body_hash
stringlengths
64
64
body
stringlengths
23
109k
docstring
stringlengths
1
57k
path
stringlengths
4
198
name
stringlengths
1
115
repository_name
stringlengths
7
111
repository_stars
float64
0
191k
lang
stringclasses
1 value
body_without_docstring
stringlengths
14
108k
unified
stringlengths
45
133k
601e1d57e4e7881f0624b2db4a08626b38caabbb84881b3a0993e0e32b20d7d4
def Get(self, request, context): 'Returns the specified API key.\n\n To get the list of available API keys, make a [List] request.\n ' context.set_code(grpc.StatusCode.UNIMPLEMENTED) context.set_details('Method not implemented!') raise NotImplementedError('Method not implemented!')
Returns the specified API key. To get the list of available API keys, make a [List] request.
yandex/cloud/iam/v1/api_key_service_pb2_grpc.py
Get
kbespalov/python-sdk
0
python
def Get(self, request, context): 'Returns the specified API key.\n\n To get the list of available API keys, make a [List] request.\n ' context.set_code(grpc.StatusCode.UNIMPLEMENTED) context.set_details('Method not implemented!') raise NotImplementedError('Method not implemented!')
def Get(self, request, context): 'Returns the specified API key.\n\n To get the list of available API keys, make a [List] request.\n ' context.set_code(grpc.StatusCode.UNIMPLEMENTED) context.set_details('Method not implemented!') raise NotImplementedError('Method not implemented!')<|docstring|>Returns the specified API key. To get the list of available API keys, make a [List] request.<|endoftext|>
9f8b8212ea62638e5945d508e04362b9dc6a02e02ad5ca581f745fca82668e13
def Create(self, request, context): 'Creates an API key for the specified service account.\n ' context.set_code(grpc.StatusCode.UNIMPLEMENTED) context.set_details('Method not implemented!') raise NotImplementedError('Method not implemented!')
Creates an API key for the specified service account.
yandex/cloud/iam/v1/api_key_service_pb2_grpc.py
Create
kbespalov/python-sdk
0
python
def Create(self, request, context): '\n ' context.set_code(grpc.StatusCode.UNIMPLEMENTED) context.set_details('Method not implemented!') raise NotImplementedError('Method not implemented!')
def Create(self, request, context): '\n ' context.set_code(grpc.StatusCode.UNIMPLEMENTED) context.set_details('Method not implemented!') raise NotImplementedError('Method not implemented!')<|docstring|>Creates an API key for the specified service account.<|endoftext|>
d43917c4f0a3b1fb8294c3d1669b2888228e31f69d67dac4c924101ec292d650
def Update(self, request, context): 'Updates the specified API key.\n ' context.set_code(grpc.StatusCode.UNIMPLEMENTED) context.set_details('Method not implemented!') raise NotImplementedError('Method not implemented!')
Updates the specified API key.
yandex/cloud/iam/v1/api_key_service_pb2_grpc.py
Update
kbespalov/python-sdk
0
python
def Update(self, request, context): '\n ' context.set_code(grpc.StatusCode.UNIMPLEMENTED) context.set_details('Method not implemented!') raise NotImplementedError('Method not implemented!')
def Update(self, request, context): '\n ' context.set_code(grpc.StatusCode.UNIMPLEMENTED) context.set_details('Method not implemented!') raise NotImplementedError('Method not implemented!')<|docstring|>Updates the specified API key.<|endoftext|>
94a9ebd40d58c679549ad46d44ea2f3168a3664b21f33fb61527d2b9b0ef971e
def Delete(self, request, context): 'Deletes the specified API key.\n ' context.set_code(grpc.StatusCode.UNIMPLEMENTED) context.set_details('Method not implemented!') raise NotImplementedError('Method not implemented!')
Deletes the specified API key.
yandex/cloud/iam/v1/api_key_service_pb2_grpc.py
Delete
kbespalov/python-sdk
0
python
def Delete(self, request, context): '\n ' context.set_code(grpc.StatusCode.UNIMPLEMENTED) context.set_details('Method not implemented!') raise NotImplementedError('Method not implemented!')
def Delete(self, request, context): '\n ' context.set_code(grpc.StatusCode.UNIMPLEMENTED) context.set_details('Method not implemented!') raise NotImplementedError('Method not implemented!')<|docstring|>Deletes the specified API key.<|endoftext|>
7ccf4114b631dc337bc7c75655b93ed8d4beaa761a857b8c754cac36cc3254d9
def ListOperations(self, request, context): 'Retrieves the list of operations for the specified API key.\n ' context.set_code(grpc.StatusCode.UNIMPLEMENTED) context.set_details('Method not implemented!') raise NotImplementedError('Method not implemented!')
Retrieves the list of operations for the specified API key.
yandex/cloud/iam/v1/api_key_service_pb2_grpc.py
ListOperations
kbespalov/python-sdk
0
python
def ListOperations(self, request, context): '\n ' context.set_code(grpc.StatusCode.UNIMPLEMENTED) context.set_details('Method not implemented!') raise NotImplementedError('Method not implemented!')
def ListOperations(self, request, context): '\n ' context.set_code(grpc.StatusCode.UNIMPLEMENTED) context.set_details('Method not implemented!') raise NotImplementedError('Method not implemented!')<|docstring|>Retrieves the list of operations for the specified API key.<|endoftext|>
9bf4d355ba3f3fc2791e8368220ba318ac71325ed5d4a63d148b228b8545d5d1
def read_file(PDB_files, variables): "\n\tPDB_files actually means log_files...lol sorry\n\t\n\tvariables is a list of variables that you want to read from the log files, but they need to be called\n\texactly what they are called in the first line of the log file\n\t\n\tfor instance, 'energy', 'natives', etc...\n\t\n\tReturns a 3D array data where data[i,j,k] corresponds to log_file i, time j within that log file, and variable k (in case you care about multiple variables like energies, natives, etc)\n\t\n\t" data = [] lens = [] variable_indices = [] times = [] temperatures = [] setpoints = [] for (filecounter, filename) in enumerate(PDB_files): step_index = 0 print('Reading file {}'.format(filename)) openfile = open(filename) data.append([]) for line in openfile.readlines(): line = line.rstrip('\n') if (len(line) > 0): entries = line.split() if ('step #' in line): fields = (['step'] + line.split()[2:]) temperature_index = fields.index('temp') if ('setpoint' in fields): setpoint_index = fields.index('setpoint') else: setpoint_index = np.nan for variable in variables: variable_indices.append(fields.index(variable)) data[filecounter].append([]) if (entries[0] == 'STEP'): if ((np.mod(int(entries[1]), step_multiples_to_read) == 0) and (int(entries[1]) >= min_step) and (int(entries[1]) < max_step)): step_index += 1 if (filecounter == 0): times.append(int(entries[1])) if (step_index == 1): temperatures.append(float(entries[(temperature_index + 1)][0:5])) if ('setpoint' in fields): setpoints.append(float(entries[(setpoint_index + 1)])) else: setpoints.append(0) for (v, variable) in enumerate(variables): data[filecounter][v].append(float(entries[(variable_indices[v] + 1)])) lens.append(len(data[filecounter][0])) data[filecounter] = np.array(data[filecounter]) x = np.zeros((1, len(data[filecounter][0]), len(data[filecounter]))) for v in range(len(variables)): x[(0, :, v)] = data[filecounter][(v, :)] data[filecounter] = x nonzero_lengths = [i for i in range(len(lens)) if (lens[i] > 0)] data = [x for (i, x) in enumerate(data) if (i in nonzero_lengths)] lens = [l for (i, l) in enumerate(lens) if (i in nonzero_lengths)] data = np.vstack((x[(:, 0:min(lens), :)] for x in data)) return (data, temperatures, setpoints, np.array(times))
PDB_files actually means log_files...lol sorry variables is a list of variables that you want to read from the log files, but they need to be called exactly what they are called in the first line of the log file for instance, 'energy', 'natives', etc... Returns a 3D array data where data[i,j,k] corresponds to log_file i, time j within that log file, and variable k (in case you care about multiple variables like energies, natives, etc)
read_log_files.py
read_file
amirbitran/dbfold
0
python
def read_file(PDB_files, variables): "\n\tPDB_files actually means log_files...lol sorry\n\t\n\tvariables is a list of variables that you want to read from the log files, but they need to be called\n\texactly what they are called in the first line of the log file\n\t\n\tfor instance, 'energy', 'natives', etc...\n\t\n\tReturns a 3D array data where data[i,j,k] corresponds to log_file i, time j within that log file, and variable k (in case you care about multiple variables like energies, natives, etc)\n\t\n\t" data = [] lens = [] variable_indices = [] times = [] temperatures = [] setpoints = [] for (filecounter, filename) in enumerate(PDB_files): step_index = 0 print('Reading file {}'.format(filename)) openfile = open(filename) data.append([]) for line in openfile.readlines(): line = line.rstrip('\n') if (len(line) > 0): entries = line.split() if ('step #' in line): fields = (['step'] + line.split()[2:]) temperature_index = fields.index('temp') if ('setpoint' in fields): setpoint_index = fields.index('setpoint') else: setpoint_index = np.nan for variable in variables: variable_indices.append(fields.index(variable)) data[filecounter].append([]) if (entries[0] == 'STEP'): if ((np.mod(int(entries[1]), step_multiples_to_read) == 0) and (int(entries[1]) >= min_step) and (int(entries[1]) < max_step)): step_index += 1 if (filecounter == 0): times.append(int(entries[1])) if (step_index == 1): temperatures.append(float(entries[(temperature_index + 1)][0:5])) if ('setpoint' in fields): setpoints.append(float(entries[(setpoint_index + 1)])) else: setpoints.append(0) for (v, variable) in enumerate(variables): data[filecounter][v].append(float(entries[(variable_indices[v] + 1)])) lens.append(len(data[filecounter][0])) data[filecounter] = np.array(data[filecounter]) x = np.zeros((1, len(data[filecounter][0]), len(data[filecounter]))) for v in range(len(variables)): x[(0, :, v)] = data[filecounter][(v, :)] data[filecounter] = x nonzero_lengths = [i for i in range(len(lens)) if (lens[i] > 0)] data = [x for (i, x) in enumerate(data) if (i in nonzero_lengths)] lens = [l for (i, l) in enumerate(lens) if (i in nonzero_lengths)] data = np.vstack((x[(:, 0:min(lens), :)] for x in data)) return (data, temperatures, setpoints, np.array(times))
def read_file(PDB_files, variables): "\n\tPDB_files actually means log_files...lol sorry\n\t\n\tvariables is a list of variables that you want to read from the log files, but they need to be called\n\texactly what they are called in the first line of the log file\n\t\n\tfor instance, 'energy', 'natives', etc...\n\t\n\tReturns a 3D array data where data[i,j,k] corresponds to log_file i, time j within that log file, and variable k (in case you care about multiple variables like energies, natives, etc)\n\t\n\t" data = [] lens = [] variable_indices = [] times = [] temperatures = [] setpoints = [] for (filecounter, filename) in enumerate(PDB_files): step_index = 0 print('Reading file {}'.format(filename)) openfile = open(filename) data.append([]) for line in openfile.readlines(): line = line.rstrip('\n') if (len(line) > 0): entries = line.split() if ('step #' in line): fields = (['step'] + line.split()[2:]) temperature_index = fields.index('temp') if ('setpoint' in fields): setpoint_index = fields.index('setpoint') else: setpoint_index = np.nan for variable in variables: variable_indices.append(fields.index(variable)) data[filecounter].append([]) if (entries[0] == 'STEP'): if ((np.mod(int(entries[1]), step_multiples_to_read) == 0) and (int(entries[1]) >= min_step) and (int(entries[1]) < max_step)): step_index += 1 if (filecounter == 0): times.append(int(entries[1])) if (step_index == 1): temperatures.append(float(entries[(temperature_index + 1)][0:5])) if ('setpoint' in fields): setpoints.append(float(entries[(setpoint_index + 1)])) else: setpoints.append(0) for (v, variable) in enumerate(variables): data[filecounter][v].append(float(entries[(variable_indices[v] + 1)])) lens.append(len(data[filecounter][0])) data[filecounter] = np.array(data[filecounter]) x = np.zeros((1, len(data[filecounter][0]), len(data[filecounter]))) for v in range(len(variables)): x[(0, :, v)] = data[filecounter][(v, :)] data[filecounter] = x nonzero_lengths = [i for i in range(len(lens)) if (lens[i] > 0)] data = [x for (i, x) in enumerate(data) if (i in nonzero_lengths)] lens = [l for (i, l) in enumerate(lens) if (i in nonzero_lengths)] data = np.vstack((x[(:, 0:min(lens), :)] for x in data)) return (data, temperatures, setpoints, np.array(times))<|docstring|>PDB_files actually means log_files...lol sorry variables is a list of variables that you want to read from the log files, but they need to be called exactly what they are called in the first line of the log file for instance, 'energy', 'natives', etc... Returns a 3D array data where data[i,j,k] corresponds to log_file i, time j within that log file, and variable k (in case you care about multiple variables like energies, natives, etc)<|endoftext|>
a308e493a0e7a3d19f7f3b0a0b0b826c36c50ef749520de759131894a4476172
def handle(self, *args, **kwargs): '\n Denormalize statistics for all current Organizations and Positions\n ' [OrganizationStats.objects.denormalize(o) for o in Organization.objects.all()] [PositionStats.objects.denormalize(p) for p in Post.objects.all()]
Denormalize statistics for all current Organizations and Positions
tx_salaries/management/commands/denormalize_salary_data.py
handle
texastribune/tx_salaries
6
python
def handle(self, *args, **kwargs): '\n \n ' [OrganizationStats.objects.denormalize(o) for o in Organization.objects.all()] [PositionStats.objects.denormalize(p) for p in Post.objects.all()]
def handle(self, *args, **kwargs): '\n \n ' [OrganizationStats.objects.denormalize(o) for o in Organization.objects.all()] [PositionStats.objects.denormalize(p) for p in Post.objects.all()]<|docstring|>Denormalize statistics for all current Organizations and Positions<|endoftext|>
bc3214ad693868b8d173e3ec2a0c45542d13220716e7e016cb3e51b360e8024b
def test_function_definition(self): ' Test that the package provides customization_data that defines the function ' func = get_function_definition(PACKAGE_NAME, FUNCTION_NAME) assert (func is not None)
Test that the package provides customization_data that defines the function
fn_cloud_foundry/tests/test_fn_cloud_foundry_instance_command.py
test_function_definition
tmack-etg/resilient-community-apps
1
python
def test_function_definition(self): ' ' func = get_function_definition(PACKAGE_NAME, FUNCTION_NAME) assert (func is not None)
def test_function_definition(self): ' ' func = get_function_definition(PACKAGE_NAME, FUNCTION_NAME) assert (func is not None)<|docstring|>Test that the package provides customization_data that defines the function<|endoftext|>
75d4e660f1b0a62016c9d6ab845c55a1a8854f2bd729405b49f8650f2f8da4b2
@patch('fn_cloud_foundry.components.fn_cloud_foundry_instance_command.IBMCloudFoundryAuthenticator') @patch('fn_cloud_foundry.util.cloud_foundry_api.requests.get') @patch('fn_cloud_foundry.util.cloud_foundry_api.requests.delete') @pytest.mark.parametrize('fn_cloud_foundry_instance_action, fn_cloud_foundry_instances, fn_cloud_foundry_applications', [('delete', 'text', 'test1')]) def test_success(self, delete, get, auth, circuits_app, fn_cloud_foundry_instance_action, fn_cloud_foundry_instances, fn_cloud_foundry_applications): ' Test calling with sample values for the parameters ' auth.return_value = AuthenticationMock() delete.return_value = give_response(204, {}) get.return_value = give_response(200, GUIDS_MOCK) function_params = {'fn_cloud_foundry_instance_action': fn_cloud_foundry_instance_action, 'fn_cloud_foundry_instances': fn_cloud_foundry_instances, 'fn_cloud_foundry_applications': fn_cloud_foundry_applications, 'fn_cloud_foundry_additional_parameters_json': '{}'} results = call_fn_cloud_foundry_instance_command_function(circuits_app, function_params) assert (results['test1']['text']['success'] == True)
Test calling with sample values for the parameters
fn_cloud_foundry/tests/test_fn_cloud_foundry_instance_command.py
test_success
tmack-etg/resilient-community-apps
1
python
@patch('fn_cloud_foundry.components.fn_cloud_foundry_instance_command.IBMCloudFoundryAuthenticator') @patch('fn_cloud_foundry.util.cloud_foundry_api.requests.get') @patch('fn_cloud_foundry.util.cloud_foundry_api.requests.delete') @pytest.mark.parametrize('fn_cloud_foundry_instance_action, fn_cloud_foundry_instances, fn_cloud_foundry_applications', [('delete', 'text', 'test1')]) def test_success(self, delete, get, auth, circuits_app, fn_cloud_foundry_instance_action, fn_cloud_foundry_instances, fn_cloud_foundry_applications): ' ' auth.return_value = AuthenticationMock() delete.return_value = give_response(204, {}) get.return_value = give_response(200, GUIDS_MOCK) function_params = {'fn_cloud_foundry_instance_action': fn_cloud_foundry_instance_action, 'fn_cloud_foundry_instances': fn_cloud_foundry_instances, 'fn_cloud_foundry_applications': fn_cloud_foundry_applications, 'fn_cloud_foundry_additional_parameters_json': '{}'} results = call_fn_cloud_foundry_instance_command_function(circuits_app, function_params) assert (results['test1']['text']['success'] == True)
@patch('fn_cloud_foundry.components.fn_cloud_foundry_instance_command.IBMCloudFoundryAuthenticator') @patch('fn_cloud_foundry.util.cloud_foundry_api.requests.get') @patch('fn_cloud_foundry.util.cloud_foundry_api.requests.delete') @pytest.mark.parametrize('fn_cloud_foundry_instance_action, fn_cloud_foundry_instances, fn_cloud_foundry_applications', [('delete', 'text', 'test1')]) def test_success(self, delete, get, auth, circuits_app, fn_cloud_foundry_instance_action, fn_cloud_foundry_instances, fn_cloud_foundry_applications): ' ' auth.return_value = AuthenticationMock() delete.return_value = give_response(204, {}) get.return_value = give_response(200, GUIDS_MOCK) function_params = {'fn_cloud_foundry_instance_action': fn_cloud_foundry_instance_action, 'fn_cloud_foundry_instances': fn_cloud_foundry_instances, 'fn_cloud_foundry_applications': fn_cloud_foundry_applications, 'fn_cloud_foundry_additional_parameters_json': '{}'} results = call_fn_cloud_foundry_instance_command_function(circuits_app, function_params) assert (results['test1']['text']['success'] == True)<|docstring|>Test calling with sample values for the parameters<|endoftext|>
356777e06e14de87f56f15d92ca3a639cd8d55b0960640c1346b0bb11f8ec01f
@patch('fn_cloud_foundry.components.fn_cloud_foundry_instance_command.IBMCloudFoundryAuthenticator') @patch('fn_cloud_foundry.util.cloud_foundry_api.requests.get') @patch('fn_cloud_foundry.util.cloud_foundry_api.requests.delete') @pytest.mark.parametrize('fn_cloud_foundry_instance_action, fn_cloud_foundry_instances, fn_cloud_foundry_applications', [('delete', 'text', 'rand name')]) def test_app_not_found(self, delete, get, auth, circuits_app, fn_cloud_foundry_instance_action, fn_cloud_foundry_instances, fn_cloud_foundry_applications): ' Test calling with sample values for the parameters ' auth.return_value = AuthenticationMock() delete.return_value = give_response(204, {}) get.return_value = give_response(200, GUIDS_MOCK) function_params = {'fn_cloud_foundry_instance_action': fn_cloud_foundry_instance_action, 'fn_cloud_foundry_instances': fn_cloud_foundry_instances, 'fn_cloud_foundry_applications': fn_cloud_foundry_applications, 'fn_cloud_foundry_additional_parameters_json': '{}'} results = call_fn_cloud_foundry_instance_command_function(circuits_app, function_params) assert (results['rand name']['success'] == False)
Test calling with sample values for the parameters
fn_cloud_foundry/tests/test_fn_cloud_foundry_instance_command.py
test_app_not_found
tmack-etg/resilient-community-apps
1
python
@patch('fn_cloud_foundry.components.fn_cloud_foundry_instance_command.IBMCloudFoundryAuthenticator') @patch('fn_cloud_foundry.util.cloud_foundry_api.requests.get') @patch('fn_cloud_foundry.util.cloud_foundry_api.requests.delete') @pytest.mark.parametrize('fn_cloud_foundry_instance_action, fn_cloud_foundry_instances, fn_cloud_foundry_applications', [('delete', 'text', 'rand name')]) def test_app_not_found(self, delete, get, auth, circuits_app, fn_cloud_foundry_instance_action, fn_cloud_foundry_instances, fn_cloud_foundry_applications): ' ' auth.return_value = AuthenticationMock() delete.return_value = give_response(204, {}) get.return_value = give_response(200, GUIDS_MOCK) function_params = {'fn_cloud_foundry_instance_action': fn_cloud_foundry_instance_action, 'fn_cloud_foundry_instances': fn_cloud_foundry_instances, 'fn_cloud_foundry_applications': fn_cloud_foundry_applications, 'fn_cloud_foundry_additional_parameters_json': '{}'} results = call_fn_cloud_foundry_instance_command_function(circuits_app, function_params) assert (results['rand name']['success'] == False)
@patch('fn_cloud_foundry.components.fn_cloud_foundry_instance_command.IBMCloudFoundryAuthenticator') @patch('fn_cloud_foundry.util.cloud_foundry_api.requests.get') @patch('fn_cloud_foundry.util.cloud_foundry_api.requests.delete') @pytest.mark.parametrize('fn_cloud_foundry_instance_action, fn_cloud_foundry_instances, fn_cloud_foundry_applications', [('delete', 'text', 'rand name')]) def test_app_not_found(self, delete, get, auth, circuits_app, fn_cloud_foundry_instance_action, fn_cloud_foundry_instances, fn_cloud_foundry_applications): ' ' auth.return_value = AuthenticationMock() delete.return_value = give_response(204, {}) get.return_value = give_response(200, GUIDS_MOCK) function_params = {'fn_cloud_foundry_instance_action': fn_cloud_foundry_instance_action, 'fn_cloud_foundry_instances': fn_cloud_foundry_instances, 'fn_cloud_foundry_applications': fn_cloud_foundry_applications, 'fn_cloud_foundry_additional_parameters_json': '{}'} results = call_fn_cloud_foundry_instance_command_function(circuits_app, function_params) assert (results['rand name']['success'] == False)<|docstring|>Test calling with sample values for the parameters<|endoftext|>
850d4bc571c00d8745b29fec097e1a1dbc911f6ce83367af7715f075d023912f
@patch('fn_cloud_foundry.components.fn_cloud_foundry_instance_command.IBMCloudFoundryAuthenticator') @patch('fn_cloud_foundry.util.cloud_foundry_api.requests.get') @patch('fn_cloud_foundry.util.cloud_foundry_api.requests.delete') @pytest.mark.parametrize('fn_cloud_foundry_instance_action, fn_cloud_foundry_instances, fn_cloud_foundry_applications', [('delete', 'text', 'test1')]) def test_fail_info(self, delete, get, auth, circuits_app, fn_cloud_foundry_instance_action, fn_cloud_foundry_instances, fn_cloud_foundry_applications): ' Test calling with sample values for the parameters ' auth.return_value = AuthenticationMock() delete.return_value = give_response(204, {}) get.return_value = give_response(404, GUIDS_MOCK) function_params = {'fn_cloud_foundry_instance_action': fn_cloud_foundry_instance_action, 'fn_cloud_foundry_instances': fn_cloud_foundry_instances, 'fn_cloud_foundry_applications': fn_cloud_foundry_applications, 'fn_cloud_foundry_additional_parameters_json': '{}'} with pytest.raises(AssertionError): results = call_fn_cloud_foundry_instance_command_function(circuits_app, function_params)
Test calling with sample values for the parameters
fn_cloud_foundry/tests/test_fn_cloud_foundry_instance_command.py
test_fail_info
tmack-etg/resilient-community-apps
1
python
@patch('fn_cloud_foundry.components.fn_cloud_foundry_instance_command.IBMCloudFoundryAuthenticator') @patch('fn_cloud_foundry.util.cloud_foundry_api.requests.get') @patch('fn_cloud_foundry.util.cloud_foundry_api.requests.delete') @pytest.mark.parametrize('fn_cloud_foundry_instance_action, fn_cloud_foundry_instances, fn_cloud_foundry_applications', [('delete', 'text', 'test1')]) def test_fail_info(self, delete, get, auth, circuits_app, fn_cloud_foundry_instance_action, fn_cloud_foundry_instances, fn_cloud_foundry_applications): ' ' auth.return_value = AuthenticationMock() delete.return_value = give_response(204, {}) get.return_value = give_response(404, GUIDS_MOCK) function_params = {'fn_cloud_foundry_instance_action': fn_cloud_foundry_instance_action, 'fn_cloud_foundry_instances': fn_cloud_foundry_instances, 'fn_cloud_foundry_applications': fn_cloud_foundry_applications, 'fn_cloud_foundry_additional_parameters_json': '{}'} with pytest.raises(AssertionError): results = call_fn_cloud_foundry_instance_command_function(circuits_app, function_params)
@patch('fn_cloud_foundry.components.fn_cloud_foundry_instance_command.IBMCloudFoundryAuthenticator') @patch('fn_cloud_foundry.util.cloud_foundry_api.requests.get') @patch('fn_cloud_foundry.util.cloud_foundry_api.requests.delete') @pytest.mark.parametrize('fn_cloud_foundry_instance_action, fn_cloud_foundry_instances, fn_cloud_foundry_applications', [('delete', 'text', 'test1')]) def test_fail_info(self, delete, get, auth, circuits_app, fn_cloud_foundry_instance_action, fn_cloud_foundry_instances, fn_cloud_foundry_applications): ' ' auth.return_value = AuthenticationMock() delete.return_value = give_response(204, {}) get.return_value = give_response(404, GUIDS_MOCK) function_params = {'fn_cloud_foundry_instance_action': fn_cloud_foundry_instance_action, 'fn_cloud_foundry_instances': fn_cloud_foundry_instances, 'fn_cloud_foundry_applications': fn_cloud_foundry_applications, 'fn_cloud_foundry_additional_parameters_json': '{}'} with pytest.raises(AssertionError): results = call_fn_cloud_foundry_instance_command_function(circuits_app, function_params)<|docstring|>Test calling with sample values for the parameters<|endoftext|>
d56427d12443801f135c61b87a2e2e7bc0910e95c34042461431ed04cfc60cc5
@patch('fn_cloud_foundry.components.fn_cloud_foundry_instance_command.IBMCloudFoundryAuthenticator') @patch('fn_cloud_foundry.util.cloud_foundry_api.requests.get') @patch('fn_cloud_foundry.util.cloud_foundry_api.requests.delete') @pytest.mark.parametrize('fn_cloud_foundry_instance_action, fn_cloud_foundry_instances, fn_cloud_foundry_applications', [('delete', 'text', 'test1')]) def test_fail_instance(self, delete, get, auth, circuits_app, fn_cloud_foundry_instance_action, fn_cloud_foundry_instances, fn_cloud_foundry_applications): ' Test calling with sample values for the parameters ' auth.return_value = AuthenticationMock() delete.return_value = give_response(404, {}) get.return_value = give_response(200, GUIDS_MOCK) function_params = {'fn_cloud_foundry_instance_action': fn_cloud_foundry_instance_action, 'fn_cloud_foundry_instances': fn_cloud_foundry_instances, 'fn_cloud_foundry_applications': fn_cloud_foundry_applications, 'fn_cloud_foundry_additional_parameters_json': '{}'} results = call_fn_cloud_foundry_instance_command_function(circuits_app, function_params) assert (results['test1']['text']['success'] == False)
Test calling with sample values for the parameters
fn_cloud_foundry/tests/test_fn_cloud_foundry_instance_command.py
test_fail_instance
tmack-etg/resilient-community-apps
1
python
@patch('fn_cloud_foundry.components.fn_cloud_foundry_instance_command.IBMCloudFoundryAuthenticator') @patch('fn_cloud_foundry.util.cloud_foundry_api.requests.get') @patch('fn_cloud_foundry.util.cloud_foundry_api.requests.delete') @pytest.mark.parametrize('fn_cloud_foundry_instance_action, fn_cloud_foundry_instances, fn_cloud_foundry_applications', [('delete', 'text', 'test1')]) def test_fail_instance(self, delete, get, auth, circuits_app, fn_cloud_foundry_instance_action, fn_cloud_foundry_instances, fn_cloud_foundry_applications): ' ' auth.return_value = AuthenticationMock() delete.return_value = give_response(404, {}) get.return_value = give_response(200, GUIDS_MOCK) function_params = {'fn_cloud_foundry_instance_action': fn_cloud_foundry_instance_action, 'fn_cloud_foundry_instances': fn_cloud_foundry_instances, 'fn_cloud_foundry_applications': fn_cloud_foundry_applications, 'fn_cloud_foundry_additional_parameters_json': '{}'} results = call_fn_cloud_foundry_instance_command_function(circuits_app, function_params) assert (results['test1']['text']['success'] == False)
@patch('fn_cloud_foundry.components.fn_cloud_foundry_instance_command.IBMCloudFoundryAuthenticator') @patch('fn_cloud_foundry.util.cloud_foundry_api.requests.get') @patch('fn_cloud_foundry.util.cloud_foundry_api.requests.delete') @pytest.mark.parametrize('fn_cloud_foundry_instance_action, fn_cloud_foundry_instances, fn_cloud_foundry_applications', [('delete', 'text', 'test1')]) def test_fail_instance(self, delete, get, auth, circuits_app, fn_cloud_foundry_instance_action, fn_cloud_foundry_instances, fn_cloud_foundry_applications): ' ' auth.return_value = AuthenticationMock() delete.return_value = give_response(404, {}) get.return_value = give_response(200, GUIDS_MOCK) function_params = {'fn_cloud_foundry_instance_action': fn_cloud_foundry_instance_action, 'fn_cloud_foundry_instances': fn_cloud_foundry_instances, 'fn_cloud_foundry_applications': fn_cloud_foundry_applications, 'fn_cloud_foundry_additional_parameters_json': '{}'} results = call_fn_cloud_foundry_instance_command_function(circuits_app, function_params) assert (results['test1']['text']['success'] == False)<|docstring|>Test calling with sample values for the parameters<|endoftext|>
70c1fdcb23772a1328dddb922db80b11ddbce813e96e31020274d67eb59f108b
@patch('fn_cloud_foundry.components.fn_cloud_foundry_instance_command.IBMCloudFoundryAuthenticator') @patch('fn_cloud_foundry.util.cloud_foundry_api.requests.get') @patch('fn_cloud_foundry.util.cloud_foundry_api.requests.delete') @pytest.mark.parametrize('fn_cloud_foundry_instance_action, fn_cloud_foundry_instances, fn_cloud_foundry_applications', [('delete', 'inst1, inst2', 'test1')]) def test_fail_and_succeed(self, delete, get, auth, circuits_app, fn_cloud_foundry_instance_action, fn_cloud_foundry_instances, fn_cloud_foundry_applications): ' Test calling with sample values for the parameters ' auth.return_value = AuthenticationMock() delete.return_value = give_response(204, {}) get.return_value = give_response(200, GUIDS_MOCK) function_params = {'fn_cloud_foundry_instance_action': fn_cloud_foundry_instance_action, 'fn_cloud_foundry_instances': fn_cloud_foundry_instances, 'fn_cloud_foundry_applications': fn_cloud_foundry_applications, 'fn_cloud_foundry_additional_parameters_json': '{}'} results = call_fn_cloud_foundry_instance_command_function(circuits_app, function_params) assert (results['test1']['inst1']['success'] == True) assert (results['test1']['inst2']['success'] == True)
Test calling with sample values for the parameters
fn_cloud_foundry/tests/test_fn_cloud_foundry_instance_command.py
test_fail_and_succeed
tmack-etg/resilient-community-apps
1
python
@patch('fn_cloud_foundry.components.fn_cloud_foundry_instance_command.IBMCloudFoundryAuthenticator') @patch('fn_cloud_foundry.util.cloud_foundry_api.requests.get') @patch('fn_cloud_foundry.util.cloud_foundry_api.requests.delete') @pytest.mark.parametrize('fn_cloud_foundry_instance_action, fn_cloud_foundry_instances, fn_cloud_foundry_applications', [('delete', 'inst1, inst2', 'test1')]) def test_fail_and_succeed(self, delete, get, auth, circuits_app, fn_cloud_foundry_instance_action, fn_cloud_foundry_instances, fn_cloud_foundry_applications): ' ' auth.return_value = AuthenticationMock() delete.return_value = give_response(204, {}) get.return_value = give_response(200, GUIDS_MOCK) function_params = {'fn_cloud_foundry_instance_action': fn_cloud_foundry_instance_action, 'fn_cloud_foundry_instances': fn_cloud_foundry_instances, 'fn_cloud_foundry_applications': fn_cloud_foundry_applications, 'fn_cloud_foundry_additional_parameters_json': '{}'} results = call_fn_cloud_foundry_instance_command_function(circuits_app, function_params) assert (results['test1']['inst1']['success'] == True) assert (results['test1']['inst2']['success'] == True)
@patch('fn_cloud_foundry.components.fn_cloud_foundry_instance_command.IBMCloudFoundryAuthenticator') @patch('fn_cloud_foundry.util.cloud_foundry_api.requests.get') @patch('fn_cloud_foundry.util.cloud_foundry_api.requests.delete') @pytest.mark.parametrize('fn_cloud_foundry_instance_action, fn_cloud_foundry_instances, fn_cloud_foundry_applications', [('delete', 'inst1, inst2', 'test1')]) def test_fail_and_succeed(self, delete, get, auth, circuits_app, fn_cloud_foundry_instance_action, fn_cloud_foundry_instances, fn_cloud_foundry_applications): ' ' auth.return_value = AuthenticationMock() delete.return_value = give_response(204, {}) get.return_value = give_response(200, GUIDS_MOCK) function_params = {'fn_cloud_foundry_instance_action': fn_cloud_foundry_instance_action, 'fn_cloud_foundry_instances': fn_cloud_foundry_instances, 'fn_cloud_foundry_applications': fn_cloud_foundry_applications, 'fn_cloud_foundry_additional_parameters_json': '{}'} results = call_fn_cloud_foundry_instance_command_function(circuits_app, function_params) assert (results['test1']['inst1']['success'] == True) assert (results['test1']['inst2']['success'] == True)<|docstring|>Test calling with sample values for the parameters<|endoftext|>
22a3538a7ac1c6ed29bccbd68478d514bd42874d0a7cc50d8e34d65657200497
@patch('fn_cloud_foundry.components.fn_cloud_foundry_instance_command.IBMCloudFoundryAuthenticator') @patch('fn_cloud_foundry.util.cloud_foundry_api.requests.get') @patch('fn_cloud_foundry.util.cloud_foundry_api.requests.delete') @pytest.mark.parametrize('fn_cloud_foundry_instance_action, fn_cloud_foundry_instances, fn_cloud_foundry_applications', [('unreal command', 'inst1, inst2', 'test1')]) def test_fail_and_succeed(self, delete, get, auth, circuits_app, fn_cloud_foundry_instance_action, fn_cloud_foundry_instances, fn_cloud_foundry_applications): ' Test calling with sample values for the parameters ' auth.return_value = AuthenticationMock() delete.return_value = give_response(204, {}) get.return_value = give_response(200, GUIDS_MOCK) function_params = {'fn_cloud_foundry_instance_action': fn_cloud_foundry_instance_action, 'fn_cloud_foundry_instances': fn_cloud_foundry_instances, 'fn_cloud_foundry_applications': fn_cloud_foundry_applications, 'fn_cloud_foundry_additional_parameters_json': '{}'} results = call_fn_cloud_foundry_instance_command_function(circuits_app, function_params) assert (results['success'] == False)
Test calling with sample values for the parameters
fn_cloud_foundry/tests/test_fn_cloud_foundry_instance_command.py
test_fail_and_succeed
tmack-etg/resilient-community-apps
1
python
@patch('fn_cloud_foundry.components.fn_cloud_foundry_instance_command.IBMCloudFoundryAuthenticator') @patch('fn_cloud_foundry.util.cloud_foundry_api.requests.get') @patch('fn_cloud_foundry.util.cloud_foundry_api.requests.delete') @pytest.mark.parametrize('fn_cloud_foundry_instance_action, fn_cloud_foundry_instances, fn_cloud_foundry_applications', [('unreal command', 'inst1, inst2', 'test1')]) def test_fail_and_succeed(self, delete, get, auth, circuits_app, fn_cloud_foundry_instance_action, fn_cloud_foundry_instances, fn_cloud_foundry_applications): ' ' auth.return_value = AuthenticationMock() delete.return_value = give_response(204, {}) get.return_value = give_response(200, GUIDS_MOCK) function_params = {'fn_cloud_foundry_instance_action': fn_cloud_foundry_instance_action, 'fn_cloud_foundry_instances': fn_cloud_foundry_instances, 'fn_cloud_foundry_applications': fn_cloud_foundry_applications, 'fn_cloud_foundry_additional_parameters_json': '{}'} results = call_fn_cloud_foundry_instance_command_function(circuits_app, function_params) assert (results['success'] == False)
@patch('fn_cloud_foundry.components.fn_cloud_foundry_instance_command.IBMCloudFoundryAuthenticator') @patch('fn_cloud_foundry.util.cloud_foundry_api.requests.get') @patch('fn_cloud_foundry.util.cloud_foundry_api.requests.delete') @pytest.mark.parametrize('fn_cloud_foundry_instance_action, fn_cloud_foundry_instances, fn_cloud_foundry_applications', [('unreal command', 'inst1, inst2', 'test1')]) def test_fail_and_succeed(self, delete, get, auth, circuits_app, fn_cloud_foundry_instance_action, fn_cloud_foundry_instances, fn_cloud_foundry_applications): ' ' auth.return_value = AuthenticationMock() delete.return_value = give_response(204, {}) get.return_value = give_response(200, GUIDS_MOCK) function_params = {'fn_cloud_foundry_instance_action': fn_cloud_foundry_instance_action, 'fn_cloud_foundry_instances': fn_cloud_foundry_instances, 'fn_cloud_foundry_applications': fn_cloud_foundry_applications, 'fn_cloud_foundry_additional_parameters_json': '{}'} results = call_fn_cloud_foundry_instance_command_function(circuits_app, function_params) assert (results['success'] == False)<|docstring|>Test calling with sample values for the parameters<|endoftext|>
50d214a5d3583d27fc75ddb4570046bf7a57dd45d55d3d140811d78b29a388c8
@patch('fn_cloud_foundry.components.fn_cloud_foundry_instance_command.IBMCloudFoundryAuthenticator') @patch('fn_cloud_foundry.util.cloud_foundry_api.requests.get') @patch('fn_cloud_foundry.util.cloud_foundry_api.requests.delete') @pytest.mark.parametrize('fn_cloud_foundry_instance_action, fn_cloud_foundry_instances, fn_cloud_foundry_applications', [('delete', 'text', None), ('delete', None, 'test1'), (None, 'text', 'test1')]) def test_fail_parameters(self, delete, get, auth, circuits_app, fn_cloud_foundry_instance_action, fn_cloud_foundry_instances, fn_cloud_foundry_applications): ' Test calling with sample values for the parameters ' auth.return_value = AuthenticationMock() delete.return_value = give_response(204, {}) get.return_value = give_response(200, GUIDS_MOCK) function_params = {'fn_cloud_foundry_instance_action': fn_cloud_foundry_instance_action, 'fn_cloud_foundry_instances': fn_cloud_foundry_instances, 'fn_cloud_foundry_applications': fn_cloud_foundry_applications, 'fn_cloud_foundry_additional_parameters_json': '{}'} with pytest.raises(AssertionError): results = call_fn_cloud_foundry_instance_command_function(circuits_app, function_params)
Test calling with sample values for the parameters
fn_cloud_foundry/tests/test_fn_cloud_foundry_instance_command.py
test_fail_parameters
tmack-etg/resilient-community-apps
1
python
@patch('fn_cloud_foundry.components.fn_cloud_foundry_instance_command.IBMCloudFoundryAuthenticator') @patch('fn_cloud_foundry.util.cloud_foundry_api.requests.get') @patch('fn_cloud_foundry.util.cloud_foundry_api.requests.delete') @pytest.mark.parametrize('fn_cloud_foundry_instance_action, fn_cloud_foundry_instances, fn_cloud_foundry_applications', [('delete', 'text', None), ('delete', None, 'test1'), (None, 'text', 'test1')]) def test_fail_parameters(self, delete, get, auth, circuits_app, fn_cloud_foundry_instance_action, fn_cloud_foundry_instances, fn_cloud_foundry_applications): ' ' auth.return_value = AuthenticationMock() delete.return_value = give_response(204, {}) get.return_value = give_response(200, GUIDS_MOCK) function_params = {'fn_cloud_foundry_instance_action': fn_cloud_foundry_instance_action, 'fn_cloud_foundry_instances': fn_cloud_foundry_instances, 'fn_cloud_foundry_applications': fn_cloud_foundry_applications, 'fn_cloud_foundry_additional_parameters_json': '{}'} with pytest.raises(AssertionError): results = call_fn_cloud_foundry_instance_command_function(circuits_app, function_params)
@patch('fn_cloud_foundry.components.fn_cloud_foundry_instance_command.IBMCloudFoundryAuthenticator') @patch('fn_cloud_foundry.util.cloud_foundry_api.requests.get') @patch('fn_cloud_foundry.util.cloud_foundry_api.requests.delete') @pytest.mark.parametrize('fn_cloud_foundry_instance_action, fn_cloud_foundry_instances, fn_cloud_foundry_applications', [('delete', 'text', None), ('delete', None, 'test1'), (None, 'text', 'test1')]) def test_fail_parameters(self, delete, get, auth, circuits_app, fn_cloud_foundry_instance_action, fn_cloud_foundry_instances, fn_cloud_foundry_applications): ' ' auth.return_value = AuthenticationMock() delete.return_value = give_response(204, {}) get.return_value = give_response(200, GUIDS_MOCK) function_params = {'fn_cloud_foundry_instance_action': fn_cloud_foundry_instance_action, 'fn_cloud_foundry_instances': fn_cloud_foundry_instances, 'fn_cloud_foundry_applications': fn_cloud_foundry_applications, 'fn_cloud_foundry_additional_parameters_json': '{}'} with pytest.raises(AssertionError): results = call_fn_cloud_foundry_instance_command_function(circuits_app, function_params)<|docstring|>Test calling with sample values for the parameters<|endoftext|>
c487a91fe26f4f6de7798f3fab449b87ce29902270bfbb1e88f3b21f9ea123d6
def evaluate(lex, lab, debugmode, unvoiced, voiced, executionStack, currentStack, otherStack, register): '\n Evaluates code.\n Input is a list of lexemes, and a dictionary of labels.\n Most definitions of instructions are located here.\n Returns the current stack and the other stack as a tuple (current, other).\n ' numList = 0 numFun = 0 numLoops = 0 if debugmode: print('Initial Conditions:') print('Unvoiced:', unvoiced) print('Voiced:', voiced) print('Execution:', executionStack) print('Register:', repr(register)) print() executionDepth = 0 ep = 0 while (ep < len(lex)): if debugmode: print(ep, lex[ep].token, repr(lex[ep].lexeme)) otherStack = (voiced if (currentStack == unvoiced) else unvoiced) if (lex[ep].token == T.NUMBER): base = 10 for d in lex[ep].lexeme: if (d in string.ascii_letters): base = 36 break currentStack.append(convert_base(lex[ep].lexeme, base)) if (lex[ep].token == T.STRING): currentStack.append(lex[ep].lexeme[1:(- 1)]) if (lex[ep].token == T.LISTBEGIN): numList = 1 list = '[' while (numList > 0): ep += 1 list += (lex[ep].lexeme if (lex[ep].token != T.LISTSEP) else ',') if (lex[ep].token == T.LISTBEGIN): numList += 1 if (lex[ep].token == T.LISTEND): numList -= 1 currentStack.append(eval(list)) if (lex[ep].token == T.INSTRUCTION): if (lex[ep].lexeme == 'ɔ'): ep = lab[lex[(ep + 1)].lexeme] elif (lex[ep].lexeme == 'ʌ'): truthy = False con = currentStack.pop() if (type(con) in [int, float]): truthy = (con != 0) elif (type(con) in [str]): truthy = (con != '') elif (type(con) in [list]): truthy = (con != []) ep += (1 if truthy else 0) elif ((lex[ep].lexeme == 'e') and (len(executionStack) > 0)): currentStack.append(executionStack[(- 1)]) elif ((lex[ep].lexeme == 'ø') and (len(executionStack) > 0)): executionStack[(- 1)] = currentStack.pop() elif ((lex[ep].lexeme == 'æ') and (len(executionStack) > 1)): currentStack.append(executionStack[(- 2)]) elif ((lex[ep].lexeme == 'œ') and (len(executionStack) > 1)): executionStack[(- 2)] = currentStack.pop() elif (lex[ep].lexeme == 'ɸ'): currentStack = unvoiced otherStack = voiced elif (lex[ep].lexeme == 'β'): currentStack = voiced otherStack = unvoiced elif (lex[ep].lexeme == 'ɓ'): currentStack.pop((1 if (currentStack == unvoiced) else 0)) elif (lex[ep].lexeme == 'k'): otherStack.append(currentStack.pop()) elif (lex[ep].lexeme == 'g'): currentStack.append(otherStack.pop()) elif (lex[ep].lexeme == 'w'): register = currentStack.pop() elif (lex[ep].lexeme == 'ʍ'): currentStack.append(register) else: executeInstruction(lex[ep].lexeme, unvoiced, voiced, currentStack) if ((ep + 1) < len(lex)): if ((lex[ep].token == T.FUNNAME) and (lex[(ep + 1)].token != T.FUNDEFSTART)): executionStack.append(ep) ep = lab[lex[ep].lexeme] executionDepth += 1 if ((lex[ep].token == T.FUNNAME) and (lex[(ep + 1)].token == T.FUNDEFSTART)): while (lex[ep].token != T.FUNDEFEND): ep += 1 if (lex[ep].token == T.LOOPSTART): start = currentStack.pop() end = currentStack.pop() executionStack.append(end) executionStack.append(start) numLoops += 1 if (lex[ep].token == T.LOOPEND): if (numLoops > 0): if (executionStack[(- 1)] < executionStack[(- 2)]): ep = (lab[ep] - 1) else: executionStack.pop() executionStack.pop() numLoops -= 1 if (lex[ep].token == T.LOOPEXIT): if (numLoops > 0): for i in range(ep, len(lex)): if (i in lab): ep = lab[i] executionStack.pop() executionStack.pop() numLoops -= 1 break if ((executionDepth > 0) and (lex[ep].token == T.FUNDEFEND)): executionDepth -= 1 ep = executionStack.pop() if debugmode: if (currentStack == unvoiced): print('Unvoiced:', unvoiced, '<-- currentStack') print('Voiced:', voiced) else: print('Unvoiced:', unvoiced) print('Voiced:', voiced, '<-- currentStack') print('Execution:', executionStack) print('Register:', repr(register)) print() ep += 1 return (currentStack, otherStack)
Evaluates code. Input is a list of lexemes, and a dictionary of labels. Most definitions of instructions are located here. Returns the current stack and the other stack as a tuple (current, other).
src/evaluator.py
evaluate
bigyihsuan/International-Phonetic-Esoteric-Language
14
python
def evaluate(lex, lab, debugmode, unvoiced, voiced, executionStack, currentStack, otherStack, register): '\n Evaluates code.\n Input is a list of lexemes, and a dictionary of labels.\n Most definitions of instructions are located here.\n Returns the current stack and the other stack as a tuple (current, other).\n ' numList = 0 numFun = 0 numLoops = 0 if debugmode: print('Initial Conditions:') print('Unvoiced:', unvoiced) print('Voiced:', voiced) print('Execution:', executionStack) print('Register:', repr(register)) print() executionDepth = 0 ep = 0 while (ep < len(lex)): if debugmode: print(ep, lex[ep].token, repr(lex[ep].lexeme)) otherStack = (voiced if (currentStack == unvoiced) else unvoiced) if (lex[ep].token == T.NUMBER): base = 10 for d in lex[ep].lexeme: if (d in string.ascii_letters): base = 36 break currentStack.append(convert_base(lex[ep].lexeme, base)) if (lex[ep].token == T.STRING): currentStack.append(lex[ep].lexeme[1:(- 1)]) if (lex[ep].token == T.LISTBEGIN): numList = 1 list = '[' while (numList > 0): ep += 1 list += (lex[ep].lexeme if (lex[ep].token != T.LISTSEP) else ',') if (lex[ep].token == T.LISTBEGIN): numList += 1 if (lex[ep].token == T.LISTEND): numList -= 1 currentStack.append(eval(list)) if (lex[ep].token == T.INSTRUCTION): if (lex[ep].lexeme == 'ɔ'): ep = lab[lex[(ep + 1)].lexeme] elif (lex[ep].lexeme == 'ʌ'): truthy = False con = currentStack.pop() if (type(con) in [int, float]): truthy = (con != 0) elif (type(con) in [str]): truthy = (con != ) elif (type(con) in [list]): truthy = (con != []) ep += (1 if truthy else 0) elif ((lex[ep].lexeme == 'e') and (len(executionStack) > 0)): currentStack.append(executionStack[(- 1)]) elif ((lex[ep].lexeme == 'ø') and (len(executionStack) > 0)): executionStack[(- 1)] = currentStack.pop() elif ((lex[ep].lexeme == 'æ') and (len(executionStack) > 1)): currentStack.append(executionStack[(- 2)]) elif ((lex[ep].lexeme == 'œ') and (len(executionStack) > 1)): executionStack[(- 2)] = currentStack.pop() elif (lex[ep].lexeme == 'ɸ'): currentStack = unvoiced otherStack = voiced elif (lex[ep].lexeme == 'β'): currentStack = voiced otherStack = unvoiced elif (lex[ep].lexeme == 'ɓ'): currentStack.pop((1 if (currentStack == unvoiced) else 0)) elif (lex[ep].lexeme == 'k'): otherStack.append(currentStack.pop()) elif (lex[ep].lexeme == 'g'): currentStack.append(otherStack.pop()) elif (lex[ep].lexeme == 'w'): register = currentStack.pop() elif (lex[ep].lexeme == 'ʍ'): currentStack.append(register) else: executeInstruction(lex[ep].lexeme, unvoiced, voiced, currentStack) if ((ep + 1) < len(lex)): if ((lex[ep].token == T.FUNNAME) and (lex[(ep + 1)].token != T.FUNDEFSTART)): executionStack.append(ep) ep = lab[lex[ep].lexeme] executionDepth += 1 if ((lex[ep].token == T.FUNNAME) and (lex[(ep + 1)].token == T.FUNDEFSTART)): while (lex[ep].token != T.FUNDEFEND): ep += 1 if (lex[ep].token == T.LOOPSTART): start = currentStack.pop() end = currentStack.pop() executionStack.append(end) executionStack.append(start) numLoops += 1 if (lex[ep].token == T.LOOPEND): if (numLoops > 0): if (executionStack[(- 1)] < executionStack[(- 2)]): ep = (lab[ep] - 1) else: executionStack.pop() executionStack.pop() numLoops -= 1 if (lex[ep].token == T.LOOPEXIT): if (numLoops > 0): for i in range(ep, len(lex)): if (i in lab): ep = lab[i] executionStack.pop() executionStack.pop() numLoops -= 1 break if ((executionDepth > 0) and (lex[ep].token == T.FUNDEFEND)): executionDepth -= 1 ep = executionStack.pop() if debugmode: if (currentStack == unvoiced): print('Unvoiced:', unvoiced, '<-- currentStack') print('Voiced:', voiced) else: print('Unvoiced:', unvoiced) print('Voiced:', voiced, '<-- currentStack') print('Execution:', executionStack) print('Register:', repr(register)) print() ep += 1 return (currentStack, otherStack)
def evaluate(lex, lab, debugmode, unvoiced, voiced, executionStack, currentStack, otherStack, register): '\n Evaluates code.\n Input is a list of lexemes, and a dictionary of labels.\n Most definitions of instructions are located here.\n Returns the current stack and the other stack as a tuple (current, other).\n ' numList = 0 numFun = 0 numLoops = 0 if debugmode: print('Initial Conditions:') print('Unvoiced:', unvoiced) print('Voiced:', voiced) print('Execution:', executionStack) print('Register:', repr(register)) print() executionDepth = 0 ep = 0 while (ep < len(lex)): if debugmode: print(ep, lex[ep].token, repr(lex[ep].lexeme)) otherStack = (voiced if (currentStack == unvoiced) else unvoiced) if (lex[ep].token == T.NUMBER): base = 10 for d in lex[ep].lexeme: if (d in string.ascii_letters): base = 36 break currentStack.append(convert_base(lex[ep].lexeme, base)) if (lex[ep].token == T.STRING): currentStack.append(lex[ep].lexeme[1:(- 1)]) if (lex[ep].token == T.LISTBEGIN): numList = 1 list = '[' while (numList > 0): ep += 1 list += (lex[ep].lexeme if (lex[ep].token != T.LISTSEP) else ',') if (lex[ep].token == T.LISTBEGIN): numList += 1 if (lex[ep].token == T.LISTEND): numList -= 1 currentStack.append(eval(list)) if (lex[ep].token == T.INSTRUCTION): if (lex[ep].lexeme == 'ɔ'): ep = lab[lex[(ep + 1)].lexeme] elif (lex[ep].lexeme == 'ʌ'): truthy = False con = currentStack.pop() if (type(con) in [int, float]): truthy = (con != 0) elif (type(con) in [str]): truthy = (con != ) elif (type(con) in [list]): truthy = (con != []) ep += (1 if truthy else 0) elif ((lex[ep].lexeme == 'e') and (len(executionStack) > 0)): currentStack.append(executionStack[(- 1)]) elif ((lex[ep].lexeme == 'ø') and (len(executionStack) > 0)): executionStack[(- 1)] = currentStack.pop() elif ((lex[ep].lexeme == 'æ') and (len(executionStack) > 1)): currentStack.append(executionStack[(- 2)]) elif ((lex[ep].lexeme == 'œ') and (len(executionStack) > 1)): executionStack[(- 2)] = currentStack.pop() elif (lex[ep].lexeme == 'ɸ'): currentStack = unvoiced otherStack = voiced elif (lex[ep].lexeme == 'β'): currentStack = voiced otherStack = unvoiced elif (lex[ep].lexeme == 'ɓ'): currentStack.pop((1 if (currentStack == unvoiced) else 0)) elif (lex[ep].lexeme == 'k'): otherStack.append(currentStack.pop()) elif (lex[ep].lexeme == 'g'): currentStack.append(otherStack.pop()) elif (lex[ep].lexeme == 'w'): register = currentStack.pop() elif (lex[ep].lexeme == 'ʍ'): currentStack.append(register) else: executeInstruction(lex[ep].lexeme, unvoiced, voiced, currentStack) if ((ep + 1) < len(lex)): if ((lex[ep].token == T.FUNNAME) and (lex[(ep + 1)].token != T.FUNDEFSTART)): executionStack.append(ep) ep = lab[lex[ep].lexeme] executionDepth += 1 if ((lex[ep].token == T.FUNNAME) and (lex[(ep + 1)].token == T.FUNDEFSTART)): while (lex[ep].token != T.FUNDEFEND): ep += 1 if (lex[ep].token == T.LOOPSTART): start = currentStack.pop() end = currentStack.pop() executionStack.append(end) executionStack.append(start) numLoops += 1 if (lex[ep].token == T.LOOPEND): if (numLoops > 0): if (executionStack[(- 1)] < executionStack[(- 2)]): ep = (lab[ep] - 1) else: executionStack.pop() executionStack.pop() numLoops -= 1 if (lex[ep].token == T.LOOPEXIT): if (numLoops > 0): for i in range(ep, len(lex)): if (i in lab): ep = lab[i] executionStack.pop() executionStack.pop() numLoops -= 1 break if ((executionDepth > 0) and (lex[ep].token == T.FUNDEFEND)): executionDepth -= 1 ep = executionStack.pop() if debugmode: if (currentStack == unvoiced): print('Unvoiced:', unvoiced, '<-- currentStack') print('Voiced:', voiced) else: print('Unvoiced:', unvoiced) print('Voiced:', voiced, '<-- currentStack') print('Execution:', executionStack) print('Register:', repr(register)) print() ep += 1 return (currentStack, otherStack)<|docstring|>Evaluates code. Input is a list of lexemes, and a dictionary of labels. Most definitions of instructions are located here. Returns the current stack and the other stack as a tuple (current, other).<|endoftext|>
ce3d6a1fd70f515f716d90c003024e588ca5f7243ff66883e06040ec053231e6
def generate_unit_vectors(n, sigma=(2 * np.pi)): 'Generates matrix NxN of unit length vectors' phi = np.random.uniform(0, sigma, (n, n)) v = np.stack((np.cos(phi), np.sin(phi)), axis=(- 1)) return v
Generates matrix NxN of unit length vectors
Dataset.py
generate_unit_vectors
kirill-pinigin/NeuralPhotoFilter
1
python
def generate_unit_vectors(n, sigma=(2 * np.pi)): phi = np.random.uniform(0, sigma, (n, n)) v = np.stack((np.cos(phi), np.sin(phi)), axis=(- 1)) return v
def generate_unit_vectors(n, sigma=(2 * np.pi)): phi = np.random.uniform(0, sigma, (n, n)) v = np.stack((np.cos(phi), np.sin(phi)), axis=(- 1)) return v<|docstring|>Generates matrix NxN of unit length vectors<|endoftext|>
213cd876031d7aaa92770a97b76340329fb2e16b735fe7242db0f862f1c655f3
def generate_configs(base_config_file, pre_train_model, data_root_, train_name, test_name, num_classes, work_dir, log_interval, checkpoint_interval, total_epoch, lr_config_step): '\n\n\n :param base_config_file: the config file\n :param pre_train_model: pre_train_model\n :param data_root_: the root path of data\n :param train_name: the train json name\n :param test_name: the test json name\n :param num_classes: the number of class/category\n :param work_dir: the directory for saving training results\n :param log_interval: the interval for logging\n :param checkpoint_interval: the interval for saving checkpoint\n :param total_epoch: the total number of training epoch\n :param lr_config_step: the step list for adjusting learning rate\n :return:\n ' f = open(base_config_file, 'r') file_name = (str(time.strftime('%Y%m%d_%H%M%S', time.localtime())) + '.py') file_name = os.path.join(work_dir, file_name) w = open(file_name, 'w') stat = None for line in f.readlines(): prefix_line = line.split('=')[0] if ('pretrained' in prefix_line): line = (((line.split('=')[0] + "='") + pre_train_model) + "',\n") elif ('num_classes' in prefix_line): line = (((line.split('=')[0] + '=') + str((num_classes + 1))) + ',\n') elif ('data_root' in prefix_line): line = (((line.split('=')[0] + "= '") + str(data_root_)) + "'\n") elif (('interval' in line) and ('dict(' not in line)): line = (((line.split('=')[0] + '=') + str(log_interval)) + ',\n') elif ('checkpoint_config' in prefix_line): line = (((line.split('=')[0] + '= dict(interval=') + str(checkpoint_interval)) + ')\n') elif ('work_dir' in prefix_line): line = (((line.split('=')[0] + "= '") + work_dir) + "'\n") elif ('total_epochs' in prefix_line): line = (((line.split('=')[0] + '= ') + str(total_epoch)) + '\n') elif ('step=' in line): line = (((line.split('=')[0] + '=') + str(lr_config_step)) + ')\n') if ('train=dict' in line): stat = 'train' elif ('val=dict' in line): stat = 'val' elif ('test=dict' in line): stat = 'test' if ('img_prefix' in line): line = (line.split('=')[0] + '=data_root,\n') if ((stat == 'train') and ('ann_file' in line)): line = (((line.split('=')[0] + "=data_root + '") + train_name) + "',\n") elif ((stat == 'val') and ('ann_file' in line)): line = (((line.split('=')[0] + "=data_root + '") + test_name) + "',\n") elif ((stat == 'test') and ('ann_file' in line)): line = (((line.split('=')[0] + "=data_root + '") + test_name) + "',\n") w.write(line) f.close() w.close() return file_name
:param base_config_file: the config file :param pre_train_model: pre_train_model :param data_root_: the root path of data :param train_name: the train json name :param test_name: the test json name :param num_classes: the number of class/category :param work_dir: the directory for saving training results :param log_interval: the interval for logging :param checkpoint_interval: the interval for saving checkpoint :param total_epoch: the total number of training epoch :param lr_config_step: the step list for adjusting learning rate :return:
tools_2/configs/auto_gen_config_file.py
generate_configs
hukefei/chongqing_contest
1
python
def generate_configs(base_config_file, pre_train_model, data_root_, train_name, test_name, num_classes, work_dir, log_interval, checkpoint_interval, total_epoch, lr_config_step): '\n\n\n :param base_config_file: the config file\n :param pre_train_model: pre_train_model\n :param data_root_: the root path of data\n :param train_name: the train json name\n :param test_name: the test json name\n :param num_classes: the number of class/category\n :param work_dir: the directory for saving training results\n :param log_interval: the interval for logging\n :param checkpoint_interval: the interval for saving checkpoint\n :param total_epoch: the total number of training epoch\n :param lr_config_step: the step list for adjusting learning rate\n :return:\n ' f = open(base_config_file, 'r') file_name = (str(time.strftime('%Y%m%d_%H%M%S', time.localtime())) + '.py') file_name = os.path.join(work_dir, file_name) w = open(file_name, 'w') stat = None for line in f.readlines(): prefix_line = line.split('=')[0] if ('pretrained' in prefix_line): line = (((line.split('=')[0] + "='") + pre_train_model) + "',\n") elif ('num_classes' in prefix_line): line = (((line.split('=')[0] + '=') + str((num_classes + 1))) + ',\n') elif ('data_root' in prefix_line): line = (((line.split('=')[0] + "= '") + str(data_root_)) + "'\n") elif (('interval' in line) and ('dict(' not in line)): line = (((line.split('=')[0] + '=') + str(log_interval)) + ',\n') elif ('checkpoint_config' in prefix_line): line = (((line.split('=')[0] + '= dict(interval=') + str(checkpoint_interval)) + ')\n') elif ('work_dir' in prefix_line): line = (((line.split('=')[0] + "= '") + work_dir) + "'\n") elif ('total_epochs' in prefix_line): line = (((line.split('=')[0] + '= ') + str(total_epoch)) + '\n') elif ('step=' in line): line = (((line.split('=')[0] + '=') + str(lr_config_step)) + ')\n') if ('train=dict' in line): stat = 'train' elif ('val=dict' in line): stat = 'val' elif ('test=dict' in line): stat = 'test' if ('img_prefix' in line): line = (line.split('=')[0] + '=data_root,\n') if ((stat == 'train') and ('ann_file' in line)): line = (((line.split('=')[0] + "=data_root + '") + train_name) + "',\n") elif ((stat == 'val') and ('ann_file' in line)): line = (((line.split('=')[0] + "=data_root + '") + test_name) + "',\n") elif ((stat == 'test') and ('ann_file' in line)): line = (((line.split('=')[0] + "=data_root + '") + test_name) + "',\n") w.write(line) f.close() w.close() return file_name
def generate_configs(base_config_file, pre_train_model, data_root_, train_name, test_name, num_classes, work_dir, log_interval, checkpoint_interval, total_epoch, lr_config_step): '\n\n\n :param base_config_file: the config file\n :param pre_train_model: pre_train_model\n :param data_root_: the root path of data\n :param train_name: the train json name\n :param test_name: the test json name\n :param num_classes: the number of class/category\n :param work_dir: the directory for saving training results\n :param log_interval: the interval for logging\n :param checkpoint_interval: the interval for saving checkpoint\n :param total_epoch: the total number of training epoch\n :param lr_config_step: the step list for adjusting learning rate\n :return:\n ' f = open(base_config_file, 'r') file_name = (str(time.strftime('%Y%m%d_%H%M%S', time.localtime())) + '.py') file_name = os.path.join(work_dir, file_name) w = open(file_name, 'w') stat = None for line in f.readlines(): prefix_line = line.split('=')[0] if ('pretrained' in prefix_line): line = (((line.split('=')[0] + "='") + pre_train_model) + "',\n") elif ('num_classes' in prefix_line): line = (((line.split('=')[0] + '=') + str((num_classes + 1))) + ',\n') elif ('data_root' in prefix_line): line = (((line.split('=')[0] + "= '") + str(data_root_)) + "'\n") elif (('interval' in line) and ('dict(' not in line)): line = (((line.split('=')[0] + '=') + str(log_interval)) + ',\n') elif ('checkpoint_config' in prefix_line): line = (((line.split('=')[0] + '= dict(interval=') + str(checkpoint_interval)) + ')\n') elif ('work_dir' in prefix_line): line = (((line.split('=')[0] + "= '") + work_dir) + "'\n") elif ('total_epochs' in prefix_line): line = (((line.split('=')[0] + '= ') + str(total_epoch)) + '\n') elif ('step=' in line): line = (((line.split('=')[0] + '=') + str(lr_config_step)) + ')\n') if ('train=dict' in line): stat = 'train' elif ('val=dict' in line): stat = 'val' elif ('test=dict' in line): stat = 'test' if ('img_prefix' in line): line = (line.split('=')[0] + '=data_root,\n') if ((stat == 'train') and ('ann_file' in line)): line = (((line.split('=')[0] + "=data_root + '") + train_name) + "',\n") elif ((stat == 'val') and ('ann_file' in line)): line = (((line.split('=')[0] + "=data_root + '") + test_name) + "',\n") elif ((stat == 'test') and ('ann_file' in line)): line = (((line.split('=')[0] + "=data_root + '") + test_name) + "',\n") w.write(line) f.close() w.close() return file_name<|docstring|>:param base_config_file: the config file :param pre_train_model: pre_train_model :param data_root_: the root path of data :param train_name: the train json name :param test_name: the test json name :param num_classes: the number of class/category :param work_dir: the directory for saving training results :param log_interval: the interval for logging :param checkpoint_interval: the interval for saving checkpoint :param total_epoch: the total number of training epoch :param lr_config_step: the step list for adjusting learning rate :return:<|endoftext|>
ea4808996cf60b09d14d4cdbedaff82d5109eb122122349573640043a2e0d13d
def solve_linear_system(self): 'Convert technosphere matrix from sparse to dense before solving linear system.' return solve(self.technosphere_matrix.todense(), self.demand_array)
Convert technosphere matrix from sparse to dense before solving linear system.
bw_calc/dense_lca.py
solve_linear_system
brightway-lca/bw_calc
0
python
def solve_linear_system(self): return solve(self.technosphere_matrix.todense(), self.demand_array)
def solve_linear_system(self): return solve(self.technosphere_matrix.todense(), self.demand_array)<|docstring|>Convert technosphere matrix from sparse to dense before solving linear system.<|endoftext|>
e76223dcb2ce5c55181360aac33daea795c64193ec342a8755e228fa68f6264f
def resolve_operation(operation: cirq.Operation, resolvers: Iterable[SymbolResolver]) -> SymbolInfo: 'Builds a SymbolInfo object based off of a designated operation\n and list of resolvers. The latest resolver takes precendent.\n\n Args:\n operation: the cirq.Operation object to resolve\n resolvers: a list of SymbolResolvers which provides instructions\n on how to build SymbolInfo objects.\n\n Raises:\n ValueError: if the operation cannot be resolved into a symbol.\n ' symbol_info = None for resolver in resolvers: info = resolver(operation) if (info is not None): symbol_info = info if (symbol_info is None): raise ValueError(f'Cannot resolve operation: {operation}') return symbol_info
Builds a SymbolInfo object based off of a designated operation and list of resolvers. The latest resolver takes precendent. Args: operation: the cirq.Operation object to resolve resolvers: a list of SymbolResolvers which provides instructions on how to build SymbolInfo objects. Raises: ValueError: if the operation cannot be resolved into a symbol.
cirq-web/cirq_web/circuits/symbols.py
resolve_operation
Tinkidinki/Cirq
3,326
python
def resolve_operation(operation: cirq.Operation, resolvers: Iterable[SymbolResolver]) -> SymbolInfo: 'Builds a SymbolInfo object based off of a designated operation\n and list of resolvers. The latest resolver takes precendent.\n\n Args:\n operation: the cirq.Operation object to resolve\n resolvers: a list of SymbolResolvers which provides instructions\n on how to build SymbolInfo objects.\n\n Raises:\n ValueError: if the operation cannot be resolved into a symbol.\n ' symbol_info = None for resolver in resolvers: info = resolver(operation) if (info is not None): symbol_info = info if (symbol_info is None): raise ValueError(f'Cannot resolve operation: {operation}') return symbol_info
def resolve_operation(operation: cirq.Operation, resolvers: Iterable[SymbolResolver]) -> SymbolInfo: 'Builds a SymbolInfo object based off of a designated operation\n and list of resolvers. The latest resolver takes precendent.\n\n Args:\n operation: the cirq.Operation object to resolve\n resolvers: a list of SymbolResolvers which provides instructions\n on how to build SymbolInfo objects.\n\n Raises:\n ValueError: if the operation cannot be resolved into a symbol.\n ' symbol_info = None for resolver in resolvers: info = resolver(operation) if (info is not None): symbol_info = info if (symbol_info is None): raise ValueError(f'Cannot resolve operation: {operation}') return symbol_info<|docstring|>Builds a SymbolInfo object based off of a designated operation and list of resolvers. The latest resolver takes precendent. Args: operation: the cirq.Operation object to resolve resolvers: a list of SymbolResolvers which provides instructions on how to build SymbolInfo objects. Raises: ValueError: if the operation cannot be resolved into a symbol.<|endoftext|>
51345713c59a06f36a321d9983dee0d26cf565c74787828ffcac3be0451c95ee
@staticmethod def unknown_operation(num_qubits: int) -> 'SymbolInfo': 'Generates a SymbolInfo object for an unknown operation.\n\n Args:\n num_qubits: the number of qubits in the operation\n ' symbol_info = SymbolInfo([], []) for _ in range(num_qubits): symbol_info.colors.append('gray') symbol_info.labels.append('?') return symbol_info
Generates a SymbolInfo object for an unknown operation. Args: num_qubits: the number of qubits in the operation
cirq-web/cirq_web/circuits/symbols.py
unknown_operation
Tinkidinki/Cirq
3,326
python
@staticmethod def unknown_operation(num_qubits: int) -> 'SymbolInfo': 'Generates a SymbolInfo object for an unknown operation.\n\n Args:\n num_qubits: the number of qubits in the operation\n ' symbol_info = SymbolInfo([], []) for _ in range(num_qubits): symbol_info.colors.append('gray') symbol_info.labels.append('?') return symbol_info
@staticmethod def unknown_operation(num_qubits: int) -> 'SymbolInfo': 'Generates a SymbolInfo object for an unknown operation.\n\n Args:\n num_qubits: the number of qubits in the operation\n ' symbol_info = SymbolInfo([], []) for _ in range(num_qubits): symbol_info.colors.append('gray') symbol_info.labels.append('?') return symbol_info<|docstring|>Generates a SymbolInfo object for an unknown operation. Args: num_qubits: the number of qubits in the operation<|endoftext|>
b0b49400b6b0c3b8c5217727acba3ea929a11eee2aecd0990584212e240bdbcf
@abc.abstractmethod def resolve(self, operation: cirq.Operation) -> Optional[SymbolInfo]: 'Converts cirq.Operation objects into SymbolInfo objects for serialization.'
Converts cirq.Operation objects into SymbolInfo objects for serialization.
cirq-web/cirq_web/circuits/symbols.py
resolve
Tinkidinki/Cirq
3,326
python
@abc.abstractmethod def resolve(self, operation: cirq.Operation) -> Optional[SymbolInfo]:
@abc.abstractmethod def resolve(self, operation: cirq.Operation) -> Optional[SymbolInfo]: <|docstring|>Converts cirq.Operation objects into SymbolInfo objects for serialization.<|endoftext|>
20dc4ef7599b996563afcc33c36e5514d80359a5ae9c528957987870597fdc66
def resolve(self, operation: cirq.Operation) -> Optional[SymbolInfo]: 'Checks for the _circuit_diagram_info attribute of the operation,\n and if it exists, build the symbol information from it. Otherwise,\n builds symbol info for an unknown operation.\n\n Args:\n operation: the cirq.Operation object to resolve\n ' try: wire_symbols = cirq.circuit_diagram_info(operation).wire_symbols except TypeError: return SymbolInfo.unknown_operation(cirq.num_qubits(operation)) symbol_info = SymbolInfo(list(wire_symbols), []) for symbol in wire_symbols: symbol_info.colors.append(DefaultResolver._SYMBOL_COLORS.get(symbol, 'gray')) return symbol_info
Checks for the _circuit_diagram_info attribute of the operation, and if it exists, build the symbol information from it. Otherwise, builds symbol info for an unknown operation. Args: operation: the cirq.Operation object to resolve
cirq-web/cirq_web/circuits/symbols.py
resolve
Tinkidinki/Cirq
3,326
python
def resolve(self, operation: cirq.Operation) -> Optional[SymbolInfo]: 'Checks for the _circuit_diagram_info attribute of the operation,\n and if it exists, build the symbol information from it. Otherwise,\n builds symbol info for an unknown operation.\n\n Args:\n operation: the cirq.Operation object to resolve\n ' try: wire_symbols = cirq.circuit_diagram_info(operation).wire_symbols except TypeError: return SymbolInfo.unknown_operation(cirq.num_qubits(operation)) symbol_info = SymbolInfo(list(wire_symbols), []) for symbol in wire_symbols: symbol_info.colors.append(DefaultResolver._SYMBOL_COLORS.get(symbol, 'gray')) return symbol_info
def resolve(self, operation: cirq.Operation) -> Optional[SymbolInfo]: 'Checks for the _circuit_diagram_info attribute of the operation,\n and if it exists, build the symbol information from it. Otherwise,\n builds symbol info for an unknown operation.\n\n Args:\n operation: the cirq.Operation object to resolve\n ' try: wire_symbols = cirq.circuit_diagram_info(operation).wire_symbols except TypeError: return SymbolInfo.unknown_operation(cirq.num_qubits(operation)) symbol_info = SymbolInfo(list(wire_symbols), []) for symbol in wire_symbols: symbol_info.colors.append(DefaultResolver._SYMBOL_COLORS.get(symbol, 'gray')) return symbol_info<|docstring|>Checks for the _circuit_diagram_info attribute of the operation, and if it exists, build the symbol information from it. Otherwise, builds symbol info for an unknown operation. Args: operation: the cirq.Operation object to resolve<|endoftext|>
3164709af420ab3467a07b4b392bf3de72eff4892bbd8e66358d8711210c5a2a
def __init__(self, wire_symbols, location_info, color_info, moment): 'Gathers symbol information from an operation and builds an\n object to represent it in 3D.\n\n Args:\n wire_symbols: a list of symbols taken from circuit_diagram_info()\n that will be used to represent the operation in the 3D circuit.\n\n location_info: A list of coordinates for each wire_symbol. The\n index of the coordinate tuple in the location_info list must\n correspond with the index of the symbol in the wire_symbols list.\n\n color_info: a list representing the desired color of the symbol(s).\n These will also correspond to index of the symbol in the\n wire_symbols list.\n\n moment: the moment where the symbol should be.\n ' self.wire_symbols = wire_symbols self.location_info = location_info self.color_info = color_info self.moment = moment
Gathers symbol information from an operation and builds an object to represent it in 3D. Args: wire_symbols: a list of symbols taken from circuit_diagram_info() that will be used to represent the operation in the 3D circuit. location_info: A list of coordinates for each wire_symbol. The index of the coordinate tuple in the location_info list must correspond with the index of the symbol in the wire_symbols list. color_info: a list representing the desired color of the symbol(s). These will also correspond to index of the symbol in the wire_symbols list. moment: the moment where the symbol should be.
cirq-web/cirq_web/circuits/symbols.py
__init__
Tinkidinki/Cirq
3,326
python
def __init__(self, wire_symbols, location_info, color_info, moment): 'Gathers symbol information from an operation and builds an\n object to represent it in 3D.\n\n Args:\n wire_symbols: a list of symbols taken from circuit_diagram_info()\n that will be used to represent the operation in the 3D circuit.\n\n location_info: A list of coordinates for each wire_symbol. The\n index of the coordinate tuple in the location_info list must\n correspond with the index of the symbol in the wire_symbols list.\n\n color_info: a list representing the desired color of the symbol(s).\n These will also correspond to index of the symbol in the\n wire_symbols list.\n\n moment: the moment where the symbol should be.\n ' self.wire_symbols = wire_symbols self.location_info = location_info self.color_info = color_info self.moment = moment
def __init__(self, wire_symbols, location_info, color_info, moment): 'Gathers symbol information from an operation and builds an\n object to represent it in 3D.\n\n Args:\n wire_symbols: a list of symbols taken from circuit_diagram_info()\n that will be used to represent the operation in the 3D circuit.\n\n location_info: A list of coordinates for each wire_symbol. The\n index of the coordinate tuple in the location_info list must\n correspond with the index of the symbol in the wire_symbols list.\n\n color_info: a list representing the desired color of the symbol(s).\n These will also correspond to index of the symbol in the\n wire_symbols list.\n\n moment: the moment where the symbol should be.\n ' self.wire_symbols = wire_symbols self.location_info = location_info self.color_info = color_info self.moment = moment<|docstring|>Gathers symbol information from an operation and builds an object to represent it in 3D. Args: wire_symbols: a list of symbols taken from circuit_diagram_info() that will be used to represent the operation in the 3D circuit. location_info: A list of coordinates for each wire_symbol. The index of the coordinate tuple in the location_info list must correspond with the index of the symbol in the wire_symbols list. color_info: a list representing the desired color of the symbol(s). These will also correspond to index of the symbol in the wire_symbols list. moment: the moment where the symbol should be.<|endoftext|>
5d4893934d03f5a9684f8d5c7078cefdcf93d1b4ea4cc53d59002156bdf14148
def do(StateRouterInfoList, dial_db): 'Create code that allows to jump to a state based on an integer value.\n ' result = [' __quex_assert_no_passage();\n', (' %s /* prevent unused label */\n' % Lng.GOTO(DoorID.global_state_router(dial_db), dial_db)), ('%s\n' % Lng.LABEL(DoorID.global_state_router(dial_db), dial_db))] if ((not Setup.computed_gotos_f) and StateRouterInfoList): result.extend(__get_code(StateRouterInfoList)) return result
Create code that allows to jump to a state based on an integer value.
quex/output/core/state_router.py
do
smmckay/quex-mirror
0
python
def do(StateRouterInfoList, dial_db): '\n ' result = [' __quex_assert_no_passage();\n', (' %s /* prevent unused label */\n' % Lng.GOTO(DoorID.global_state_router(dial_db), dial_db)), ('%s\n' % Lng.LABEL(DoorID.global_state_router(dial_db), dial_db))] if ((not Setup.computed_gotos_f) and StateRouterInfoList): result.extend(__get_code(StateRouterInfoList)) return result
def do(StateRouterInfoList, dial_db): '\n ' result = [' __quex_assert_no_passage();\n', (' %s /* prevent unused label */\n' % Lng.GOTO(DoorID.global_state_router(dial_db), dial_db)), ('%s\n' % Lng.LABEL(DoorID.global_state_router(dial_db), dial_db))] if ((not Setup.computed_gotos_f) and StateRouterInfoList): result.extend(__get_code(StateRouterInfoList)) return result<|docstring|>Create code that allows to jump to a state based on an integer value.<|endoftext|>
cf9172998c2dad786cfd4303ca7112e9a4e43d7d42465685a18232206159e09e
@typed(dial_db=DialDB) def get_info(StateIndexList, dial_db): '\n NOTE: At least a \'dummy\' state router is always equired so that \'goto\n QUEX_STATE_ROUTER;\' does not reference a non-existing label. Then, we\n return an empty text array.\n\n <fschaef9/13y10m15d: "Still true with current dial_db implementation?">\n ' if (len(StateIndexList) == 0): return [] result = ([None] * len(StateIndexList)) for (i, index) in enumerate(StateIndexList): assert (type(index) != str) if (index >= 0): adr = index else: assert False, 'Is this still an issue?' adr = DoorID.drop_out((- index), dial_db).related_address result[i] = (index, Lng.GOTO_ADDRESS(adr, dial_db)) return result
NOTE: At least a 'dummy' state router is always equired so that 'goto QUEX_STATE_ROUTER;' does not reference a non-existing label. Then, we return an empty text array. <fschaef9/13y10m15d: "Still true with current dial_db implementation?">
quex/output/core/state_router.py
get_info
smmckay/quex-mirror
0
python
@typed(dial_db=DialDB) def get_info(StateIndexList, dial_db): '\n NOTE: At least a \'dummy\' state router is always equired so that \'goto\n QUEX_STATE_ROUTER;\' does not reference a non-existing label. Then, we\n return an empty text array.\n\n <fschaef9/13y10m15d: "Still true with current dial_db implementation?">\n ' if (len(StateIndexList) == 0): return [] result = ([None] * len(StateIndexList)) for (i, index) in enumerate(StateIndexList): assert (type(index) != str) if (index >= 0): adr = index else: assert False, 'Is this still an issue?' adr = DoorID.drop_out((- index), dial_db).related_address result[i] = (index, Lng.GOTO_ADDRESS(adr, dial_db)) return result
@typed(dial_db=DialDB) def get_info(StateIndexList, dial_db): '\n NOTE: At least a \'dummy\' state router is always equired so that \'goto\n QUEX_STATE_ROUTER;\' does not reference a non-existing label. Then, we\n return an empty text array.\n\n <fschaef9/13y10m15d: "Still true with current dial_db implementation?">\n ' if (len(StateIndexList) == 0): return [] result = ([None] * len(StateIndexList)) for (i, index) in enumerate(StateIndexList): assert (type(index) != str) if (index >= 0): adr = index else: assert False, 'Is this still an issue?' adr = DoorID.drop_out((- index), dial_db).related_address result[i] = (index, Lng.GOTO_ADDRESS(adr, dial_db)) return result<|docstring|>NOTE: At least a 'dummy' state router is always equired so that 'goto QUEX_STATE_ROUTER;' does not reference a non-existing label. Then, we return an empty text array. <fschaef9/13y10m15d: "Still true with current dial_db implementation?"><|endoftext|>
28d5986bf50a04527ab56134cc99dea147db0d8905285305e03d64e6e2d196b0
def _get_framework_args(self): '\n often, a framework provides specific variables that are passed\n into the handler function (e.g. the request object in\n aiohttp). return a dictionary of these arguments, which will be\n added to the function arguments if they appear.\n ' pass
often, a framework provides specific variables that are passed into the handler function (e.g. the request object in aiohttp). return a dictionary of these arguments, which will be added to the function arguments if they appear.
transmute_core/param_extractor.py
_get_framework_args
pawelkoston/transmute-core
42
python
def _get_framework_args(self): '\n often, a framework provides specific variables that are passed\n into the handler function (e.g. the request object in\n aiohttp). return a dictionary of these arguments, which will be\n added to the function arguments if they appear.\n ' pass
def _get_framework_args(self): '\n often, a framework provides specific variables that are passed\n into the handler function (e.g. the request object in\n aiohttp). return a dictionary of these arguments, which will be\n added to the function arguments if they appear.\n ' pass<|docstring|>often, a framework provides specific variables that are passed into the handler function (e.g. the request object in aiohttp). return a dictionary of these arguments, which will be added to the function arguments if they appear.<|endoftext|>
ea33b8dce9685cacfa53d2fede16b910df59cf7fd9a8dbec4d8dda22c39a5690
@property def body(self): ' return the request body. ' raise NotImplementedError()
return the request body.
transmute_core/param_extractor.py
body
pawelkoston/transmute-core
42
python
@property def body(self): ' ' raise NotImplementedError()
@property def body(self): ' ' raise NotImplementedError()<|docstring|>return the request body.<|endoftext|>
44ee87176e23ab7d6def30f80dbee0ee2f9bd61aeb7722b3c7e90f56e52ee51b
def __init__(self, playDomain): '\n :param playDomain: (直播or时移)播放域名\n ' self.playDomain = playDomain self.authStatus = None self.authKey = None
:param playDomain: (直播or时移)播放域名
python_code/vnev/Lib/site-packages/jdcloud_sdk/services/live/apis/SetLivePlayAuthKeyRequest.py
__init__
Ureimu/weather-robot
14
python
def __init__(self, playDomain): '\n \n ' self.playDomain = playDomain self.authStatus = None self.authKey = None
def __init__(self, playDomain): '\n \n ' self.playDomain = playDomain self.authStatus = None self.authKey = None<|docstring|>:param playDomain: (直播or时移)播放域名<|endoftext|>
0c0a64291a1ec2757b0f9020368166e51ae7fcd4adf659490c07976e880c98eb
def setAuthStatus(self, authStatus): '\n :param authStatus: (Optional) (直播or时移)播放鉴权状态\n on: 开启\n off: 关闭\n- 当播放鉴权状态on(开启)时,authKey不能为空\n\n ' self.authStatus = authStatus
:param authStatus: (Optional) (直播or时移)播放鉴权状态 on: 开启 off: 关闭 - 当播放鉴权状态on(开启)时,authKey不能为空
python_code/vnev/Lib/site-packages/jdcloud_sdk/services/live/apis/SetLivePlayAuthKeyRequest.py
setAuthStatus
Ureimu/weather-robot
14
python
def setAuthStatus(self, authStatus): '\n :param authStatus: (Optional) (直播or时移)播放鉴权状态\n on: 开启\n off: 关闭\n- 当播放鉴权状态on(开启)时,authKey不能为空\n\n ' self.authStatus = authStatus
def setAuthStatus(self, authStatus): '\n :param authStatus: (Optional) (直播or时移)播放鉴权状态\n on: 开启\n off: 关闭\n- 当播放鉴权状态on(开启)时,authKey不能为空\n\n ' self.authStatus = authStatus<|docstring|>:param authStatus: (Optional) (直播or时移)播放鉴权状态 on: 开启 off: 关闭 - 当播放鉴权状态on(开启)时,authKey不能为空<|endoftext|>
dd414ad134ea979b8c00a8e1c81c6e24b01a7cdd396f324dc631cdae1a8be8ce
def setAuthKey(self, authKey): '\n :param authKey: (Optional) (直播or时移)播放鉴权key\n- 取值: 支持大小写字母和数字 长度6-32位\n\n ' self.authKey = authKey
:param authKey: (Optional) (直播or时移)播放鉴权key - 取值: 支持大小写字母和数字 长度6-32位
python_code/vnev/Lib/site-packages/jdcloud_sdk/services/live/apis/SetLivePlayAuthKeyRequest.py
setAuthKey
Ureimu/weather-robot
14
python
def setAuthKey(self, authKey): '\n :param authKey: (Optional) (直播or时移)播放鉴权key\n- 取值: 支持大小写字母和数字 长度6-32位\n\n ' self.authKey = authKey
def setAuthKey(self, authKey): '\n :param authKey: (Optional) (直播or时移)播放鉴权key\n- 取值: 支持大小写字母和数字 长度6-32位\n\n ' self.authKey = authKey<|docstring|>:param authKey: (Optional) (直播or时移)播放鉴权key - 取值: 支持大小写字母和数字 长度6-32位<|endoftext|>
afa111ca4efebedfdb565717b5b9f7c1c7911e79e09032b7b0e1d9e57e88a298
def in_docker(): 'Returns: True if running in a docker container, else False' try: with open('/proc/1/cgroup', 'rt') as ifh: contents = ifh.read() return any([(word in contents) for word in ['actions_job', 'docker']]) except OSError: return False
Returns: True if running in a docker container, else False
tools/precommit.py
in_docker
bensampson5/libsv
2
python
def in_docker(): try: with open('/proc/1/cgroup', 'rt') as ifh: contents = ifh.read() return any([(word in contents) for word in ['actions_job', 'docker']]) except OSError: return False
def in_docker(): try: with open('/proc/1/cgroup', 'rt') as ifh: contents = ifh.read() return any([(word in contents) for word in ['actions_job', 'docker']]) except OSError: return False<|docstring|>Returns: True if running in a docker container, else False<|endoftext|>
c427e978013d1b12e3b6a67fb85fd6573aabbf2a3888ad4351cdd00a4b9c8a30
def run_test(): 'Run tests' cmd = ['pytest'] run(cmd)
Run tests
tools/precommit.py
run_test
bensampson5/libsv
2
python
def run_test(): cmd = ['pytest'] run(cmd)
def run_test(): cmd = ['pytest'] run(cmd)<|docstring|>Run tests<|endoftext|>
b7ee543283218d2cdebe83642fe3c58273a3137740369c2d5576738fac0f08b9
def run_check_format(): 'Check formatting in all files' run_check_format_hdl() run_check_format_python()
Check formatting in all files
tools/precommit.py
run_check_format
bensampson5/libsv
2
python
def run_check_format(): run_check_format_hdl() run_check_format_python()
def run_check_format(): run_check_format_hdl() run_check_format_python()<|docstring|>Check formatting in all files<|endoftext|>
0dd0ed80a50e3202ff425972eeea86b2223f7abf9baccc82ae535b2cc8825310
def run_check_format_hdl(): 'Check formatting in HDL files' print('\nChecking HDL formatting...\n', flush=FLUSH) hdl_files = find_sv_files() hdl_file_code_original = [] for hdl_file in hdl_files: with open(hdl_file, 'r') as f: hdl_file_code_original.append(f.readlines()) run_fix_format_hdl(print_output=False) hdl_file_code_formatted = [] for hdl_file in hdl_files: with open(hdl_file, 'r') as f: hdl_file_code_formatted.append(f.readlines()) colorama.init() is_diff = False for i in range(len(hdl_file_code_original)): original = hdl_file_code_original[i] formatted = hdl_file_code_formatted[i] fname = str(hdl_files[i]) diff = list(unified_diff(original, formatted, fromfile=fname, tofile=fname, n=5)) if diff: is_diff = True print_unified_diff_in_color(diff) colorama.deinit() for i in range(len(hdl_files)): with open(hdl_files[i], 'w') as f: f.write(''.join(hdl_file_code_original[i])) if is_diff: raise RuntimeError('HDL format check failed')
Check formatting in HDL files
tools/precommit.py
run_check_format_hdl
bensampson5/libsv
2
python
def run_check_format_hdl(): print('\nChecking HDL formatting...\n', flush=FLUSH) hdl_files = find_sv_files() hdl_file_code_original = [] for hdl_file in hdl_files: with open(hdl_file, 'r') as f: hdl_file_code_original.append(f.readlines()) run_fix_format_hdl(print_output=False) hdl_file_code_formatted = [] for hdl_file in hdl_files: with open(hdl_file, 'r') as f: hdl_file_code_formatted.append(f.readlines()) colorama.init() is_diff = False for i in range(len(hdl_file_code_original)): original = hdl_file_code_original[i] formatted = hdl_file_code_formatted[i] fname = str(hdl_files[i]) diff = list(unified_diff(original, formatted, fromfile=fname, tofile=fname, n=5)) if diff: is_diff = True print_unified_diff_in_color(diff) colorama.deinit() for i in range(len(hdl_files)): with open(hdl_files[i], 'w') as f: f.write(.join(hdl_file_code_original[i])) if is_diff: raise RuntimeError('HDL format check failed')
def run_check_format_hdl(): print('\nChecking HDL formatting...\n', flush=FLUSH) hdl_files = find_sv_files() hdl_file_code_original = [] for hdl_file in hdl_files: with open(hdl_file, 'r') as f: hdl_file_code_original.append(f.readlines()) run_fix_format_hdl(print_output=False) hdl_file_code_formatted = [] for hdl_file in hdl_files: with open(hdl_file, 'r') as f: hdl_file_code_formatted.append(f.readlines()) colorama.init() is_diff = False for i in range(len(hdl_file_code_original)): original = hdl_file_code_original[i] formatted = hdl_file_code_formatted[i] fname = str(hdl_files[i]) diff = list(unified_diff(original, formatted, fromfile=fname, tofile=fname, n=5)) if diff: is_diff = True print_unified_diff_in_color(diff) colorama.deinit() for i in range(len(hdl_files)): with open(hdl_files[i], 'w') as f: f.write(.join(hdl_file_code_original[i])) if is_diff: raise RuntimeError('HDL format check failed')<|docstring|>Check formatting in HDL files<|endoftext|>
96431924f0d6f60f13e5bd51033786f177b87a39b0f913f5a702e6b2ca8a7d67
def run_check_format_python(): 'Check formatting in Python files' print('\nChecking Python formatting...\n', flush=FLUSH) cmd = ['black', '--diff', '--check', '--color', '.'] run(cmd)
Check formatting in Python files
tools/precommit.py
run_check_format_python
bensampson5/libsv
2
python
def run_check_format_python(): print('\nChecking Python formatting...\n', flush=FLUSH) cmd = ['black', '--diff', '--check', '--color', '.'] run(cmd)
def run_check_format_python(): print('\nChecking Python formatting...\n', flush=FLUSH) cmd = ['black', '--diff', '--check', '--color', '.'] run(cmd)<|docstring|>Check formatting in Python files<|endoftext|>
37a6caeb27886dbfe5386612d68f320edf874d1bad4ea152582f339aaffc5f89
def run_fix_format(): 'Fix formatting in all files' print('\nFixing HDL formatting...\n', flush=FLUSH) run_fix_format_hdl() run_fix_format_python()
Fix formatting in all files
tools/precommit.py
run_fix_format
bensampson5/libsv
2
python
def run_fix_format(): print('\nFixing HDL formatting...\n', flush=FLUSH) run_fix_format_hdl() run_fix_format_python()
def run_fix_format(): print('\nFixing HDL formatting...\n', flush=FLUSH) run_fix_format_hdl() run_fix_format_python()<|docstring|>Fix formatting in all files<|endoftext|>
aff76568c330de7f274128dc2cb5863a37dce4cb39e5a9a5b2afddd91618db56
def run_fix_format_hdl(print_output=True): 'Fix formatting in HDL files' cmd = ['verible-verilog-format', '--inplace'] verible_verilog_format_yaml = (PROJECT_ROOT / '.verible-verilog-format.yaml') yaml_data = None if verible_verilog_format_yaml.exists(): with open(verible_verilog_format_yaml, 'r') as f: yaml_data = yaml.safe_load(f.read()) format_args = [] for (k, v) in yaml_data.items(): format_args.append(f'--{k}={v}') cmd += format_args hdl_files = find_sv_files() cmd += [str(f) for f in hdl_files] run(cmd, print_output=print_output)
Fix formatting in HDL files
tools/precommit.py
run_fix_format_hdl
bensampson5/libsv
2
python
def run_fix_format_hdl(print_output=True): cmd = ['verible-verilog-format', '--inplace'] verible_verilog_format_yaml = (PROJECT_ROOT / '.verible-verilog-format.yaml') yaml_data = None if verible_verilog_format_yaml.exists(): with open(verible_verilog_format_yaml, 'r') as f: yaml_data = yaml.safe_load(f.read()) format_args = [] for (k, v) in yaml_data.items(): format_args.append(f'--{k}={v}') cmd += format_args hdl_files = find_sv_files() cmd += [str(f) for f in hdl_files] run(cmd, print_output=print_output)
def run_fix_format_hdl(print_output=True): cmd = ['verible-verilog-format', '--inplace'] verible_verilog_format_yaml = (PROJECT_ROOT / '.verible-verilog-format.yaml') yaml_data = None if verible_verilog_format_yaml.exists(): with open(verible_verilog_format_yaml, 'r') as f: yaml_data = yaml.safe_load(f.read()) format_args = [] for (k, v) in yaml_data.items(): format_args.append(f'--{k}={v}') cmd += format_args hdl_files = find_sv_files() cmd += [str(f) for f in hdl_files] run(cmd, print_output=print_output)<|docstring|>Fix formatting in HDL files<|endoftext|>
465bf0df302b1f0e82a554e2d2cc1d6eaa4e7c714c1a8a43b9f60ff88c65624b
def run_fix_format_python(): 'Fix formatting in Python files' print('\nFixing Python formatting...\n', flush=FLUSH) cmd = ['black', '.'] run(cmd)
Fix formatting in Python files
tools/precommit.py
run_fix_format_python
bensampson5/libsv
2
python
def run_fix_format_python(): print('\nFixing Python formatting...\n', flush=FLUSH) cmd = ['black', '.'] run(cmd)
def run_fix_format_python(): print('\nFixing Python formatting...\n', flush=FLUSH) cmd = ['black', '.'] run(cmd)<|docstring|>Fix formatting in Python files<|endoftext|>
08d0ef29e393b0ccab16b6f6f806d72726e62dc9149f717824d4d6d70be34eeb
def run_lint_hdl(): 'Run HDL linter' print('\nLinting HDL...\n', flush=FLUSH) cmd = ['verible-verilog-lint'] hdl_files = find_sv_files() cmd += [str(f) for f in hdl_files] run(cmd)
Run HDL linter
tools/precommit.py
run_lint_hdl
bensampson5/libsv
2
python
def run_lint_hdl(): print('\nLinting HDL...\n', flush=FLUSH) cmd = ['verible-verilog-lint'] hdl_files = find_sv_files() cmd += [str(f) for f in hdl_files] run(cmd)
def run_lint_hdl(): print('\nLinting HDL...\n', flush=FLUSH) cmd = ['verible-verilog-lint'] hdl_files = find_sv_files() cmd += [str(f) for f in hdl_files] run(cmd)<|docstring|>Run HDL linter<|endoftext|>
470e4cf7ec8e9deca4552eff900e633b9e6f4f3a0a5434048bd75dd9510ab785
def run_lint_python(): 'Run Python linter' print('\nLinting Python...\n', flush=FLUSH) cmd = ['flake8', '.'] run(cmd)
Run Python linter
tools/precommit.py
run_lint_python
bensampson5/libsv
2
python
def run_lint_python(): print('\nLinting Python...\n', flush=FLUSH) cmd = ['flake8', '.'] run(cmd)
def run_lint_python(): print('\nLinting Python...\n', flush=FLUSH) cmd = ['flake8', '.'] run(cmd)<|docstring|>Run Python linter<|endoftext|>
8fe329a0f4535af94a8db3961a3dc6f4913c868f5aada41808f6eb482a8bb91f
def run_docs(): 'Make documentation' DOCS_BUILD_DIR = (DOCS_DIR / 'build') if DOCS_BUILD_DIR.exists(): shutil.rmtree(DOCS_BUILD_DIR) DOCS_BUILD_DIR.mkdir() cmd = ['make', 'html'] run(cmd, cwd=DOCS_DIR)
Make documentation
tools/precommit.py
run_docs
bensampson5/libsv
2
python
def run_docs(): DOCS_BUILD_DIR = (DOCS_DIR / 'build') if DOCS_BUILD_DIR.exists(): shutil.rmtree(DOCS_BUILD_DIR) DOCS_BUILD_DIR.mkdir() cmd = ['make', 'html'] run(cmd, cwd=DOCS_DIR)
def run_docs(): DOCS_BUILD_DIR = (DOCS_DIR / 'build') if DOCS_BUILD_DIR.exists(): shutil.rmtree(DOCS_BUILD_DIR) DOCS_BUILD_DIR.mkdir() cmd = ['make', 'html'] run(cmd, cwd=DOCS_DIR)<|docstring|>Make documentation<|endoftext|>
6bacd8fbca16f3212bff7276252d13b1839e89231e9e9f475adcc3aa15d690e8
def run_build_package(): 'Builds PyPI package' cmd = ['poetry', 'build'] run(cmd)
Builds PyPI package
tools/precommit.py
run_build_package
bensampson5/libsv
2
python
def run_build_package(): cmd = ['poetry', 'build'] run(cmd)
def run_build_package(): cmd = ['poetry', 'build'] run(cmd)<|docstring|>Builds PyPI package<|endoftext|>
3ef4aff8cb9cb8fe0bbeb34643ab674d7803323df48a1476e585c110b0fade65
@click.command() @click.option('--test', is_flag=True, help='Run tests') @click.option('--check-format', is_flag=True, help='Check formatting') @click.option('--fix-format', is_flag=True, help='Fix formatting') @click.option('--lint', is_flag=True, help='Run linting') @click.option('--docs', is_flag=True, help='Build documentation') @click.option('--build-package', is_flag=True, help='Build package') def precommit(test, check_format, fix_format, lint, docs, build_package): 'Precommit tool for LibSV. If no options are provided, this\n tool will run all precommit steps except for --fix-format. If one or more\n options are specified then only those precommit steps will be run.' if (not any([test, check_format, fix_format, lint, docs, build_package])): test = True check_format = True fix_format = False lint = True docs = True build_package = True if (not in_docker()): raise OSError('Not in a docker container. This script must be run from within a docker container. See README.md for instructions.') else: if (not PROJECT_ROOT.is_dir()): raise FileNotFoundError(f'Cannot find project root directory: {PROJECT_ROOT}') if test: print('\nRunning tests...\n', flush=FLUSH) run_test() if check_format: print('\nChecking formatting...\n', flush=FLUSH) run_check_format() if fix_format: print('\nFixing formatting...\n', flush=FLUSH) run_fix_format() if lint: print('\nLinting...\n', flush=FLUSH) run_lint() if docs: print('\nBuilding documentation...\n', flush=FLUSH) run_docs() if build_package: print('\nBuilding package...\n') run_build_package()
Precommit tool for LibSV. If no options are provided, this tool will run all precommit steps except for --fix-format. If one or more options are specified then only those precommit steps will be run.
tools/precommit.py
precommit
bensampson5/libsv
2
python
@click.command() @click.option('--test', is_flag=True, help='Run tests') @click.option('--check-format', is_flag=True, help='Check formatting') @click.option('--fix-format', is_flag=True, help='Fix formatting') @click.option('--lint', is_flag=True, help='Run linting') @click.option('--docs', is_flag=True, help='Build documentation') @click.option('--build-package', is_flag=True, help='Build package') def precommit(test, check_format, fix_format, lint, docs, build_package): 'Precommit tool for LibSV. If no options are provided, this\n tool will run all precommit steps except for --fix-format. If one or more\n options are specified then only those precommit steps will be run.' if (not any([test, check_format, fix_format, lint, docs, build_package])): test = True check_format = True fix_format = False lint = True docs = True build_package = True if (not in_docker()): raise OSError('Not in a docker container. This script must be run from within a docker container. See README.md for instructions.') else: if (not PROJECT_ROOT.is_dir()): raise FileNotFoundError(f'Cannot find project root directory: {PROJECT_ROOT}') if test: print('\nRunning tests...\n', flush=FLUSH) run_test() if check_format: print('\nChecking formatting...\n', flush=FLUSH) run_check_format() if fix_format: print('\nFixing formatting...\n', flush=FLUSH) run_fix_format() if lint: print('\nLinting...\n', flush=FLUSH) run_lint() if docs: print('\nBuilding documentation...\n', flush=FLUSH) run_docs() if build_package: print('\nBuilding package...\n') run_build_package()
@click.command() @click.option('--test', is_flag=True, help='Run tests') @click.option('--check-format', is_flag=True, help='Check formatting') @click.option('--fix-format', is_flag=True, help='Fix formatting') @click.option('--lint', is_flag=True, help='Run linting') @click.option('--docs', is_flag=True, help='Build documentation') @click.option('--build-package', is_flag=True, help='Build package') def precommit(test, check_format, fix_format, lint, docs, build_package): 'Precommit tool for LibSV. If no options are provided, this\n tool will run all precommit steps except for --fix-format. If one or more\n options are specified then only those precommit steps will be run.' if (not any([test, check_format, fix_format, lint, docs, build_package])): test = True check_format = True fix_format = False lint = True docs = True build_package = True if (not in_docker()): raise OSError('Not in a docker container. This script must be run from within a docker container. See README.md for instructions.') else: if (not PROJECT_ROOT.is_dir()): raise FileNotFoundError(f'Cannot find project root directory: {PROJECT_ROOT}') if test: print('\nRunning tests...\n', flush=FLUSH) run_test() if check_format: print('\nChecking formatting...\n', flush=FLUSH) run_check_format() if fix_format: print('\nFixing formatting...\n', flush=FLUSH) run_fix_format() if lint: print('\nLinting...\n', flush=FLUSH) run_lint() if docs: print('\nBuilding documentation...\n', flush=FLUSH) run_docs() if build_package: print('\nBuilding package...\n') run_build_package()<|docstring|>Precommit tool for LibSV. If no options are provided, this tool will run all precommit steps except for --fix-format. If one or more options are specified then only those precommit steps will be run.<|endoftext|>
aac1b29c1366106708bea31603cfea8efe4c3dfea2b5272abf3c3afa585d02e2
def detect_sys(): "Tries to identify your python platform\n\n :returns: a dict with the gathered information\n :rtype: dict\n :raises: None\n\n the returned dict has these keys: 'system', 'bit', 'compiler', 'python_version_tuple'\n\n eg.::\n\n {'system':'Windows', 'bit':'32bit', 'compiler':'MSC v.1500 32bit (Intel)', 'python_version_tuple':('2', '7', '6')}\n\n " system = platform.system() bit = platform.architecture()[0] compiler = platform.python_compiler() ver = platform.python_version_tuple() return {'system': system, 'bit': bit, 'compiler': compiler, 'python_version_tuple': ver}
Tries to identify your python platform :returns: a dict with the gathered information :rtype: dict :raises: None the returned dict has these keys: 'system', 'bit', 'compiler', 'python_version_tuple' eg.:: {'system':'Windows', 'bit':'32bit', 'compiler':'MSC v.1500 32bit (Intel)', 'python_version_tuple':('2', '7', '6')}
src/jukeboxcore/ostool.py
detect_sys
JukeboxPipeline/jukebox-core
2
python
def detect_sys(): "Tries to identify your python platform\n\n :returns: a dict with the gathered information\n :rtype: dict\n :raises: None\n\n the returned dict has these keys: 'system', 'bit', 'compiler', 'python_version_tuple'\n\n eg.::\n\n {'system':'Windows', 'bit':'32bit', 'compiler':'MSC v.1500 32bit (Intel)', 'python_version_tuple':('2', '7', '6')}\n\n " system = platform.system() bit = platform.architecture()[0] compiler = platform.python_compiler() ver = platform.python_version_tuple() return {'system': system, 'bit': bit, 'compiler': compiler, 'python_version_tuple': ver}
def detect_sys(): "Tries to identify your python platform\n\n :returns: a dict with the gathered information\n :rtype: dict\n :raises: None\n\n the returned dict has these keys: 'system', 'bit', 'compiler', 'python_version_tuple'\n\n eg.::\n\n {'system':'Windows', 'bit':'32bit', 'compiler':'MSC v.1500 32bit (Intel)', 'python_version_tuple':('2', '7', '6')}\n\n " system = platform.system() bit = platform.architecture()[0] compiler = platform.python_compiler() ver = platform.python_version_tuple() return {'system': system, 'bit': bit, 'compiler': compiler, 'python_version_tuple': ver}<|docstring|>Tries to identify your python platform :returns: a dict with the gathered information :rtype: dict :raises: None the returned dict has these keys: 'system', 'bit', 'compiler', 'python_version_tuple' eg.:: {'system':'Windows', 'bit':'32bit', 'compiler':'MSC v.1500 32bit (Intel)', 'python_version_tuple':('2', '7', '6')}<|endoftext|>
0dc7f70ae6ebd202dadf4da5f674a03eefd8481ed199f102993d13009391f558
def get_interface(): 'Return the appropriate PlatformInterface implementation for your platform\n\n :returns: the appropriate platform interface for my platform\n :rtype: :class:`PlatformInterface``\n :raises: errors.UnsupportedPlatformError\n ' plat = detect_sys()['system'] try: return interfaces[plat]() except KeyError: raise errors.UnsupportedPlatformError(('%s is not supported. Implement an interface for it in jukeboxcore.ostool!' % plat))
Return the appropriate PlatformInterface implementation for your platform :returns: the appropriate platform interface for my platform :rtype: :class:`PlatformInterface`` :raises: errors.UnsupportedPlatformError
src/jukeboxcore/ostool.py
get_interface
JukeboxPipeline/jukebox-core
2
python
def get_interface(): 'Return the appropriate PlatformInterface implementation for your platform\n\n :returns: the appropriate platform interface for my platform\n :rtype: :class:`PlatformInterface``\n :raises: errors.UnsupportedPlatformError\n ' plat = detect_sys()['system'] try: return interfaces[plat]() except KeyError: raise errors.UnsupportedPlatformError(('%s is not supported. Implement an interface for it in jukeboxcore.ostool!' % plat))
def get_interface(): 'Return the appropriate PlatformInterface implementation for your platform\n\n :returns: the appropriate platform interface for my platform\n :rtype: :class:`PlatformInterface``\n :raises: errors.UnsupportedPlatformError\n ' plat = detect_sys()['system'] try: return interfaces[plat]() except KeyError: raise errors.UnsupportedPlatformError(('%s is not supported. Implement an interface for it in jukeboxcore.ostool!' % plat))<|docstring|>Return the appropriate PlatformInterface implementation for your platform :returns: the appropriate platform interface for my platform :rtype: :class:`PlatformInterface`` :raises: errors.UnsupportedPlatformError<|endoftext|>
27706c66c4e3cea3472cfcb77e4971b39e04a2a050497879d652b6762f45f9d3
@abc.abstractmethod def get_maya_location(self): 'Return the installation path to maya\n\n :returns: path to maya\n :rtype: str\n :raises: errors.SoftwareNotFoundError\n ' pass
Return the installation path to maya :returns: path to maya :rtype: str :raises: errors.SoftwareNotFoundError
src/jukeboxcore/ostool.py
get_maya_location
JukeboxPipeline/jukebox-core
2
python
@abc.abstractmethod def get_maya_location(self): 'Return the installation path to maya\n\n :returns: path to maya\n :rtype: str\n :raises: errors.SoftwareNotFoundError\n ' pass
@abc.abstractmethod def get_maya_location(self): 'Return the installation path to maya\n\n :returns: path to maya\n :rtype: str\n :raises: errors.SoftwareNotFoundError\n ' pass<|docstring|>Return the installation path to maya :returns: path to maya :rtype: str :raises: errors.SoftwareNotFoundError<|endoftext|>
c38fdee0188465a590f2b319f7e86d6e4f722ba0bc34bbe48c8343c1dbe6cf85
@abc.abstractmethod def get_maya_sitepackage_dir(self): 'Return the sitepackage dir for maya\n\n :returns: path to the maya sitepackages\n :rtype: str\n :raises: errors.SoftwareNotFoundError\n ' pass
Return the sitepackage dir for maya :returns: path to the maya sitepackages :rtype: str :raises: errors.SoftwareNotFoundError
src/jukeboxcore/ostool.py
get_maya_sitepackage_dir
JukeboxPipeline/jukebox-core
2
python
@abc.abstractmethod def get_maya_sitepackage_dir(self): 'Return the sitepackage dir for maya\n\n :returns: path to the maya sitepackages\n :rtype: str\n :raises: errors.SoftwareNotFoundError\n ' pass
@abc.abstractmethod def get_maya_sitepackage_dir(self): 'Return the sitepackage dir for maya\n\n :returns: path to the maya sitepackages\n :rtype: str\n :raises: errors.SoftwareNotFoundError\n ' pass<|docstring|>Return the sitepackage dir for maya :returns: path to the maya sitepackages :rtype: str :raises: errors.SoftwareNotFoundError<|endoftext|>
e4f5324dd40073ccf98727553912f6cc6f0349b445a94d86f49e2518a8cb005d
@abc.abstractmethod def get_maya_bin(self): 'Return the path to the maya bin dir\n\n :returns: path to maya bin dir\n :rtype: str\n :raises: errors.SoftwareNotFoundError\n ' pass
Return the path to the maya bin dir :returns: path to maya bin dir :rtype: str :raises: errors.SoftwareNotFoundError
src/jukeboxcore/ostool.py
get_maya_bin
JukeboxPipeline/jukebox-core
2
python
@abc.abstractmethod def get_maya_bin(self): 'Return the path to the maya bin dir\n\n :returns: path to maya bin dir\n :rtype: str\n :raises: errors.SoftwareNotFoundError\n ' pass
@abc.abstractmethod def get_maya_bin(self): 'Return the path to the maya bin dir\n\n :returns: path to maya bin dir\n :rtype: str\n :raises: errors.SoftwareNotFoundError\n ' pass<|docstring|>Return the path to the maya bin dir :returns: path to maya bin dir :rtype: str :raises: errors.SoftwareNotFoundError<|endoftext|>
22ed840c3fbcdd9f614a2428f1ed5ca4e25f8e388154f28f36e8ea89701529e3
@abc.abstractmethod def get_maya_python(self): 'Return the path to the mayapy executable\n\n :returns: path to the maya python intepreter\n :rtype: str\n :raises: errors.SoftwareNotFoundError\n ' pass
Return the path to the mayapy executable :returns: path to the maya python intepreter :rtype: str :raises: errors.SoftwareNotFoundError
src/jukeboxcore/ostool.py
get_maya_python
JukeboxPipeline/jukebox-core
2
python
@abc.abstractmethod def get_maya_python(self): 'Return the path to the mayapy executable\n\n :returns: path to the maya python intepreter\n :rtype: str\n :raises: errors.SoftwareNotFoundError\n ' pass
@abc.abstractmethod def get_maya_python(self): 'Return the path to the mayapy executable\n\n :returns: path to the maya python intepreter\n :rtype: str\n :raises: errors.SoftwareNotFoundError\n ' pass<|docstring|>Return the path to the mayapy executable :returns: path to the maya python intepreter :rtype: str :raises: errors.SoftwareNotFoundError<|endoftext|>
34058536b515f2683eda813793f9dc1eb552d9d15abd0df65bbdd80dae7f6435
@abc.abstractmethod def get_maya_exe(self): 'Return the path to the maya executable\n\n :returns: path to the maya exe\n :rtype: str\n :raises: errors.SoftwareNotFoundError\n ' pass
Return the path to the maya executable :returns: path to the maya exe :rtype: str :raises: errors.SoftwareNotFoundError
src/jukeboxcore/ostool.py
get_maya_exe
JukeboxPipeline/jukebox-core
2
python
@abc.abstractmethod def get_maya_exe(self): 'Return the path to the maya executable\n\n :returns: path to the maya exe\n :rtype: str\n :raises: errors.SoftwareNotFoundError\n ' pass
@abc.abstractmethod def get_maya_exe(self): 'Return the path to the maya executable\n\n :returns: path to the maya exe\n :rtype: str\n :raises: errors.SoftwareNotFoundError\n ' pass<|docstring|>Return the path to the maya executable :returns: path to the maya exe :rtype: str :raises: errors.SoftwareNotFoundError<|endoftext|>
3a61860623b861682252a2ad3ff686a2dd2200149c87f2f2d3790c55bed10802
@abc.abstractmethod def get_maya_envpath(self): 'Return the PYTHONPATH neccessary for running mayapy\n\n :returns: the PYTHONPATH that is used for running mayapy\n :rtype: str\n :raises: None\n ' pass
Return the PYTHONPATH neccessary for running mayapy :returns: the PYTHONPATH that is used for running mayapy :rtype: str :raises: None
src/jukeboxcore/ostool.py
get_maya_envpath
JukeboxPipeline/jukebox-core
2
python
@abc.abstractmethod def get_maya_envpath(self): 'Return the PYTHONPATH neccessary for running mayapy\n\n :returns: the PYTHONPATH that is used for running mayapy\n :rtype: str\n :raises: None\n ' pass
@abc.abstractmethod def get_maya_envpath(self): 'Return the PYTHONPATH neccessary for running mayapy\n\n :returns: the PYTHONPATH that is used for running mayapy\n :rtype: str\n :raises: None\n ' pass<|docstring|>Return the PYTHONPATH neccessary for running mayapy :returns: the PYTHONPATH that is used for running mayapy :rtype: str :raises: None<|endoftext|>
8597721a257a998bbeae678a113a2d289df5d00aa7edf7ae799ca6d1c70b42c9
@abc.abstractmethod def open_path(self, path): 'Open the given path in the file browser\n\n :param path: the path to open\n :type path: str\n :returns: None\n :rtype: None\n :raises: None\n ' pass
Open the given path in the file browser :param path: the path to open :type path: str :returns: None :rtype: None :raises: None
src/jukeboxcore/ostool.py
open_path
JukeboxPipeline/jukebox-core
2
python
@abc.abstractmethod def open_path(self, path): 'Open the given path in the file browser\n\n :param path: the path to open\n :type path: str\n :returns: None\n :rtype: None\n :raises: None\n ' pass
@abc.abstractmethod def open_path(self, path): 'Open the given path in the file browser\n\n :param path: the path to open\n :type path: str\n :returns: None\n :rtype: None\n :raises: None\n ' pass<|docstring|>Open the given path in the file browser :param path: the path to open :type path: str :returns: None :rtype: None :raises: None<|endoftext|>
5071d118c62426942cdd477a291a172061031774ee18307c33408542c891e451
def get_maya_location(self): ' Return the installation path to maya\n\n :returns: path to maya\n :rtype: str\n :raises: errors.SoftwareNotFoundError\n ' import _winreg for ver in MAYA_VERSIONS: try: key = _winreg.OpenKey(_winreg.HKEY_LOCAL_MACHINE, MAYA_REG_KEY.format(mayaversion=ver), 0, (_winreg.KEY_READ | _winreg.KEY_WOW64_64KEY)) value = _winreg.QueryValueEx(key, 'MAYA_INSTALL_LOCATION')[0] except WindowsError: log.debug(('Maya %s installation not found in registry!' % ver)) if (not value): raise errors.SoftwareNotFoundError(('Maya %s installation not found in registry!' % MAYA_VERSIONS)) return value
Return the installation path to maya :returns: path to maya :rtype: str :raises: errors.SoftwareNotFoundError
src/jukeboxcore/ostool.py
get_maya_location
JukeboxPipeline/jukebox-core
2
python
def get_maya_location(self): ' Return the installation path to maya\n\n :returns: path to maya\n :rtype: str\n :raises: errors.SoftwareNotFoundError\n ' import _winreg for ver in MAYA_VERSIONS: try: key = _winreg.OpenKey(_winreg.HKEY_LOCAL_MACHINE, MAYA_REG_KEY.format(mayaversion=ver), 0, (_winreg.KEY_READ | _winreg.KEY_WOW64_64KEY)) value = _winreg.QueryValueEx(key, 'MAYA_INSTALL_LOCATION')[0] except WindowsError: log.debug(('Maya %s installation not found in registry!' % ver)) if (not value): raise errors.SoftwareNotFoundError(('Maya %s installation not found in registry!' % MAYA_VERSIONS)) return value
def get_maya_location(self): ' Return the installation path to maya\n\n :returns: path to maya\n :rtype: str\n :raises: errors.SoftwareNotFoundError\n ' import _winreg for ver in MAYA_VERSIONS: try: key = _winreg.OpenKey(_winreg.HKEY_LOCAL_MACHINE, MAYA_REG_KEY.format(mayaversion=ver), 0, (_winreg.KEY_READ | _winreg.KEY_WOW64_64KEY)) value = _winreg.QueryValueEx(key, 'MAYA_INSTALL_LOCATION')[0] except WindowsError: log.debug(('Maya %s installation not found in registry!' % ver)) if (not value): raise errors.SoftwareNotFoundError(('Maya %s installation not found in registry!' % MAYA_VERSIONS)) return value<|docstring|>Return the installation path to maya :returns: path to maya :rtype: str :raises: errors.SoftwareNotFoundError<|endoftext|>
9bedb82920626965562756f575aec6f99d6b6771a9a6a47aa6443567f0f32300
def get_maya_sitepackage_dir(self): 'Return the sitepackage dir for maya\n\n :returns: path to the maya sitepackages\n :rtype: str\n :raises: errors.SoftwareNotFoundError\n ' mayaloc = self.get_maya_location() return os.path.join(mayaloc, 'Python', 'Lib', 'site-packages')
Return the sitepackage dir for maya :returns: path to the maya sitepackages :rtype: str :raises: errors.SoftwareNotFoundError
src/jukeboxcore/ostool.py
get_maya_sitepackage_dir
JukeboxPipeline/jukebox-core
2
python
def get_maya_sitepackage_dir(self): 'Return the sitepackage dir for maya\n\n :returns: path to the maya sitepackages\n :rtype: str\n :raises: errors.SoftwareNotFoundError\n ' mayaloc = self.get_maya_location() return os.path.join(mayaloc, 'Python', 'Lib', 'site-packages')
def get_maya_sitepackage_dir(self): 'Return the sitepackage dir for maya\n\n :returns: path to the maya sitepackages\n :rtype: str\n :raises: errors.SoftwareNotFoundError\n ' mayaloc = self.get_maya_location() return os.path.join(mayaloc, 'Python', 'Lib', 'site-packages')<|docstring|>Return the sitepackage dir for maya :returns: path to the maya sitepackages :rtype: str :raises: errors.SoftwareNotFoundError<|endoftext|>
71c02c0344e4d22673fb43a6fb891d3091ac76b12c0fe27f7664c9cb0eac1025
def get_maya_bin(self): 'Return the path to the maya bin dir\n\n :returns: path to maya bin dir\n :rtype: str\n :raises: errors.SoftwareNotFoundError\n ' mayaloc = self.get_maya_location() return os.path.join(mayaloc, 'bin')
Return the path to the maya bin dir :returns: path to maya bin dir :rtype: str :raises: errors.SoftwareNotFoundError
src/jukeboxcore/ostool.py
get_maya_bin
JukeboxPipeline/jukebox-core
2
python
def get_maya_bin(self): 'Return the path to the maya bin dir\n\n :returns: path to maya bin dir\n :rtype: str\n :raises: errors.SoftwareNotFoundError\n ' mayaloc = self.get_maya_location() return os.path.join(mayaloc, 'bin')
def get_maya_bin(self): 'Return the path to the maya bin dir\n\n :returns: path to maya bin dir\n :rtype: str\n :raises: errors.SoftwareNotFoundError\n ' mayaloc = self.get_maya_location() return os.path.join(mayaloc, 'bin')<|docstring|>Return the path to the maya bin dir :returns: path to maya bin dir :rtype: str :raises: errors.SoftwareNotFoundError<|endoftext|>
240360d7169e3fa5640f65cd51f49df29933dec22b5ae9a189d58e549817bb65
def get_maya_python(self): 'Return the path to the mayapy executable\n\n :returns: path to the maya python intepreter\n :rtype: str\n :raises: errors.SoftwareNotFoundError\n ' mayabin = self.get_maya_bin() return os.path.join(mayabin, 'mayapy.exe')
Return the path to the mayapy executable :returns: path to the maya python intepreter :rtype: str :raises: errors.SoftwareNotFoundError
src/jukeboxcore/ostool.py
get_maya_python
JukeboxPipeline/jukebox-core
2
python
def get_maya_python(self): 'Return the path to the mayapy executable\n\n :returns: path to the maya python intepreter\n :rtype: str\n :raises: errors.SoftwareNotFoundError\n ' mayabin = self.get_maya_bin() return os.path.join(mayabin, 'mayapy.exe')
def get_maya_python(self): 'Return the path to the mayapy executable\n\n :returns: path to the maya python intepreter\n :rtype: str\n :raises: errors.SoftwareNotFoundError\n ' mayabin = self.get_maya_bin() return os.path.join(mayabin, 'mayapy.exe')<|docstring|>Return the path to the mayapy executable :returns: path to the maya python intepreter :rtype: str :raises: errors.SoftwareNotFoundError<|endoftext|>
f3267add62edc829b5348b7bc228632f88867617d92ea07ee673fd4a56410880
def get_maya_exe(self): 'Return the path to the maya executable\n\n :returns: path to the maya exe\n :rtype: str\n :raises: errors.SoftwareNotFoundError\n ' mayabin = self.get_maya_bin() return os.path.join(mayabin, 'maya.exe')
Return the path to the maya executable :returns: path to the maya exe :rtype: str :raises: errors.SoftwareNotFoundError
src/jukeboxcore/ostool.py
get_maya_exe
JukeboxPipeline/jukebox-core
2
python
def get_maya_exe(self): 'Return the path to the maya executable\n\n :returns: path to the maya exe\n :rtype: str\n :raises: errors.SoftwareNotFoundError\n ' mayabin = self.get_maya_bin() return os.path.join(mayabin, 'maya.exe')
def get_maya_exe(self): 'Return the path to the maya executable\n\n :returns: path to the maya exe\n :rtype: str\n :raises: errors.SoftwareNotFoundError\n ' mayabin = self.get_maya_bin() return os.path.join(mayabin, 'maya.exe')<|docstring|>Return the path to the maya executable :returns: path to the maya exe :rtype: str :raises: errors.SoftwareNotFoundError<|endoftext|>
b840fe3f7f5682eea3f2427be0b69060fdd912b71b302d7238fd942b775116c4
def get_maya_envpath(self): 'Return the PYTHONPATH neccessary for running mayapy\n\n If you start native mayapy, it will setup these paths.\n You might want to prepend this to your path if running from\n an external intepreter.\n\n :returns: the PYTHONPATH that is used for running mayapy\n :rtype: str\n :raises: None\n ' opj = os.path.join ml = self.get_maya_location() mb = self.get_maya_bin() msp = self.get_maya_sitepackage_dir() pyzip = opj(mb, 'python27.zip') pydir = opj(ml, 'Python') pydll = opj(pydir, 'DLLs') pylib = opj(pydir, 'lib') pyplat = opj(pylib, 'plat-win') pytk = opj(pylib, 'lib-tk') path = os.pathsep.join((pyzip, pydll, pylib, pyplat, pytk, mb, pydir, msp)) return path
Return the PYTHONPATH neccessary for running mayapy If you start native mayapy, it will setup these paths. You might want to prepend this to your path if running from an external intepreter. :returns: the PYTHONPATH that is used for running mayapy :rtype: str :raises: None
src/jukeboxcore/ostool.py
get_maya_envpath
JukeboxPipeline/jukebox-core
2
python
def get_maya_envpath(self): 'Return the PYTHONPATH neccessary for running mayapy\n\n If you start native mayapy, it will setup these paths.\n You might want to prepend this to your path if running from\n an external intepreter.\n\n :returns: the PYTHONPATH that is used for running mayapy\n :rtype: str\n :raises: None\n ' opj = os.path.join ml = self.get_maya_location() mb = self.get_maya_bin() msp = self.get_maya_sitepackage_dir() pyzip = opj(mb, 'python27.zip') pydir = opj(ml, 'Python') pydll = opj(pydir, 'DLLs') pylib = opj(pydir, 'lib') pyplat = opj(pylib, 'plat-win') pytk = opj(pylib, 'lib-tk') path = os.pathsep.join((pyzip, pydll, pylib, pyplat, pytk, mb, pydir, msp)) return path
def get_maya_envpath(self): 'Return the PYTHONPATH neccessary for running mayapy\n\n If you start native mayapy, it will setup these paths.\n You might want to prepend this to your path if running from\n an external intepreter.\n\n :returns: the PYTHONPATH that is used for running mayapy\n :rtype: str\n :raises: None\n ' opj = os.path.join ml = self.get_maya_location() mb = self.get_maya_bin() msp = self.get_maya_sitepackage_dir() pyzip = opj(mb, 'python27.zip') pydir = opj(ml, 'Python') pydll = opj(pydir, 'DLLs') pylib = opj(pydir, 'lib') pyplat = opj(pylib, 'plat-win') pytk = opj(pylib, 'lib-tk') path = os.pathsep.join((pyzip, pydll, pylib, pyplat, pytk, mb, pydir, msp)) return path<|docstring|>Return the PYTHONPATH neccessary for running mayapy If you start native mayapy, it will setup these paths. You might want to prepend this to your path if running from an external intepreter. :returns: the PYTHONPATH that is used for running mayapy :rtype: str :raises: None<|endoftext|>
4af1184cf7ef6917c9e0a72065232bd9ad1d8637cdda7a93307db523f0c648f3
def open_path(self, path): 'Open the given path in the file browser\n\n :param path: the path to open\n :type path: str\n :returns: None\n :rtype: None\n :raises: None\n ' os.startfile(path)
Open the given path in the file browser :param path: the path to open :type path: str :returns: None :rtype: None :raises: None
src/jukeboxcore/ostool.py
open_path
JukeboxPipeline/jukebox-core
2
python
def open_path(self, path): 'Open the given path in the file browser\n\n :param path: the path to open\n :type path: str\n :returns: None\n :rtype: None\n :raises: None\n ' os.startfile(path)
def open_path(self, path): 'Open the given path in the file browser\n\n :param path: the path to open\n :type path: str\n :returns: None\n :rtype: None\n :raises: None\n ' os.startfile(path)<|docstring|>Open the given path in the file browser :param path: the path to open :type path: str :returns: None :rtype: None :raises: None<|endoftext|>
f5c02bf1cca76b0390b521f68fb244eb7db4433052f35a7a59fa94f6540709f6
def isprint(c): 'Return if character is printable ASCII' if ((c >= '@') and (c <= '~')): return True else: return False
Return if character is printable ASCII
disasm/disasm65c02.py
isprint
napobear/6502
188
python
def isprint(c): if ((c >= '@') and (c <= '~')): return True else: return False
def isprint(c): if ((c >= '@') and (c <= '~')): return True else: return False<|docstring|>Return if character is printable ASCII<|endoftext|>
ebb544cc71ed2f610cb0a1af4c395e0d361b09fc011871b4f052d2e21978b29e
def case(s): 'Return string or uppercase version of string if option is set.' global upperOption if upperOption: return s.upper() else: return s
Return string or uppercase version of string if option is set.
disasm/disasm65c02.py
case
napobear/6502
188
python
def case(s): global upperOption if upperOption: return s.upper() else: return s
def case(s): global upperOption if upperOption: return s.upper() else: return s<|docstring|>Return string or uppercase version of string if option is set.<|endoftext|>
f8c3cd643c634035d693086c9e7fbf0680d9e346adcbe19acb5ae0f312c8101c
def formatByte(data): 'Format an 8-bit byte using the current display format (e.g. hex or octal)' global args if (args.format == 4): return ('%03o' % data) else: return ('%02X' % data)
Format an 8-bit byte using the current display format (e.g. hex or octal)
disasm/disasm65c02.py
formatByte
napobear/6502
188
python
def formatByte(data): global args if (args.format == 4): return ('%03o' % data) else: return ('%02X' % data)
def formatByte(data): global args if (args.format == 4): return ('%03o' % data) else: return ('%02X' % data)<|docstring|>Format an 8-bit byte using the current display format (e.g. hex or octal)<|endoftext|>
95b969f0aeff1239fa8f747c8a2f0250e9a2abe9b4b6e6923e68fe9daf077745
def formatAddress(data): 'Format a 16-bit address using the current display format (e.g. hex or octal)' global args if (args.format == 4): return ('%06o' % data) else: return ('%04X' % data)
Format a 16-bit address using the current display format (e.g. hex or octal)
disasm/disasm65c02.py
formatAddress
napobear/6502
188
python
def formatAddress(data): global args if (args.format == 4): return ('%06o' % data) else: return ('%04X' % data)
def formatAddress(data): global args if (args.format == 4): return ('%06o' % data) else: return ('%04X' % data)<|docstring|>Format a 16-bit address using the current display format (e.g. hex or octal)<|endoftext|>
ca651a3477a4e0d8fed749a33456f36c8cc394f8bbcad47e4ce39f49354cf206
@classmethod def encode(cls, value): ' Encode a value in a byte array\n\n Parameters\n -----------\n value: int range from -32768 to 32767\n Value to encode\n\n Return\n -------\n Byte Array -- Encode value in a byte array to send trough a network\n\n ' if isinstance(value, int): buffer = None if cls.validate_range(value): buffer = value.to_bytes(4, 'little', signed=True) return buffer else: raise ValueError('value is not in valid cip range') else: raise TypeError('value must be int')
Encode a value in a byte array Parameters ----------- value: int range from -32768 to 32767 Value to encode Return ------- Byte Array -- Encode value in a byte array to send trough a network
data_type/ftime.py
encode
hsocarras/pycip
0
python
@classmethod def encode(cls, value): ' Encode a value in a byte array\n\n Parameters\n -----------\n value: int range from -32768 to 32767\n Value to encode\n\n Return\n -------\n Byte Array -- Encode value in a byte array to send trough a network\n\n ' if isinstance(value, int): buffer = None if cls.validate_range(value): buffer = value.to_bytes(4, 'little', signed=True) return buffer else: raise ValueError('value is not in valid cip range') else: raise TypeError('value must be int')
@classmethod def encode(cls, value): ' Encode a value in a byte array\n\n Parameters\n -----------\n value: int range from -32768 to 32767\n Value to encode\n\n Return\n -------\n Byte Array -- Encode value in a byte array to send trough a network\n\n ' if isinstance(value, int): buffer = None if cls.validate_range(value): buffer = value.to_bytes(4, 'little', signed=True) return buffer else: raise ValueError('value is not in valid cip range') else: raise TypeError('value must be int')<|docstring|>Encode a value in a byte array Parameters ----------- value: int range from -32768 to 32767 Value to encode Return ------- Byte Array -- Encode value in a byte array to send trough a network<|endoftext|>
ef554915a198d45638e2e0c39cfd34c06770ac71ba2cda7fc4e2c714cdcdf172
@classmethod def decode(cls, buffer): ' Decode a value from a byte array\n\n Parameters\n -----------\n buffer: byte array\n buffer to decode\n\n Return\n -------\n value : int\n Decode value from a byte array received trough a network\n\n ' if isinstance(buffer, bytes): value = None if (len(buffer) == 4): value = int.from_bytes(buffer, 'little', signed=True) return value else: raise ValueError('buffer length mitsmatch with DINT encoding') else: raise TypeError('buffer must be bytes')
Decode a value from a byte array Parameters ----------- buffer: byte array buffer to decode Return ------- value : int Decode value from a byte array received trough a network
data_type/ftime.py
decode
hsocarras/pycip
0
python
@classmethod def decode(cls, buffer): ' Decode a value from a byte array\n\n Parameters\n -----------\n buffer: byte array\n buffer to decode\n\n Return\n -------\n value : int\n Decode value from a byte array received trough a network\n\n ' if isinstance(buffer, bytes): value = None if (len(buffer) == 4): value = int.from_bytes(buffer, 'little', signed=True) return value else: raise ValueError('buffer length mitsmatch with DINT encoding') else: raise TypeError('buffer must be bytes')
@classmethod def decode(cls, buffer): ' Decode a value from a byte array\n\n Parameters\n -----------\n buffer: byte array\n buffer to decode\n\n Return\n -------\n value : int\n Decode value from a byte array received trough a network\n\n ' if isinstance(buffer, bytes): value = None if (len(buffer) == 4): value = int.from_bytes(buffer, 'little', signed=True) return value else: raise ValueError('buffer length mitsmatch with DINT encoding') else: raise TypeError('buffer must be bytes')<|docstring|>Decode a value from a byte array Parameters ----------- buffer: byte array buffer to decode Return ------- value : int Decode value from a byte array received trough a network<|endoftext|>
0df46d503bfd80c3c07b73115b1ac56aaacad46ffce9072d66c14c87e072e771
@classmethod def to_string(cls, value): ' Encode a date string from T#-35m47.483648s to T#35m47.483547s.\n\n Parameters\n -----------\n value: int\n value of amount of microseconds\n Return\n -------\n str: \n String iso format starting with T# identifier\n\n ' if isinstance(value, int): if cls.validate_range(value): _min = int((value / 60000000)) _rest = (value % 60000000) _seconds = int((_rest / 1000000)) _micro_seconds = (_rest % 1000000) str_min = str(_min) if (len(str_min) < 2): str_min = ('0' + str_min) str_seconds = str(_seconds) if (len(str_seconds) < 2): str_seconds = ('0' + str_seconds) str_microseconds = str(_micro_seconds) if (len(str_microseconds) < 6): pad_str = {1: '00000', 2: '0000', 3: '000', 4: '00', 5: '0'} str_microseconds = (pad_str.get(len(str_microseconds)) + str_microseconds) return (((((('T#' + str_min) + 'm') + str_seconds) + '.') + str_microseconds) + 's') else: raise ValueError('value is not valid integer') else: raise TypeError('value must be int')
Encode a date string from T#-35m47.483648s to T#35m47.483547s. Parameters ----------- value: int value of amount of microseconds Return ------- str: String iso format starting with T# identifier
data_type/ftime.py
to_string
hsocarras/pycip
0
python
@classmethod def to_string(cls, value): ' Encode a date string from T#-35m47.483648s to T#35m47.483547s.\n\n Parameters\n -----------\n value: int\n value of amount of microseconds\n Return\n -------\n str: \n String iso format starting with T# identifier\n\n ' if isinstance(value, int): if cls.validate_range(value): _min = int((value / 60000000)) _rest = (value % 60000000) _seconds = int((_rest / 1000000)) _micro_seconds = (_rest % 1000000) str_min = str(_min) if (len(str_min) < 2): str_min = ('0' + str_min) str_seconds = str(_seconds) if (len(str_seconds) < 2): str_seconds = ('0' + str_seconds) str_microseconds = str(_micro_seconds) if (len(str_microseconds) < 6): pad_str = {1: '00000', 2: '0000', 3: '000', 4: '00', 5: '0'} str_microseconds = (pad_str.get(len(str_microseconds)) + str_microseconds) return (((((('T#' + str_min) + 'm') + str_seconds) + '.') + str_microseconds) + 's') else: raise ValueError('value is not valid integer') else: raise TypeError('value must be int')
@classmethod def to_string(cls, value): ' Encode a date string from T#-35m47.483648s to T#35m47.483547s.\n\n Parameters\n -----------\n value: int\n value of amount of microseconds\n Return\n -------\n str: \n String iso format starting with T# identifier\n\n ' if isinstance(value, int): if cls.validate_range(value): _min = int((value / 60000000)) _rest = (value % 60000000) _seconds = int((_rest / 1000000)) _micro_seconds = (_rest % 1000000) str_min = str(_min) if (len(str_min) < 2): str_min = ('0' + str_min) str_seconds = str(_seconds) if (len(str_seconds) < 2): str_seconds = ('0' + str_seconds) str_microseconds = str(_micro_seconds) if (len(str_microseconds) < 6): pad_str = {1: '00000', 2: '0000', 3: '000', 4: '00', 5: '0'} str_microseconds = (pad_str.get(len(str_microseconds)) + str_microseconds) return (((((('T#' + str_min) + 'm') + str_seconds) + '.') + str_microseconds) + 's') else: raise ValueError('value is not valid integer') else: raise TypeError('value must be int')<|docstring|>Encode a date string from T#-35m47.483648s to T#35m47.483547s. Parameters ----------- value: int value of amount of microseconds Return ------- str: String iso format starting with T# identifier<|endoftext|>
8d0f93bc14753836311c9b3a4ec55e38dadbbd1d551c5151ab53069b508a8965
@classmethod def from_string(cls, time_str): 'Decode a time string from T#-35m47.483648s to T#35m47.483547s.\n Parameters\n -----------\n value: string\n String iso format startin with T# identifier\n \n\n Return\n -------\n dint: \n value of amount of miliseconds from midnight\n\n ' format_str = time_str[2:(- 1)] if (time_str[0:2] == 'T#'): index_minutes = format_str.find('m') _minutes = int(format_str[0:index_minutes]) _seconds = float(format_str[(index_minutes + 1):]) if (_minutes < 0): value = ((_minutes * 60000000) - int((_seconds * 1000000))) else: value = ((_minutes * 60000000) + int((_seconds * 1000000))) if cls.validate_range(value): return value else: raise ValueError('value is not valid integer') else: raise TypeError('argument string is not valid TOD type string')
Decode a time string from T#-35m47.483648s to T#35m47.483547s. Parameters ----------- value: string String iso format startin with T# identifier Return ------- dint: value of amount of miliseconds from midnight
data_type/ftime.py
from_string
hsocarras/pycip
0
python
@classmethod def from_string(cls, time_str): 'Decode a time string from T#-35m47.483648s to T#35m47.483547s.\n Parameters\n -----------\n value: string\n String iso format startin with T# identifier\n \n\n Return\n -------\n dint: \n value of amount of miliseconds from midnight\n\n ' format_str = time_str[2:(- 1)] if (time_str[0:2] == 'T#'): index_minutes = format_str.find('m') _minutes = int(format_str[0:index_minutes]) _seconds = float(format_str[(index_minutes + 1):]) if (_minutes < 0): value = ((_minutes * 60000000) - int((_seconds * 1000000))) else: value = ((_minutes * 60000000) + int((_seconds * 1000000))) if cls.validate_range(value): return value else: raise ValueError('value is not valid integer') else: raise TypeError('argument string is not valid TOD type string')
@classmethod def from_string(cls, time_str): 'Decode a time string from T#-35m47.483648s to T#35m47.483547s.\n Parameters\n -----------\n value: string\n String iso format startin with T# identifier\n \n\n Return\n -------\n dint: \n value of amount of miliseconds from midnight\n\n ' format_str = time_str[2:(- 1)] if (time_str[0:2] == 'T#'): index_minutes = format_str.find('m') _minutes = int(format_str[0:index_minutes]) _seconds = float(format_str[(index_minutes + 1):]) if (_minutes < 0): value = ((_minutes * 60000000) - int((_seconds * 1000000))) else: value = ((_minutes * 60000000) + int((_seconds * 1000000))) if cls.validate_range(value): return value else: raise ValueError('value is not valid integer') else: raise TypeError('argument string is not valid TOD type string')<|docstring|>Decode a time string from T#-35m47.483648s to T#35m47.483547s. Parameters ----------- value: string String iso format startin with T# identifier Return ------- dint: value of amount of miliseconds from midnight<|endoftext|>
38e4d30f653b1a5d33c7b59892762fa35c3a93e7f73b014371c94ff9181b2bce
@property def is_cab_compliant(self): '\n Parse the options to find whether authority is CAB Forum Compliant,\n i.e., adhering to the CA/Browser Forum Baseline Requirements.\n Returns None if option is not available\n ' if (not self.options): return None options_array = json.loads(self.options) if isinstance(options_array, list): for option in options_array: if (('name' in option) and (option['name'] == 'cab_compliant')): return option['value'] return None
Parse the options to find whether authority is CAB Forum Compliant, i.e., adhering to the CA/Browser Forum Baseline Requirements. Returns None if option is not available
lemur/authorities/models.py
is_cab_compliant
douglasc-nflx/lemur
0
python
@property def is_cab_compliant(self): '\n Parse the options to find whether authority is CAB Forum Compliant,\n i.e., adhering to the CA/Browser Forum Baseline Requirements.\n Returns None if option is not available\n ' if (not self.options): return None options_array = json.loads(self.options) if isinstance(options_array, list): for option in options_array: if (('name' in option) and (option['name'] == 'cab_compliant')): return option['value'] return None
@property def is_cab_compliant(self): '\n Parse the options to find whether authority is CAB Forum Compliant,\n i.e., adhering to the CA/Browser Forum Baseline Requirements.\n Returns None if option is not available\n ' if (not self.options): return None options_array = json.loads(self.options) if isinstance(options_array, list): for option in options_array: if (('name' in option) and (option['name'] == 'cab_compliant')): return option['value'] return None<|docstring|>Parse the options to find whether authority is CAB Forum Compliant, i.e., adhering to the CA/Browser Forum Baseline Requirements. Returns None if option is not available<|endoftext|>
e2cf03bafc3763d9fd0110ae7fd00c69c8d8bce20fd6fb594284f021ae3e05ba
@property def is_private_authority(self): '\n Tells if authority is private/internal. In other words, it is not publicly trusted.\n If plugin is configured in list LEMUR_PRIVATE_AUTHORITY_PLUGIN_NAMES, the authority is treated as private\n :return: True if private, False otherwise\n ' return (self.plugin_name in current_app.config.get('LEMUR_PRIVATE_AUTHORITY_PLUGIN_NAMES', []))
Tells if authority is private/internal. In other words, it is not publicly trusted. If plugin is configured in list LEMUR_PRIVATE_AUTHORITY_PLUGIN_NAMES, the authority is treated as private :return: True if private, False otherwise
lemur/authorities/models.py
is_private_authority
douglasc-nflx/lemur
0
python
@property def is_private_authority(self): '\n Tells if authority is private/internal. In other words, it is not publicly trusted.\n If plugin is configured in list LEMUR_PRIVATE_AUTHORITY_PLUGIN_NAMES, the authority is treated as private\n :return: True if private, False otherwise\n ' return (self.plugin_name in current_app.config.get('LEMUR_PRIVATE_AUTHORITY_PLUGIN_NAMES', []))
@property def is_private_authority(self): '\n Tells if authority is private/internal. In other words, it is not publicly trusted.\n If plugin is configured in list LEMUR_PRIVATE_AUTHORITY_PLUGIN_NAMES, the authority is treated as private\n :return: True if private, False otherwise\n ' return (self.plugin_name in current_app.config.get('LEMUR_PRIVATE_AUTHORITY_PLUGIN_NAMES', []))<|docstring|>Tells if authority is private/internal. In other words, it is not publicly trusted. If plugin is configured in list LEMUR_PRIVATE_AUTHORITY_PLUGIN_NAMES, the authority is treated as private :return: True if private, False otherwise<|endoftext|>
57b6867ddf3c6d79a3ab3acacae5b349728400f8e0478789327eb8000940269a
@property def is_cn_optional(self): '\n Parse the options to find whether common name is treated as an optional field.\n Returns False if option is not available\n ' if (not self.options): return False options_array = json.loads(self.options) if isinstance(options_array, list): for option in options_array: if (('name' in option) and (option['name'] == 'cn_optional')): return option['value'] return False
Parse the options to find whether common name is treated as an optional field. Returns False if option is not available
lemur/authorities/models.py
is_cn_optional
douglasc-nflx/lemur
0
python
@property def is_cn_optional(self): '\n Parse the options to find whether common name is treated as an optional field.\n Returns False if option is not available\n ' if (not self.options): return False options_array = json.loads(self.options) if isinstance(options_array, list): for option in options_array: if (('name' in option) and (option['name'] == 'cn_optional')): return option['value'] return False
@property def is_cn_optional(self): '\n Parse the options to find whether common name is treated as an optional field.\n Returns False if option is not available\n ' if (not self.options): return False options_array = json.loads(self.options) if isinstance(options_array, list): for option in options_array: if (('name' in option) and (option['name'] == 'cn_optional')): return option['value'] return False<|docstring|>Parse the options to find whether common name is treated as an optional field. Returns False if option is not available<|endoftext|>
c030d61666ac7d2193b161a815a0068aa2177be630f51a2207ce1f6431e440e1
def __init__(self, file_name=None): '\n Initialisation of the generator, by loading a tensorflow model\n\n Parameters\n ----------\n dir: string\n Path to the tensorflow model to load,\n or a the module name on galsim-hub using the format `hub:module_name`\n ' if ('hub:' in file_name): file_name = ('https://github.com/McWilliamsCenter/galsim_hub/blob/master/hub/%s/model.tar.gz?raw=true' % file_name.split(':')[1]) self.file_name = file_name self.module = None self.quantities = [] self.random_variables = [] self.sample_req_params = {} self.sample_opt_params = {} self.sample_single_params = [] module = hub.Module(self.file_name) self.stamp_size = module.get_attached_message('stamp_size', tf.train.Int64List).value[0] self.pixel_size = module.get_attached_message('pixel_size', tf.train.FloatList).value[0] for k in module.get_input_info_dict(): if ('random_normal' in k): self.random_variables.append(k) continue self.quantities.append(k) self.sample_req_params[k] = float
Initialisation of the generator, by loading a tensorflow model Parameters ---------- dir: string Path to the tensorflow model to load, or a the module name on galsim-hub using the format `hub:module_name`
galsim_hub/generative_model.py
__init__
Hbretonniere/galsim_hub
7
python
def __init__(self, file_name=None): '\n Initialisation of the generator, by loading a tensorflow model\n\n Parameters\n ----------\n dir: string\n Path to the tensorflow model to load,\n or a the module name on galsim-hub using the format `hub:module_name`\n ' if ('hub:' in file_name): file_name = ('https://github.com/McWilliamsCenter/galsim_hub/blob/master/hub/%s/model.tar.gz?raw=true' % file_name.split(':')[1]) self.file_name = file_name self.module = None self.quantities = [] self.random_variables = [] self.sample_req_params = {} self.sample_opt_params = {} self.sample_single_params = [] module = hub.Module(self.file_name) self.stamp_size = module.get_attached_message('stamp_size', tf.train.Int64List).value[0] self.pixel_size = module.get_attached_message('pixel_size', tf.train.FloatList).value[0] for k in module.get_input_info_dict(): if ('random_normal' in k): self.random_variables.append(k) continue self.quantities.append(k) self.sample_req_params[k] = float
def __init__(self, file_name=None): '\n Initialisation of the generator, by loading a tensorflow model\n\n Parameters\n ----------\n dir: string\n Path to the tensorflow model to load,\n or a the module name on galsim-hub using the format `hub:module_name`\n ' if ('hub:' in file_name): file_name = ('https://github.com/McWilliamsCenter/galsim_hub/blob/master/hub/%s/model.tar.gz?raw=true' % file_name.split(':')[1]) self.file_name = file_name self.module = None self.quantities = [] self.random_variables = [] self.sample_req_params = {} self.sample_opt_params = {} self.sample_single_params = [] module = hub.Module(self.file_name) self.stamp_size = module.get_attached_message('stamp_size', tf.train.Int64List).value[0] self.pixel_size = module.get_attached_message('pixel_size', tf.train.FloatList).value[0] for k in module.get_input_info_dict(): if ('random_normal' in k): self.random_variables.append(k) continue self.quantities.append(k) self.sample_req_params[k] = float<|docstring|>Initialisation of the generator, by loading a tensorflow model Parameters ---------- dir: string Path to the tensorflow model to load, or a the module name on galsim-hub using the format `hub:module_name`<|endoftext|>
405da87f1986556f1936161e5b2616393eafdf266b7d4efa3bbb37391ed0a76c
def sample(self, cat, noise=None, rng=None, x_interpolant=None, k_interpolant=None, pad_factor=4, noise_pad_size=0, gsparams=None, session_config=None): '\n Samples galaxy images from the model\n ' if (self.module is None): self.module = hub.Module(self.file_name) self.sess = tf.Session(session_config) self.sess.run(tf.global_variables_initializer()) self.inputs = {} for k in (self.quantities + self.random_variables): tensor_info = self.module.get_input_info_dict()[k] self.inputs[k] = tf.placeholder(tensor_info.dtype, shape=tensor_info.get_shape(), name=k) self.generated_images = self.module(self.inputs) feed_dict = {self.inputs[k]: cat[k] for k in self.quantities} if (rng is None): rng = galsim.BaseDeviate(rng) orig_rng = rng.duplicate() if ('random_normal' in self.random_variables): noise_shape = self.module.get_input_info_dict()['random_normal'].get_shape() noise_shape = ([len(cat)] + [noise_shape[(i + 1)].value for i in range((len(noise_shape) - 1))]) noise_array = np.empty(np.prod(noise_shape), dtype=float) gd = galsim.random.GaussianDeviate(rng, sigma=1) gd.generate(noise_array) feed_dict[self.inputs['random_normal']] = noise_array.reshape(noise_shape).astype('float32') x = self.sess.run(self.generated_images, feed_dict=feed_dict) ims = [] for i in range(len(x)): im = galsim.Image(np.ascontiguousarray(x[i].reshape((self.stamp_size, self.stamp_size)).astype(np.float64)), scale=self.pixel_size) ims.append(galsim.InterpolatedImage(im, x_interpolant=x_interpolant, k_interpolant=k_interpolant, pad_factor=pad_factor, noise_pad_size=noise_pad_size, noise_pad=noise, rng=rng, gsparams=gsparams)) if (len(ims) == 1): ims = ims[0] return ims
Samples galaxy images from the model
galsim_hub/generative_model.py
sample
Hbretonniere/galsim_hub
7
python
def sample(self, cat, noise=None, rng=None, x_interpolant=None, k_interpolant=None, pad_factor=4, noise_pad_size=0, gsparams=None, session_config=None): '\n \n ' if (self.module is None): self.module = hub.Module(self.file_name) self.sess = tf.Session(session_config) self.sess.run(tf.global_variables_initializer()) self.inputs = {} for k in (self.quantities + self.random_variables): tensor_info = self.module.get_input_info_dict()[k] self.inputs[k] = tf.placeholder(tensor_info.dtype, shape=tensor_info.get_shape(), name=k) self.generated_images = self.module(self.inputs) feed_dict = {self.inputs[k]: cat[k] for k in self.quantities} if (rng is None): rng = galsim.BaseDeviate(rng) orig_rng = rng.duplicate() if ('random_normal' in self.random_variables): noise_shape = self.module.get_input_info_dict()['random_normal'].get_shape() noise_shape = ([len(cat)] + [noise_shape[(i + 1)].value for i in range((len(noise_shape) - 1))]) noise_array = np.empty(np.prod(noise_shape), dtype=float) gd = galsim.random.GaussianDeviate(rng, sigma=1) gd.generate(noise_array) feed_dict[self.inputs['random_normal']] = noise_array.reshape(noise_shape).astype('float32') x = self.sess.run(self.generated_images, feed_dict=feed_dict) ims = [] for i in range(len(x)): im = galsim.Image(np.ascontiguousarray(x[i].reshape((self.stamp_size, self.stamp_size)).astype(np.float64)), scale=self.pixel_size) ims.append(galsim.InterpolatedImage(im, x_interpolant=x_interpolant, k_interpolant=k_interpolant, pad_factor=pad_factor, noise_pad_size=noise_pad_size, noise_pad=noise, rng=rng, gsparams=gsparams)) if (len(ims) == 1): ims = ims[0] return ims
def sample(self, cat, noise=None, rng=None, x_interpolant=None, k_interpolant=None, pad_factor=4, noise_pad_size=0, gsparams=None, session_config=None): '\n \n ' if (self.module is None): self.module = hub.Module(self.file_name) self.sess = tf.Session(session_config) self.sess.run(tf.global_variables_initializer()) self.inputs = {} for k in (self.quantities + self.random_variables): tensor_info = self.module.get_input_info_dict()[k] self.inputs[k] = tf.placeholder(tensor_info.dtype, shape=tensor_info.get_shape(), name=k) self.generated_images = self.module(self.inputs) feed_dict = {self.inputs[k]: cat[k] for k in self.quantities} if (rng is None): rng = galsim.BaseDeviate(rng) orig_rng = rng.duplicate() if ('random_normal' in self.random_variables): noise_shape = self.module.get_input_info_dict()['random_normal'].get_shape() noise_shape = ([len(cat)] + [noise_shape[(i + 1)].value for i in range((len(noise_shape) - 1))]) noise_array = np.empty(np.prod(noise_shape), dtype=float) gd = galsim.random.GaussianDeviate(rng, sigma=1) gd.generate(noise_array) feed_dict[self.inputs['random_normal']] = noise_array.reshape(noise_shape).astype('float32') x = self.sess.run(self.generated_images, feed_dict=feed_dict) ims = [] for i in range(len(x)): im = galsim.Image(np.ascontiguousarray(x[i].reshape((self.stamp_size, self.stamp_size)).astype(np.float64)), scale=self.pixel_size) ims.append(galsim.InterpolatedImage(im, x_interpolant=x_interpolant, k_interpolant=k_interpolant, pad_factor=pad_factor, noise_pad_size=noise_pad_size, noise_pad=noise, rng=rng, gsparams=gsparams)) if (len(ims) == 1): ims = ims[0] return ims<|docstring|>Samples galaxy images from the model<|endoftext|>
4fc46b49de612707e4c6ae4758e9b9b85130f41dbd6a153bee5132a6e8eebb4e
def check_path_working(): '\n This function to check\n your working device, in case you transfer this\n application from one to another, in my case;\n I am using laptop and the device .\n Please make sure to change utils.path_utils.py to your\n local directory or as needed\n ' if path.exists(WORKING_PATH_MAC): return WORKING_PATH_MAC else: return WORKING_PATH_PI
This function to check your working device, in case you transfer this application from one to another, in my case; I am using laptop and the device . Please make sure to change utils.path_utils.py to your local directory or as needed
App/utils/path_utils.py
check_path_working
omancommunity/watermelon
0
python
def check_path_working(): '\n This function to check\n your working device, in case you transfer this\n application from one to another, in my case;\n I am using laptop and the device .\n Please make sure to change utils.path_utils.py to your\n local directory or as needed\n ' if path.exists(WORKING_PATH_MAC): return WORKING_PATH_MAC else: return WORKING_PATH_PI
def check_path_working(): '\n This function to check\n your working device, in case you transfer this\n application from one to another, in my case;\n I am using laptop and the device .\n Please make sure to change utils.path_utils.py to your\n local directory or as needed\n ' if path.exists(WORKING_PATH_MAC): return WORKING_PATH_MAC else: return WORKING_PATH_PI<|docstring|>This function to check your working device, in case you transfer this application from one to another, in my case; I am using laptop and the device . Please make sure to change utils.path_utils.py to your local directory or as needed<|endoftext|>
e9a2aac66b04c9ca0295a69c0896f1e01f46afa3c3e813d8b0cc832acebdaeda
def fmrivols2conn(fmri_input, atlas_filename, confounds_fn=None, measure='correlation'): '\n Takes 4D fmri volumes from different and extracts the connectivity matrix\n Parameters\n ----------\n fmri_input: this variable can be:\n path Fullpath to functional images in nifti\n List of Fullpath to functional images in nifti\n nibabel nifti object\n list of nibabel nifti objects\n altas_filename: path\n Fullpath to the parcellation to create the FC matrix of nibabel object containin the atlas.\n Must be in the same space than functional images \n confounds_fn (optional): this variable can be\n Paths to a csv type files with the confound regressors for one dataset.\n List of Fullpath to csv files with confound regressors for the datasets.\n numpy array with regressors where each column is a regressor.\n List of numpy arrays with regressors for each dataset\n measure: str\n {"correlation", "partial correlation", "tangent", "covariance", "precision"}, optional\n Returns\n -------\n FC_matrix: matrix\n Functional connectivy matrix of the image. \n ' masker = NiftiLabelsMasker(labels_img=atlas_filename, standardize=True) connectome_measure = ConnectivityMeasure(kind=measure) if isinstance(fmri_input, list): timeseries = [] for (i, volume) in enumerate(fmri_input): if (confounds_fn is None): timeseries.append(masker.fit_transform(volume).T) else: timeseries.append(masker.fit_transform(volume, confounds=confounds_fn[i]).T) timeseries = np.array(timeseries) final_ts = np.mean(timeseries, axis=0) elif (isinstance(fmri_input, str) or hasattr(fmri_input, 'affine')): if (confounds_fn is None): final_ts = masker.fit_transform(fmri_input, confounds=confounds_fn).T else: final_ts = masker.fit_transform(fmri_input).T FC_matrix = connectome_measure.fit_transform([final_ts.T])[0] return FC_matrix
Takes 4D fmri volumes from different and extracts the connectivity matrix Parameters ---------- fmri_input: this variable can be: path Fullpath to functional images in nifti List of Fullpath to functional images in nifti nibabel nifti object list of nibabel nifti objects altas_filename: path Fullpath to the parcellation to create the FC matrix of nibabel object containin the atlas. Must be in the same space than functional images confounds_fn (optional): this variable can be Paths to a csv type files with the confound regressors for one dataset. List of Fullpath to csv files with confound regressors for the datasets. numpy array with regressors where each column is a regressor. List of numpy arrays with regressors for each dataset measure: str {"correlation", "partial correlation", "tangent", "covariance", "precision"}, optional Returns ------- FC_matrix: matrix Functional connectivy matrix of the image.
brainspace/utils/volflow.py
fmrivols2conn
ReinderVosDeWael/BrainSpace
11
python
def fmrivols2conn(fmri_input, atlas_filename, confounds_fn=None, measure='correlation'): '\n Takes 4D fmri volumes from different and extracts the connectivity matrix\n Parameters\n ----------\n fmri_input: this variable can be:\n path Fullpath to functional images in nifti\n List of Fullpath to functional images in nifti\n nibabel nifti object\n list of nibabel nifti objects\n altas_filename: path\n Fullpath to the parcellation to create the FC matrix of nibabel object containin the atlas.\n Must be in the same space than functional images \n confounds_fn (optional): this variable can be\n Paths to a csv type files with the confound regressors for one dataset.\n List of Fullpath to csv files with confound regressors for the datasets.\n numpy array with regressors where each column is a regressor.\n List of numpy arrays with regressors for each dataset\n measure: str\n {"correlation", "partial correlation", "tangent", "covariance", "precision"}, optional\n Returns\n -------\n FC_matrix: matrix\n Functional connectivy matrix of the image. \n ' masker = NiftiLabelsMasker(labels_img=atlas_filename, standardize=True) connectome_measure = ConnectivityMeasure(kind=measure) if isinstance(fmri_input, list): timeseries = [] for (i, volume) in enumerate(fmri_input): if (confounds_fn is None): timeseries.append(masker.fit_transform(volume).T) else: timeseries.append(masker.fit_transform(volume, confounds=confounds_fn[i]).T) timeseries = np.array(timeseries) final_ts = np.mean(timeseries, axis=0) elif (isinstance(fmri_input, str) or hasattr(fmri_input, 'affine')): if (confounds_fn is None): final_ts = masker.fit_transform(fmri_input, confounds=confounds_fn).T else: final_ts = masker.fit_transform(fmri_input).T FC_matrix = connectome_measure.fit_transform([final_ts.T])[0] return FC_matrix
def fmrivols2conn(fmri_input, atlas_filename, confounds_fn=None, measure='correlation'): '\n Takes 4D fmri volumes from different and extracts the connectivity matrix\n Parameters\n ----------\n fmri_input: this variable can be:\n path Fullpath to functional images in nifti\n List of Fullpath to functional images in nifti\n nibabel nifti object\n list of nibabel nifti objects\n altas_filename: path\n Fullpath to the parcellation to create the FC matrix of nibabel object containin the atlas.\n Must be in the same space than functional images \n confounds_fn (optional): this variable can be\n Paths to a csv type files with the confound regressors for one dataset.\n List of Fullpath to csv files with confound regressors for the datasets.\n numpy array with regressors where each column is a regressor.\n List of numpy arrays with regressors for each dataset\n measure: str\n {"correlation", "partial correlation", "tangent", "covariance", "precision"}, optional\n Returns\n -------\n FC_matrix: matrix\n Functional connectivy matrix of the image. \n ' masker = NiftiLabelsMasker(labels_img=atlas_filename, standardize=True) connectome_measure = ConnectivityMeasure(kind=measure) if isinstance(fmri_input, list): timeseries = [] for (i, volume) in enumerate(fmri_input): if (confounds_fn is None): timeseries.append(masker.fit_transform(volume).T) else: timeseries.append(masker.fit_transform(volume, confounds=confounds_fn[i]).T) timeseries = np.array(timeseries) final_ts = np.mean(timeseries, axis=0) elif (isinstance(fmri_input, str) or hasattr(fmri_input, 'affine')): if (confounds_fn is None): final_ts = masker.fit_transform(fmri_input, confounds=confounds_fn).T else: final_ts = masker.fit_transform(fmri_input).T FC_matrix = connectome_measure.fit_transform([final_ts.T])[0] return FC_matrix<|docstring|>Takes 4D fmri volumes from different and extracts the connectivity matrix Parameters ---------- fmri_input: this variable can be: path Fullpath to functional images in nifti List of Fullpath to functional images in nifti nibabel nifti object list of nibabel nifti objects altas_filename: path Fullpath to the parcellation to create the FC matrix of nibabel object containin the atlas. Must be in the same space than functional images confounds_fn (optional): this variable can be Paths to a csv type files with the confound regressors for one dataset. List of Fullpath to csv files with confound regressors for the datasets. numpy array with regressors where each column is a regressor. List of numpy arrays with regressors for each dataset measure: str {"correlation", "partial correlation", "tangent", "covariance", "precision"}, optional Returns ------- FC_matrix: matrix Functional connectivy matrix of the image.<|endoftext|>
dcdec90ca48ed32d8f1d84471931c493af34d665f28a4cd8e55f15565cee0b5e
def __init__(self, distribution): ' Constructor. ' self.verbose = None sdist_.__init__(self, distribution) self.__buildConfiguration = BuildConfiguration() self.dist_dir = self.__buildConfiguration.distDirectory
Constructor.
build_scripts/distutils/src/datafinder_distutils/targets/sdist.py
__init__
schlauch/DataFinder
9
python
def __init__(self, distribution): ' ' self.verbose = None sdist_.__init__(self, distribution) self.__buildConfiguration = BuildConfiguration() self.dist_dir = self.__buildConfiguration.distDirectory
def __init__(self, distribution): ' ' self.verbose = None sdist_.__init__(self, distribution) self.__buildConfiguration = BuildConfiguration() self.dist_dir = self.__buildConfiguration.distDirectory<|docstring|>Constructor.<|endoftext|>
f26c71918e6564693ae10796b1a7ccc1ad9a633c3d75e9dfdac341ed155aa9f4
def run(self): ' \n Sets some new configuration values and runs \n the default run method of the sdist target.\n ' self._prepare() for commandName in self.get_sub_commands(): self.run_command(commandName) self.distribution.packages = self.__buildConfiguration.getPackages() self._createManifestTemplate() sdist_.run(self) shutil.rmtree(_DOCUMENTATION_DIRECTORY)
Sets some new configuration values and runs the default run method of the sdist target.
build_scripts/distutils/src/datafinder_distutils/targets/sdist.py
run
schlauch/DataFinder
9
python
def run(self): ' \n Sets some new configuration values and runs \n the default run method of the sdist target.\n ' self._prepare() for commandName in self.get_sub_commands(): self.run_command(commandName) self.distribution.packages = self.__buildConfiguration.getPackages() self._createManifestTemplate() sdist_.run(self) shutil.rmtree(_DOCUMENTATION_DIRECTORY)
def run(self): ' \n Sets some new configuration values and runs \n the default run method of the sdist target.\n ' self._prepare() for commandName in self.get_sub_commands(): self.run_command(commandName) self.distribution.packages = self.__buildConfiguration.getPackages() self._createManifestTemplate() sdist_.run(self) shutil.rmtree(_DOCUMENTATION_DIRECTORY)<|docstring|>Sets some new configuration values and runs the default run method of the sdist target.<|endoftext|>
45b35b7433ed1cf75f39238512759f5298aef277c9b7d7571250330e462271e3
def _prepare(self): ' Prepares the source distribution creation. ' epydocOptions = self.distribution.get_option_dict('doc') epydocOptions['destdir'] = ('', _DOCUMENTATION_DIRECTORY) modules = 'src/datafinder/script_api' if (not self.__buildConfiguration.excludeClients): modules += ';src/datafinder/gui/user/script_api.py' epydocOptions['modules'] = ('', modules) setVersion(self.__buildConfiguration.fullName) self._adjustSetupConfigurationFile()
Prepares the source distribution creation.
build_scripts/distutils/src/datafinder_distutils/targets/sdist.py
_prepare
schlauch/DataFinder
9
python
def _prepare(self): ' ' epydocOptions = self.distribution.get_option_dict('doc') epydocOptions['destdir'] = (, _DOCUMENTATION_DIRECTORY) modules = 'src/datafinder/script_api' if (not self.__buildConfiguration.excludeClients): modules += ';src/datafinder/gui/user/script_api.py' epydocOptions['modules'] = (, modules) setVersion(self.__buildConfiguration.fullName) self._adjustSetupConfigurationFile()
def _prepare(self): ' ' epydocOptions = self.distribution.get_option_dict('doc') epydocOptions['destdir'] = (, _DOCUMENTATION_DIRECTORY) modules = 'src/datafinder/script_api' if (not self.__buildConfiguration.excludeClients): modules += ';src/datafinder/gui/user/script_api.py' epydocOptions['modules'] = (, modules) setVersion(self.__buildConfiguration.fullName) self._adjustSetupConfigurationFile()<|docstring|>Prepares the source distribution creation.<|endoftext|>
76d885476693f9dd42759d2f3c6cfa1477ee1e4d525cb210a1fce371979a06b4
def _createManifestTemplate(self): ' Handles the creation of the manifest template file. ' try: manifestFileObject = open('MANIFEST.in', 'wb') for filePath in self._getAdditionalFiles(): manifestFileObject.write(('include %s\n' % filePath)) for fileName in os.listdir(_DOCUMENTATION_DIRECTORY): manifestFileObject.write(('include %s\n' % os.path.join(_DOCUMENTATION_DIRECTORY, fileName))) manifestFileObject.close() except IOError: print('Cannot create manifest template file.') sys.exit((- 1))
Handles the creation of the manifest template file.
build_scripts/distutils/src/datafinder_distutils/targets/sdist.py
_createManifestTemplate
schlauch/DataFinder
9
python
def _createManifestTemplate(self): ' ' try: manifestFileObject = open('MANIFEST.in', 'wb') for filePath in self._getAdditionalFiles(): manifestFileObject.write(('include %s\n' % filePath)) for fileName in os.listdir(_DOCUMENTATION_DIRECTORY): manifestFileObject.write(('include %s\n' % os.path.join(_DOCUMENTATION_DIRECTORY, fileName))) manifestFileObject.close() except IOError: print('Cannot create manifest template file.') sys.exit((- 1))
def _createManifestTemplate(self): ' ' try: manifestFileObject = open('MANIFEST.in', 'wb') for filePath in self._getAdditionalFiles(): manifestFileObject.write(('include %s\n' % filePath)) for fileName in os.listdir(_DOCUMENTATION_DIRECTORY): manifestFileObject.write(('include %s\n' % os.path.join(_DOCUMENTATION_DIRECTORY, fileName))) manifestFileObject.close() except IOError: print('Cannot create manifest template file.') sys.exit((- 1))<|docstring|>Handles the creation of the manifest template file.<|endoftext|>
bfed2a3a2fa5cd0ba049cd3bdc16f802f99c5b7771040cd10eae6e186ee323bb
def _getAdditionalFiles(self): ' Determines all files which should be distributed but not installed. ' additionalFiles = [self.__buildConfiguration.changesFile, self.__buildConfiguration.licenseFile, os.path.join('script_extensions', 'README.txt')] topLevelDirectories = [self.__buildConfiguration.unittestDirectory, self.__buildConfiguration.distutilSourceDirectory, self.__buildConfiguration.scriptExamplesDirectory] for directory in topLevelDirectories: for (rootPath, dirNames, fileNames) in os.walk(directory): for fileName in fileNames: if (fileName.endswith('.py') and (not (fileName in _IGNORE_BUILD_TARGETS))): additionalFiles.append(os.path.join(rootPath, fileName)) if (self.__buildConfiguration.excludeClients and ('gui' in dirNames)): dirNames.remove('gui') return additionalFiles
Determines all files which should be distributed but not installed.
build_scripts/distutils/src/datafinder_distutils/targets/sdist.py
_getAdditionalFiles
schlauch/DataFinder
9
python
def _getAdditionalFiles(self): ' ' additionalFiles = [self.__buildConfiguration.changesFile, self.__buildConfiguration.licenseFile, os.path.join('script_extensions', 'README.txt')] topLevelDirectories = [self.__buildConfiguration.unittestDirectory, self.__buildConfiguration.distutilSourceDirectory, self.__buildConfiguration.scriptExamplesDirectory] for directory in topLevelDirectories: for (rootPath, dirNames, fileNames) in os.walk(directory): for fileName in fileNames: if (fileName.endswith('.py') and (not (fileName in _IGNORE_BUILD_TARGETS))): additionalFiles.append(os.path.join(rootPath, fileName)) if (self.__buildConfiguration.excludeClients and ('gui' in dirNames)): dirNames.remove('gui') return additionalFiles
def _getAdditionalFiles(self): ' ' additionalFiles = [self.__buildConfiguration.changesFile, self.__buildConfiguration.licenseFile, os.path.join('script_extensions', 'README.txt')] topLevelDirectories = [self.__buildConfiguration.unittestDirectory, self.__buildConfiguration.distutilSourceDirectory, self.__buildConfiguration.scriptExamplesDirectory] for directory in topLevelDirectories: for (rootPath, dirNames, fileNames) in os.walk(directory): for fileName in fileNames: if (fileName.endswith('.py') and (not (fileName in _IGNORE_BUILD_TARGETS))): additionalFiles.append(os.path.join(rootPath, fileName)) if (self.__buildConfiguration.excludeClients and ('gui' in dirNames)): dirNames.remove('gui') return additionalFiles<|docstring|>Determines all files which should be distributed but not installed.<|endoftext|>
38c71ac944c364dfaa54e95e3e5f462e79d3aa42240fb02211aaf43c0056b224
def _adjustSetupConfigurationFile(self): ' Corrects the exclude_clients parameter so \n everything works on installation as expected. ' configurationFileObject = open(_CONFIGURATION_FILE_NAME, 'rb') lines = configurationFileObject.readlines() configurationFileObject.close() for line in lines: if (_EXCLUDE_CLIENTS_KEYWORD in line): index = lines.index(line) lines.remove(line) lines.insert(index, (((_EXCLUDE_CLIENTS_KEYWORD + '=') + str(int(self.__buildConfiguration.excludeClients))) + '\n')) elif (_REVISION_KEYWORD in line): index = lines.index(line) lines.remove(line) lines.insert(index, (((_REVISION_KEYWORD + '=') + self.__buildConfiguration.revision) + '\n')) elif (_IS_RELEASE_KEYWORD in line): index = lines.index(line) lines.remove(line) lines.insert(index, (((_IS_RELEASE_KEYWORD + '=') + str(int(self.__buildConfiguration.isRelease))) + '\n')) configurationFileObject = open(_CONFIGURATION_FILE_NAME, 'wb') configurationFileObject.writelines(lines) configurationFileObject.close()
Corrects the exclude_clients parameter so everything works on installation as expected.
build_scripts/distutils/src/datafinder_distutils/targets/sdist.py
_adjustSetupConfigurationFile
schlauch/DataFinder
9
python
def _adjustSetupConfigurationFile(self): ' Corrects the exclude_clients parameter so \n everything works on installation as expected. ' configurationFileObject = open(_CONFIGURATION_FILE_NAME, 'rb') lines = configurationFileObject.readlines() configurationFileObject.close() for line in lines: if (_EXCLUDE_CLIENTS_KEYWORD in line): index = lines.index(line) lines.remove(line) lines.insert(index, (((_EXCLUDE_CLIENTS_KEYWORD + '=') + str(int(self.__buildConfiguration.excludeClients))) + '\n')) elif (_REVISION_KEYWORD in line): index = lines.index(line) lines.remove(line) lines.insert(index, (((_REVISION_KEYWORD + '=') + self.__buildConfiguration.revision) + '\n')) elif (_IS_RELEASE_KEYWORD in line): index = lines.index(line) lines.remove(line) lines.insert(index, (((_IS_RELEASE_KEYWORD + '=') + str(int(self.__buildConfiguration.isRelease))) + '\n')) configurationFileObject = open(_CONFIGURATION_FILE_NAME, 'wb') configurationFileObject.writelines(lines) configurationFileObject.close()
def _adjustSetupConfigurationFile(self): ' Corrects the exclude_clients parameter so \n everything works on installation as expected. ' configurationFileObject = open(_CONFIGURATION_FILE_NAME, 'rb') lines = configurationFileObject.readlines() configurationFileObject.close() for line in lines: if (_EXCLUDE_CLIENTS_KEYWORD in line): index = lines.index(line) lines.remove(line) lines.insert(index, (((_EXCLUDE_CLIENTS_KEYWORD + '=') + str(int(self.__buildConfiguration.excludeClients))) + '\n')) elif (_REVISION_KEYWORD in line): index = lines.index(line) lines.remove(line) lines.insert(index, (((_REVISION_KEYWORD + '=') + self.__buildConfiguration.revision) + '\n')) elif (_IS_RELEASE_KEYWORD in line): index = lines.index(line) lines.remove(line) lines.insert(index, (((_IS_RELEASE_KEYWORD + '=') + str(int(self.__buildConfiguration.isRelease))) + '\n')) configurationFileObject = open(_CONFIGURATION_FILE_NAME, 'wb') configurationFileObject.writelines(lines) configurationFileObject.close()<|docstring|>Corrects the exclude_clients parameter so everything works on installation as expected.<|endoftext|>
32a99ec79f84d04a06bca4adb14255d5fbc747d71c2791ab2b3bae4140f22d3e
def __init__(self, circuitcontainer, addrmap=None): '\n :param circuitcontainer: an object which implements\n :class:`interface.ICircuitContainer`\n ' self.circuit_container = ICircuitContainer(circuitcontainer) self.id = None "An int, Tor's ID for this :class:`txtorcon.Circuit`" self.state = None "A string, Tor's idea of the state of this\n :class:`txtorcon.Stream`" self.target_host = None 'Usually a hostname, but sometimes an IP address (e.g. when\n we query existing state from Tor)' self.target_addr = None "If available, the IP address we're connecting to (if None,\n see target_host instead)." self.target_port = 0 "The port we're connecting to." self.circuit = None "If we've attached to a :class:`txtorcon.Circuit`, this will\n be an instance of :class:`txtorcon.Circuit` (otherwise None)." self.listeners = [] 'A list of all connected\n :class:`txtorcon.interface.IStreamListener` instances.' self.source_addr = None 'If available, the address from which this Stream originated\n (e.g. local process, etc). See get_process() also.' self.source_port = 0 'If available, the port from which this Stream\n originated. See get_process() also.' self.flags = {} 'All flags from last update to this Stream. str->str' self._closing_deferred = None 'Internal. Holds Deferred that will callback when this\n stream is CLOSED, FAILED (or DETACHED??)' self._addrmap = addrmap
:param circuitcontainer: an object which implements :class:`interface.ICircuitContainer`
txtorcon/stream.py
__init__
PrivateStorageio/txtorcon
180
python
def __init__(self, circuitcontainer, addrmap=None): '\n :param circuitcontainer: an object which implements\n :class:`interface.ICircuitContainer`\n ' self.circuit_container = ICircuitContainer(circuitcontainer) self.id = None "An int, Tor's ID for this :class:`txtorcon.Circuit`" self.state = None "A string, Tor's idea of the state of this\n :class:`txtorcon.Stream`" self.target_host = None 'Usually a hostname, but sometimes an IP address (e.g. when\n we query existing state from Tor)' self.target_addr = None "If available, the IP address we're connecting to (if None,\n see target_host instead)." self.target_port = 0 "The port we're connecting to." self.circuit = None "If we've attached to a :class:`txtorcon.Circuit`, this will\n be an instance of :class:`txtorcon.Circuit` (otherwise None)." self.listeners = [] 'A list of all connected\n :class:`txtorcon.interface.IStreamListener` instances.' self.source_addr = None 'If available, the address from which this Stream originated\n (e.g. local process, etc). See get_process() also.' self.source_port = 0 'If available, the port from which this Stream\n originated. See get_process() also.' self.flags = {} 'All flags from last update to this Stream. str->str' self._closing_deferred = None 'Internal. Holds Deferred that will callback when this\n stream is CLOSED, FAILED (or DETACHED??)' self._addrmap = addrmap
def __init__(self, circuitcontainer, addrmap=None): '\n :param circuitcontainer: an object which implements\n :class:`interface.ICircuitContainer`\n ' self.circuit_container = ICircuitContainer(circuitcontainer) self.id = None "An int, Tor's ID for this :class:`txtorcon.Circuit`" self.state = None "A string, Tor's idea of the state of this\n :class:`txtorcon.Stream`" self.target_host = None 'Usually a hostname, but sometimes an IP address (e.g. when\n we query existing state from Tor)' self.target_addr = None "If available, the IP address we're connecting to (if None,\n see target_host instead)." self.target_port = 0 "The port we're connecting to." self.circuit = None "If we've attached to a :class:`txtorcon.Circuit`, this will\n be an instance of :class:`txtorcon.Circuit` (otherwise None)." self.listeners = [] 'A list of all connected\n :class:`txtorcon.interface.IStreamListener` instances.' self.source_addr = None 'If available, the address from which this Stream originated\n (e.g. local process, etc). See get_process() also.' self.source_port = 0 'If available, the port from which this Stream\n originated. See get_process() also.' self.flags = {} 'All flags from last update to this Stream. str->str' self._closing_deferred = None 'Internal. Holds Deferred that will callback when this\n stream is CLOSED, FAILED (or DETACHED??)' self._addrmap = addrmap<|docstring|>:param circuitcontainer: an object which implements :class:`interface.ICircuitContainer`<|endoftext|>
b3d84bdcf0422b60da12d0e3af70718c54fb598e130b433eacc325bebf228c65
def listen(self, listen): '\n Attach an :class:`txtorcon.interface.IStreamListener` to this stream.\n\n See also :meth:`txtorcon.TorState.add_stream_listener` to\n listen to all streams.\n\n :param listen: something that knows\n :class:`txtorcon.interface.IStreamListener`\n ' listener = IStreamListener(listen) if (listener not in self.listeners): self.listeners.append(listener)
Attach an :class:`txtorcon.interface.IStreamListener` to this stream. See also :meth:`txtorcon.TorState.add_stream_listener` to listen to all streams. :param listen: something that knows :class:`txtorcon.interface.IStreamListener`
txtorcon/stream.py
listen
PrivateStorageio/txtorcon
180
python
def listen(self, listen): '\n Attach an :class:`txtorcon.interface.IStreamListener` to this stream.\n\n See also :meth:`txtorcon.TorState.add_stream_listener` to\n listen to all streams.\n\n :param listen: something that knows\n :class:`txtorcon.interface.IStreamListener`\n ' listener = IStreamListener(listen) if (listener not in self.listeners): self.listeners.append(listener)
def listen(self, listen): '\n Attach an :class:`txtorcon.interface.IStreamListener` to this stream.\n\n See also :meth:`txtorcon.TorState.add_stream_listener` to\n listen to all streams.\n\n :param listen: something that knows\n :class:`txtorcon.interface.IStreamListener`\n ' listener = IStreamListener(listen) if (listener not in self.listeners): self.listeners.append(listener)<|docstring|>Attach an :class:`txtorcon.interface.IStreamListener` to this stream. See also :meth:`txtorcon.TorState.add_stream_listener` to listen to all streams. :param listen: something that knows :class:`txtorcon.interface.IStreamListener`<|endoftext|>
1b94311b21325494e51c159fdedce069e622b684fd535a21b7f998d269080af3
def close(self, **kw): '\n This asks Tor to close the underlying stream object. See\n :meth:`txtorcon.interface.ITorControlProtocol.close_stream`\n for details.\n\n Although Tor currently takes no flags, it allows you to; any\n keyword arguments are passed through as flags.\n\n NOTE that the callback delivered from this method only\n callbacks after the underlying stream is really destroyed\n (*not* just when the CLOSESTREAM command has successfully\n completed).\n ' self._closing_deferred = defer.Deferred() def close_command_is_queued(*args): return self._closing_deferred d = self.circuit_container.close_stream(self, **kw) d.addCallback(close_command_is_queued) return self._closing_deferred
This asks Tor to close the underlying stream object. See :meth:`txtorcon.interface.ITorControlProtocol.close_stream` for details. Although Tor currently takes no flags, it allows you to; any keyword arguments are passed through as flags. NOTE that the callback delivered from this method only callbacks after the underlying stream is really destroyed (*not* just when the CLOSESTREAM command has successfully completed).
txtorcon/stream.py
close
PrivateStorageio/txtorcon
180
python
def close(self, **kw): '\n This asks Tor to close the underlying stream object. See\n :meth:`txtorcon.interface.ITorControlProtocol.close_stream`\n for details.\n\n Although Tor currently takes no flags, it allows you to; any\n keyword arguments are passed through as flags.\n\n NOTE that the callback delivered from this method only\n callbacks after the underlying stream is really destroyed\n (*not* just when the CLOSESTREAM command has successfully\n completed).\n ' self._closing_deferred = defer.Deferred() def close_command_is_queued(*args): return self._closing_deferred d = self.circuit_container.close_stream(self, **kw) d.addCallback(close_command_is_queued) return self._closing_deferred
def close(self, **kw): '\n This asks Tor to close the underlying stream object. See\n :meth:`txtorcon.interface.ITorControlProtocol.close_stream`\n for details.\n\n Although Tor currently takes no flags, it allows you to; any\n keyword arguments are passed through as flags.\n\n NOTE that the callback delivered from this method only\n callbacks after the underlying stream is really destroyed\n (*not* just when the CLOSESTREAM command has successfully\n completed).\n ' self._closing_deferred = defer.Deferred() def close_command_is_queued(*args): return self._closing_deferred d = self.circuit_container.close_stream(self, **kw) d.addCallback(close_command_is_queued) return self._closing_deferred<|docstring|>This asks Tor to close the underlying stream object. See :meth:`txtorcon.interface.ITorControlProtocol.close_stream` for details. Although Tor currently takes no flags, it allows you to; any keyword arguments are passed through as flags. NOTE that the callback delivered from this method only callbacks after the underlying stream is really destroyed (*not* just when the CLOSESTREAM command has successfully completed).<|endoftext|>
e8f4ebf488807539561a2cc40ab62dde78877b99b8904a6d18a86610f2024adb
def _create_flags(self, kw): '\n this clones the kw dict, adding a lower-case version of every key\n (duplicated in circuit.py; consider putting in util?)\n ' flags = {} for k in kw.keys(): flags[k] = kw[k] flags[k.lower()] = flags[k] return flags
this clones the kw dict, adding a lower-case version of every key (duplicated in circuit.py; consider putting in util?)
txtorcon/stream.py
_create_flags
PrivateStorageio/txtorcon
180
python
def _create_flags(self, kw): '\n this clones the kw dict, adding a lower-case version of every key\n (duplicated in circuit.py; consider putting in util?)\n ' flags = {} for k in kw.keys(): flags[k] = kw[k] flags[k.lower()] = flags[k] return flags
def _create_flags(self, kw): '\n this clones the kw dict, adding a lower-case version of every key\n (duplicated in circuit.py; consider putting in util?)\n ' flags = {} for k in kw.keys(): flags[k] = kw[k] flags[k.lower()] = flags[k] return flags<|docstring|>this clones the kw dict, adding a lower-case version of every key (duplicated in circuit.py; consider putting in util?)<|endoftext|>
d2d4911c78615e3b9fb7d74fd2f5af8a671b522c633ba41dd7b47414ad08c59c
def _notify(self, func, *args, **kw): "\n Internal helper. Calls the IStreamListener function 'func' with\n the given args, guarding around errors.\n " for x in self.listeners: try: getattr(x, func)(*args, **kw) except Exception: log.err()
Internal helper. Calls the IStreamListener function 'func' with the given args, guarding around errors.
txtorcon/stream.py
_notify
PrivateStorageio/txtorcon
180
python
def _notify(self, func, *args, **kw): "\n Internal helper. Calls the IStreamListener function 'func' with\n the given args, guarding around errors.\n " for x in self.listeners: try: getattr(x, func)(*args, **kw) except Exception: log.err()
def _notify(self, func, *args, **kw): "\n Internal helper. Calls the IStreamListener function 'func' with\n the given args, guarding around errors.\n " for x in self.listeners: try: getattr(x, func)(*args, **kw) except Exception: log.err()<|docstring|>Internal helper. Calls the IStreamListener function 'func' with the given args, guarding around errors.<|endoftext|>
c142f99263890f49fa839c05249bb2406cc0a095658241fa4254b80f9084de51
def maybe_call_closing_deferred(self): '\n Used internally to callback on the _closing_deferred if it\n exists.\n ' if self._closing_deferred: self._closing_deferred.callback(self) self._closing_deferred = None
Used internally to callback on the _closing_deferred if it exists.
txtorcon/stream.py
maybe_call_closing_deferred
PrivateStorageio/txtorcon
180
python
def maybe_call_closing_deferred(self): '\n Used internally to callback on the _closing_deferred if it\n exists.\n ' if self._closing_deferred: self._closing_deferred.callback(self) self._closing_deferred = None
def maybe_call_closing_deferred(self): '\n Used internally to callback on the _closing_deferred if it\n exists.\n ' if self._closing_deferred: self._closing_deferred.callback(self) self._closing_deferred = None<|docstring|>Used internally to callback on the _closing_deferred if it exists.<|endoftext|>
1f6444b363a83946d94d70dc335aaefe0d91be6cb7ebd8a930e222777106f6c9
@pytest.fixture(autouse=True) def setup(self, request, connection_with_udf): '\n Setup method.\n ' for i in range(15): key = ('test', 'demo', i) rec = {'age': i} connection_with_udf.put(key, rec) policy = {} self.job_id = connection_with_udf.scan_apply('test', 'demo', 'bin_lua', 'mytransform', ['age', 2]) def teardown(): '\n Teardown method.\n ' for i in range(15): key = ('test', 'demo', i) connection_with_udf.remove(key) request.addfinalizer(teardown)
Setup method.
test/new_tests/test_job_info.py
setup
vpnable/aerospike-client-python
105
python
@pytest.fixture(autouse=True) def setup(self, request, connection_with_udf): '\n \n ' for i in range(15): key = ('test', 'demo', i) rec = {'age': i} connection_with_udf.put(key, rec) policy = {} self.job_id = connection_with_udf.scan_apply('test', 'demo', 'bin_lua', 'mytransform', ['age', 2]) def teardown(): '\n Teardown method.\n ' for i in range(15): key = ('test', 'demo', i) connection_with_udf.remove(key) request.addfinalizer(teardown)
@pytest.fixture(autouse=True) def setup(self, request, connection_with_udf): '\n \n ' for i in range(15): key = ('test', 'demo', i) rec = {'age': i} connection_with_udf.put(key, rec) policy = {} self.job_id = connection_with_udf.scan_apply('test', 'demo', 'bin_lua', 'mytransform', ['age', 2]) def teardown(): '\n Teardown method.\n ' for i in range(15): key = ('test', 'demo', i) connection_with_udf.remove(key) request.addfinalizer(teardown)<|docstring|>Setup method.<|endoftext|>
addad49a26f3ce8243b715fce9863a65aba642bfe694d3a617718bf95e5fdfdc
def test_job_info_with_no_parameters(self): '\n Invoke job_info() without any mandatory parameters.\n ' with pytest.raises(TypeError) as typeError: self.as_connection.job_info() assert ("argument 'job_id' (pos 1)" in str(typeError.value))
Invoke job_info() without any mandatory parameters.
test/new_tests/test_job_info.py
test_job_info_with_no_parameters
vpnable/aerospike-client-python
105
python
def test_job_info_with_no_parameters(self): '\n \n ' with pytest.raises(TypeError) as typeError: self.as_connection.job_info() assert ("argument 'job_id' (pos 1)" in str(typeError.value))
def test_job_info_with_no_parameters(self): '\n \n ' with pytest.raises(TypeError) as typeError: self.as_connection.job_info() assert ("argument 'job_id' (pos 1)" in str(typeError.value))<|docstring|>Invoke job_info() without any mandatory parameters.<|endoftext|>
0dfbdb751d744caf0fc592c32dcbf1433ba882147c7c94a003f1ed60bdde6b3e
@pytest.mark.xfail(reason='This test fails if job_info() finishes in < 1ms') def test_job_info_with_small_timeout(self, connection_with_udf): '\n Invoke job_info() with correct policy and an expected timeout\n ' policy = {'timeout': 1} self.job_id = connection_with_udf.scan_apply('test', 'demo', 'bin_lua', 'mytransform', ['age', 2], block=False) with pytest.raises(e.TimeoutError): job_info = self.as_connection.job_info(self.job_id, aerospike.JOB_SCAN, policy)
Invoke job_info() with correct policy and an expected timeout
test/new_tests/test_job_info.py
test_job_info_with_small_timeout
vpnable/aerospike-client-python
105
python
@pytest.mark.xfail(reason='This test fails if job_info() finishes in < 1ms') def test_job_info_with_small_timeout(self, connection_with_udf): '\n \n ' policy = {'timeout': 1} self.job_id = connection_with_udf.scan_apply('test', 'demo', 'bin_lua', 'mytransform', ['age', 2], block=False) with pytest.raises(e.TimeoutError): job_info = self.as_connection.job_info(self.job_id, aerospike.JOB_SCAN, policy)
@pytest.mark.xfail(reason='This test fails if job_info() finishes in < 1ms') def test_job_info_with_small_timeout(self, connection_with_udf): '\n \n ' policy = {'timeout': 1} self.job_id = connection_with_udf.scan_apply('test', 'demo', 'bin_lua', 'mytransform', ['age', 2], block=False) with pytest.raises(e.TimeoutError): job_info = self.as_connection.job_info(self.job_id, aerospike.JOB_SCAN, policy)<|docstring|>Invoke job_info() with correct policy and an expected timeout<|endoftext|>
b86c97da56697aa7bf1a70af63fe33f6deb697f9a815cd53329fca5f64466218
def test_job_info_with_correct_parameters(self): '\n Invoke job_info() with correct parameters\n ' job_info = self.as_connection.job_info(self.job_id, aerospike.JOB_SCAN) valid_statuses = (aerospike.JOB_STATUS_COMPLETED, aerospike.JOB_STATUS_INPROGRESS) assert (job_info['status'] in valid_statuses) expected_fields = ('status', 'progress_pct', 'records_read') for field in expected_fields: assert (field in job_info)
Invoke job_info() with correct parameters
test/new_tests/test_job_info.py
test_job_info_with_correct_parameters
vpnable/aerospike-client-python
105
python
def test_job_info_with_correct_parameters(self): '\n \n ' job_info = self.as_connection.job_info(self.job_id, aerospike.JOB_SCAN) valid_statuses = (aerospike.JOB_STATUS_COMPLETED, aerospike.JOB_STATUS_INPROGRESS) assert (job_info['status'] in valid_statuses) expected_fields = ('status', 'progress_pct', 'records_read') for field in expected_fields: assert (field in job_info)
def test_job_info_with_correct_parameters(self): '\n \n ' job_info = self.as_connection.job_info(self.job_id, aerospike.JOB_SCAN) valid_statuses = (aerospike.JOB_STATUS_COMPLETED, aerospike.JOB_STATUS_INPROGRESS) assert (job_info['status'] in valid_statuses) expected_fields = ('status', 'progress_pct', 'records_read') for field in expected_fields: assert (field in job_info)<|docstring|>Invoke job_info() with correct parameters<|endoftext|>
75ec320b34a4d2cc319a19c4e395a7aec1d50ef29d44c6bf166568546e74e234
def test_job_info_with_correct_policy(self): '\n Invoke job_info() with correct policy\n ' policy = {'timeout': 1000} job_info = self.as_connection.job_info(self.job_id, aerospike.JOB_SCAN, policy) valid_statuses = (aerospike.JOB_STATUS_COMPLETED, aerospike.JOB_STATUS_INPROGRESS) assert (job_info['status'] in valid_statuses)
Invoke job_info() with correct policy
test/new_tests/test_job_info.py
test_job_info_with_correct_policy
vpnable/aerospike-client-python
105
python
def test_job_info_with_correct_policy(self): '\n \n ' policy = {'timeout': 1000} job_info = self.as_connection.job_info(self.job_id, aerospike.JOB_SCAN, policy) valid_statuses = (aerospike.JOB_STATUS_COMPLETED, aerospike.JOB_STATUS_INPROGRESS) assert (job_info['status'] in valid_statuses)
def test_job_info_with_correct_policy(self): '\n \n ' policy = {'timeout': 1000} job_info = self.as_connection.job_info(self.job_id, aerospike.JOB_SCAN, policy) valid_statuses = (aerospike.JOB_STATUS_COMPLETED, aerospike.JOB_STATUS_INPROGRESS) assert (job_info['status'] in valid_statuses)<|docstring|>Invoke job_info() with correct policy<|endoftext|>
6f3390a642e5ccb8e9fbf6b8c4166c7d2bfeb88a9275cf0632d266525557727d
def test_job_info_with_incorrect_policy(self): '\n Invoke job_info() with incorrect policy\n ' policy = {'timeout': 0.5} with pytest.raises(e.ParamError) as err_info: self.as_connection.job_info(self.job_id, aerospike.JOB_SCAN, policy) assert (err_info.value.code == (- 2)) assert (err_info.value.msg == 'timeout is invalid')
Invoke job_info() with incorrect policy
test/new_tests/test_job_info.py
test_job_info_with_incorrect_policy
vpnable/aerospike-client-python
105
python
def test_job_info_with_incorrect_policy(self): '\n \n ' policy = {'timeout': 0.5} with pytest.raises(e.ParamError) as err_info: self.as_connection.job_info(self.job_id, aerospike.JOB_SCAN, policy) assert (err_info.value.code == (- 2)) assert (err_info.value.msg == 'timeout is invalid')
def test_job_info_with_incorrect_policy(self): '\n \n ' policy = {'timeout': 0.5} with pytest.raises(e.ParamError) as err_info: self.as_connection.job_info(self.job_id, aerospike.JOB_SCAN, policy) assert (err_info.value.code == (- 2)) assert (err_info.value.msg == 'timeout is invalid')<|docstring|>Invoke job_info() with incorrect policy<|endoftext|>
bd7066edbc18288930fa65ff4263615bf50c0bf5c8e4e79249e62758fc2b2b01
def test_job_info_with_scanid_incorrect(self): '\n Invoke job_info() with scan id incorrect,\n this should not raise an error\n ' response = self.as_connection.job_info((self.job_id + 2), aerospike.JOB_SCAN) assert (response['status'] == aerospike.JOB_STATUS_COMPLETED)
Invoke job_info() with scan id incorrect, this should not raise an error
test/new_tests/test_job_info.py
test_job_info_with_scanid_incorrect
vpnable/aerospike-client-python
105
python
def test_job_info_with_scanid_incorrect(self): '\n Invoke job_info() with scan id incorrect,\n this should not raise an error\n ' response = self.as_connection.job_info((self.job_id + 2), aerospike.JOB_SCAN) assert (response['status'] == aerospike.JOB_STATUS_COMPLETED)
def test_job_info_with_scanid_incorrect(self): '\n Invoke job_info() with scan id incorrect,\n this should not raise an error\n ' response = self.as_connection.job_info((self.job_id + 2), aerospike.JOB_SCAN) assert (response['status'] == aerospike.JOB_STATUS_COMPLETED)<|docstring|>Invoke job_info() with scan id incorrect, this should not raise an error<|endoftext|>
c5ddb9f0a9a57c38876bc9179159447362fde11bfb05864851328020b55adb32
def test_job_info_with_largeid(self): '\n Invoke job_info() with a large scan id,\n this should not raise an error\n ' response = self.as_connection.job_info(13287138843617152748, aerospike.JOB_SCAN) assert (response['status'] == aerospike.JOB_STATUS_COMPLETED)
Invoke job_info() with a large scan id, this should not raise an error
test/new_tests/test_job_info.py
test_job_info_with_largeid
vpnable/aerospike-client-python
105
python
def test_job_info_with_largeid(self): '\n Invoke job_info() with a large scan id,\n this should not raise an error\n ' response = self.as_connection.job_info(13287138843617152748, aerospike.JOB_SCAN) assert (response['status'] == aerospike.JOB_STATUS_COMPLETED)
def test_job_info_with_largeid(self): '\n Invoke job_info() with a large scan id,\n this should not raise an error\n ' response = self.as_connection.job_info(13287138843617152748, aerospike.JOB_SCAN) assert (response['status'] == aerospike.JOB_STATUS_COMPLETED)<|docstring|>Invoke job_info() with a large scan id, this should not raise an error<|endoftext|>
58148f5b8e6c86b8c06e045c6691091434fdfc25f038d52e9011266b9d7c192c
def test_job_info_with_scanid_string(self): '\n Invoke job_info() with scan id incorrect\n ' with pytest.raises(TypeError) as typeError: self.as_connection.job_info('string') assert any([('job_info() argument 1 must be int' in str(typeError.value)), ('job_info() argument 1 must be an int' in str(typeError.value))])
Invoke job_info() with scan id incorrect
test/new_tests/test_job_info.py
test_job_info_with_scanid_string
vpnable/aerospike-client-python
105
python
def test_job_info_with_scanid_string(self): '\n \n ' with pytest.raises(TypeError) as typeError: self.as_connection.job_info('string') assert any([('job_info() argument 1 must be int' in str(typeError.value)), ('job_info() argument 1 must be an int' in str(typeError.value))])
def test_job_info_with_scanid_string(self): '\n \n ' with pytest.raises(TypeError) as typeError: self.as_connection.job_info('string') assert any([('job_info() argument 1 must be int' in str(typeError.value)), ('job_info() argument 1 must be an int' in str(typeError.value))])<|docstring|>Invoke job_info() with scan id incorrect<|endoftext|>
34c7f2545cf03607ad9aeb747c1156d09d1a1adb1c45960d2eeb59ace1fa1e6f
def test_job_info_with_correct_parameters_without_connection(self): '\n Invoke job_info() with correct parameters without connection\n ' config = {'hosts': [('127.0.0.1', 3000)]} client1 = aerospike.client(config) with pytest.raises(e.ClusterError) as err_info: client1.job_info(self.job_id, aerospike.JOB_SCAN) assert (err_info.value.code == AerospikeStatus.AEROSPIKE_CLUSTER_ERROR)
Invoke job_info() with correct parameters without connection
test/new_tests/test_job_info.py
test_job_info_with_correct_parameters_without_connection
vpnable/aerospike-client-python
105
python
def test_job_info_with_correct_parameters_without_connection(self): '\n \n ' config = {'hosts': [('127.0.0.1', 3000)]} client1 = aerospike.client(config) with pytest.raises(e.ClusterError) as err_info: client1.job_info(self.job_id, aerospike.JOB_SCAN) assert (err_info.value.code == AerospikeStatus.AEROSPIKE_CLUSTER_ERROR)
def test_job_info_with_correct_parameters_without_connection(self): '\n \n ' config = {'hosts': [('127.0.0.1', 3000)]} client1 = aerospike.client(config) with pytest.raises(e.ClusterError) as err_info: client1.job_info(self.job_id, aerospike.JOB_SCAN) assert (err_info.value.code == AerospikeStatus.AEROSPIKE_CLUSTER_ERROR)<|docstring|>Invoke job_info() with correct parameters without connection<|endoftext|>
e5ded2708859eebf5d161374cecab0fa4f8b7e6cdf1943fa336b944a0fd01c20
def test_job_info_with_constant_out_of_valid_values(self): '\n Invoke job_info() with the scan module out of the expected range\n ' with pytest.raises(e.ParamError): response = self.as_connection.job_info(self.job_id, 'not query nor scan')
Invoke job_info() with the scan module out of the expected range
test/new_tests/test_job_info.py
test_job_info_with_constant_out_of_valid_values
vpnable/aerospike-client-python
105
python
def test_job_info_with_constant_out_of_valid_values(self): '\n \n ' with pytest.raises(e.ParamError): response = self.as_connection.job_info(self.job_id, 'not query nor scan')
def test_job_info_with_constant_out_of_valid_values(self): '\n \n ' with pytest.raises(e.ParamError): response = self.as_connection.job_info(self.job_id, 'not query nor scan')<|docstring|>Invoke job_info() with the scan module out of the expected range<|endoftext|>
8b76e070357110490f6f4a50649f8d0c45e256fb7aa50acefab655de687d9d8d
@pytest.mark.parametrize('module', (None, 1.5, {}, [], 0)) def test_job_info_with_module_wrong_type(self, module): '\n Invoke job_info() with the scan module argument of the wrong type\n ' with pytest.raises(TypeError): response = self.as_connection.job_info(self.job_id, module)
Invoke job_info() with the scan module argument of the wrong type
test/new_tests/test_job_info.py
test_job_info_with_module_wrong_type
vpnable/aerospike-client-python
105
python
@pytest.mark.parametrize('module', (None, 1.5, {}, [], 0)) def test_job_info_with_module_wrong_type(self, module): '\n \n ' with pytest.raises(TypeError): response = self.as_connection.job_info(self.job_id, module)
@pytest.mark.parametrize('module', (None, 1.5, {}, [], 0)) def test_job_info_with_module_wrong_type(self, module): '\n \n ' with pytest.raises(TypeError): response = self.as_connection.job_info(self.job_id, module)<|docstring|>Invoke job_info() with the scan module argument of the wrong type<|endoftext|>
d49bc4fc592e81f449c819054313c3a191e6a3e99b5c433646d55cc5da0c552d
def teardown(): '\n Teardown method.\n ' for i in range(15): key = ('test', 'demo', i) connection_with_udf.remove(key)
Teardown method.
test/new_tests/test_job_info.py
teardown
vpnable/aerospike-client-python
105
python
def teardown(): '\n \n ' for i in range(15): key = ('test', 'demo', i) connection_with_udf.remove(key)
def teardown(): '\n \n ' for i in range(15): key = ('test', 'demo', i) connection_with_udf.remove(key)<|docstring|>Teardown method.<|endoftext|>