body_hash stringlengths 64 64 | body stringlengths 23 109k | docstring stringlengths 1 57k | path stringlengths 4 198 | name stringlengths 1 115 | repository_name stringlengths 7 111 | repository_stars float64 0 191k | lang stringclasses 1 value | body_without_docstring stringlengths 14 108k | unified stringlengths 45 133k |
|---|---|---|---|---|---|---|---|---|---|
d5cc93a570e72de8e6b998f7e758051c650a383c69c9d944beefe79ddc415e7e | def set_AccessTokenSecret(self, value):
'\n Set the value of the AccessTokenSecret input for this Choreo. ((required, string) The Access Token Secret retrieved during the OAuth process.)\n '
super(UnfollowUserInputSet, self)._set_input('AccessTokenSecret', value) | Set the value of the AccessTokenSecret input for this Choreo. ((required, string) The Access Token Secret retrieved during the OAuth process.) | temboo/core/Library/Tumblr/User/UnfollowUser.py | set_AccessTokenSecret | jordanemedlock/psychtruths | 7 | python | def set_AccessTokenSecret(self, value):
'\n \n '
super(UnfollowUserInputSet, self)._set_input('AccessTokenSecret', value) | def set_AccessTokenSecret(self, value):
'\n \n '
super(UnfollowUserInputSet, self)._set_input('AccessTokenSecret', value)<|docstring|>Set the value of the AccessTokenSecret input for this Choreo. ((required, string) The Access Token Secret retrieved during the OAuth process.)<|endoftext|> |
3e24fec27fc4369212d876e4a9a8e1875e95f556c78612927d7bd0d81e4338f8 | def set_AccessToken(self, value):
'\n Set the value of the AccessToken input for this Choreo. ((required, string) The Access Token retrieved during the OAuth process.)\n '
super(UnfollowUserInputSet, self)._set_input('AccessToken', value) | Set the value of the AccessToken input for this Choreo. ((required, string) The Access Token retrieved during the OAuth process.) | temboo/core/Library/Tumblr/User/UnfollowUser.py | set_AccessToken | jordanemedlock/psychtruths | 7 | python | def set_AccessToken(self, value):
'\n \n '
super(UnfollowUserInputSet, self)._set_input('AccessToken', value) | def set_AccessToken(self, value):
'\n \n '
super(UnfollowUserInputSet, self)._set_input('AccessToken', value)<|docstring|>Set the value of the AccessToken input for this Choreo. ((required, string) The Access Token retrieved during the OAuth process.)<|endoftext|> |
ed758b855bd7a03459d40556792d1a005141ab9aae516955638b2838e0835100 | def set_ResponseFormat(self, value):
'\n Set the value of the ResponseFormat input for this Choreo. ((optional, string) The format that the response should be in. Can be set to xml or json. Defaults to json.)\n '
super(UnfollowUserInputSet, self)._set_input('ResponseFormat', value) | Set the value of the ResponseFormat input for this Choreo. ((optional, string) The format that the response should be in. Can be set to xml or json. Defaults to json.) | temboo/core/Library/Tumblr/User/UnfollowUser.py | set_ResponseFormat | jordanemedlock/psychtruths | 7 | python | def set_ResponseFormat(self, value):
'\n \n '
super(UnfollowUserInputSet, self)._set_input('ResponseFormat', value) | def set_ResponseFormat(self, value):
'\n \n '
super(UnfollowUserInputSet, self)._set_input('ResponseFormat', value)<|docstring|>Set the value of the ResponseFormat input for this Choreo. ((optional, string) The format that the response should be in. Can be set to xml or json. Defaults to json.)<|endoftext|> |
53591498f0064a034fdbd1b969afb8d9a359103bf21a5dcf7167ef1296eced5d | def set_SecretKey(self, value):
'\n Set the value of the SecretKey input for this Choreo. ((required, string) The Secret Key provided by Tumblr (AKA the OAuth Consumer Secret).)\n '
super(UnfollowUserInputSet, self)._set_input('SecretKey', value) | Set the value of the SecretKey input for this Choreo. ((required, string) The Secret Key provided by Tumblr (AKA the OAuth Consumer Secret).) | temboo/core/Library/Tumblr/User/UnfollowUser.py | set_SecretKey | jordanemedlock/psychtruths | 7 | python | def set_SecretKey(self, value):
'\n \n '
super(UnfollowUserInputSet, self)._set_input('SecretKey', value) | def set_SecretKey(self, value):
'\n \n '
super(UnfollowUserInputSet, self)._set_input('SecretKey', value)<|docstring|>Set the value of the SecretKey input for this Choreo. ((required, string) The Secret Key provided by Tumblr (AKA the OAuth Consumer Secret).)<|endoftext|> |
9f8107ed827e18338155e78c064592306419117f83fa532c7eef228e7a3428ba | def get_Response(self):
'\n Retrieve the value for the "Response" output from this Choreo execution. (The response from Tumblr. Default is JSON, can be set to XML by entering \'xml\' in ResponseFormat.)\n '
return self._output.get('Response', None) | Retrieve the value for the "Response" output from this Choreo execution. (The response from Tumblr. Default is JSON, can be set to XML by entering 'xml' in ResponseFormat.) | temboo/core/Library/Tumblr/User/UnfollowUser.py | get_Response | jordanemedlock/psychtruths | 7 | python | def get_Response(self):
'\n Retrieve the value for the "Response" output from this Choreo execution. (The response from Tumblr. Default is JSON, can be set to XML by entering \'xml\' in ResponseFormat.)\n '
return self._output.get('Response', None) | def get_Response(self):
'\n Retrieve the value for the "Response" output from this Choreo execution. (The response from Tumblr. Default is JSON, can be set to XML by entering \'xml\' in ResponseFormat.)\n '
return self._output.get('Response', None)<|docstring|>Retrieve the value for the "Response" output from this Choreo execution. (The response from Tumblr. Default is JSON, can be set to XML by entering 'xml' in ResponseFormat.)<|endoftext|> |
7c4cfa20ac6a97284247d414959664ddc989f5df7763151945bbcc85edeb55ff | def create_vggface_network(input_shape=None, **kwargs):
'VGGFace extractor.\n '
input_a = keras.layers.Input(shape=input_shape)
input_b = keras.layers.Input(shape=input_shape)
vgg = VGGFace(include_top=False, input_shape=input_shape)
for layer in vgg.layers:
if (not (layer.name.startswith('conv5') or (layer.name == 'pool5'))):
layer.trainable = False
last_layer = vgg.layers[(- 1)].output
flatten = keras.layers.Flatten()(last_layer)
feature_extractor = keras.models.Model(vgg.input, flatten)
features_a = feature_extractor(input_a)
features_b = feature_extractor(input_b)
embeddings = [features_a, features_b]
distance = keras.layers.Lambda(euclidean_distance, output_shape=get_output_shape, name='distance')(embeddings)
model = keras.models.Model([input_a, input_b], distance)
return model | VGGFace extractor. | models/vggface.py | create_vggface_network | vitalwarley/vision | 0 | python | def create_vggface_network(input_shape=None, **kwargs):
'\n '
input_a = keras.layers.Input(shape=input_shape)
input_b = keras.layers.Input(shape=input_shape)
vgg = VGGFace(include_top=False, input_shape=input_shape)
for layer in vgg.layers:
if (not (layer.name.startswith('conv5') or (layer.name == 'pool5'))):
layer.trainable = False
last_layer = vgg.layers[(- 1)].output
flatten = keras.layers.Flatten()(last_layer)
feature_extractor = keras.models.Model(vgg.input, flatten)
features_a = feature_extractor(input_a)
features_b = feature_extractor(input_b)
embeddings = [features_a, features_b]
distance = keras.layers.Lambda(euclidean_distance, output_shape=get_output_shape, name='distance')(embeddings)
model = keras.models.Model([input_a, input_b], distance)
return model | def create_vggface_network(input_shape=None, **kwargs):
'\n '
input_a = keras.layers.Input(shape=input_shape)
input_b = keras.layers.Input(shape=input_shape)
vgg = VGGFace(include_top=False, input_shape=input_shape)
for layer in vgg.layers:
if (not (layer.name.startswith('conv5') or (layer.name == 'pool5'))):
layer.trainable = False
last_layer = vgg.layers[(- 1)].output
flatten = keras.layers.Flatten()(last_layer)
feature_extractor = keras.models.Model(vgg.input, flatten)
features_a = feature_extractor(input_a)
features_b = feature_extractor(input_b)
embeddings = [features_a, features_b]
distance = keras.layers.Lambda(euclidean_distance, output_shape=get_output_shape, name='distance')(embeddings)
model = keras.models.Model([input_a, input_b], distance)
return model<|docstring|>VGGFace extractor.<|endoftext|> |
e4087e6037e6d408577ffbe86d7d824feb173dac02e339aa64667bb6fd069786 | def test_get_urls_method(self):
'\n Test get_urls method on the News class\n '
assert (NewsApp().get_urls() == ['news.urls']) | Test get_urls method on the News class | news/tests/test_cms_apps.py | test_get_urls_method | giantmade/giant-news | 2 | python | def test_get_urls_method(self):
'\n \n '
assert (NewsApp().get_urls() == ['news.urls']) | def test_get_urls_method(self):
'\n \n '
assert (NewsApp().get_urls() == ['news.urls'])<|docstring|>Test get_urls method on the News class<|endoftext|> |
a9c438977d06a02e18c9983ad9e3d2233b9091fd5c13e9e313d318277d97e2b8 | def combine_futures(futures: typing.Sequence[Future]) -> Future:
'Combines set of Futures.\n\n It waits for the completion of the all input Futures regardless\n of their output.\n\n The returned Future completes with the list of the results of the input\n Futures, respecting the input order.\n\n If one of the input Futures completes exceptionally, the returned\n Future also completes exceptionally. In case of multiple exceptional\n completions, the returned Future will be completed with the first\n exceptional result.\n\n Args:\n futures: List of Futures to be combined.\n\n Returns:\n Result of the combination.\n '
count = len(futures)
results = ([None] * count)
if (count == 0):
return ImmediateFuture(results)
completed = AtomicInteger()
combined = Future()
errors = []
def done(future, index):
if future.is_success():
results[index] = future.result()
elif (not errors):
errors.append((future.exception(), future.traceback()))
if (count == completed.increment_and_get()):
if errors:
(first_exception, first_traceback) = errors[0]
combined.set_exception(first_exception, first_traceback)
else:
combined.set_result(results)
for (index, future) in enumerate(futures):
future.add_done_callback((lambda f, captured_index=index: done(f, captured_index)))
return combined | Combines set of Futures.
It waits for the completion of the all input Futures regardless
of their output.
The returned Future completes with the list of the results of the input
Futures, respecting the input order.
If one of the input Futures completes exceptionally, the returned
Future also completes exceptionally. In case of multiple exceptional
completions, the returned Future will be completed with the first
exceptional result.
Args:
futures: List of Futures to be combined.
Returns:
Result of the combination. | hazelcast/future.py | combine_futures | esra-sengul/hazelcast-python-client | 0 | python | def combine_futures(futures: typing.Sequence[Future]) -> Future:
'Combines set of Futures.\n\n It waits for the completion of the all input Futures regardless\n of their output.\n\n The returned Future completes with the list of the results of the input\n Futures, respecting the input order.\n\n If one of the input Futures completes exceptionally, the returned\n Future also completes exceptionally. In case of multiple exceptional\n completions, the returned Future will be completed with the first\n exceptional result.\n\n Args:\n futures: List of Futures to be combined.\n\n Returns:\n Result of the combination.\n '
count = len(futures)
results = ([None] * count)
if (count == 0):
return ImmediateFuture(results)
completed = AtomicInteger()
combined = Future()
errors = []
def done(future, index):
if future.is_success():
results[index] = future.result()
elif (not errors):
errors.append((future.exception(), future.traceback()))
if (count == completed.increment_and_get()):
if errors:
(first_exception, first_traceback) = errors[0]
combined.set_exception(first_exception, first_traceback)
else:
combined.set_result(results)
for (index, future) in enumerate(futures):
future.add_done_callback((lambda f, captured_index=index: done(f, captured_index)))
return combined | def combine_futures(futures: typing.Sequence[Future]) -> Future:
'Combines set of Futures.\n\n It waits for the completion of the all input Futures regardless\n of their output.\n\n The returned Future completes with the list of the results of the input\n Futures, respecting the input order.\n\n If one of the input Futures completes exceptionally, the returned\n Future also completes exceptionally. In case of multiple exceptional\n completions, the returned Future will be completed with the first\n exceptional result.\n\n Args:\n futures: List of Futures to be combined.\n\n Returns:\n Result of the combination.\n '
count = len(futures)
results = ([None] * count)
if (count == 0):
return ImmediateFuture(results)
completed = AtomicInteger()
combined = Future()
errors = []
def done(future, index):
if future.is_success():
results[index] = future.result()
elif (not errors):
errors.append((future.exception(), future.traceback()))
if (count == completed.increment_and_get()):
if errors:
(first_exception, first_traceback) = errors[0]
combined.set_exception(first_exception, first_traceback)
else:
combined.set_result(results)
for (index, future) in enumerate(futures):
future.add_done_callback((lambda f, captured_index=index: done(f, captured_index)))
return combined<|docstring|>Combines set of Futures.
It waits for the completion of the all input Futures regardless
of their output.
The returned Future completes with the list of the results of the input
Futures, respecting the input order.
If one of the input Futures completes exceptionally, the returned
Future also completes exceptionally. In case of multiple exceptional
completions, the returned Future will be completed with the first
exceptional result.
Args:
futures: List of Futures to be combined.
Returns:
Result of the combination.<|endoftext|> |
7b5e035d6c505b1538c4c85fb6f3bf4f83c999fba83852ac0f82630cafd098c9 | def set_result(self, result: ResultType) -> None:
'Sets the result of the Future.\n\n Args:\n result: Result of the Future.\n '
self._result = result
self._event.set()
self._invoke_callbacks() | Sets the result of the Future.
Args:
result: Result of the Future. | hazelcast/future.py | set_result | esra-sengul/hazelcast-python-client | 0 | python | def set_result(self, result: ResultType) -> None:
'Sets the result of the Future.\n\n Args:\n result: Result of the Future.\n '
self._result = result
self._event.set()
self._invoke_callbacks() | def set_result(self, result: ResultType) -> None:
'Sets the result of the Future.\n\n Args:\n result: Result of the Future.\n '
self._result = result
self._event.set()
self._invoke_callbacks()<|docstring|>Sets the result of the Future.
Args:
result: Result of the Future.<|endoftext|> |
b88049a8dd2036f12c0df0a5766305d542242080dd4872811736d4a7d61c46a0 | def set_exception(self, exception: Exception, traceback: types.TracebackType=None) -> None:
'Sets the exception for this Future in case of errors.\n\n Args:\n exception: Exception to raise in case of error.\n traceback: Traceback of the exception.\n '
if (not isinstance(exception, BaseException)):
raise RuntimeError('Exception must be of BaseException type')
self._exception = exception
self._traceback = traceback
self._event.set()
self._invoke_callbacks() | Sets the exception for this Future in case of errors.
Args:
exception: Exception to raise in case of error.
traceback: Traceback of the exception. | hazelcast/future.py | set_exception | esra-sengul/hazelcast-python-client | 0 | python | def set_exception(self, exception: Exception, traceback: types.TracebackType=None) -> None:
'Sets the exception for this Future in case of errors.\n\n Args:\n exception: Exception to raise in case of error.\n traceback: Traceback of the exception.\n '
if (not isinstance(exception, BaseException)):
raise RuntimeError('Exception must be of BaseException type')
self._exception = exception
self._traceback = traceback
self._event.set()
self._invoke_callbacks() | def set_exception(self, exception: Exception, traceback: types.TracebackType=None) -> None:
'Sets the exception for this Future in case of errors.\n\n Args:\n exception: Exception to raise in case of error.\n traceback: Traceback of the exception.\n '
if (not isinstance(exception, BaseException)):
raise RuntimeError('Exception must be of BaseException type')
self._exception = exception
self._traceback = traceback
self._event.set()
self._invoke_callbacks()<|docstring|>Sets the exception for this Future in case of errors.
Args:
exception: Exception to raise in case of error.
traceback: Traceback of the exception.<|endoftext|> |
44c9d4603d3d90ff734a088e419dafa8c4ef42a1e08bdda159ad923b918232bd | def result(self) -> ResultType:
'Returns the result of the Future, which makes the call synchronous\n if the result has not been computed yet.\n\n Returns:\n Result of the Future.\n '
self._reactor_check()
self._event.wait()
if self._exception:
re_raise(self._exception, self._traceback)
return self._result | Returns the result of the Future, which makes the call synchronous
if the result has not been computed yet.
Returns:
Result of the Future. | hazelcast/future.py | result | esra-sengul/hazelcast-python-client | 0 | python | def result(self) -> ResultType:
'Returns the result of the Future, which makes the call synchronous\n if the result has not been computed yet.\n\n Returns:\n Result of the Future.\n '
self._reactor_check()
self._event.wait()
if self._exception:
re_raise(self._exception, self._traceback)
return self._result | def result(self) -> ResultType:
'Returns the result of the Future, which makes the call synchronous\n if the result has not been computed yet.\n\n Returns:\n Result of the Future.\n '
self._reactor_check()
self._event.wait()
if self._exception:
re_raise(self._exception, self._traceback)
return self._result<|docstring|>Returns the result of the Future, which makes the call synchronous
if the result has not been computed yet.
Returns:
Result of the Future.<|endoftext|> |
920a1aa38fa69b1476efd8402446ceb17ca9db7de7375ebc84dbc9b37f37a251 | def is_success(self) -> bool:
'Determines whether the result can be successfully computed or not.'
return (self._result is not _SENTINEL) | Determines whether the result can be successfully computed or not. | hazelcast/future.py | is_success | esra-sengul/hazelcast-python-client | 0 | python | def is_success(self) -> bool:
return (self._result is not _SENTINEL) | def is_success(self) -> bool:
return (self._result is not _SENTINEL)<|docstring|>Determines whether the result can be successfully computed or not.<|endoftext|> |
d665b2f5defc5da1b016127bc51ce7a52a7b3d879ff938c3c3f3fe0864ee6bbc | def done(self) -> bool:
'Determines whether the result is computed or not.\n\n Returns:\n ``True`` if the result is computed, ``False`` otherwise.\n '
return self._event.is_set() | Determines whether the result is computed or not.
Returns:
``True`` if the result is computed, ``False`` otherwise. | hazelcast/future.py | done | esra-sengul/hazelcast-python-client | 0 | python | def done(self) -> bool:
'Determines whether the result is computed or not.\n\n Returns:\n ``True`` if the result is computed, ``False`` otherwise.\n '
return self._event.is_set() | def done(self) -> bool:
'Determines whether the result is computed or not.\n\n Returns:\n ``True`` if the result is computed, ``False`` otherwise.\n '
return self._event.is_set()<|docstring|>Determines whether the result is computed or not.
Returns:
``True`` if the result is computed, ``False`` otherwise.<|endoftext|> |
09df2c699200e078935f24604568d7e9e251777c423ec09a776045bb881ec12f | def running(self) -> bool:
'Determines whether the asynchronous call, the computation is still\n running or not.\n\n Returns:\n ``True`` if the result is being computed, ``False`` otherwise.\n '
return (not self.done()) | Determines whether the asynchronous call, the computation is still
running or not.
Returns:
``True`` if the result is being computed, ``False`` otherwise. | hazelcast/future.py | running | esra-sengul/hazelcast-python-client | 0 | python | def running(self) -> bool:
'Determines whether the asynchronous call, the computation is still\n running or not.\n\n Returns:\n ``True`` if the result is being computed, ``False`` otherwise.\n '
return (not self.done()) | def running(self) -> bool:
'Determines whether the asynchronous call, the computation is still\n running or not.\n\n Returns:\n ``True`` if the result is being computed, ``False`` otherwise.\n '
return (not self.done())<|docstring|>Determines whether the asynchronous call, the computation is still
running or not.
Returns:
``True`` if the result is being computed, ``False`` otherwise.<|endoftext|> |
dc63e5cb6eb4274f434428b7927473fd70fc44086845728a3efd21096be5720f | def exception(self) -> typing.Optional[Exception]:
'Returns the exceptional result, if any.\n\n Returns:\n Exceptional result of this Future.\n '
self._reactor_check()
self._event.wait()
return self._exception | Returns the exceptional result, if any.
Returns:
Exceptional result of this Future. | hazelcast/future.py | exception | esra-sengul/hazelcast-python-client | 0 | python | def exception(self) -> typing.Optional[Exception]:
'Returns the exceptional result, if any.\n\n Returns:\n Exceptional result of this Future.\n '
self._reactor_check()
self._event.wait()
return self._exception | def exception(self) -> typing.Optional[Exception]:
'Returns the exceptional result, if any.\n\n Returns:\n Exceptional result of this Future.\n '
self._reactor_check()
self._event.wait()
return self._exception<|docstring|>Returns the exceptional result, if any.
Returns:
Exceptional result of this Future.<|endoftext|> |
0b291af87bb98d0e908b7fd1bf6e010e6392e0997ceaad9ff9b6cddb7c093611 | def traceback(self) -> typing.Optional[types.TracebackType]:
'Traceback of the exception.'
self._reactor_check()
self._event.wait()
return self._traceback | Traceback of the exception. | hazelcast/future.py | traceback | esra-sengul/hazelcast-python-client | 0 | python | def traceback(self) -> typing.Optional[types.TracebackType]:
self._reactor_check()
self._event.wait()
return self._traceback | def traceback(self) -> typing.Optional[types.TracebackType]:
self._reactor_check()
self._event.wait()
return self._traceback<|docstring|>Traceback of the exception.<|endoftext|> |
8d9933f579e28a04f37b5d485a7edb58178b4fde2331d20c0e6d44954861fd09 | def continue_with(self, continuation_func: typing.Callable[(..., typing.Any)], *args: typing.Any) -> 'Future':
'Create a continuation that executes when the Future is completed.\n\n Args:\n continuation_func: A function which takes the Future as the only\n parameter. Return value of the function will be set as the\n result of the continuation future. If the return value of the\n function is another Future, it will be chained to the returned\n Future.\n *args: Arguments to be passed into ``continuation_function``.\n\n Returns:\n A new Future which will be completed when the continuation is done.\n '
future = Future()
def callback(f):
try:
result = continuation_func(f, *args)
if isinstance(result, Future):
future._chain(result)
else:
future.set_result(result)
except:
(exception, traceback) = sys.exc_info()[1:]
future.set_exception(exception, traceback)
self.add_done_callback(callback)
return future | Create a continuation that executes when the Future is completed.
Args:
continuation_func: A function which takes the Future as the only
parameter. Return value of the function will be set as the
result of the continuation future. If the return value of the
function is another Future, it will be chained to the returned
Future.
*args: Arguments to be passed into ``continuation_function``.
Returns:
A new Future which will be completed when the continuation is done. | hazelcast/future.py | continue_with | esra-sengul/hazelcast-python-client | 0 | python | def continue_with(self, continuation_func: typing.Callable[(..., typing.Any)], *args: typing.Any) -> 'Future':
'Create a continuation that executes when the Future is completed.\n\n Args:\n continuation_func: A function which takes the Future as the only\n parameter. Return value of the function will be set as the\n result of the continuation future. If the return value of the\n function is another Future, it will be chained to the returned\n Future.\n *args: Arguments to be passed into ``continuation_function``.\n\n Returns:\n A new Future which will be completed when the continuation is done.\n '
future = Future()
def callback(f):
try:
result = continuation_func(f, *args)
if isinstance(result, Future):
future._chain(result)
else:
future.set_result(result)
except:
(exception, traceback) = sys.exc_info()[1:]
future.set_exception(exception, traceback)
self.add_done_callback(callback)
return future | def continue_with(self, continuation_func: typing.Callable[(..., typing.Any)], *args: typing.Any) -> 'Future':
'Create a continuation that executes when the Future is completed.\n\n Args:\n continuation_func: A function which takes the Future as the only\n parameter. Return value of the function will be set as the\n result of the continuation future. If the return value of the\n function is another Future, it will be chained to the returned\n Future.\n *args: Arguments to be passed into ``continuation_function``.\n\n Returns:\n A new Future which will be completed when the continuation is done.\n '
future = Future()
def callback(f):
try:
result = continuation_func(f, *args)
if isinstance(result, Future):
future._chain(result)
else:
future.set_result(result)
except:
(exception, traceback) = sys.exc_info()[1:]
future.set_exception(exception, traceback)
self.add_done_callback(callback)
return future<|docstring|>Create a continuation that executes when the Future is completed.
Args:
continuation_func: A function which takes the Future as the only
parameter. Return value of the function will be set as the
result of the continuation future. If the return value of the
function is another Future, it will be chained to the returned
Future.
*args: Arguments to be passed into ``continuation_function``.
Returns:
A new Future which will be completed when the continuation is done.<|endoftext|> |
213a1799abb30c605faaaaf01dea87c28a7e23c8051d800629358f2cf52704d0 | @swagger_auto_schema(query_serializer=NodeQuery, responses=with_common_response(with_common_response({status.HTTP_200_OK: NodeListSerializer})))
def list(self, request, *args, **kwargs):
'\n List Nodes\n\n Filter nodes with query parameters.\n '
serializer = NodeQuery(data=request.GET)
if serializer.is_valid(raise_exception=True):
page = serializer.validated_data.get('page')
per_page = serializer.validated_data.get('per_page')
node_type = serializer.validated_data.get('type')
name = serializer.validated_data.get('name')
network_type = serializer.validated_data.get('network_type')
network_version = serializer.validated_data.get('network_version')
agent_id = serializer.validated_data.get('agent_id')
if ((agent_id is not None) and (not request.user.is_operator)):
raise PermissionDenied
query_filter = {}
if node_type:
query_filter.update({'type': node_type})
if name:
query_filter.update({'name__icontains': name})
if network_type:
query_filter.update({'network_type': network_type})
if network_version:
query_filter.update({'network_version': network_version})
if request.user.is_administrator:
query_filter.update({'organization': request.user.organization})
elif request.user.is_common_user:
query_filter.update({'user': request.user})
if agent_id:
query_filter.update({'agent__id': agent_id})
nodes = Node.objects.filter(**query_filter)
p = Paginator(nodes, per_page)
nodes = p.page(page)
response = NodeListSerializer({'total': p.count, 'data': nodes})
return Response(data=response.data, status=status.HTTP_200_OK) | List Nodes
Filter nodes with query parameters. | src/api-engine/api/routes/node/views.py | list | hello2mao/cello | 1 | python | @swagger_auto_schema(query_serializer=NodeQuery, responses=with_common_response(with_common_response({status.HTTP_200_OK: NodeListSerializer})))
def list(self, request, *args, **kwargs):
'\n List Nodes\n\n Filter nodes with query parameters.\n '
serializer = NodeQuery(data=request.GET)
if serializer.is_valid(raise_exception=True):
page = serializer.validated_data.get('page')
per_page = serializer.validated_data.get('per_page')
node_type = serializer.validated_data.get('type')
name = serializer.validated_data.get('name')
network_type = serializer.validated_data.get('network_type')
network_version = serializer.validated_data.get('network_version')
agent_id = serializer.validated_data.get('agent_id')
if ((agent_id is not None) and (not request.user.is_operator)):
raise PermissionDenied
query_filter = {}
if node_type:
query_filter.update({'type': node_type})
if name:
query_filter.update({'name__icontains': name})
if network_type:
query_filter.update({'network_type': network_type})
if network_version:
query_filter.update({'network_version': network_version})
if request.user.is_administrator:
query_filter.update({'organization': request.user.organization})
elif request.user.is_common_user:
query_filter.update({'user': request.user})
if agent_id:
query_filter.update({'agent__id': agent_id})
nodes = Node.objects.filter(**query_filter)
p = Paginator(nodes, per_page)
nodes = p.page(page)
response = NodeListSerializer({'total': p.count, 'data': nodes})
return Response(data=response.data, status=status.HTTP_200_OK) | @swagger_auto_schema(query_serializer=NodeQuery, responses=with_common_response(with_common_response({status.HTTP_200_OK: NodeListSerializer})))
def list(self, request, *args, **kwargs):
'\n List Nodes\n\n Filter nodes with query parameters.\n '
serializer = NodeQuery(data=request.GET)
if serializer.is_valid(raise_exception=True):
page = serializer.validated_data.get('page')
per_page = serializer.validated_data.get('per_page')
node_type = serializer.validated_data.get('type')
name = serializer.validated_data.get('name')
network_type = serializer.validated_data.get('network_type')
network_version = serializer.validated_data.get('network_version')
agent_id = serializer.validated_data.get('agent_id')
if ((agent_id is not None) and (not request.user.is_operator)):
raise PermissionDenied
query_filter = {}
if node_type:
query_filter.update({'type': node_type})
if name:
query_filter.update({'name__icontains': name})
if network_type:
query_filter.update({'network_type': network_type})
if network_version:
query_filter.update({'network_version': network_version})
if request.user.is_administrator:
query_filter.update({'organization': request.user.organization})
elif request.user.is_common_user:
query_filter.update({'user': request.user})
if agent_id:
query_filter.update({'agent__id': agent_id})
nodes = Node.objects.filter(**query_filter)
p = Paginator(nodes, per_page)
nodes = p.page(page)
response = NodeListSerializer({'total': p.count, 'data': nodes})
return Response(data=response.data, status=status.HTTP_200_OK)<|docstring|>List Nodes
Filter nodes with query parameters.<|endoftext|> |
86016e7a85a10fe7b00c78beaad0ef573259c337442eacd532528137ac4bf36c | @swagger_auto_schema(request_body=NodeCreateBody, responses=with_common_response({status.HTTP_201_CREATED: NodeIDSerializer}))
def create(self, request):
'\n Create Node\n\n Create node\n '
serializer = NodeCreateBody(data=request.data)
if serializer.is_valid(raise_exception=True):
self._validate_organization(request)
agent_type = serializer.validated_data.get('agent_type')
network_type = serializer.validated_data.get('network_type')
network_version = serializer.validated_data.get('network_version')
agent = serializer.validated_data.get('agent')
node_type = serializer.validated_data.get('type')
ca = serializer.validated_data.get('ca')
peer = serializer.validated_data.get('peer')
if (agent is None):
available_agents = Agent.objects.annotate(network_num=Count('node__network')).annotate(node_num=Count('node')).filter(schedulable=True, type=agent_type, network_num__lt=F('capacity'), node_num__lt=F('node_capacity'), organization=request.user.organization).order_by('node_num')
if (len(available_agents) > 0):
agent = available_agents[0]
else:
raise NoResource
else:
if (not request.user.is_operator):
raise PermissionDenied
node_count = Node.objects.filter(agent=agent).count()
if ((node_count >= agent.node_capacity) or (not agent.schedulable)):
raise NoResource
fabric_ca = None
fabric_peer = None
peer_ca_list = []
if (node_type == FabricNodeType.Ca.name.lower()):
fabric_ca = self._save_fabric_ca(request, ca)
elif (node_type == FabricNodeType.Peer.name.lower()):
(fabric_peer, peer_ca_list) = self._save_fabric_peer(request, peer)
node = Node(network_type=network_type, agent=agent, network_version=network_version, user=request.user, organization=request.user.organization, type=node_type, ca=fabric_ca, peer=fabric_peer)
node.save()
agent_config_file = request.build_absolute_uri(agent.config_file.url)
node_detail_url = reverse('node-detail', args=[str(node.id)])
node_detail_url = request.build_absolute_uri(node_detail_url)
node_file_upload_api = reverse('node-files', args=[str(node.id)])
node_file_upload_api = request.build_absolute_uri(node_file_upload_api)
operate_node.delay(str(node.id), AgentOperation.Create.value, agent_config_file=agent_config_file, node_detail_url=node_detail_url, node_file_upload_api=node_file_upload_api, peer_ca_list=json.dumps(peer_ca_list))
response = NodeIDSerializer({'id': str(node.id)})
return Response(response.data, status=status.HTTP_201_CREATED) | Create Node
Create node | src/api-engine/api/routes/node/views.py | create | hello2mao/cello | 1 | python | @swagger_auto_schema(request_body=NodeCreateBody, responses=with_common_response({status.HTTP_201_CREATED: NodeIDSerializer}))
def create(self, request):
'\n Create Node\n\n Create node\n '
serializer = NodeCreateBody(data=request.data)
if serializer.is_valid(raise_exception=True):
self._validate_organization(request)
agent_type = serializer.validated_data.get('agent_type')
network_type = serializer.validated_data.get('network_type')
network_version = serializer.validated_data.get('network_version')
agent = serializer.validated_data.get('agent')
node_type = serializer.validated_data.get('type')
ca = serializer.validated_data.get('ca')
peer = serializer.validated_data.get('peer')
if (agent is None):
available_agents = Agent.objects.annotate(network_num=Count('node__network')).annotate(node_num=Count('node')).filter(schedulable=True, type=agent_type, network_num__lt=F('capacity'), node_num__lt=F('node_capacity'), organization=request.user.organization).order_by('node_num')
if (len(available_agents) > 0):
agent = available_agents[0]
else:
raise NoResource
else:
if (not request.user.is_operator):
raise PermissionDenied
node_count = Node.objects.filter(agent=agent).count()
if ((node_count >= agent.node_capacity) or (not agent.schedulable)):
raise NoResource
fabric_ca = None
fabric_peer = None
peer_ca_list = []
if (node_type == FabricNodeType.Ca.name.lower()):
fabric_ca = self._save_fabric_ca(request, ca)
elif (node_type == FabricNodeType.Peer.name.lower()):
(fabric_peer, peer_ca_list) = self._save_fabric_peer(request, peer)
node = Node(network_type=network_type, agent=agent, network_version=network_version, user=request.user, organization=request.user.organization, type=node_type, ca=fabric_ca, peer=fabric_peer)
node.save()
agent_config_file = request.build_absolute_uri(agent.config_file.url)
node_detail_url = reverse('node-detail', args=[str(node.id)])
node_detail_url = request.build_absolute_uri(node_detail_url)
node_file_upload_api = reverse('node-files', args=[str(node.id)])
node_file_upload_api = request.build_absolute_uri(node_file_upload_api)
operate_node.delay(str(node.id), AgentOperation.Create.value, agent_config_file=agent_config_file, node_detail_url=node_detail_url, node_file_upload_api=node_file_upload_api, peer_ca_list=json.dumps(peer_ca_list))
response = NodeIDSerializer({'id': str(node.id)})
return Response(response.data, status=status.HTTP_201_CREATED) | @swagger_auto_schema(request_body=NodeCreateBody, responses=with_common_response({status.HTTP_201_CREATED: NodeIDSerializer}))
def create(self, request):
'\n Create Node\n\n Create node\n '
serializer = NodeCreateBody(data=request.data)
if serializer.is_valid(raise_exception=True):
self._validate_organization(request)
agent_type = serializer.validated_data.get('agent_type')
network_type = serializer.validated_data.get('network_type')
network_version = serializer.validated_data.get('network_version')
agent = serializer.validated_data.get('agent')
node_type = serializer.validated_data.get('type')
ca = serializer.validated_data.get('ca')
peer = serializer.validated_data.get('peer')
if (agent is None):
available_agents = Agent.objects.annotate(network_num=Count('node__network')).annotate(node_num=Count('node')).filter(schedulable=True, type=agent_type, network_num__lt=F('capacity'), node_num__lt=F('node_capacity'), organization=request.user.organization).order_by('node_num')
if (len(available_agents) > 0):
agent = available_agents[0]
else:
raise NoResource
else:
if (not request.user.is_operator):
raise PermissionDenied
node_count = Node.objects.filter(agent=agent).count()
if ((node_count >= agent.node_capacity) or (not agent.schedulable)):
raise NoResource
fabric_ca = None
fabric_peer = None
peer_ca_list = []
if (node_type == FabricNodeType.Ca.name.lower()):
fabric_ca = self._save_fabric_ca(request, ca)
elif (node_type == FabricNodeType.Peer.name.lower()):
(fabric_peer, peer_ca_list) = self._save_fabric_peer(request, peer)
node = Node(network_type=network_type, agent=agent, network_version=network_version, user=request.user, organization=request.user.organization, type=node_type, ca=fabric_ca, peer=fabric_peer)
node.save()
agent_config_file = request.build_absolute_uri(agent.config_file.url)
node_detail_url = reverse('node-detail', args=[str(node.id)])
node_detail_url = request.build_absolute_uri(node_detail_url)
node_file_upload_api = reverse('node-files', args=[str(node.id)])
node_file_upload_api = request.build_absolute_uri(node_file_upload_api)
operate_node.delay(str(node.id), AgentOperation.Create.value, agent_config_file=agent_config_file, node_detail_url=node_detail_url, node_file_upload_api=node_file_upload_api, peer_ca_list=json.dumps(peer_ca_list))
response = NodeIDSerializer({'id': str(node.id)})
return Response(response.data, status=status.HTTP_201_CREATED)<|docstring|>Create Node
Create node<|endoftext|> |
5b50f4ece848282f180cc317e7fcd6e7019b5b965bd0b9cf301a5fad7a68b466 | @swagger_auto_schema(methods=['post'], query_serializer=NodeOperationSerializer, responses=with_common_response({status.HTTP_202_ACCEPTED: 'Accepted'}))
@action(methods=['post'], detail=True, url_path='operations')
def operate(self, request, pk=None):
'\n Operate Node\n\n Do some operation on node, start/stop/restart\n '
pass | Operate Node
Do some operation on node, start/stop/restart | src/api-engine/api/routes/node/views.py | operate | hello2mao/cello | 1 | python | @swagger_auto_schema(methods=['post'], query_serializer=NodeOperationSerializer, responses=with_common_response({status.HTTP_202_ACCEPTED: 'Accepted'}))
@action(methods=['post'], detail=True, url_path='operations')
def operate(self, request, pk=None):
'\n Operate Node\n\n Do some operation on node, start/stop/restart\n '
pass | @swagger_auto_schema(methods=['post'], query_serializer=NodeOperationSerializer, responses=with_common_response({status.HTTP_202_ACCEPTED: 'Accepted'}))
@action(methods=['post'], detail=True, url_path='operations')
def operate(self, request, pk=None):
'\n Operate Node\n\n Do some operation on node, start/stop/restart\n '
pass<|docstring|>Operate Node
Do some operation on node, start/stop/restart<|endoftext|> |
abd9c0c87c77073aa7ec5c1afe7e0656e6b232ad620a676cd02563943fce0034 | @swagger_auto_schema(responses=with_common_response({status.HTTP_204_NO_CONTENT: 'No Content'}))
def destroy(self, request, pk=None):
'\n Delete Node\n\n Delete node\n '
try:
if request.user.is_superuser:
node = Node.objects.get(id=pk)
else:
node = Node.objects.get(id=pk, organization=request.user.organization)
except ObjectDoesNotExist:
raise ResourceNotFound
else:
if (node.status != NodeStatus.Deleting.name.lower()):
if (node.status not in [NodeStatus.Error.name.lower(), NodeStatus.Deleted.name.lower()]):
node.status = NodeStatus.Deleting.name.lower()
node.save()
agent_config_file = request.build_absolute_uri(node.agent.config_file.url)
node_detail_url = reverse('node-detail', args=[pk])
node_detail_url = request.build_absolute_uri(node_detail_url)
operate_node.delay(str(node.id), AgentOperation.Delete.value, agent_config_file=agent_config_file, node_detail_url=node_detail_url)
else:
node.delete()
return Response(status=status.HTTP_204_NO_CONTENT) | Delete Node
Delete node | src/api-engine/api/routes/node/views.py | destroy | hello2mao/cello | 1 | python | @swagger_auto_schema(responses=with_common_response({status.HTTP_204_NO_CONTENT: 'No Content'}))
def destroy(self, request, pk=None):
'\n Delete Node\n\n Delete node\n '
try:
if request.user.is_superuser:
node = Node.objects.get(id=pk)
else:
node = Node.objects.get(id=pk, organization=request.user.organization)
except ObjectDoesNotExist:
raise ResourceNotFound
else:
if (node.status != NodeStatus.Deleting.name.lower()):
if (node.status not in [NodeStatus.Error.name.lower(), NodeStatus.Deleted.name.lower()]):
node.status = NodeStatus.Deleting.name.lower()
node.save()
agent_config_file = request.build_absolute_uri(node.agent.config_file.url)
node_detail_url = reverse('node-detail', args=[pk])
node_detail_url = request.build_absolute_uri(node_detail_url)
operate_node.delay(str(node.id), AgentOperation.Delete.value, agent_config_file=agent_config_file, node_detail_url=node_detail_url)
else:
node.delete()
return Response(status=status.HTTP_204_NO_CONTENT) | @swagger_auto_schema(responses=with_common_response({status.HTTP_204_NO_CONTENT: 'No Content'}))
def destroy(self, request, pk=None):
'\n Delete Node\n\n Delete node\n '
try:
if request.user.is_superuser:
node = Node.objects.get(id=pk)
else:
node = Node.objects.get(id=pk, organization=request.user.organization)
except ObjectDoesNotExist:
raise ResourceNotFound
else:
if (node.status != NodeStatus.Deleting.name.lower()):
if (node.status not in [NodeStatus.Error.name.lower(), NodeStatus.Deleted.name.lower()]):
node.status = NodeStatus.Deleting.name.lower()
node.save()
agent_config_file = request.build_absolute_uri(node.agent.config_file.url)
node_detail_url = reverse('node-detail', args=[pk])
node_detail_url = request.build_absolute_uri(node_detail_url)
operate_node.delay(str(node.id), AgentOperation.Delete.value, agent_config_file=agent_config_file, node_detail_url=node_detail_url)
else:
node.delete()
return Response(status=status.HTTP_204_NO_CONTENT)<|docstring|>Delete Node
Delete node<|endoftext|> |
9f92a01d69f84f47de080b6bb584c44532904111e5d9f6efabf8a70e4414a795 | @swagger_auto_schema(operation_id='update node', request_body=NodeUpdateBody, responses=with_common_response({status.HTTP_202_ACCEPTED: 'Accepted'}))
def update(self, request, pk=None):
'\n Update Node\n\n Update special node with id.\n '
serializer = NodeUpdateBody(data=request.data)
if serializer.is_valid(raise_exception=True):
node_status = serializer.validated_data.get('status')
ports = serializer.validated_data.get('ports', [])
try:
node = Node.objects.get(id=pk)
except ObjectDoesNotExist:
raise ResourceNotFound
node.status = node_status
node.save()
for port_item in ports:
port = Port(external=port_item.get('external'), internal=port_item.get('internal'), node=node)
port.save()
return Response(status=status.HTTP_202_ACCEPTED) | Update Node
Update special node with id. | src/api-engine/api/routes/node/views.py | update | hello2mao/cello | 1 | python | @swagger_auto_schema(operation_id='update node', request_body=NodeUpdateBody, responses=with_common_response({status.HTTP_202_ACCEPTED: 'Accepted'}))
def update(self, request, pk=None):
'\n Update Node\n\n Update special node with id.\n '
serializer = NodeUpdateBody(data=request.data)
if serializer.is_valid(raise_exception=True):
node_status = serializer.validated_data.get('status')
ports = serializer.validated_data.get('ports', [])
try:
node = Node.objects.get(id=pk)
except ObjectDoesNotExist:
raise ResourceNotFound
node.status = node_status
node.save()
for port_item in ports:
port = Port(external=port_item.get('external'), internal=port_item.get('internal'), node=node)
port.save()
return Response(status=status.HTTP_202_ACCEPTED) | @swagger_auto_schema(operation_id='update node', request_body=NodeUpdateBody, responses=with_common_response({status.HTTP_202_ACCEPTED: 'Accepted'}))
def update(self, request, pk=None):
'\n Update Node\n\n Update special node with id.\n '
serializer = NodeUpdateBody(data=request.data)
if serializer.is_valid(raise_exception=True):
node_status = serializer.validated_data.get('status')
ports = serializer.validated_data.get('ports', [])
try:
node = Node.objects.get(id=pk)
except ObjectDoesNotExist:
raise ResourceNotFound
node.status = node_status
node.save()
for port_item in ports:
port = Port(external=port_item.get('external'), internal=port_item.get('internal'), node=node)
port.save()
return Response(status=status.HTTP_202_ACCEPTED)<|docstring|>Update Node
Update special node with id.<|endoftext|> |
6ebf6f2bcd7c117c62c14d79277304746fbbddb370d82fa057907f77d7bb344c | @swagger_auto_schema(methods=['post'], request_body=NodeFileCreateSerializer, responses=with_common_response({status.HTTP_202_ACCEPTED: 'Accepted'}))
@action(methods=['post'], detail=True, url_path='files', url_name='files')
def upload_files(self, request, pk=None):
'\n Upload file to node\n\n Upload related files to node\n '
serializer = NodeFileCreateSerializer(data=request.data)
if serializer.is_valid(raise_exception=True):
file = serializer.validated_data.get('file')
try:
node = Node.objects.get(id=pk)
except ObjectDoesNotExist:
raise ResourceNotFound
else:
if node.file:
node.file.delete()
node.file = file
node.save()
return Response(status=status.HTTP_202_ACCEPTED) | Upload file to node
Upload related files to node | src/api-engine/api/routes/node/views.py | upload_files | hello2mao/cello | 1 | python | @swagger_auto_schema(methods=['post'], request_body=NodeFileCreateSerializer, responses=with_common_response({status.HTTP_202_ACCEPTED: 'Accepted'}))
@action(methods=['post'], detail=True, url_path='files', url_name='files')
def upload_files(self, request, pk=None):
'\n Upload file to node\n\n Upload related files to node\n '
serializer = NodeFileCreateSerializer(data=request.data)
if serializer.is_valid(raise_exception=True):
file = serializer.validated_data.get('file')
try:
node = Node.objects.get(id=pk)
except ObjectDoesNotExist:
raise ResourceNotFound
else:
if node.file:
node.file.delete()
node.file = file
node.save()
return Response(status=status.HTTP_202_ACCEPTED) | @swagger_auto_schema(methods=['post'], request_body=NodeFileCreateSerializer, responses=with_common_response({status.HTTP_202_ACCEPTED: 'Accepted'}))
@action(methods=['post'], detail=True, url_path='files', url_name='files')
def upload_files(self, request, pk=None):
'\n Upload file to node\n\n Upload related files to node\n '
serializer = NodeFileCreateSerializer(data=request.data)
if serializer.is_valid(raise_exception=True):
file = serializer.validated_data.get('file')
try:
node = Node.objects.get(id=pk)
except ObjectDoesNotExist:
raise ResourceNotFound
else:
if node.file:
node.file.delete()
node.file = file
node.save()
return Response(status=status.HTTP_202_ACCEPTED)<|docstring|>Upload file to node
Upload related files to node<|endoftext|> |
7821072c70e077ef139dec860a44d0ce3717dab7b83e3b0f997b1dc4be616899 | @swagger_auto_schema(responses=with_common_response(with_common_response({status.HTTP_200_OK: NodeInfoSerializer})))
def retrieve(self, request, pk=None):
'\n Get Node information\n\n Get node detail information.\n '
self._validate_organization(request)
try:
node = Node.objects.get(id=pk, organization=request.user.organization)
except ObjectDoesNotExist:
raise ResourceNotFound
else:
if node.file:
node.file = request.build_absolute_uri(node.file.url)
ports = Port.objects.filter(node=node)
node.links = [{'internal_port': port.internal, 'url': ('%s:%s' % (node.agent.ip, port.external))} for port in ports]
response = NodeInfoSerializer(node)
return Response(data=response.data, status=status.HTTP_200_OK) | Get Node information
Get node detail information. | src/api-engine/api/routes/node/views.py | retrieve | hello2mao/cello | 1 | python | @swagger_auto_schema(responses=with_common_response(with_common_response({status.HTTP_200_OK: NodeInfoSerializer})))
def retrieve(self, request, pk=None):
'\n Get Node information\n\n Get node detail information.\n '
self._validate_organization(request)
try:
node = Node.objects.get(id=pk, organization=request.user.organization)
except ObjectDoesNotExist:
raise ResourceNotFound
else:
if node.file:
node.file = request.build_absolute_uri(node.file.url)
ports = Port.objects.filter(node=node)
node.links = [{'internal_port': port.internal, 'url': ('%s:%s' % (node.agent.ip, port.external))} for port in ports]
response = NodeInfoSerializer(node)
return Response(data=response.data, status=status.HTTP_200_OK) | @swagger_auto_schema(responses=with_common_response(with_common_response({status.HTTP_200_OK: NodeInfoSerializer})))
def retrieve(self, request, pk=None):
'\n Get Node information\n\n Get node detail information.\n '
self._validate_organization(request)
try:
node = Node.objects.get(id=pk, organization=request.user.organization)
except ObjectDoesNotExist:
raise ResourceNotFound
else:
if node.file:
node.file = request.build_absolute_uri(node.file.url)
ports = Port.objects.filter(node=node)
node.links = [{'internal_port': port.internal, 'url': ('%s:%s' % (node.agent.ip, port.external))} for port in ports]
response = NodeInfoSerializer(node)
return Response(data=response.data, status=status.HTTP_200_OK)<|docstring|>Get Node information
Get node detail information.<|endoftext|> |
856e4684694251cca7e7060ba2973a94c60cb2e98c6aa2dbaac6a129ec59b14e | @swagger_auto_schema(methods=['patch'], request_body=NodeUserPatchSerializer, responses=with_common_response({status.HTTP_202_ACCEPTED: 'Accepted'}))
@action(methods=['patch'], detail=True, url_path='users/(?P<user_pk>[^/.]+)', url_name='patch-user')
def patch_user(self, request, pk=None, user_pk=None):
'\n Patch user status for node\n\n Patch user status for node\n '
serializer = NodeUserPatchSerializer(data=request.data)
if serializer.is_valid(raise_exception=True):
try:
node_user = NodeUser.objects.get(id=user_pk, node__id=pk)
except ObjectDoesNotExist:
raise ResourceNotFound
node_user.status = serializer.validated_data.get('status')
node_user.save()
return Response(status=status.HTTP_202_ACCEPTED) | Patch user status for node
Patch user status for node | src/api-engine/api/routes/node/views.py | patch_user | hello2mao/cello | 1 | python | @swagger_auto_schema(methods=['patch'], request_body=NodeUserPatchSerializer, responses=with_common_response({status.HTTP_202_ACCEPTED: 'Accepted'}))
@action(methods=['patch'], detail=True, url_path='users/(?P<user_pk>[^/.]+)', url_name='patch-user')
def patch_user(self, request, pk=None, user_pk=None):
'\n Patch user status for node\n\n Patch user status for node\n '
serializer = NodeUserPatchSerializer(data=request.data)
if serializer.is_valid(raise_exception=True):
try:
node_user = NodeUser.objects.get(id=user_pk, node__id=pk)
except ObjectDoesNotExist:
raise ResourceNotFound
node_user.status = serializer.validated_data.get('status')
node_user.save()
return Response(status=status.HTTP_202_ACCEPTED) | @swagger_auto_schema(methods=['patch'], request_body=NodeUserPatchSerializer, responses=with_common_response({status.HTTP_202_ACCEPTED: 'Accepted'}))
@action(methods=['patch'], detail=True, url_path='users/(?P<user_pk>[^/.]+)', url_name='patch-user')
def patch_user(self, request, pk=None, user_pk=None):
'\n Patch user status for node\n\n Patch user status for node\n '
serializer = NodeUserPatchSerializer(data=request.data)
if serializer.is_valid(raise_exception=True):
try:
node_user = NodeUser.objects.get(id=user_pk, node__id=pk)
except ObjectDoesNotExist:
raise ResourceNotFound
node_user.status = serializer.validated_data.get('status')
node_user.save()
return Response(status=status.HTTP_202_ACCEPTED)<|docstring|>Patch user status for node
Patch user status for node<|endoftext|> |
bb12dfe81e548cddd5186e914a2966ecc5895b93d623305a7fba276959301b8d | def make_bridge(start, end, schedule, n=1, constructor=Bridge):
"\n Construct a 'bridge', i.e. a sequence of transition kernels\n\n Parameters\n ----------\n start, end : TransitionKernels\n transition kernels whose stationary distributions are the\n initial and final ensemble\n\n schedule : iterable\n inverse temperature schedule\n\n n : integer > 0\n power to which the intermediate transition kernels will be\n raised\n\n constructor :\n constructor for the bridge\n "
bridge = [constructor(beta, start, end) for beta in schedule]
if (n > 1):
bridge = [T.power(n) for T in bridge]
return bridge | Construct a 'bridge', i.e. a sequence of transition kernels
Parameters
----------
start, end : TransitionKernels
transition kernels whose stationary distributions are the
initial and final ensemble
schedule : iterable
inverse temperature schedule
n : integer > 0
power to which the intermediate transition kernels will be
raised
constructor :
constructor for the bridge | paths/simulate.py | make_bridge | michaelhabeck/paths | 5 | python | def make_bridge(start, end, schedule, n=1, constructor=Bridge):
"\n Construct a 'bridge', i.e. a sequence of transition kernels\n\n Parameters\n ----------\n start, end : TransitionKernels\n transition kernels whose stationary distributions are the\n initial and final ensemble\n\n schedule : iterable\n inverse temperature schedule\n\n n : integer > 0\n power to which the intermediate transition kernels will be\n raised\n\n constructor :\n constructor for the bridge\n "
bridge = [constructor(beta, start, end) for beta in schedule]
if (n > 1):
bridge = [T.power(n) for T in bridge]
return bridge | def make_bridge(start, end, schedule, n=1, constructor=Bridge):
"\n Construct a 'bridge', i.e. a sequence of transition kernels\n\n Parameters\n ----------\n start, end : TransitionKernels\n transition kernels whose stationary distributions are the\n initial and final ensemble\n\n schedule : iterable\n inverse temperature schedule\n\n n : integer > 0\n power to which the intermediate transition kernels will be\n raised\n\n constructor :\n constructor for the bridge\n "
bridge = [constructor(beta, start, end) for beta in schedule]
if (n > 1):
bridge = [T.power(n) for T in bridge]
return bridge<|docstring|>Construct a 'bridge', i.e. a sequence of transition kernels
Parameters
----------
start, end : TransitionKernels
transition kernels whose stationary distributions are the
initial and final ensemble
schedule : iterable
inverse temperature schedule
n : integer > 0
power to which the intermediate transition kernels will be
raised
constructor :
constructor for the bridge<|endoftext|> |
9270197294895bd208179095ed39779658e386726c9e4422b73ed470da998ae2 | def generate_paths(bridge, n_paths=1, store_paths=False):
'\n Run a nonequilibrium simulation by stepping through a sequence\n of Markov perturbations\n \n Parameters\n ----------\n bridge : iterable\n sequence of transition kernels\n\n n_paths : integer\n number of paths that will be simulated\n\n store_paths : boolean\n flag that specifies if the full paths will be return or only\n the final states\n '
X = [bridge[0].stationary.sample(n=int(n_paths))]
for T in bridge[1:]:
x = T(X[(- 1)])
X.append(x)
return np.array(X) | Run a nonequilibrium simulation by stepping through a sequence
of Markov perturbations
Parameters
----------
bridge : iterable
sequence of transition kernels
n_paths : integer
number of paths that will be simulated
store_paths : boolean
flag that specifies if the full paths will be return or only
the final states | paths/simulate.py | generate_paths | michaelhabeck/paths | 5 | python | def generate_paths(bridge, n_paths=1, store_paths=False):
'\n Run a nonequilibrium simulation by stepping through a sequence\n of Markov perturbations\n \n Parameters\n ----------\n bridge : iterable\n sequence of transition kernels\n\n n_paths : integer\n number of paths that will be simulated\n\n store_paths : boolean\n flag that specifies if the full paths will be return or only\n the final states\n '
X = [bridge[0].stationary.sample(n=int(n_paths))]
for T in bridge[1:]:
x = T(X[(- 1)])
X.append(x)
return np.array(X) | def generate_paths(bridge, n_paths=1, store_paths=False):
'\n Run a nonequilibrium simulation by stepping through a sequence\n of Markov perturbations\n \n Parameters\n ----------\n bridge : iterable\n sequence of transition kernels\n\n n_paths : integer\n number of paths that will be simulated\n\n store_paths : boolean\n flag that specifies if the full paths will be return or only\n the final states\n '
X = [bridge[0].stationary.sample(n=int(n_paths))]
for T in bridge[1:]:
x = T(X[(- 1)])
X.append(x)
return np.array(X)<|docstring|>Run a nonequilibrium simulation by stepping through a sequence
of Markov perturbations
Parameters
----------
bridge : iterable
sequence of transition kernels
n_paths : integer
number of paths that will be simulated
store_paths : boolean
flag that specifies if the full paths will be return or only
the final states<|endoftext|> |
7a63c975182d9cc7acadae1279e5ba1385556f4268ccf51827f52c5554c10619 | def simulate(bridge, n_paths=1):
'\n Generate multiple paths from the bridge and compute the work\n Returns log weights (work) and final states (weighted samples\n from the target ensemble).\n '
X = generate_paths(bridge, n_paths)
p = [T.stationary for T in bridge]
W = np.sum([(p[(k + 1)].energy(X[k]) - p[k].energy(X[k])) for k in range((len(bridge) - 1))], 0)
return (W, X[(- 1)]) | Generate multiple paths from the bridge and compute the work
Returns log weights (work) and final states (weighted samples
from the target ensemble). | paths/simulate.py | simulate | michaelhabeck/paths | 5 | python | def simulate(bridge, n_paths=1):
'\n Generate multiple paths from the bridge and compute the work\n Returns log weights (work) and final states (weighted samples\n from the target ensemble).\n '
X = generate_paths(bridge, n_paths)
p = [T.stationary for T in bridge]
W = np.sum([(p[(k + 1)].energy(X[k]) - p[k].energy(X[k])) for k in range((len(bridge) - 1))], 0)
return (W, X[(- 1)]) | def simulate(bridge, n_paths=1):
'\n Generate multiple paths from the bridge and compute the work\n Returns log weights (work) and final states (weighted samples\n from the target ensemble).\n '
X = generate_paths(bridge, n_paths)
p = [T.stationary for T in bridge]
W = np.sum([(p[(k + 1)].energy(X[k]) - p[k].energy(X[k])) for k in range((len(bridge) - 1))], 0)
return (W, X[(- 1)])<|docstring|>Generate multiple paths from the bridge and compute the work
Returns log weights (work) and final states (weighted samples
from the target ensemble).<|endoftext|> |
f24146177294383dde2c5f6bb2ca97d78fa553ac3558bf7674e73e6298494a48 | def test_get_all_variants(gemini_case_obj):
'Test to get some variants from the gemini adapter'
plugin = GeminiPlugin()
plugin.add_case(gemini_case_obj)
filters = {}
result = plugin.variants('643594', filters=filters, count=1000)
variants = result.variants
nr_of_variants = result.nr_of_variants
assert (nr_of_variants == 14) | Test to get some variants from the gemini adapter | tests/plugins/gemini/test_gemini_variant_mixin.py | test_get_all_variants | robinandeer/puzzle | 24 | python | def test_get_all_variants(gemini_case_obj):
plugin = GeminiPlugin()
plugin.add_case(gemini_case_obj)
filters = {}
result = plugin.variants('643594', filters=filters, count=1000)
variants = result.variants
nr_of_variants = result.nr_of_variants
assert (nr_of_variants == 14) | def test_get_all_variants(gemini_case_obj):
plugin = GeminiPlugin()
plugin.add_case(gemini_case_obj)
filters = {}
result = plugin.variants('643594', filters=filters, count=1000)
variants = result.variants
nr_of_variants = result.nr_of_variants
assert (nr_of_variants == 14)<|docstring|>Test to get some variants from the gemini adapter<|endoftext|> |
244a7e69ce1edbb5f72e84b18cb1181b68c6fcce4d3bc727da13f53b6c097a59 | def test_get_variants(gemini_case_obj):
'Test to get some variants from the gemini plugin'
plugin = GeminiPlugin()
plugin.add_case(gemini_case_obj)
filters = {}
result = plugin.variants('643594', filters=filters, count=5)
variants = result.variants
nr_of_variants = result.nr_of_variants
assert (nr_of_variants == 5) | Test to get some variants from the gemini plugin | tests/plugins/gemini/test_gemini_variant_mixin.py | test_get_variants | robinandeer/puzzle | 24 | python | def test_get_variants(gemini_case_obj):
plugin = GeminiPlugin()
plugin.add_case(gemini_case_obj)
filters = {}
result = plugin.variants('643594', filters=filters, count=5)
variants = result.variants
nr_of_variants = result.nr_of_variants
assert (nr_of_variants == 5) | def test_get_variants(gemini_case_obj):
plugin = GeminiPlugin()
plugin.add_case(gemini_case_obj)
filters = {}
result = plugin.variants('643594', filters=filters, count=5)
variants = result.variants
nr_of_variants = result.nr_of_variants
assert (nr_of_variants == 5)<|docstring|>Test to get some variants from the gemini plugin<|endoftext|> |
ae87853433231169cbcc0808b402957b2c74783b8e44dc3c9c45c737d96ca95f | def test_variant(gemini_case_obj):
'Test to get one variant'
plugin = GeminiPlugin()
plugin.add_case(gemini_case_obj)
variant = plugin.variant(case_id='643594', variant_id=4)
assert (variant['CHROM'] == '6')
assert (variant['POS'] == '32487163')
assert (type(variant['genes']) == type([])) | Test to get one variant | tests/plugins/gemini/test_gemini_variant_mixin.py | test_variant | robinandeer/puzzle | 24 | python | def test_variant(gemini_case_obj):
plugin = GeminiPlugin()
plugin.add_case(gemini_case_obj)
variant = plugin.variant(case_id='643594', variant_id=4)
assert (variant['CHROM'] == '6')
assert (variant['POS'] == '32487163')
assert (type(variant['genes']) == type([])) | def test_variant(gemini_case_obj):
plugin = GeminiPlugin()
plugin.add_case(gemini_case_obj)
variant = plugin.variant(case_id='643594', variant_id=4)
assert (variant['CHROM'] == '6')
assert (variant['POS'] == '32487163')
assert (type(variant['genes']) == type([]))<|docstring|>Test to get one variant<|endoftext|> |
4b4c61b1b7ee258dd601a8d32458d60e56225c7d25c4da9cba8ebb274791c064 | def __init__(self, api_server=None, audit=None, authenticator=None, controller_manager=None, scheduler=None, local_vars_configuration=None):
'IoXK8sClusterControlplaneV1alpha3AWSManagedControlPlaneSpecLogging - a model defined in OpenAPI'
if (local_vars_configuration is None):
local_vars_configuration = Configuration()
self.local_vars_configuration = local_vars_configuration
self._api_server = None
self._audit = None
self._authenticator = None
self._controller_manager = None
self._scheduler = None
self.discriminator = None
self.api_server = api_server
self.audit = audit
self.authenticator = authenticator
self.controller_manager = controller_manager
self.scheduler = scheduler | IoXK8sClusterControlplaneV1alpha3AWSManagedControlPlaneSpecLogging - a model defined in OpenAPI | kubernetes/client/models/io_xk8s_cluster_controlplane_v1alpha3_aws_managed_control_plane_spec_logging.py | __init__ | mariusgheorghies/python | 0 | python | def __init__(self, api_server=None, audit=None, authenticator=None, controller_manager=None, scheduler=None, local_vars_configuration=None):
if (local_vars_configuration is None):
local_vars_configuration = Configuration()
self.local_vars_configuration = local_vars_configuration
self._api_server = None
self._audit = None
self._authenticator = None
self._controller_manager = None
self._scheduler = None
self.discriminator = None
self.api_server = api_server
self.audit = audit
self.authenticator = authenticator
self.controller_manager = controller_manager
self.scheduler = scheduler | def __init__(self, api_server=None, audit=None, authenticator=None, controller_manager=None, scheduler=None, local_vars_configuration=None):
if (local_vars_configuration is None):
local_vars_configuration = Configuration()
self.local_vars_configuration = local_vars_configuration
self._api_server = None
self._audit = None
self._authenticator = None
self._controller_manager = None
self._scheduler = None
self.discriminator = None
self.api_server = api_server
self.audit = audit
self.authenticator = authenticator
self.controller_manager = controller_manager
self.scheduler = scheduler<|docstring|>IoXK8sClusterControlplaneV1alpha3AWSManagedControlPlaneSpecLogging - a model defined in OpenAPI<|endoftext|> |
81152e23b3919eed925bbf05dc841c19c8380077b62f305a4adefeeb6788c385 | @property
def api_server(self):
'Gets the api_server of this IoXK8sClusterControlplaneV1alpha3AWSManagedControlPlaneSpecLogging. # noqa: E501\n\n APIServer indicates if the Kubernetes API Server log (kube-apiserver) shoulkd be enabled # noqa: E501\n\n :return: The api_server of this IoXK8sClusterControlplaneV1alpha3AWSManagedControlPlaneSpecLogging. # noqa: E501\n :rtype: bool\n '
return self._api_server | Gets the api_server of this IoXK8sClusterControlplaneV1alpha3AWSManagedControlPlaneSpecLogging. # noqa: E501
APIServer indicates if the Kubernetes API Server log (kube-apiserver) shoulkd be enabled # noqa: E501
:return: The api_server of this IoXK8sClusterControlplaneV1alpha3AWSManagedControlPlaneSpecLogging. # noqa: E501
:rtype: bool | kubernetes/client/models/io_xk8s_cluster_controlplane_v1alpha3_aws_managed_control_plane_spec_logging.py | api_server | mariusgheorghies/python | 0 | python | @property
def api_server(self):
'Gets the api_server of this IoXK8sClusterControlplaneV1alpha3AWSManagedControlPlaneSpecLogging. # noqa: E501\n\n APIServer indicates if the Kubernetes API Server log (kube-apiserver) shoulkd be enabled # noqa: E501\n\n :return: The api_server of this IoXK8sClusterControlplaneV1alpha3AWSManagedControlPlaneSpecLogging. # noqa: E501\n :rtype: bool\n '
return self._api_server | @property
def api_server(self):
'Gets the api_server of this IoXK8sClusterControlplaneV1alpha3AWSManagedControlPlaneSpecLogging. # noqa: E501\n\n APIServer indicates if the Kubernetes API Server log (kube-apiserver) shoulkd be enabled # noqa: E501\n\n :return: The api_server of this IoXK8sClusterControlplaneV1alpha3AWSManagedControlPlaneSpecLogging. # noqa: E501\n :rtype: bool\n '
return self._api_server<|docstring|>Gets the api_server of this IoXK8sClusterControlplaneV1alpha3AWSManagedControlPlaneSpecLogging. # noqa: E501
APIServer indicates if the Kubernetes API Server log (kube-apiserver) shoulkd be enabled # noqa: E501
:return: The api_server of this IoXK8sClusterControlplaneV1alpha3AWSManagedControlPlaneSpecLogging. # noqa: E501
:rtype: bool<|endoftext|> |
74dd44f0f0d1379d5f2780c5191d2cfb24e759dd24483b2884cb146d2309015b | @api_server.setter
def api_server(self, api_server):
'Sets the api_server of this IoXK8sClusterControlplaneV1alpha3AWSManagedControlPlaneSpecLogging.\n\n APIServer indicates if the Kubernetes API Server log (kube-apiserver) shoulkd be enabled # noqa: E501\n\n :param api_server: The api_server of this IoXK8sClusterControlplaneV1alpha3AWSManagedControlPlaneSpecLogging. # noqa: E501\n :type: bool\n '
if (self.local_vars_configuration.client_side_validation and (api_server is None)):
raise ValueError('Invalid value for `api_server`, must not be `None`')
self._api_server = api_server | Sets the api_server of this IoXK8sClusterControlplaneV1alpha3AWSManagedControlPlaneSpecLogging.
APIServer indicates if the Kubernetes API Server log (kube-apiserver) shoulkd be enabled # noqa: E501
:param api_server: The api_server of this IoXK8sClusterControlplaneV1alpha3AWSManagedControlPlaneSpecLogging. # noqa: E501
:type: bool | kubernetes/client/models/io_xk8s_cluster_controlplane_v1alpha3_aws_managed_control_plane_spec_logging.py | api_server | mariusgheorghies/python | 0 | python | @api_server.setter
def api_server(self, api_server):
'Sets the api_server of this IoXK8sClusterControlplaneV1alpha3AWSManagedControlPlaneSpecLogging.\n\n APIServer indicates if the Kubernetes API Server log (kube-apiserver) shoulkd be enabled # noqa: E501\n\n :param api_server: The api_server of this IoXK8sClusterControlplaneV1alpha3AWSManagedControlPlaneSpecLogging. # noqa: E501\n :type: bool\n '
if (self.local_vars_configuration.client_side_validation and (api_server is None)):
raise ValueError('Invalid value for `api_server`, must not be `None`')
self._api_server = api_server | @api_server.setter
def api_server(self, api_server):
'Sets the api_server of this IoXK8sClusterControlplaneV1alpha3AWSManagedControlPlaneSpecLogging.\n\n APIServer indicates if the Kubernetes API Server log (kube-apiserver) shoulkd be enabled # noqa: E501\n\n :param api_server: The api_server of this IoXK8sClusterControlplaneV1alpha3AWSManagedControlPlaneSpecLogging. # noqa: E501\n :type: bool\n '
if (self.local_vars_configuration.client_side_validation and (api_server is None)):
raise ValueError('Invalid value for `api_server`, must not be `None`')
self._api_server = api_server<|docstring|>Sets the api_server of this IoXK8sClusterControlplaneV1alpha3AWSManagedControlPlaneSpecLogging.
APIServer indicates if the Kubernetes API Server log (kube-apiserver) shoulkd be enabled # noqa: E501
:param api_server: The api_server of this IoXK8sClusterControlplaneV1alpha3AWSManagedControlPlaneSpecLogging. # noqa: E501
:type: bool<|endoftext|> |
5acc86ecc94b5d60c6b873a44aa71885ecba532a38794e403f6159d44047e8b7 | @property
def audit(self):
'Gets the audit of this IoXK8sClusterControlplaneV1alpha3AWSManagedControlPlaneSpecLogging. # noqa: E501\n\n Audit indicates if the Kubernetes API audit log should be enabled # noqa: E501\n\n :return: The audit of this IoXK8sClusterControlplaneV1alpha3AWSManagedControlPlaneSpecLogging. # noqa: E501\n :rtype: bool\n '
return self._audit | Gets the audit of this IoXK8sClusterControlplaneV1alpha3AWSManagedControlPlaneSpecLogging. # noqa: E501
Audit indicates if the Kubernetes API audit log should be enabled # noqa: E501
:return: The audit of this IoXK8sClusterControlplaneV1alpha3AWSManagedControlPlaneSpecLogging. # noqa: E501
:rtype: bool | kubernetes/client/models/io_xk8s_cluster_controlplane_v1alpha3_aws_managed_control_plane_spec_logging.py | audit | mariusgheorghies/python | 0 | python | @property
def audit(self):
'Gets the audit of this IoXK8sClusterControlplaneV1alpha3AWSManagedControlPlaneSpecLogging. # noqa: E501\n\n Audit indicates if the Kubernetes API audit log should be enabled # noqa: E501\n\n :return: The audit of this IoXK8sClusterControlplaneV1alpha3AWSManagedControlPlaneSpecLogging. # noqa: E501\n :rtype: bool\n '
return self._audit | @property
def audit(self):
'Gets the audit of this IoXK8sClusterControlplaneV1alpha3AWSManagedControlPlaneSpecLogging. # noqa: E501\n\n Audit indicates if the Kubernetes API audit log should be enabled # noqa: E501\n\n :return: The audit of this IoXK8sClusterControlplaneV1alpha3AWSManagedControlPlaneSpecLogging. # noqa: E501\n :rtype: bool\n '
return self._audit<|docstring|>Gets the audit of this IoXK8sClusterControlplaneV1alpha3AWSManagedControlPlaneSpecLogging. # noqa: E501
Audit indicates if the Kubernetes API audit log should be enabled # noqa: E501
:return: The audit of this IoXK8sClusterControlplaneV1alpha3AWSManagedControlPlaneSpecLogging. # noqa: E501
:rtype: bool<|endoftext|> |
a6822e1b1efc8905f217f932d35588510c89fde7b98160f57bd868a091bc34eb | @audit.setter
def audit(self, audit):
'Sets the audit of this IoXK8sClusterControlplaneV1alpha3AWSManagedControlPlaneSpecLogging.\n\n Audit indicates if the Kubernetes API audit log should be enabled # noqa: E501\n\n :param audit: The audit of this IoXK8sClusterControlplaneV1alpha3AWSManagedControlPlaneSpecLogging. # noqa: E501\n :type: bool\n '
if (self.local_vars_configuration.client_side_validation and (audit is None)):
raise ValueError('Invalid value for `audit`, must not be `None`')
self._audit = audit | Sets the audit of this IoXK8sClusterControlplaneV1alpha3AWSManagedControlPlaneSpecLogging.
Audit indicates if the Kubernetes API audit log should be enabled # noqa: E501
:param audit: The audit of this IoXK8sClusterControlplaneV1alpha3AWSManagedControlPlaneSpecLogging. # noqa: E501
:type: bool | kubernetes/client/models/io_xk8s_cluster_controlplane_v1alpha3_aws_managed_control_plane_spec_logging.py | audit | mariusgheorghies/python | 0 | python | @audit.setter
def audit(self, audit):
'Sets the audit of this IoXK8sClusterControlplaneV1alpha3AWSManagedControlPlaneSpecLogging.\n\n Audit indicates if the Kubernetes API audit log should be enabled # noqa: E501\n\n :param audit: The audit of this IoXK8sClusterControlplaneV1alpha3AWSManagedControlPlaneSpecLogging. # noqa: E501\n :type: bool\n '
if (self.local_vars_configuration.client_side_validation and (audit is None)):
raise ValueError('Invalid value for `audit`, must not be `None`')
self._audit = audit | @audit.setter
def audit(self, audit):
'Sets the audit of this IoXK8sClusterControlplaneV1alpha3AWSManagedControlPlaneSpecLogging.\n\n Audit indicates if the Kubernetes API audit log should be enabled # noqa: E501\n\n :param audit: The audit of this IoXK8sClusterControlplaneV1alpha3AWSManagedControlPlaneSpecLogging. # noqa: E501\n :type: bool\n '
if (self.local_vars_configuration.client_side_validation and (audit is None)):
raise ValueError('Invalid value for `audit`, must not be `None`')
self._audit = audit<|docstring|>Sets the audit of this IoXK8sClusterControlplaneV1alpha3AWSManagedControlPlaneSpecLogging.
Audit indicates if the Kubernetes API audit log should be enabled # noqa: E501
:param audit: The audit of this IoXK8sClusterControlplaneV1alpha3AWSManagedControlPlaneSpecLogging. # noqa: E501
:type: bool<|endoftext|> |
c5808eb4f762413623768a109bfa9031dc4bc80fd633d22f3bcf5830e6c37d6c | @property
def authenticator(self):
'Gets the authenticator of this IoXK8sClusterControlplaneV1alpha3AWSManagedControlPlaneSpecLogging. # noqa: E501\n\n Authenticator indicates if the iam authenticator log should be enabled # noqa: E501\n\n :return: The authenticator of this IoXK8sClusterControlplaneV1alpha3AWSManagedControlPlaneSpecLogging. # noqa: E501\n :rtype: bool\n '
return self._authenticator | Gets the authenticator of this IoXK8sClusterControlplaneV1alpha3AWSManagedControlPlaneSpecLogging. # noqa: E501
Authenticator indicates if the iam authenticator log should be enabled # noqa: E501
:return: The authenticator of this IoXK8sClusterControlplaneV1alpha3AWSManagedControlPlaneSpecLogging. # noqa: E501
:rtype: bool | kubernetes/client/models/io_xk8s_cluster_controlplane_v1alpha3_aws_managed_control_plane_spec_logging.py | authenticator | mariusgheorghies/python | 0 | python | @property
def authenticator(self):
'Gets the authenticator of this IoXK8sClusterControlplaneV1alpha3AWSManagedControlPlaneSpecLogging. # noqa: E501\n\n Authenticator indicates if the iam authenticator log should be enabled # noqa: E501\n\n :return: The authenticator of this IoXK8sClusterControlplaneV1alpha3AWSManagedControlPlaneSpecLogging. # noqa: E501\n :rtype: bool\n '
return self._authenticator | @property
def authenticator(self):
'Gets the authenticator of this IoXK8sClusterControlplaneV1alpha3AWSManagedControlPlaneSpecLogging. # noqa: E501\n\n Authenticator indicates if the iam authenticator log should be enabled # noqa: E501\n\n :return: The authenticator of this IoXK8sClusterControlplaneV1alpha3AWSManagedControlPlaneSpecLogging. # noqa: E501\n :rtype: bool\n '
return self._authenticator<|docstring|>Gets the authenticator of this IoXK8sClusterControlplaneV1alpha3AWSManagedControlPlaneSpecLogging. # noqa: E501
Authenticator indicates if the iam authenticator log should be enabled # noqa: E501
:return: The authenticator of this IoXK8sClusterControlplaneV1alpha3AWSManagedControlPlaneSpecLogging. # noqa: E501
:rtype: bool<|endoftext|> |
3268bc375a231f40f600930b9781dfa4171dc003c4c2253e5fe7472036b94836 | @authenticator.setter
def authenticator(self, authenticator):
'Sets the authenticator of this IoXK8sClusterControlplaneV1alpha3AWSManagedControlPlaneSpecLogging.\n\n Authenticator indicates if the iam authenticator log should be enabled # noqa: E501\n\n :param authenticator: The authenticator of this IoXK8sClusterControlplaneV1alpha3AWSManagedControlPlaneSpecLogging. # noqa: E501\n :type: bool\n '
if (self.local_vars_configuration.client_side_validation and (authenticator is None)):
raise ValueError('Invalid value for `authenticator`, must not be `None`')
self._authenticator = authenticator | Sets the authenticator of this IoXK8sClusterControlplaneV1alpha3AWSManagedControlPlaneSpecLogging.
Authenticator indicates if the iam authenticator log should be enabled # noqa: E501
:param authenticator: The authenticator of this IoXK8sClusterControlplaneV1alpha3AWSManagedControlPlaneSpecLogging. # noqa: E501
:type: bool | kubernetes/client/models/io_xk8s_cluster_controlplane_v1alpha3_aws_managed_control_plane_spec_logging.py | authenticator | mariusgheorghies/python | 0 | python | @authenticator.setter
def authenticator(self, authenticator):
'Sets the authenticator of this IoXK8sClusterControlplaneV1alpha3AWSManagedControlPlaneSpecLogging.\n\n Authenticator indicates if the iam authenticator log should be enabled # noqa: E501\n\n :param authenticator: The authenticator of this IoXK8sClusterControlplaneV1alpha3AWSManagedControlPlaneSpecLogging. # noqa: E501\n :type: bool\n '
if (self.local_vars_configuration.client_side_validation and (authenticator is None)):
raise ValueError('Invalid value for `authenticator`, must not be `None`')
self._authenticator = authenticator | @authenticator.setter
def authenticator(self, authenticator):
'Sets the authenticator of this IoXK8sClusterControlplaneV1alpha3AWSManagedControlPlaneSpecLogging.\n\n Authenticator indicates if the iam authenticator log should be enabled # noqa: E501\n\n :param authenticator: The authenticator of this IoXK8sClusterControlplaneV1alpha3AWSManagedControlPlaneSpecLogging. # noqa: E501\n :type: bool\n '
if (self.local_vars_configuration.client_side_validation and (authenticator is None)):
raise ValueError('Invalid value for `authenticator`, must not be `None`')
self._authenticator = authenticator<|docstring|>Sets the authenticator of this IoXK8sClusterControlplaneV1alpha3AWSManagedControlPlaneSpecLogging.
Authenticator indicates if the iam authenticator log should be enabled # noqa: E501
:param authenticator: The authenticator of this IoXK8sClusterControlplaneV1alpha3AWSManagedControlPlaneSpecLogging. # noqa: E501
:type: bool<|endoftext|> |
a64bfe73f76f2a98c73877f740f7e98682f8187ca8e4d253003c76a3e375f7d4 | @property
def controller_manager(self):
'Gets the controller_manager of this IoXK8sClusterControlplaneV1alpha3AWSManagedControlPlaneSpecLogging. # noqa: E501\n\n ControllerManager indicates if the controller manager (kube-controller-manager) log should be enabled # noqa: E501\n\n :return: The controller_manager of this IoXK8sClusterControlplaneV1alpha3AWSManagedControlPlaneSpecLogging. # noqa: E501\n :rtype: bool\n '
return self._controller_manager | Gets the controller_manager of this IoXK8sClusterControlplaneV1alpha3AWSManagedControlPlaneSpecLogging. # noqa: E501
ControllerManager indicates if the controller manager (kube-controller-manager) log should be enabled # noqa: E501
:return: The controller_manager of this IoXK8sClusterControlplaneV1alpha3AWSManagedControlPlaneSpecLogging. # noqa: E501
:rtype: bool | kubernetes/client/models/io_xk8s_cluster_controlplane_v1alpha3_aws_managed_control_plane_spec_logging.py | controller_manager | mariusgheorghies/python | 0 | python | @property
def controller_manager(self):
'Gets the controller_manager of this IoXK8sClusterControlplaneV1alpha3AWSManagedControlPlaneSpecLogging. # noqa: E501\n\n ControllerManager indicates if the controller manager (kube-controller-manager) log should be enabled # noqa: E501\n\n :return: The controller_manager of this IoXK8sClusterControlplaneV1alpha3AWSManagedControlPlaneSpecLogging. # noqa: E501\n :rtype: bool\n '
return self._controller_manager | @property
def controller_manager(self):
'Gets the controller_manager of this IoXK8sClusterControlplaneV1alpha3AWSManagedControlPlaneSpecLogging. # noqa: E501\n\n ControllerManager indicates if the controller manager (kube-controller-manager) log should be enabled # noqa: E501\n\n :return: The controller_manager of this IoXK8sClusterControlplaneV1alpha3AWSManagedControlPlaneSpecLogging. # noqa: E501\n :rtype: bool\n '
return self._controller_manager<|docstring|>Gets the controller_manager of this IoXK8sClusterControlplaneV1alpha3AWSManagedControlPlaneSpecLogging. # noqa: E501
ControllerManager indicates if the controller manager (kube-controller-manager) log should be enabled # noqa: E501
:return: The controller_manager of this IoXK8sClusterControlplaneV1alpha3AWSManagedControlPlaneSpecLogging. # noqa: E501
:rtype: bool<|endoftext|> |
96f8c2520f904e85b665c0b1e4eb54d428adf93b633a866add70499722b592e2 | @controller_manager.setter
def controller_manager(self, controller_manager):
'Sets the controller_manager of this IoXK8sClusterControlplaneV1alpha3AWSManagedControlPlaneSpecLogging.\n\n ControllerManager indicates if the controller manager (kube-controller-manager) log should be enabled # noqa: E501\n\n :param controller_manager: The controller_manager of this IoXK8sClusterControlplaneV1alpha3AWSManagedControlPlaneSpecLogging. # noqa: E501\n :type: bool\n '
if (self.local_vars_configuration.client_side_validation and (controller_manager is None)):
raise ValueError('Invalid value for `controller_manager`, must not be `None`')
self._controller_manager = controller_manager | Sets the controller_manager of this IoXK8sClusterControlplaneV1alpha3AWSManagedControlPlaneSpecLogging.
ControllerManager indicates if the controller manager (kube-controller-manager) log should be enabled # noqa: E501
:param controller_manager: The controller_manager of this IoXK8sClusterControlplaneV1alpha3AWSManagedControlPlaneSpecLogging. # noqa: E501
:type: bool | kubernetes/client/models/io_xk8s_cluster_controlplane_v1alpha3_aws_managed_control_plane_spec_logging.py | controller_manager | mariusgheorghies/python | 0 | python | @controller_manager.setter
def controller_manager(self, controller_manager):
'Sets the controller_manager of this IoXK8sClusterControlplaneV1alpha3AWSManagedControlPlaneSpecLogging.\n\n ControllerManager indicates if the controller manager (kube-controller-manager) log should be enabled # noqa: E501\n\n :param controller_manager: The controller_manager of this IoXK8sClusterControlplaneV1alpha3AWSManagedControlPlaneSpecLogging. # noqa: E501\n :type: bool\n '
if (self.local_vars_configuration.client_side_validation and (controller_manager is None)):
raise ValueError('Invalid value for `controller_manager`, must not be `None`')
self._controller_manager = controller_manager | @controller_manager.setter
def controller_manager(self, controller_manager):
'Sets the controller_manager of this IoXK8sClusterControlplaneV1alpha3AWSManagedControlPlaneSpecLogging.\n\n ControllerManager indicates if the controller manager (kube-controller-manager) log should be enabled # noqa: E501\n\n :param controller_manager: The controller_manager of this IoXK8sClusterControlplaneV1alpha3AWSManagedControlPlaneSpecLogging. # noqa: E501\n :type: bool\n '
if (self.local_vars_configuration.client_side_validation and (controller_manager is None)):
raise ValueError('Invalid value for `controller_manager`, must not be `None`')
self._controller_manager = controller_manager<|docstring|>Sets the controller_manager of this IoXK8sClusterControlplaneV1alpha3AWSManagedControlPlaneSpecLogging.
ControllerManager indicates if the controller manager (kube-controller-manager) log should be enabled # noqa: E501
:param controller_manager: The controller_manager of this IoXK8sClusterControlplaneV1alpha3AWSManagedControlPlaneSpecLogging. # noqa: E501
:type: bool<|endoftext|> |
d5b1ef5b7f7ea72340012539baffe5b7c17a25cdbec8d5c2e9d259bbf41c57e1 | @property
def scheduler(self):
'Gets the scheduler of this IoXK8sClusterControlplaneV1alpha3AWSManagedControlPlaneSpecLogging. # noqa: E501\n\n Scheduler indicates if the Kubernetes scheduler (kube-scheduler) log should be enabled # noqa: E501\n\n :return: The scheduler of this IoXK8sClusterControlplaneV1alpha3AWSManagedControlPlaneSpecLogging. # noqa: E501\n :rtype: bool\n '
return self._scheduler | Gets the scheduler of this IoXK8sClusterControlplaneV1alpha3AWSManagedControlPlaneSpecLogging. # noqa: E501
Scheduler indicates if the Kubernetes scheduler (kube-scheduler) log should be enabled # noqa: E501
:return: The scheduler of this IoXK8sClusterControlplaneV1alpha3AWSManagedControlPlaneSpecLogging. # noqa: E501
:rtype: bool | kubernetes/client/models/io_xk8s_cluster_controlplane_v1alpha3_aws_managed_control_plane_spec_logging.py | scheduler | mariusgheorghies/python | 0 | python | @property
def scheduler(self):
'Gets the scheduler of this IoXK8sClusterControlplaneV1alpha3AWSManagedControlPlaneSpecLogging. # noqa: E501\n\n Scheduler indicates if the Kubernetes scheduler (kube-scheduler) log should be enabled # noqa: E501\n\n :return: The scheduler of this IoXK8sClusterControlplaneV1alpha3AWSManagedControlPlaneSpecLogging. # noqa: E501\n :rtype: bool\n '
return self._scheduler | @property
def scheduler(self):
'Gets the scheduler of this IoXK8sClusterControlplaneV1alpha3AWSManagedControlPlaneSpecLogging. # noqa: E501\n\n Scheduler indicates if the Kubernetes scheduler (kube-scheduler) log should be enabled # noqa: E501\n\n :return: The scheduler of this IoXK8sClusterControlplaneV1alpha3AWSManagedControlPlaneSpecLogging. # noqa: E501\n :rtype: bool\n '
return self._scheduler<|docstring|>Gets the scheduler of this IoXK8sClusterControlplaneV1alpha3AWSManagedControlPlaneSpecLogging. # noqa: E501
Scheduler indicates if the Kubernetes scheduler (kube-scheduler) log should be enabled # noqa: E501
:return: The scheduler of this IoXK8sClusterControlplaneV1alpha3AWSManagedControlPlaneSpecLogging. # noqa: E501
:rtype: bool<|endoftext|> |
922ebd556e8f9d928f8e771adfda96e8fea41593d10b8c43677a0ea9f3d15373 | @scheduler.setter
def scheduler(self, scheduler):
'Sets the scheduler of this IoXK8sClusterControlplaneV1alpha3AWSManagedControlPlaneSpecLogging.\n\n Scheduler indicates if the Kubernetes scheduler (kube-scheduler) log should be enabled # noqa: E501\n\n :param scheduler: The scheduler of this IoXK8sClusterControlplaneV1alpha3AWSManagedControlPlaneSpecLogging. # noqa: E501\n :type: bool\n '
if (self.local_vars_configuration.client_side_validation and (scheduler is None)):
raise ValueError('Invalid value for `scheduler`, must not be `None`')
self._scheduler = scheduler | Sets the scheduler of this IoXK8sClusterControlplaneV1alpha3AWSManagedControlPlaneSpecLogging.
Scheduler indicates if the Kubernetes scheduler (kube-scheduler) log should be enabled # noqa: E501
:param scheduler: The scheduler of this IoXK8sClusterControlplaneV1alpha3AWSManagedControlPlaneSpecLogging. # noqa: E501
:type: bool | kubernetes/client/models/io_xk8s_cluster_controlplane_v1alpha3_aws_managed_control_plane_spec_logging.py | scheduler | mariusgheorghies/python | 0 | python | @scheduler.setter
def scheduler(self, scheduler):
'Sets the scheduler of this IoXK8sClusterControlplaneV1alpha3AWSManagedControlPlaneSpecLogging.\n\n Scheduler indicates if the Kubernetes scheduler (kube-scheduler) log should be enabled # noqa: E501\n\n :param scheduler: The scheduler of this IoXK8sClusterControlplaneV1alpha3AWSManagedControlPlaneSpecLogging. # noqa: E501\n :type: bool\n '
if (self.local_vars_configuration.client_side_validation and (scheduler is None)):
raise ValueError('Invalid value for `scheduler`, must not be `None`')
self._scheduler = scheduler | @scheduler.setter
def scheduler(self, scheduler):
'Sets the scheduler of this IoXK8sClusterControlplaneV1alpha3AWSManagedControlPlaneSpecLogging.\n\n Scheduler indicates if the Kubernetes scheduler (kube-scheduler) log should be enabled # noqa: E501\n\n :param scheduler: The scheduler of this IoXK8sClusterControlplaneV1alpha3AWSManagedControlPlaneSpecLogging. # noqa: E501\n :type: bool\n '
if (self.local_vars_configuration.client_side_validation and (scheduler is None)):
raise ValueError('Invalid value for `scheduler`, must not be `None`')
self._scheduler = scheduler<|docstring|>Sets the scheduler of this IoXK8sClusterControlplaneV1alpha3AWSManagedControlPlaneSpecLogging.
Scheduler indicates if the Kubernetes scheduler (kube-scheduler) log should be enabled # noqa: E501
:param scheduler: The scheduler of this IoXK8sClusterControlplaneV1alpha3AWSManagedControlPlaneSpecLogging. # noqa: E501
:type: bool<|endoftext|> |
5a4e41bb6a0def746593298cb605df98f1366e957c4ca89b12010ea7db707963 | def to_dict(self):
'Returns the model properties as a dict'
result = {}
for (attr, _) in six.iteritems(self.openapi_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map((lambda x: (x.to_dict() if hasattr(x, 'to_dict') else x)), value))
elif hasattr(value, 'to_dict'):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map((lambda item: ((item[0], item[1].to_dict()) if hasattr(item[1], 'to_dict') else item)), value.items()))
else:
result[attr] = value
return result | Returns the model properties as a dict | kubernetes/client/models/io_xk8s_cluster_controlplane_v1alpha3_aws_managed_control_plane_spec_logging.py | to_dict | mariusgheorghies/python | 0 | python | def to_dict(self):
result = {}
for (attr, _) in six.iteritems(self.openapi_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map((lambda x: (x.to_dict() if hasattr(x, 'to_dict') else x)), value))
elif hasattr(value, 'to_dict'):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map((lambda item: ((item[0], item[1].to_dict()) if hasattr(item[1], 'to_dict') else item)), value.items()))
else:
result[attr] = value
return result | def to_dict(self):
result = {}
for (attr, _) in six.iteritems(self.openapi_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map((lambda x: (x.to_dict() if hasattr(x, 'to_dict') else x)), value))
elif hasattr(value, 'to_dict'):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map((lambda item: ((item[0], item[1].to_dict()) if hasattr(item[1], 'to_dict') else item)), value.items()))
else:
result[attr] = value
return result<|docstring|>Returns the model properties as a dict<|endoftext|> |
cbb19eaa2fc8a113d9e32f924ef280a7e97563f8915f94f65dab438997af2e99 | def to_str(self):
'Returns the string representation of the model'
return pprint.pformat(self.to_dict()) | Returns the string representation of the model | kubernetes/client/models/io_xk8s_cluster_controlplane_v1alpha3_aws_managed_control_plane_spec_logging.py | to_str | mariusgheorghies/python | 0 | python | def to_str(self):
return pprint.pformat(self.to_dict()) | def to_str(self):
return pprint.pformat(self.to_dict())<|docstring|>Returns the string representation of the model<|endoftext|> |
772243a2c2b3261a9b954d07aaf295e3c1242a579a495e2d6a5679c677861703 | def __repr__(self):
'For `print` and `pprint`'
return self.to_str() | For `print` and `pprint` | kubernetes/client/models/io_xk8s_cluster_controlplane_v1alpha3_aws_managed_control_plane_spec_logging.py | __repr__ | mariusgheorghies/python | 0 | python | def __repr__(self):
return self.to_str() | def __repr__(self):
return self.to_str()<|docstring|>For `print` and `pprint`<|endoftext|> |
b61e018e92e2f12e679a0a66bd9a81eda9cf47f6e4854e76aff727eb21a33cf2 | def __eq__(self, other):
'Returns true if both objects are equal'
if (not isinstance(other, IoXK8sClusterControlplaneV1alpha3AWSManagedControlPlaneSpecLogging)):
return False
return (self.to_dict() == other.to_dict()) | Returns true if both objects are equal | kubernetes/client/models/io_xk8s_cluster_controlplane_v1alpha3_aws_managed_control_plane_spec_logging.py | __eq__ | mariusgheorghies/python | 0 | python | def __eq__(self, other):
if (not isinstance(other, IoXK8sClusterControlplaneV1alpha3AWSManagedControlPlaneSpecLogging)):
return False
return (self.to_dict() == other.to_dict()) | def __eq__(self, other):
if (not isinstance(other, IoXK8sClusterControlplaneV1alpha3AWSManagedControlPlaneSpecLogging)):
return False
return (self.to_dict() == other.to_dict())<|docstring|>Returns true if both objects are equal<|endoftext|> |
5abfa3ef4f6447252be12dd2dff161e423cc4ee1a8401ecd512ed4b9919de778 | def __ne__(self, other):
'Returns true if both objects are not equal'
if (not isinstance(other, IoXK8sClusterControlplaneV1alpha3AWSManagedControlPlaneSpecLogging)):
return True
return (self.to_dict() != other.to_dict()) | Returns true if both objects are not equal | kubernetes/client/models/io_xk8s_cluster_controlplane_v1alpha3_aws_managed_control_plane_spec_logging.py | __ne__ | mariusgheorghies/python | 0 | python | def __ne__(self, other):
if (not isinstance(other, IoXK8sClusterControlplaneV1alpha3AWSManagedControlPlaneSpecLogging)):
return True
return (self.to_dict() != other.to_dict()) | def __ne__(self, other):
if (not isinstance(other, IoXK8sClusterControlplaneV1alpha3AWSManagedControlPlaneSpecLogging)):
return True
return (self.to_dict() != other.to_dict())<|docstring|>Returns true if both objects are not equal<|endoftext|> |
95f70cc82beb1ba936c60031b29e8c8baacaeca07da254dc74c9dc86759ce655 | def test_bug163(self):
"Test bug #163 - Don't convert first char of a XRC extraproperty to upper case."
self.load_and_generate('bug163', test_GUI=False) | Test bug #163 - Don't convert first char of a XRC extraproperty to upper case. | tests/test_bugs_new.py | test_bug163 | Jalkhov/wxGlade | 225 | python | def test_bug163(self):
self.load_and_generate('bug163', test_GUI=False) | def test_bug163(self):
self.load_and_generate('bug163', test_GUI=False)<|docstring|>Test bug #163 - Don't convert first char of a XRC extraproperty to upper case.<|endoftext|> |
445a0d06d1df94d3920a93b48af0c155a4d84a62c2ab094b0d8ec63d8eaddd24 | def test_bug165(self):
"Test bug #165 - Can't rename notebook widget class - internal error on Preview"
self.load_and_generate('bug165', test_GUI=True) | Test bug #165 - Can't rename notebook widget class - internal error on Preview | tests/test_bugs_new.py | test_bug165 | Jalkhov/wxGlade | 225 | python | def test_bug165(self):
self.load_and_generate('bug165', test_GUI=True) | def test_bug165(self):
self.load_and_generate('bug165', test_GUI=True)<|docstring|>Test bug #165 - Can't rename notebook widget class - internal error on Preview<|endoftext|> |
84bc897f96f29b5606ed471912157af93a7277ba1711fd9afd5ce72e2d45457d | def test_bug166(self):
'Test bug #166 - UnicodeDecodeError when saving project using non ASCII characters in menu items'
self.load_and_generate('bug166', test_GUI=False) | Test bug #166 - UnicodeDecodeError when saving project using non ASCII characters in menu items | tests/test_bugs_new.py | test_bug166 | Jalkhov/wxGlade | 225 | python | def test_bug166(self):
self.load_and_generate('bug166', test_GUI=False) | def test_bug166(self):
self.load_and_generate('bug166', test_GUI=False)<|docstring|>Test bug #166 - UnicodeDecodeError when saving project using non ASCII characters in menu items<|endoftext|> |
e2f2c96e49b31749255142e3ac8a1228faf87f1001573eaeee7f890c69fd13e8 | def test_bug167(self):
'Test bug #167 - ascii codec error - UnicodeDecodeError will be raised if existing files will be changed\n (and not overwritten) and those files contains non-ASCII characters.'
for fn in ('bug167', 'bug167_utf8'):
for ext in ('.cpp', '.h', '.lisp', '.pl', '.py', '.xrc'):
source = self._get_casefile_path((fn + ext))
target = self._get_outputfile_path((fn + ext))
if os.path.exists(source):
self._copy_and_modify(source, target)
self.load_and_generate('bug167', test_GUI=False)
self.load_and_generate('bug167_utf8', test_GUI=False) | Test bug #167 - ascii codec error - UnicodeDecodeError will be raised if existing files will be changed
(and not overwritten) and those files contains non-ASCII characters. | tests/test_bugs_new.py | test_bug167 | Jalkhov/wxGlade | 225 | python | def test_bug167(self):
'Test bug #167 - ascii codec error - UnicodeDecodeError will be raised if existing files will be changed\n (and not overwritten) and those files contains non-ASCII characters.'
for fn in ('bug167', 'bug167_utf8'):
for ext in ('.cpp', '.h', '.lisp', '.pl', '.py', '.xrc'):
source = self._get_casefile_path((fn + ext))
target = self._get_outputfile_path((fn + ext))
if os.path.exists(source):
self._copy_and_modify(source, target)
self.load_and_generate('bug167', test_GUI=False)
self.load_and_generate('bug167_utf8', test_GUI=False) | def test_bug167(self):
'Test bug #167 - ascii codec error - UnicodeDecodeError will be raised if existing files will be changed\n (and not overwritten) and those files contains non-ASCII characters.'
for fn in ('bug167', 'bug167_utf8'):
for ext in ('.cpp', '.h', '.lisp', '.pl', '.py', '.xrc'):
source = self._get_casefile_path((fn + ext))
target = self._get_outputfile_path((fn + ext))
if os.path.exists(source):
self._copy_and_modify(source, target)
self.load_and_generate('bug167', test_GUI=False)
self.load_and_generate('bug167_utf8', test_GUI=False)<|docstring|>Test bug #167 - ascii codec error - UnicodeDecodeError will be raised if existing files will be changed
(and not overwritten) and those files contains non-ASCII characters.<|endoftext|> |
98a74840d0885815037d44739416e32b7edd96683ca79ccf05c7ee0877419881 | def test_bug179(self):
'Test bug #179 - Main file is generated without custom extensions'
self.load_and_generate('bug179', test_GUI=False)
return
codegen = common.code_writers['C++']
source = self._load_file('bug179.wxg')
source = self._modify_attrs(source, overwrite='0')
result_app = self._load_file('Bug179_main.c++')
result_frame_cpp = self._load_file('Bug179_Frame.c++')
result_frame_h = self._load_file('Bug179_Frame.hpp')
self._generate_code('C++', source, self.curr_dir)
app_filename = codegen._generate_app_filename()
main_cpp = self._with_curr_dir(app_filename)
generated_app = self.vFiles[main_cpp].getvalue()
generated_frame_cpp = self.vFiles[self._with_curr_dir('Bug179_Frame.c++')].getvalue()
generated_frame_h = self.vFiles[self._with_curr_dir('Bug179_Frame.hpp')].getvalue()
self._compare(result_app, generated_app, app_filename)
self._compare(result_frame_cpp, generated_frame_cpp, 'Bug179_Frame.c++')
self._compare(result_frame_h, generated_frame_h, 'Bug179_Frame.hpp') | Test bug #179 - Main file is generated without custom extensions | tests/test_bugs_new.py | test_bug179 | Jalkhov/wxGlade | 225 | python | def test_bug179(self):
self.load_and_generate('bug179', test_GUI=False)
return
codegen = common.code_writers['C++']
source = self._load_file('bug179.wxg')
source = self._modify_attrs(source, overwrite='0')
result_app = self._load_file('Bug179_main.c++')
result_frame_cpp = self._load_file('Bug179_Frame.c++')
result_frame_h = self._load_file('Bug179_Frame.hpp')
self._generate_code('C++', source, self.curr_dir)
app_filename = codegen._generate_app_filename()
main_cpp = self._with_curr_dir(app_filename)
generated_app = self.vFiles[main_cpp].getvalue()
generated_frame_cpp = self.vFiles[self._with_curr_dir('Bug179_Frame.c++')].getvalue()
generated_frame_h = self.vFiles[self._with_curr_dir('Bug179_Frame.hpp')].getvalue()
self._compare(result_app, generated_app, app_filename)
self._compare(result_frame_cpp, generated_frame_cpp, 'Bug179_Frame.c++')
self._compare(result_frame_h, generated_frame_h, 'Bug179_Frame.hpp') | def test_bug179(self):
self.load_and_generate('bug179', test_GUI=False)
return
codegen = common.code_writers['C++']
source = self._load_file('bug179.wxg')
source = self._modify_attrs(source, overwrite='0')
result_app = self._load_file('Bug179_main.c++')
result_frame_cpp = self._load_file('Bug179_Frame.c++')
result_frame_h = self._load_file('Bug179_Frame.hpp')
self._generate_code('C++', source, self.curr_dir)
app_filename = codegen._generate_app_filename()
main_cpp = self._with_curr_dir(app_filename)
generated_app = self.vFiles[main_cpp].getvalue()
generated_frame_cpp = self.vFiles[self._with_curr_dir('Bug179_Frame.c++')].getvalue()
generated_frame_h = self.vFiles[self._with_curr_dir('Bug179_Frame.hpp')].getvalue()
self._compare(result_app, generated_app, app_filename)
self._compare(result_frame_cpp, generated_frame_cpp, 'Bug179_Frame.c++')
self._compare(result_frame_h, generated_frame_h, 'Bug179_Frame.hpp')<|docstring|>Test bug #179 - Main file is generated without custom extensions<|endoftext|> |
5b49aeb494c2f809bb27634661243350e2d97adc4f567d6326604b6f5abf224b | def test_bug183(self):
'Test bug #183 - Preview failure for class names with Perl scope separator.'
self.load_and_generate('bug183', test_GUI=False) | Test bug #183 - Preview failure for class names with Perl scope separator. | tests/test_bugs_new.py | test_bug183 | Jalkhov/wxGlade | 225 | python | def test_bug183(self):
self.load_and_generate('bug183', test_GUI=False) | def test_bug183(self):
self.load_and_generate('bug183', test_GUI=False)<|docstring|>Test bug #183 - Preview failure for class names with Perl scope separator.<|endoftext|> |
8ba6e920cf46917a6e99133462a9de10d64f573d567e1202a1aef71f5dcf5c79 | def test_bug184(self):
'Test bug #184 - Perl code generation: System colour constants named incorrectly.'
self.load_and_generate('bug184', test_GUI=False) | Test bug #184 - Perl code generation: System colour constants named incorrectly. | tests/test_bugs_new.py | test_bug184 | Jalkhov/wxGlade | 225 | python | def test_bug184(self):
self.load_and_generate('bug184', test_GUI=False) | def test_bug184(self):
self.load_and_generate('bug184', test_GUI=False)<|docstring|>Test bug #184 - Perl code generation: System colour constants named incorrectly.<|endoftext|> |
8230649610509704c7a7b237516f853faf6162ecda1f5b3d5fb31f11da12189e | def test_bug186(self):
'Test bug #186 - Fix C++ code issue with Ids assigned to variables'
self.load_and_generate('bug186', test_GUI=False) | Test bug #186 - Fix C++ code issue with Ids assigned to variables | tests/test_bugs_new.py | test_bug186 | Jalkhov/wxGlade | 225 | python | def test_bug186(self):
self.load_and_generate('bug186', test_GUI=False) | def test_bug186(self):
self.load_and_generate('bug186', test_GUI=False)<|docstring|>Test bug #186 - Fix C++ code issue with Ids assigned to variables<|endoftext|> |
52f7c9bd55b26a8a4e65f5b471626a232a8e4d899a4baa0ce868b50efddaba50 | def test_bug188_toolbar_depencencies(self):
'Test bug #188 - Missing dependencies with wxToolBox widgets'
self.load_and_generate('bug188_included_toolbar', test_GUI=False)
self.load_and_generate('bug188_standalone_toolbar', test_GUI=False) | Test bug #188 - Missing dependencies with wxToolBox widgets | tests/test_bugs_new.py | test_bug188_toolbar_depencencies | Jalkhov/wxGlade | 225 | python | def test_bug188_toolbar_depencencies(self):
self.load_and_generate('bug188_included_toolbar', test_GUI=False)
self.load_and_generate('bug188_standalone_toolbar', test_GUI=False) | def test_bug188_toolbar_depencencies(self):
self.load_and_generate('bug188_included_toolbar', test_GUI=False)
self.load_and_generate('bug188_standalone_toolbar', test_GUI=False)<|docstring|>Test bug #188 - Missing dependencies with wxToolBox widgets<|endoftext|> |
6fc30794b66d257f147f559aca189ba320c9a7f9d2b16a28d16a3b00da766eaf | def test_bars_wo_parent(self):
'Test AttributeError during code generation of toplevel menubars'
self.load_and_generate('bars_wo_parent', test_GUI=False) | Test AttributeError during code generation of toplevel menubars | tests/test_bugs_new.py | test_bars_wo_parent | Jalkhov/wxGlade | 225 | python | def test_bars_wo_parent(self):
self.load_and_generate('bars_wo_parent', test_GUI=False) | def test_bars_wo_parent(self):
self.load_and_generate('bars_wo_parent', test_GUI=False)<|docstring|>Test AttributeError during code generation of toplevel menubars<|endoftext|> |
85a7ffac7a2eb73a0ee0d80a7d1a5ac059e85ab46dd43488e48a14c96d814a0c | def test_bug189_XMLParsingError(self):
'Test bug #189 - XmlParsingError : An internal error occurred while Generate Code'
self.load_and_generate('bug183', test_GUI=False) | Test bug #189 - XmlParsingError : An internal error occurred while Generate Code | tests/test_bugs_new.py | test_bug189_XMLParsingError | Jalkhov/wxGlade | 225 | python | def test_bug189_XMLParsingError(self):
self.load_and_generate('bug183', test_GUI=False) | def test_bug189_XMLParsingError(self):
self.load_and_generate('bug183', test_GUI=False)<|docstring|>Test bug #189 - XmlParsingError : An internal error occurred while Generate Code<|endoftext|> |
df55cc6a44ce5c554a1fe9322ff5d04315e5cc577b8835de258760185f5943b4 | def test_bug194(self):
'Test bug #194 - LB_EXTENDED for ListBox they never show up in generated code'
self.load_and_generate('bug194', test_GUI=False) | Test bug #194 - LB_EXTENDED for ListBox they never show up in generated code | tests/test_bugs_new.py | test_bug194 | Jalkhov/wxGlade | 225 | python | def test_bug194(self):
self.load_and_generate('bug194', test_GUI=False) | def test_bug194(self):
self.load_and_generate('bug194', test_GUI=False)<|docstring|>Test bug #194 - LB_EXTENDED for ListBox they never show up in generated code<|endoftext|> |
d3ad0b30b9a0529c8d690a329cb38ec625c177b43b7e30c11e8ce849a6e8d39e | def test_bugs_20180116(self):
'Test top_window property and addition of notebook pages with base classes'
self.load_and_generate('Bugs_2018-01-16', test_GUI=False) | Test top_window property and addition of notebook pages with base classes | tests/test_bugs_new.py | test_bugs_20180116 | Jalkhov/wxGlade | 225 | python | def test_bugs_20180116(self):
self.load_and_generate('Bugs_2018-01-16', test_GUI=False) | def test_bugs_20180116(self):
self.load_and_generate('Bugs_2018-01-16', test_GUI=False)<|docstring|>Test top_window property and addition of notebook pages with base classes<|endoftext|> |
3cca2f14bdfd6eb83d8c3b121f0b7b664885edc32d75abde3af18ce14a699240 | def test_issue371(self):
'Test issue #371 - C++ code generation'
self.load_and_generate('Issue_371', excluded=('lisp',), test_GUI=False) | Test issue #371 - C++ code generation | tests/test_bugs_new.py | test_issue371 | Jalkhov/wxGlade | 225 | python | def test_issue371(self):
self.load_and_generate('Issue_371', excluded=('lisp',), test_GUI=False) | def test_issue371(self):
self.load_and_generate('Issue_371', excluded=('lisp',), test_GUI=False)<|docstring|>Test issue #371 - C++ code generation<|endoftext|> |
f019f9c36d90bdfad473661139cb77a2cdce805ab044d90b6c73bfbcef7b7cd9 | def test_issue385(self):
"generate code, add grid, generate code with 'keep user code', check whether imports are added"
self.load_and_generate('Issue385_step_1', test_GUI=False, preview=False)
for ext in ('py', 'cpp', 'pl', 'lisp', 'h'):
src = self._get_outputfile_path(('Issue385_step_1.%s' % ext))
dst = self._get_outputfile_path(('Issue385_step_2.%s' % ext))
if ((ext == 'py') and (not os.path.exists(src))):
src = self._get_outputfile_path(('Issue385_step_1_Classic.%s' % ext))
dst = self._get_outputfile_path(('Issue385_step_2_Classic.%s' % ext))
if ((ext == 'py') and (not os.path.exists(src))):
src = self._get_outputfile_path(('Issue385_step_1_Phoenix.%s' % ext))
dst = self._get_outputfile_path(('Issue385_step_2_Phoenix.%s' % ext))
if os.path.exists(dst):
os.remove(dst)
os.rename(src, dst)
self.load_and_generate('Issue385_step_2', test_GUI=False, preview=False)
self.load_and_generate('Issue385_step_2', test_GUI=False, preview=False) | generate code, add grid, generate code with 'keep user code', check whether imports are added | tests/test_bugs_new.py | test_issue385 | Jalkhov/wxGlade | 225 | python | def test_issue385(self):
self.load_and_generate('Issue385_step_1', test_GUI=False, preview=False)
for ext in ('py', 'cpp', 'pl', 'lisp', 'h'):
src = self._get_outputfile_path(('Issue385_step_1.%s' % ext))
dst = self._get_outputfile_path(('Issue385_step_2.%s' % ext))
if ((ext == 'py') and (not os.path.exists(src))):
src = self._get_outputfile_path(('Issue385_step_1_Classic.%s' % ext))
dst = self._get_outputfile_path(('Issue385_step_2_Classic.%s' % ext))
if ((ext == 'py') and (not os.path.exists(src))):
src = self._get_outputfile_path(('Issue385_step_1_Phoenix.%s' % ext))
dst = self._get_outputfile_path(('Issue385_step_2_Phoenix.%s' % ext))
if os.path.exists(dst):
os.remove(dst)
os.rename(src, dst)
self.load_and_generate('Issue385_step_2', test_GUI=False, preview=False)
self.load_and_generate('Issue385_step_2', test_GUI=False, preview=False) | def test_issue385(self):
self.load_and_generate('Issue385_step_1', test_GUI=False, preview=False)
for ext in ('py', 'cpp', 'pl', 'lisp', 'h'):
src = self._get_outputfile_path(('Issue385_step_1.%s' % ext))
dst = self._get_outputfile_path(('Issue385_step_2.%s' % ext))
if ((ext == 'py') and (not os.path.exists(src))):
src = self._get_outputfile_path(('Issue385_step_1_Classic.%s' % ext))
dst = self._get_outputfile_path(('Issue385_step_2_Classic.%s' % ext))
if ((ext == 'py') and (not os.path.exists(src))):
src = self._get_outputfile_path(('Issue385_step_1_Phoenix.%s' % ext))
dst = self._get_outputfile_path(('Issue385_step_2_Phoenix.%s' % ext))
if os.path.exists(dst):
os.remove(dst)
os.rename(src, dst)
self.load_and_generate('Issue385_step_2', test_GUI=False, preview=False)
self.load_and_generate('Issue385_step_2', test_GUI=False, preview=False)<|docstring|>generate code, add grid, generate code with 'keep user code', check whether imports are added<|endoftext|> |
9de50495bc6bbda2be5637a8fd49f995bebee16da686f528812564adc1324632 | @pytest.fixture(scope='session')
def sts_environment():
'\n This conf instance is used when running `checksdev env start mycheck myenv`.\n The start command places this as a `conf.yaml` in the `conf.d/mycheck/` directory.\n If you want to run an environment this object can not be empty.\n '
return {'url': 'https://instance.service-now.com', 'user': 'some_user', 'password': 'secret', 'batch_size': 100, 'include_resource_types': ['cmdb_ci_netgear', 'cmdb_ci_win_cluster', 'cmdb_ci_win_cluster_node', 'cmdb_ci_app_server_java']} | This conf instance is used when running `checksdev env start mycheck myenv`.
The start command places this as a `conf.yaml` in the `conf.d/mycheck/` directory.
If you want to run an environment this object can not be empty. | servicenow/tests/conftest.py | sts_environment | CTAC-unixsupport/stackstate-agent-integrations | 1 | python | @pytest.fixture(scope='session')
def sts_environment():
'\n This conf instance is used when running `checksdev env start mycheck myenv`.\n The start command places this as a `conf.yaml` in the `conf.d/mycheck/` directory.\n If you want to run an environment this object can not be empty.\n '
return {'url': 'https://instance.service-now.com', 'user': 'some_user', 'password': 'secret', 'batch_size': 100, 'include_resource_types': ['cmdb_ci_netgear', 'cmdb_ci_win_cluster', 'cmdb_ci_win_cluster_node', 'cmdb_ci_app_server_java']} | @pytest.fixture(scope='session')
def sts_environment():
'\n This conf instance is used when running `checksdev env start mycheck myenv`.\n The start command places this as a `conf.yaml` in the `conf.d/mycheck/` directory.\n If you want to run an environment this object can not be empty.\n '
return {'url': 'https://instance.service-now.com', 'user': 'some_user', 'password': 'secret', 'batch_size': 100, 'include_resource_types': ['cmdb_ci_netgear', 'cmdb_ci_win_cluster', 'cmdb_ci_win_cluster_node', 'cmdb_ci_app_server_java']}<|docstring|>This conf instance is used when running `checksdev env start mycheck myenv`.
The start command places this as a `conf.yaml` in the `conf.d/mycheck/` directory.
If you want to run an environment this object can not be empty.<|endoftext|> |
822ad86c3fe4dc8c4616bd797f057fef019c0c73f4a9f10cfd93d3099d42f595 | def generate_classifier(self):
'Generates the nn.module container Sequential classfier as the default for this class.\n\n Args:\n None.\n\n Raises:\n TODO: Update exceptions with error_handling class.\n\n Returns:\n None.\n '
self.linear_layers = []
n = len(self.data)
for i in range((n - 1)):
self.linear_layers.append(nn.Linear(self.data[i], self.data[((i + 1) % n)]))
if (i != (n - 2)):
if (self.hidden_activation == 'relu'):
self.linear_layers.append(nn.ReLU())
elif (self.hidden_activation == 'sigmoid'):
self.linear_layers.append(nn.Sigmoid())
elif (self.hidden_activation == 'tanh'):
self.linear_layers.append(nn.Tanh())
self.linear_layers.append(nn.Dropout(self.dropout))
self.linear_layers.append(nn.LogSoftmax(dim=1))
self.model = nn.Sequential(*self.linear_layers) | Generates the nn.module container Sequential classfier as the default for this class.
Args:
None.
Raises:
TODO: Update exceptions with error_handling class.
Returns:
None. | net/network/neural_net.py | generate_classifier | elQuixote/Pytorch_ImageClassifier | 1 | python | def generate_classifier(self):
'Generates the nn.module container Sequential classfier as the default for this class.\n\n Args:\n None.\n\n Raises:\n TODO: Update exceptions with error_handling class.\n\n Returns:\n None.\n '
self.linear_layers = []
n = len(self.data)
for i in range((n - 1)):
self.linear_layers.append(nn.Linear(self.data[i], self.data[((i + 1) % n)]))
if (i != (n - 2)):
if (self.hidden_activation == 'relu'):
self.linear_layers.append(nn.ReLU())
elif (self.hidden_activation == 'sigmoid'):
self.linear_layers.append(nn.Sigmoid())
elif (self.hidden_activation == 'tanh'):
self.linear_layers.append(nn.Tanh())
self.linear_layers.append(nn.Dropout(self.dropout))
self.linear_layers.append(nn.LogSoftmax(dim=1))
self.model = nn.Sequential(*self.linear_layers) | def generate_classifier(self):
'Generates the nn.module container Sequential classfier as the default for this class.\n\n Args:\n None.\n\n Raises:\n TODO: Update exceptions with error_handling class.\n\n Returns:\n None.\n '
self.linear_layers = []
n = len(self.data)
for i in range((n - 1)):
self.linear_layers.append(nn.Linear(self.data[i], self.data[((i + 1) % n)]))
if (i != (n - 2)):
if (self.hidden_activation == 'relu'):
self.linear_layers.append(nn.ReLU())
elif (self.hidden_activation == 'sigmoid'):
self.linear_layers.append(nn.Sigmoid())
elif (self.hidden_activation == 'tanh'):
self.linear_layers.append(nn.Tanh())
self.linear_layers.append(nn.Dropout(self.dropout))
self.linear_layers.append(nn.LogSoftmax(dim=1))
self.model = nn.Sequential(*self.linear_layers)<|docstring|>Generates the nn.module container Sequential classfier as the default for this class.
Args:
None.
Raises:
TODO: Update exceptions with error_handling class.
Returns:
None.<|endoftext|> |
14495d71be7d0f83f0c3f512bcba9b57cc6417e39b379fc299eae5e1734c292c | def train_network(self, train_data, validation_data, epochs=1, load_best_params=False, plot=False):
'Trains the model, requires the criterion and optimizer to be passed into the class args before hand.\n\n TODO: add exception handling for optimizer and criterion as None values.\n\n Args:\n train_data (torch.utils.data.dataloader.DataLoader): The training torch data loader.\n validation_data (torch.utils.data.dataloader.DataLoader): The validation torch data loader.\n epochs (int): The number of epochs for training.\n load_best_params (bool): If true then we will load the model_state_dict from the highest accuracy iteration\n plot (bool): If true we plot both losses.\n\n Raises:\n TODO: Add exceptions.\n\n Returns:\n None.\n '
self.model.to(self.device)
if (not self.model.training):
self.model.train()
(iteration, running_loss) = (0, 0)
(highest_accuracy, high_acc_iter, high_acc_epoch) = (0, 0, 0)
(training_loss_set, validation_loss_set) = ([], [])
best_params = None
for epoch in range(epochs):
batch_iteration = 0
for (x, y_labels) in train_data:
(x, y_labels) = (x.to(self.device), y_labels.to(self.device))
self.optimizer.zero_grad()
output = self.model(x)
loss = self.criterion(output, y_labels)
loss.backward()
self.optimizer.step()
running_loss += loss.item()
iteration += 1
batch_iteration += 1
else:
(validation_loss, accuracy) = self.validate_network(validation_data)
training_loss = (running_loss / len(train_data))
print('Model has a total of {} training epochs completed.'.format(self.epochs_completed))
print('Active session Epoch {} out of {}'.format((epoch + 1), epochs))
print('Currently model has Accuracy of {}% \nCurrent training loss is {} \nCurrent validation loss is {}'.format(accuracy, training_loss, validation_loss))
training_loss_set.append(training_loss)
validation_loss_set.append(validation_loss)
print('-------------')
running_loss = 0
if (accuracy > highest_accuracy):
highest_accuracy = accuracy
high_acc_iter = batch_iteration
high_acc_epoch = (epoch + 1)
if load_best_params:
best_params = copy.deepcopy(self.model.state_dict())
self.model.train()
self.epochs_completed += 1
(t_slope, v_slope) = self.check_overfitting(training_loss_set, validation_loss_set, plot)
print('Slope of linear reg training curve fit is {} \nSlope of linear reg Validation curve fit is {}'.format(t_slope, v_slope))
print('Training session highest accuracy was {} on epoch {} batch iteration {}'.format(highest_accuracy, high_acc_epoch, high_acc_iter))
if load_best_params:
self.model.load_state_dict(best_params)
print('Params from {} epoch, {} batch iteration were loaded'.format(high_acc_epoch, high_acc_iter)) | Trains the model, requires the criterion and optimizer to be passed into the class args before hand.
TODO: add exception handling for optimizer and criterion as None values.
Args:
train_data (torch.utils.data.dataloader.DataLoader): The training torch data loader.
validation_data (torch.utils.data.dataloader.DataLoader): The validation torch data loader.
epochs (int): The number of epochs for training.
load_best_params (bool): If true then we will load the model_state_dict from the highest accuracy iteration
plot (bool): If true we plot both losses.
Raises:
TODO: Add exceptions.
Returns:
None. | net/network/neural_net.py | train_network | elQuixote/Pytorch_ImageClassifier | 1 | python | def train_network(self, train_data, validation_data, epochs=1, load_best_params=False, plot=False):
'Trains the model, requires the criterion and optimizer to be passed into the class args before hand.\n\n TODO: add exception handling for optimizer and criterion as None values.\n\n Args:\n train_data (torch.utils.data.dataloader.DataLoader): The training torch data loader.\n validation_data (torch.utils.data.dataloader.DataLoader): The validation torch data loader.\n epochs (int): The number of epochs for training.\n load_best_params (bool): If true then we will load the model_state_dict from the highest accuracy iteration\n plot (bool): If true we plot both losses.\n\n Raises:\n TODO: Add exceptions.\n\n Returns:\n None.\n '
self.model.to(self.device)
if (not self.model.training):
self.model.train()
(iteration, running_loss) = (0, 0)
(highest_accuracy, high_acc_iter, high_acc_epoch) = (0, 0, 0)
(training_loss_set, validation_loss_set) = ([], [])
best_params = None
for epoch in range(epochs):
batch_iteration = 0
for (x, y_labels) in train_data:
(x, y_labels) = (x.to(self.device), y_labels.to(self.device))
self.optimizer.zero_grad()
output = self.model(x)
loss = self.criterion(output, y_labels)
loss.backward()
self.optimizer.step()
running_loss += loss.item()
iteration += 1
batch_iteration += 1
else:
(validation_loss, accuracy) = self.validate_network(validation_data)
training_loss = (running_loss / len(train_data))
print('Model has a total of {} training epochs completed.'.format(self.epochs_completed))
print('Active session Epoch {} out of {}'.format((epoch + 1), epochs))
print('Currently model has Accuracy of {}% \nCurrent training loss is {} \nCurrent validation loss is {}'.format(accuracy, training_loss, validation_loss))
training_loss_set.append(training_loss)
validation_loss_set.append(validation_loss)
print('-------------')
running_loss = 0
if (accuracy > highest_accuracy):
highest_accuracy = accuracy
high_acc_iter = batch_iteration
high_acc_epoch = (epoch + 1)
if load_best_params:
best_params = copy.deepcopy(self.model.state_dict())
self.model.train()
self.epochs_completed += 1
(t_slope, v_slope) = self.check_overfitting(training_loss_set, validation_loss_set, plot)
print('Slope of linear reg training curve fit is {} \nSlope of linear reg Validation curve fit is {}'.format(t_slope, v_slope))
print('Training session highest accuracy was {} on epoch {} batch iteration {}'.format(highest_accuracy, high_acc_epoch, high_acc_iter))
if load_best_params:
self.model.load_state_dict(best_params)
print('Params from {} epoch, {} batch iteration were loaded'.format(high_acc_epoch, high_acc_iter)) | def train_network(self, train_data, validation_data, epochs=1, load_best_params=False, plot=False):
'Trains the model, requires the criterion and optimizer to be passed into the class args before hand.\n\n TODO: add exception handling for optimizer and criterion as None values.\n\n Args:\n train_data (torch.utils.data.dataloader.DataLoader): The training torch data loader.\n validation_data (torch.utils.data.dataloader.DataLoader): The validation torch data loader.\n epochs (int): The number of epochs for training.\n load_best_params (bool): If true then we will load the model_state_dict from the highest accuracy iteration\n plot (bool): If true we plot both losses.\n\n Raises:\n TODO: Add exceptions.\n\n Returns:\n None.\n '
self.model.to(self.device)
if (not self.model.training):
self.model.train()
(iteration, running_loss) = (0, 0)
(highest_accuracy, high_acc_iter, high_acc_epoch) = (0, 0, 0)
(training_loss_set, validation_loss_set) = ([], [])
best_params = None
for epoch in range(epochs):
batch_iteration = 0
for (x, y_labels) in train_data:
(x, y_labels) = (x.to(self.device), y_labels.to(self.device))
self.optimizer.zero_grad()
output = self.model(x)
loss = self.criterion(output, y_labels)
loss.backward()
self.optimizer.step()
running_loss += loss.item()
iteration += 1
batch_iteration += 1
else:
(validation_loss, accuracy) = self.validate_network(validation_data)
training_loss = (running_loss / len(train_data))
print('Model has a total of {} training epochs completed.'.format(self.epochs_completed))
print('Active session Epoch {} out of {}'.format((epoch + 1), epochs))
print('Currently model has Accuracy of {}% \nCurrent training loss is {} \nCurrent validation loss is {}'.format(accuracy, training_loss, validation_loss))
training_loss_set.append(training_loss)
validation_loss_set.append(validation_loss)
print('-------------')
running_loss = 0
if (accuracy > highest_accuracy):
highest_accuracy = accuracy
high_acc_iter = batch_iteration
high_acc_epoch = (epoch + 1)
if load_best_params:
best_params = copy.deepcopy(self.model.state_dict())
self.model.train()
self.epochs_completed += 1
(t_slope, v_slope) = self.check_overfitting(training_loss_set, validation_loss_set, plot)
print('Slope of linear reg training curve fit is {} \nSlope of linear reg Validation curve fit is {}'.format(t_slope, v_slope))
print('Training session highest accuracy was {} on epoch {} batch iteration {}'.format(highest_accuracy, high_acc_epoch, high_acc_iter))
if load_best_params:
self.model.load_state_dict(best_params)
print('Params from {} epoch, {} batch iteration were loaded'.format(high_acc_epoch, high_acc_iter))<|docstring|>Trains the model, requires the criterion and optimizer to be passed into the class args before hand.
TODO: add exception handling for optimizer and criterion as None values.
Args:
train_data (torch.utils.data.dataloader.DataLoader): The training torch data loader.
validation_data (torch.utils.data.dataloader.DataLoader): The validation torch data loader.
epochs (int): The number of epochs for training.
load_best_params (bool): If true then we will load the model_state_dict from the highest accuracy iteration
plot (bool): If true we plot both losses.
Raises:
TODO: Add exceptions.
Returns:
None.<|endoftext|> |
23deb1da03ce6d4e4cda1b8d7d1e0b2662633ec8141080c207b471857b537d85 | def validate_network(self, data):
'Validate our model to check the loss and accuracy.\n\n Args:\n data (torch.utils.data.dataloader.DataLoader): The data we want to validate as torch data loader.\n\n Raises:\n TODO: Add exceptions.\n\n Returns:\n loss,accuracy (tuple): The loss and accuracy of the validation.\n '
self.model.eval()
with t.no_grad():
batch_loss = 0
batch_accuracy = 0
for (x, y_labels) in data:
(x, y_labels) = (x.to(self.device), y_labels.to(self.device))
output = self.model(x)
batch_loss += self.criterion(output, y_labels).item()
probability = t.exp(output)
(_, top_class) = probability.topk(1, dim=1)
equals = (top_class == y_labels.view(*top_class.shape))
batch_accuracy += t.mean(equals.type(t.FloatTensor)).item()
test_accuracy = ((batch_accuracy / len(data)) * 100)
test_loss = (batch_loss / len(data))
return (test_loss, test_accuracy) | Validate our model to check the loss and accuracy.
Args:
data (torch.utils.data.dataloader.DataLoader): The data we want to validate as torch data loader.
Raises:
TODO: Add exceptions.
Returns:
loss,accuracy (tuple): The loss and accuracy of the validation. | net/network/neural_net.py | validate_network | elQuixote/Pytorch_ImageClassifier | 1 | python | def validate_network(self, data):
'Validate our model to check the loss and accuracy.\n\n Args:\n data (torch.utils.data.dataloader.DataLoader): The data we want to validate as torch data loader.\n\n Raises:\n TODO: Add exceptions.\n\n Returns:\n loss,accuracy (tuple): The loss and accuracy of the validation.\n '
self.model.eval()
with t.no_grad():
batch_loss = 0
batch_accuracy = 0
for (x, y_labels) in data:
(x, y_labels) = (x.to(self.device), y_labels.to(self.device))
output = self.model(x)
batch_loss += self.criterion(output, y_labels).item()
probability = t.exp(output)
(_, top_class) = probability.topk(1, dim=1)
equals = (top_class == y_labels.view(*top_class.shape))
batch_accuracy += t.mean(equals.type(t.FloatTensor)).item()
test_accuracy = ((batch_accuracy / len(data)) * 100)
test_loss = (batch_loss / len(data))
return (test_loss, test_accuracy) | def validate_network(self, data):
'Validate our model to check the loss and accuracy.\n\n Args:\n data (torch.utils.data.dataloader.DataLoader): The data we want to validate as torch data loader.\n\n Raises:\n TODO: Add exceptions.\n\n Returns:\n loss,accuracy (tuple): The loss and accuracy of the validation.\n '
self.model.eval()
with t.no_grad():
batch_loss = 0
batch_accuracy = 0
for (x, y_labels) in data:
(x, y_labels) = (x.to(self.device), y_labels.to(self.device))
output = self.model(x)
batch_loss += self.criterion(output, y_labels).item()
probability = t.exp(output)
(_, top_class) = probability.topk(1, dim=1)
equals = (top_class == y_labels.view(*top_class.shape))
batch_accuracy += t.mean(equals.type(t.FloatTensor)).item()
test_accuracy = ((batch_accuracy / len(data)) * 100)
test_loss = (batch_loss / len(data))
return (test_loss, test_accuracy)<|docstring|>Validate our model to check the loss and accuracy.
Args:
data (torch.utils.data.dataloader.DataLoader): The data we want to validate as torch data loader.
Raises:
TODO: Add exceptions.
Returns:
loss,accuracy (tuple): The loss and accuracy of the validation.<|endoftext|> |
d2953bc001a301f86db32ce925e96b345f30eee3f0110cf052b5365cf2dd33e6 | def check_overfitting(self, train_losses, validation_losses, plot=False):
'Validate our model to check the loss and accuracy\n\n Args:\n train_losses (list of floats): The list of training losses per epoch.\n validation_losses (list of floats): The list of validation losses per epoch.\n plot (bool): If true we plot both losses.\n\n Raises:\n TODO: Add exceptions.\n\n Returns:\n slopes (tuple): The slopes of the linear reg curve fits for both validation/training.\n '
tl_x_val = np.arange(0, len(train_losses))
vl_x_val = np.arange(0, len(validation_losses))
train_data = np.array([tl_x_val, train_losses])
validate_data = np.array([vl_x_val, validation_losses])
(train_slope, train_intercept) = np.polyfit(train_data[0], train_data[1], 1)
(validation_slope, validation_intercept) = np.polyfit(validate_data[0], validate_data[1], 1)
if plot:
plt.plot(train_data[0], train_data[1], 'o', label='training loss')
plt.plot(validate_data[0], validate_data[1], 'o', label='validation loss')
plt.plot(train_data[0], (train_intercept + (train_slope * train_data[0])), 'r', label='train_regg')
plt.plot(validate_data[0], (validation_intercept + (validation_slope * validate_data[0])), 'r', label='val_regg')
plt.legend()
plt.show()
return (train_slope, validation_slope) | Validate our model to check the loss and accuracy
Args:
train_losses (list of floats): The list of training losses per epoch.
validation_losses (list of floats): The list of validation losses per epoch.
plot (bool): If true we plot both losses.
Raises:
TODO: Add exceptions.
Returns:
slopes (tuple): The slopes of the linear reg curve fits for both validation/training. | net/network/neural_net.py | check_overfitting | elQuixote/Pytorch_ImageClassifier | 1 | python | def check_overfitting(self, train_losses, validation_losses, plot=False):
'Validate our model to check the loss and accuracy\n\n Args:\n train_losses (list of floats): The list of training losses per epoch.\n validation_losses (list of floats): The list of validation losses per epoch.\n plot (bool): If true we plot both losses.\n\n Raises:\n TODO: Add exceptions.\n\n Returns:\n slopes (tuple): The slopes of the linear reg curve fits for both validation/training.\n '
tl_x_val = np.arange(0, len(train_losses))
vl_x_val = np.arange(0, len(validation_losses))
train_data = np.array([tl_x_val, train_losses])
validate_data = np.array([vl_x_val, validation_losses])
(train_slope, train_intercept) = np.polyfit(train_data[0], train_data[1], 1)
(validation_slope, validation_intercept) = np.polyfit(validate_data[0], validate_data[1], 1)
if plot:
plt.plot(train_data[0], train_data[1], 'o', label='training loss')
plt.plot(validate_data[0], validate_data[1], 'o', label='validation loss')
plt.plot(train_data[0], (train_intercept + (train_slope * train_data[0])), 'r', label='train_regg')
plt.plot(validate_data[0], (validation_intercept + (validation_slope * validate_data[0])), 'r', label='val_regg')
plt.legend()
plt.show()
return (train_slope, validation_slope) | def check_overfitting(self, train_losses, validation_losses, plot=False):
'Validate our model to check the loss and accuracy\n\n Args:\n train_losses (list of floats): The list of training losses per epoch.\n validation_losses (list of floats): The list of validation losses per epoch.\n plot (bool): If true we plot both losses.\n\n Raises:\n TODO: Add exceptions.\n\n Returns:\n slopes (tuple): The slopes of the linear reg curve fits for both validation/training.\n '
tl_x_val = np.arange(0, len(train_losses))
vl_x_val = np.arange(0, len(validation_losses))
train_data = np.array([tl_x_val, train_losses])
validate_data = np.array([vl_x_val, validation_losses])
(train_slope, train_intercept) = np.polyfit(train_data[0], train_data[1], 1)
(validation_slope, validation_intercept) = np.polyfit(validate_data[0], validate_data[1], 1)
if plot:
plt.plot(train_data[0], train_data[1], 'o', label='training loss')
plt.plot(validate_data[0], validate_data[1], 'o', label='validation loss')
plt.plot(train_data[0], (train_intercept + (train_slope * train_data[0])), 'r', label='train_regg')
plt.plot(validate_data[0], (validation_intercept + (validation_slope * validate_data[0])), 'r', label='val_regg')
plt.legend()
plt.show()
return (train_slope, validation_slope)<|docstring|>Validate our model to check the loss and accuracy
Args:
train_losses (list of floats): The list of training losses per epoch.
validation_losses (list of floats): The list of validation losses per epoch.
plot (bool): If true we plot both losses.
Raises:
TODO: Add exceptions.
Returns:
slopes (tuple): The slopes of the linear reg curve fits for both validation/training.<|endoftext|> |
e1c040a6125074bba68e00855f3d2988543855737f43e76c733dcb91c92a4b2c | def save_model_checkpoint(self, full_path, training_class_to_idx):
'Save the model checkpoint.\n\n Args:\n full_path (str): The full path to save the checkpoint to\n training_class_to_idx (dic of ints): This is where we store the dictionary mapping the name of the class to the index (label)\n\n Raises:\n TODO: Add exceptions\n\n Returns:\n None\n '
net_data_dic = {'input_count': self.inputs, 'hidden_sizes': self.hidden_sizes, 'outputs': self.outputs, 'h_activation': self.hidden_activation, 'dropout': self.dropout, 'learn_rate': self.learn_rate, 'epochs_completed': self.epochs_completed}
checkpoint = {'data': net_data_dic, 'model': self.model, 'classifier': self.model.classifier, 'optimizer.state_dict': self.optimizer.state_dict(), 'state_dict': self.model.state_dict(), 'device': self.device, 'class_to_idx': training_class_to_idx}
t.save(checkpoint, full_path) | Save the model checkpoint.
Args:
full_path (str): The full path to save the checkpoint to
training_class_to_idx (dic of ints): This is where we store the dictionary mapping the name of the class to the index (label)
Raises:
TODO: Add exceptions
Returns:
None | net/network/neural_net.py | save_model_checkpoint | elQuixote/Pytorch_ImageClassifier | 1 | python | def save_model_checkpoint(self, full_path, training_class_to_idx):
'Save the model checkpoint.\n\n Args:\n full_path (str): The full path to save the checkpoint to\n training_class_to_idx (dic of ints): This is where we store the dictionary mapping the name of the class to the index (label)\n\n Raises:\n TODO: Add exceptions\n\n Returns:\n None\n '
net_data_dic = {'input_count': self.inputs, 'hidden_sizes': self.hidden_sizes, 'outputs': self.outputs, 'h_activation': self.hidden_activation, 'dropout': self.dropout, 'learn_rate': self.learn_rate, 'epochs_completed': self.epochs_completed}
checkpoint = {'data': net_data_dic, 'model': self.model, 'classifier': self.model.classifier, 'optimizer.state_dict': self.optimizer.state_dict(), 'state_dict': self.model.state_dict(), 'device': self.device, 'class_to_idx': training_class_to_idx}
t.save(checkpoint, full_path) | def save_model_checkpoint(self, full_path, training_class_to_idx):
'Save the model checkpoint.\n\n Args:\n full_path (str): The full path to save the checkpoint to\n training_class_to_idx (dic of ints): This is where we store the dictionary mapping the name of the class to the index (label)\n\n Raises:\n TODO: Add exceptions\n\n Returns:\n None\n '
net_data_dic = {'input_count': self.inputs, 'hidden_sizes': self.hidden_sizes, 'outputs': self.outputs, 'h_activation': self.hidden_activation, 'dropout': self.dropout, 'learn_rate': self.learn_rate, 'epochs_completed': self.epochs_completed}
checkpoint = {'data': net_data_dic, 'model': self.model, 'classifier': self.model.classifier, 'optimizer.state_dict': self.optimizer.state_dict(), 'state_dict': self.model.state_dict(), 'device': self.device, 'class_to_idx': training_class_to_idx}
t.save(checkpoint, full_path)<|docstring|>Save the model checkpoint.
Args:
full_path (str): The full path to save the checkpoint to
training_class_to_idx (dic of ints): This is where we store the dictionary mapping the name of the class to the index (label)
Raises:
TODO: Add exceptions
Returns:
None<|endoftext|> |
5c720b136a82a49ba02f8ac0c8c5a0314c9375031f389745ab878213b0117e5c | def test_action_inheritance_PASS_validation():
'\n Confirm the PASS instance data passes validation.\n '
g = load_validation_graph('action_inheritance_PASS_validation.ttl', True)
assert isinstance(g, rdflib.Graph) | Confirm the PASS instance data passes validation. | tests/examples/test_validation.py | test_action_inheritance_PASS_validation | daniel-markus-msab/UCO | 34 | python | def test_action_inheritance_PASS_validation():
'\n \n '
g = load_validation_graph('action_inheritance_PASS_validation.ttl', True)
assert isinstance(g, rdflib.Graph) | def test_action_inheritance_PASS_validation():
'\n \n '
g = load_validation_graph('action_inheritance_PASS_validation.ttl', True)
assert isinstance(g, rdflib.Graph)<|docstring|>Confirm the PASS instance data passes validation.<|endoftext|> |
c812956f97d896b388247568ab2ff00fdeb9ac0d0e10ce393b80a5b0006c099d | def test_action_inheritance_XFAIL_validation():
'\n Confirm the XFAIL instance data fails validation based on an expected set of properties not conforming to shape constraints.\n '
confirm_validation_errors('action_inheritance_XFAIL_validation.ttl', {str(NS_UCO_ACTION.action), str(NS_UCO_ACTION.actionStatus)}) | Confirm the XFAIL instance data fails validation based on an expected set of properties not conforming to shape constraints. | tests/examples/test_validation.py | test_action_inheritance_XFAIL_validation | daniel-markus-msab/UCO | 34 | python | def test_action_inheritance_XFAIL_validation():
'\n \n '
confirm_validation_errors('action_inheritance_XFAIL_validation.ttl', {str(NS_UCO_ACTION.action), str(NS_UCO_ACTION.actionStatus)}) | def test_action_inheritance_XFAIL_validation():
'\n \n '
confirm_validation_errors('action_inheritance_XFAIL_validation.ttl', {str(NS_UCO_ACTION.action), str(NS_UCO_ACTION.actionStatus)})<|docstring|>Confirm the XFAIL instance data fails validation based on an expected set of properties not conforming to shape constraints.<|endoftext|> |
218829b7ac9fd4e077b4dc1d123967d4ff5a0334adb4d74b4ba91e6c506086cb | def test_action_result_PASS_validation():
'\n Confirm the PASS instance data passes validation.\n '
g = load_validation_graph('action_result_PASS_validation.ttl', True)
assert isinstance(g, rdflib.Graph) | Confirm the PASS instance data passes validation. | tests/examples/test_validation.py | test_action_result_PASS_validation | daniel-markus-msab/UCO | 34 | python | def test_action_result_PASS_validation():
'\n \n '
g = load_validation_graph('action_result_PASS_validation.ttl', True)
assert isinstance(g, rdflib.Graph) | def test_action_result_PASS_validation():
'\n \n '
g = load_validation_graph('action_result_PASS_validation.ttl', True)
assert isinstance(g, rdflib.Graph)<|docstring|>Confirm the PASS instance data passes validation.<|endoftext|> |
f1da0f1739de626d39e83c3aaf714731098883bf89623efbdfdc4ceef50e48ed | def test_location_PASS_validation():
'\n Confirm the PASS instance data passes validation.\n '
g = load_validation_graph('location_PASS_validation.ttl', True)
assert isinstance(g, rdflib.Graph) | Confirm the PASS instance data passes validation. | tests/examples/test_validation.py | test_location_PASS_validation | daniel-markus-msab/UCO | 34 | python | def test_location_PASS_validation():
'\n \n '
g = load_validation_graph('location_PASS_validation.ttl', True)
assert isinstance(g, rdflib.Graph) | def test_location_PASS_validation():
'\n \n '
g = load_validation_graph('location_PASS_validation.ttl', True)
assert isinstance(g, rdflib.Graph)<|docstring|>Confirm the PASS instance data passes validation.<|endoftext|> |
35c2a809c4a78d44e5054d1b4ef34cfbb8773f4a5327f71851af8ac82eaffa6a | def test_location_XFAIL_validation():
'\n Confirm the XFAIL instance data fails validation based on an expected set of properties not conforming to shape constraints.\n '
confirm_validation_errors('location_XFAIL_validation.ttl', {str(NS_UCO_CORE.hasFacet), str(NS_UCO_LOCATION.postalCode)}) | Confirm the XFAIL instance data fails validation based on an expected set of properties not conforming to shape constraints. | tests/examples/test_validation.py | test_location_XFAIL_validation | daniel-markus-msab/UCO | 34 | python | def test_location_XFAIL_validation():
'\n \n '
confirm_validation_errors('location_XFAIL_validation.ttl', {str(NS_UCO_CORE.hasFacet), str(NS_UCO_LOCATION.postalCode)}) | def test_location_XFAIL_validation():
'\n \n '
confirm_validation_errors('location_XFAIL_validation.ttl', {str(NS_UCO_CORE.hasFacet), str(NS_UCO_LOCATION.postalCode)})<|docstring|>Confirm the XFAIL instance data fails validation based on an expected set of properties not conforming to shape constraints.<|endoftext|> |
d6ce561829611b47a5b7cd8a332a2a3ecf9139aebd9b2a06205f129bbc1323d2 | @pytest.mark.xfail(strict=True)
def test_location_XFAIL_validation_XPASS_wrong_concept_name():
'\n Report the XFAIL instance data XPASSes one of the induced errors - the non-existent concept core:descriptionButWrongName is not reported as an error.\n Should a SHACL mechanism later be identified to detect this error, this test can be retired, adding NS_UCO_CORE.descriptionButWrongName to the expected IRI set in test_location_XFAIL_validation().\n '
confirm_validation_errors('location_XFAIL_validation.ttl', {str(NS_UCO_CORE.descriptionButWrongName)}) | Report the XFAIL instance data XPASSes one of the induced errors - the non-existent concept core:descriptionButWrongName is not reported as an error.
Should a SHACL mechanism later be identified to detect this error, this test can be retired, adding NS_UCO_CORE.descriptionButWrongName to the expected IRI set in test_location_XFAIL_validation(). | tests/examples/test_validation.py | test_location_XFAIL_validation_XPASS_wrong_concept_name | daniel-markus-msab/UCO | 34 | python | @pytest.mark.xfail(strict=True)
def test_location_XFAIL_validation_XPASS_wrong_concept_name():
'\n Report the XFAIL instance data XPASSes one of the induced errors - the non-existent concept core:descriptionButWrongName is not reported as an error.\n Should a SHACL mechanism later be identified to detect this error, this test can be retired, adding NS_UCO_CORE.descriptionButWrongName to the expected IRI set in test_location_XFAIL_validation().\n '
confirm_validation_errors('location_XFAIL_validation.ttl', {str(NS_UCO_CORE.descriptionButWrongName)}) | @pytest.mark.xfail(strict=True)
def test_location_XFAIL_validation_XPASS_wrong_concept_name():
'\n Report the XFAIL instance data XPASSes one of the induced errors - the non-existent concept core:descriptionButWrongName is not reported as an error.\n Should a SHACL mechanism later be identified to detect this error, this test can be retired, adding NS_UCO_CORE.descriptionButWrongName to the expected IRI set in test_location_XFAIL_validation().\n '
confirm_validation_errors('location_XFAIL_validation.ttl', {str(NS_UCO_CORE.descriptionButWrongName)})<|docstring|>Report the XFAIL instance data XPASSes one of the induced errors - the non-existent concept core:descriptionButWrongName is not reported as an error.
Should a SHACL mechanism later be identified to detect this error, this test can be retired, adding NS_UCO_CORE.descriptionButWrongName to the expected IRI set in test_location_XFAIL_validation().<|endoftext|> |
a859102337e808233ed4068aa41ec34aa558fc4850fc409932c3bbc48ab91a0f | def init_outputs():
'\n Initialize the GPIO outputs\n '
global ser
global __initialized
global SERVO_HORIZ, SERVO_VERT
try:
__initialized
except NameError:
pass
else:
return
with open('calibration.json', 'r') as f:
conf = json.load(f)
SERVO_HORIZ = conf['horiz']
SERVO_VERT = conf['vert']
ser = serial.Serial(SERIAL_PORT, 115200)
ser.write(b'\xff')
atexit.register(disable_servos)
__initialized = True | Initialize the GPIO outputs | servo.py | init_outputs | cdbbnnyCode/maze-challenge | 0 | python | def init_outputs():
'\n \n '
global ser
global __initialized
global SERVO_HORIZ, SERVO_VERT
try:
__initialized
except NameError:
pass
else:
return
with open('calibration.json', 'r') as f:
conf = json.load(f)
SERVO_HORIZ = conf['horiz']
SERVO_VERT = conf['vert']
ser = serial.Serial(SERIAL_PORT, 115200)
ser.write(b'\xff')
atexit.register(disable_servos)
__initialized = True | def init_outputs():
'\n \n '
global ser
global __initialized
global SERVO_HORIZ, SERVO_VERT
try:
__initialized
except NameError:
pass
else:
return
with open('calibration.json', 'r') as f:
conf = json.load(f)
SERVO_HORIZ = conf['horiz']
SERVO_VERT = conf['vert']
ser = serial.Serial(SERIAL_PORT, 115200)
ser.write(b'\xff')
atexit.register(disable_servos)
__initialized = True<|docstring|>Initialize the GPIO outputs<|endoftext|> |
b00c259a043ae8d5e9ac1dfe943ed8b7070afcb8a088ed44bb927a0eceef6f5c | def set_servo_angle(pin, pos):
"\n Set the servo's position based on a value from 0 (minimum angle) to 1 (maximum angle).\n "
normalized = pos
if (normalized > 1):
normalized = 1
elif (normalized < 0):
normalized = 0
angle = int((SERVO_MIN_ANGLE + (normalized * (SERVO_MAX_ANGLE - SERVO_MIN_ANGLE))))
ser.write(b'\xff')
ser.write(bytes([(pin - 12), angle])) | Set the servo's position based on a value from 0 (minimum angle) to 1 (maximum angle). | servo.py | set_servo_angle | cdbbnnyCode/maze-challenge | 0 | python | def set_servo_angle(pin, pos):
"\n \n "
normalized = pos
if (normalized > 1):
normalized = 1
elif (normalized < 0):
normalized = 0
angle = int((SERVO_MIN_ANGLE + (normalized * (SERVO_MAX_ANGLE - SERVO_MIN_ANGLE))))
ser.write(b'\xff')
ser.write(bytes([(pin - 12), angle])) | def set_servo_angle(pin, pos):
"\n \n "
normalized = pos
if (normalized > 1):
normalized = 1
elif (normalized < 0):
normalized = 0
angle = int((SERVO_MIN_ANGLE + (normalized * (SERVO_MAX_ANGLE - SERVO_MIN_ANGLE))))
ser.write(b'\xff')
ser.write(bytes([(pin - 12), angle]))<|docstring|>Set the servo's position based on a value from 0 (minimum angle) to 1 (maximum angle).<|endoftext|> |
8e7248374fc737025b20d4361f0d6a8f0f62c25d54661f0dad03d2f6c095cd38 | def disable_servo(pin):
'\n Attempt to disable the specified servo by turning off the PWM signal.\n Note that this method does NOT work for digital servos--they will\n continue to run until they are powered off.\n '
ser.write(b'\xff')
ser.write(bytes([((pin - 12) + 128)])) | Attempt to disable the specified servo by turning off the PWM signal.
Note that this method does NOT work for digital servos--they will
continue to run until they are powered off. | servo.py | disable_servo | cdbbnnyCode/maze-challenge | 0 | python | def disable_servo(pin):
'\n Attempt to disable the specified servo by turning off the PWM signal.\n Note that this method does NOT work for digital servos--they will\n continue to run until they are powered off.\n '
ser.write(b'\xff')
ser.write(bytes([((pin - 12) + 128)])) | def disable_servo(pin):
'\n Attempt to disable the specified servo by turning off the PWM signal.\n Note that this method does NOT work for digital servos--they will\n continue to run until they are powered off.\n '
ser.write(b'\xff')
ser.write(bytes([((pin - 12) + 128)]))<|docstring|>Attempt to disable the specified servo by turning off the PWM signal.
Note that this method does NOT work for digital servos--they will
continue to run until they are powered off.<|endoftext|> |
21aa4bdb40ec9ac5a335f4bbf8ef0365a4689d3c6bc981883632a8b9400190ee | def disable_servos():
'\n Attempt to disable all of the servos. Does NOT work for digital servos,\n since they need to be powered off to stop running.\n '
disable_servo(SERVO_HORIZ)
disable_servo(SERVO_VERT) | Attempt to disable all of the servos. Does NOT work for digital servos,
since they need to be powered off to stop running. | servo.py | disable_servos | cdbbnnyCode/maze-challenge | 0 | python | def disable_servos():
'\n Attempt to disable all of the servos. Does NOT work for digital servos,\n since they need to be powered off to stop running.\n '
disable_servo(SERVO_HORIZ)
disable_servo(SERVO_VERT) | def disable_servos():
'\n Attempt to disable all of the servos. Does NOT work for digital servos,\n since they need to be powered off to stop running.\n '
disable_servo(SERVO_HORIZ)
disable_servo(SERVO_VERT)<|docstring|>Attempt to disable all of the servos. Does NOT work for digital servos,
since they need to be powered off to stop running.<|endoftext|> |
db57c9de42a425cb95804e642f98847edaf9742aba3004460b18ca23b6202258 | def HostMountInfo(vim, *args, **kwargs):
'The HostMountInfo data object provides information related to a configured\n mount point. This object does not include information about the mounted file\n system. (See HostFileSystemMountInfo.)'
obj = vim.client.factory.create('{urn:vim25}HostMountInfo')
if ((len(args) + len(kwargs)) < 1):
raise IndexError(('Expected at least 2 arguments got: %d' % len(args)))
required = ['accessMode']
optional = ['accessible', 'mounted', 'path', 'dynamicProperty', 'dynamicType']
for (name, arg) in zip((required + optional), args):
setattr(obj, name, arg)
for (name, value) in kwargs.items():
if (name in (required + optional)):
setattr(obj, name, value)
else:
raise InvalidArgumentError(('Invalid argument: %s. Expected one of %s' % (name, ', '.join((required + optional)))))
return obj | The HostMountInfo data object provides information related to a configured
mount point. This object does not include information about the mounted file
system. (See HostFileSystemMountInfo.) | pyvisdk/do/host_mount_info.py | HostMountInfo | Infinidat/pyvisdk | 0 | python | def HostMountInfo(vim, *args, **kwargs):
'The HostMountInfo data object provides information related to a configured\n mount point. This object does not include information about the mounted file\n system. (See HostFileSystemMountInfo.)'
obj = vim.client.factory.create('{urn:vim25}HostMountInfo')
if ((len(args) + len(kwargs)) < 1):
raise IndexError(('Expected at least 2 arguments got: %d' % len(args)))
required = ['accessMode']
optional = ['accessible', 'mounted', 'path', 'dynamicProperty', 'dynamicType']
for (name, arg) in zip((required + optional), args):
setattr(obj, name, arg)
for (name, value) in kwargs.items():
if (name in (required + optional)):
setattr(obj, name, value)
else:
raise InvalidArgumentError(('Invalid argument: %s. Expected one of %s' % (name, ', '.join((required + optional)))))
return obj | def HostMountInfo(vim, *args, **kwargs):
'The HostMountInfo data object provides information related to a configured\n mount point. This object does not include information about the mounted file\n system. (See HostFileSystemMountInfo.)'
obj = vim.client.factory.create('{urn:vim25}HostMountInfo')
if ((len(args) + len(kwargs)) < 1):
raise IndexError(('Expected at least 2 arguments got: %d' % len(args)))
required = ['accessMode']
optional = ['accessible', 'mounted', 'path', 'dynamicProperty', 'dynamicType']
for (name, arg) in zip((required + optional), args):
setattr(obj, name, arg)
for (name, value) in kwargs.items():
if (name in (required + optional)):
setattr(obj, name, value)
else:
raise InvalidArgumentError(('Invalid argument: %s. Expected one of %s' % (name, ', '.join((required + optional)))))
return obj<|docstring|>The HostMountInfo data object provides information related to a configured
mount point. This object does not include information about the mounted file
system. (See HostFileSystemMountInfo.)<|endoftext|> |
4935d82d83daa0a8435671960a815a67c21b3e93891703d4719fee84d25c58af | def get_all(context: typing.Optional[Context]=None) -> typing.Mapping[(str, object)]:
'Returns the name/value pairs in the Baggage\n\n Args:\n context: The Context to use. If not set, uses current Context\n\n Returns:\n The name/value pairs in the Baggage\n '
baggage = get_value(_BAGGAGE_KEY, context=context)
if isinstance(baggage, dict):
return MappingProxyType(baggage)
return MappingProxyType({}) | Returns the name/value pairs in the Baggage
Args:
context: The Context to use. If not set, uses current Context
Returns:
The name/value pairs in the Baggage | opentelemetry-api/src/opentelemetry/baggage/__init__.py | get_all | marcusway/opentelemetry-python | 1 | python | def get_all(context: typing.Optional[Context]=None) -> typing.Mapping[(str, object)]:
'Returns the name/value pairs in the Baggage\n\n Args:\n context: The Context to use. If not set, uses current Context\n\n Returns:\n The name/value pairs in the Baggage\n '
baggage = get_value(_BAGGAGE_KEY, context=context)
if isinstance(baggage, dict):
return MappingProxyType(baggage)
return MappingProxyType({}) | def get_all(context: typing.Optional[Context]=None) -> typing.Mapping[(str, object)]:
'Returns the name/value pairs in the Baggage\n\n Args:\n context: The Context to use. If not set, uses current Context\n\n Returns:\n The name/value pairs in the Baggage\n '
baggage = get_value(_BAGGAGE_KEY, context=context)
if isinstance(baggage, dict):
return MappingProxyType(baggage)
return MappingProxyType({})<|docstring|>Returns the name/value pairs in the Baggage
Args:
context: The Context to use. If not set, uses current Context
Returns:
The name/value pairs in the Baggage<|endoftext|> |
9d407627742737b91203d4e0f11a883264f090e4c965283654970e56cf04a0a4 | def get_baggage(name: str, context: typing.Optional[Context]=None) -> typing.Optional[object]:
'Provides access to the value for a name/value pair in the\n Baggage\n\n Args:\n name: The name of the value to retrieve\n context: The Context to use. If not set, uses current Context\n\n Returns:\n The value associated with the given name, or null if the given name is\n not present.\n '
return get_all(context=context).get(name) | Provides access to the value for a name/value pair in the
Baggage
Args:
name: The name of the value to retrieve
context: The Context to use. If not set, uses current Context
Returns:
The value associated with the given name, or null if the given name is
not present. | opentelemetry-api/src/opentelemetry/baggage/__init__.py | get_baggage | marcusway/opentelemetry-python | 1 | python | def get_baggage(name: str, context: typing.Optional[Context]=None) -> typing.Optional[object]:
'Provides access to the value for a name/value pair in the\n Baggage\n\n Args:\n name: The name of the value to retrieve\n context: The Context to use. If not set, uses current Context\n\n Returns:\n The value associated with the given name, or null if the given name is\n not present.\n '
return get_all(context=context).get(name) | def get_baggage(name: str, context: typing.Optional[Context]=None) -> typing.Optional[object]:
'Provides access to the value for a name/value pair in the\n Baggage\n\n Args:\n name: The name of the value to retrieve\n context: The Context to use. If not set, uses current Context\n\n Returns:\n The value associated with the given name, or null if the given name is\n not present.\n '
return get_all(context=context).get(name)<|docstring|>Provides access to the value for a name/value pair in the
Baggage
Args:
name: The name of the value to retrieve
context: The Context to use. If not set, uses current Context
Returns:
The value associated with the given name, or null if the given name is
not present.<|endoftext|> |
5578e797198d0e1b115fcc0439f6d36867a502db5ad8b79941d2059032754e44 | def set_baggage(name: str, value: object, context: typing.Optional[Context]=None) -> Context:
'Sets a value in the Baggage\n\n Args:\n name: The name of the value to set\n value: The value to set\n context: The Context to use. If not set, uses current Context\n\n Returns:\n A Context with the value updated\n '
baggage = dict(get_all(context=context))
baggage[name] = value
return set_value(_BAGGAGE_KEY, baggage, context=context) | Sets a value in the Baggage
Args:
name: The name of the value to set
value: The value to set
context: The Context to use. If not set, uses current Context
Returns:
A Context with the value updated | opentelemetry-api/src/opentelemetry/baggage/__init__.py | set_baggage | marcusway/opentelemetry-python | 1 | python | def set_baggage(name: str, value: object, context: typing.Optional[Context]=None) -> Context:
'Sets a value in the Baggage\n\n Args:\n name: The name of the value to set\n value: The value to set\n context: The Context to use. If not set, uses current Context\n\n Returns:\n A Context with the value updated\n '
baggage = dict(get_all(context=context))
baggage[name] = value
return set_value(_BAGGAGE_KEY, baggage, context=context) | def set_baggage(name: str, value: object, context: typing.Optional[Context]=None) -> Context:
'Sets a value in the Baggage\n\n Args:\n name: The name of the value to set\n value: The value to set\n context: The Context to use. If not set, uses current Context\n\n Returns:\n A Context with the value updated\n '
baggage = dict(get_all(context=context))
baggage[name] = value
return set_value(_BAGGAGE_KEY, baggage, context=context)<|docstring|>Sets a value in the Baggage
Args:
name: The name of the value to set
value: The value to set
context: The Context to use. If not set, uses current Context
Returns:
A Context with the value updated<|endoftext|> |
32db51d215419ff754b518a9d0a8a4e5a6cce7953030b5f5ac54e002e1305205 | def remove_baggage(name: str, context: typing.Optional[Context]=None) -> Context:
'Removes a value from the Baggage\n\n Args:\n name: The name of the value to remove\n context: The Context to use. If not set, uses current Context\n\n Returns:\n A Context with the name/value removed\n '
baggage = dict(get_all(context=context))
baggage.pop(name, None)
return set_value(_BAGGAGE_KEY, baggage, context=context) | Removes a value from the Baggage
Args:
name: The name of the value to remove
context: The Context to use. If not set, uses current Context
Returns:
A Context with the name/value removed | opentelemetry-api/src/opentelemetry/baggage/__init__.py | remove_baggage | marcusway/opentelemetry-python | 1 | python | def remove_baggage(name: str, context: typing.Optional[Context]=None) -> Context:
'Removes a value from the Baggage\n\n Args:\n name: The name of the value to remove\n context: The Context to use. If not set, uses current Context\n\n Returns:\n A Context with the name/value removed\n '
baggage = dict(get_all(context=context))
baggage.pop(name, None)
return set_value(_BAGGAGE_KEY, baggage, context=context) | def remove_baggage(name: str, context: typing.Optional[Context]=None) -> Context:
'Removes a value from the Baggage\n\n Args:\n name: The name of the value to remove\n context: The Context to use. If not set, uses current Context\n\n Returns:\n A Context with the name/value removed\n '
baggage = dict(get_all(context=context))
baggage.pop(name, None)
return set_value(_BAGGAGE_KEY, baggage, context=context)<|docstring|>Removes a value from the Baggage
Args:
name: The name of the value to remove
context: The Context to use. If not set, uses current Context
Returns:
A Context with the name/value removed<|endoftext|> |
e9f5db29990834194b52b361cd6d77009bc2c6511b8ffcff201eeb833ba0879d | def clear(context: typing.Optional[Context]=None) -> Context:
'Removes all values from the Baggage\n\n Args:\n context: The Context to use. If not set, uses current Context\n\n Returns:\n A Context with all baggage entries removed\n '
return set_value(_BAGGAGE_KEY, {}, context=context) | Removes all values from the Baggage
Args:
context: The Context to use. If not set, uses current Context
Returns:
A Context with all baggage entries removed | opentelemetry-api/src/opentelemetry/baggage/__init__.py | clear | marcusway/opentelemetry-python | 1 | python | def clear(context: typing.Optional[Context]=None) -> Context:
'Removes all values from the Baggage\n\n Args:\n context: The Context to use. If not set, uses current Context\n\n Returns:\n A Context with all baggage entries removed\n '
return set_value(_BAGGAGE_KEY, {}, context=context) | def clear(context: typing.Optional[Context]=None) -> Context:
'Removes all values from the Baggage\n\n Args:\n context: The Context to use. If not set, uses current Context\n\n Returns:\n A Context with all baggage entries removed\n '
return set_value(_BAGGAGE_KEY, {}, context=context)<|docstring|>Removes all values from the Baggage
Args:
context: The Context to use. If not set, uses current Context
Returns:
A Context with all baggage entries removed<|endoftext|> |
6f272f22ca9467dc32efb0aca7890bd9b1f5903eb19d25129ba57caa6f6d7d10 | def test_simplify_coefficients(self):
'test suite for utils.simplify_coefficients.\n 1. it returns integer list when sympy integer list is passed.\n 2. it returns integer list when list of sympy symbols is passed.\n 3. it returns list of sympy rational when sympy number list is passed.\n 4. it returns list of sympy rational when list of sympy symbols is passed.\n 5. it returns tuple of integer list and a integer when sympy integer list and as_numer_denom=True are passed.\n 6. it returns tuple of integer list and a integer when list of sympy symbols and as_numer_denom=True are passed.\n '
for len_ in random_int(2, (STENCIL_HALF_WIDTH * 2)):
with self.subTest(len_):
numer = [sp.Number(random.randint(np.iinfo(np.int32).min, np.iinfo(np.int32).max)) for _ in range(len_)]
expected = [(n / 1) for n in numer]
actual = simplify_coefficients(numer)
self.assertEqual(expected, actual)
h = sp.symbols(random_string(MAX_SYMBOL_LENGTH))
for len_ in random_int(2, (STENCIL_HALF_WIDTH * 2)):
with self.subTest(len_):
numer = [random.randint(np.iinfo(np.int32).min, np.iinfo(np.int32).max) for _ in range(len_)]
coef = [(n * h) for n in numer]
expected = [(n / 1) for n in numer]
actual = simplify_coefficients(coef)
self.assertEqual(expected, actual)
for len_ in random_int(2, (STENCIL_HALF_WIDTH * 2)):
with self.subTest(len_):
denom = [sp.Number(random.randint(1, 1000)) for _ in range(len_)]
numer = [sp.Number(random.randint(np.iinfo(np.int32).min, np.iinfo(np.int32).max)) for _ in range(len_)]
coef = [(numer[i] / denom[i]) for i in range(len_)]
expected = [sp.Rational(numer[i], denom[i]) for i in range(len_)]
actual = simplify_coefficients(coef)
self.assertEqual(expected, actual)
h = sp.symbols(random_string(MAX_SYMBOL_LENGTH))
for len_ in random_int(2, (STENCIL_HALF_WIDTH * 2)):
with self.subTest(len_):
denom = [sp.Number(random.randint(1, 1000)) for _ in range(len_)]
numer = [sp.Number(random.randint(np.iinfo(np.int32).min, np.iinfo(np.int32).max)) for _ in range(len_)]
coef = [((numer[i] / denom[i]) * h) for i in range(len_)]
expected = [sp.Rational(numer[i], denom[i]) for i in range(len_)]
actual = simplify_coefficients(coef)
self.assertEqual(expected, actual)
for len_ in random_int(2, (STENCIL_HALF_WIDTH * 2)):
with self.subTest(len_):
numer = [sp.Number(random.randint(np.iinfo(np.int32).min, np.iinfo(np.int32).max)) for _ in range(len_)]
expected = ([(n / 1) for n in numer], 1)
actual = simplify_coefficients(numer, as_numer_denom=True)
self.assertEqual(expected, actual)
h = sp.symbols(random_string(MAX_SYMBOL_LENGTH))
for len_ in random_int(2, (STENCIL_HALF_WIDTH * 2)):
with self.subTest(len_):
numer = [random.randint(np.iinfo(np.int32).min, np.iinfo(np.int32).max) for _ in range(len_)]
coef = [(n * h) for n in numer]
expected = ([(n / 1) for n in numer], 1)
actual = simplify_coefficients(coef, as_numer_denom=True)
self.assertEqual(expected, actual)
for coef in [[sp.Number((3 / 4)), sp.Number((2 / 3)), sp.Number((4 / 5))]]:
with self.subTest(coef):
expected = ([45, 40, 48], 60)
actual = simplify_coefficients(coef, as_numer_denom=True)
self.assertEqual(expected, actual) | test suite for utils.simplify_coefficients.
1. it returns integer list when sympy integer list is passed.
2. it returns integer list when list of sympy symbols is passed.
3. it returns list of sympy rational when sympy number list is passed.
4. it returns list of sympy rational when list of sympy symbols is passed.
5. it returns tuple of integer list and a integer when sympy integer list and as_numer_denom=True are passed.
6. it returns tuple of integer list and a integer when list of sympy symbols and as_numer_denom=True are passed. | test/test_utils.py | test_simplify_coefficients | degawa/dictos | 1 | python | def test_simplify_coefficients(self):
'test suite for utils.simplify_coefficients.\n 1. it returns integer list when sympy integer list is passed.\n 2. it returns integer list when list of sympy symbols is passed.\n 3. it returns list of sympy rational when sympy number list is passed.\n 4. it returns list of sympy rational when list of sympy symbols is passed.\n 5. it returns tuple of integer list and a integer when sympy integer list and as_numer_denom=True are passed.\n 6. it returns tuple of integer list and a integer when list of sympy symbols and as_numer_denom=True are passed.\n '
for len_ in random_int(2, (STENCIL_HALF_WIDTH * 2)):
with self.subTest(len_):
numer = [sp.Number(random.randint(np.iinfo(np.int32).min, np.iinfo(np.int32).max)) for _ in range(len_)]
expected = [(n / 1) for n in numer]
actual = simplify_coefficients(numer)
self.assertEqual(expected, actual)
h = sp.symbols(random_string(MAX_SYMBOL_LENGTH))
for len_ in random_int(2, (STENCIL_HALF_WIDTH * 2)):
with self.subTest(len_):
numer = [random.randint(np.iinfo(np.int32).min, np.iinfo(np.int32).max) for _ in range(len_)]
coef = [(n * h) for n in numer]
expected = [(n / 1) for n in numer]
actual = simplify_coefficients(coef)
self.assertEqual(expected, actual)
for len_ in random_int(2, (STENCIL_HALF_WIDTH * 2)):
with self.subTest(len_):
denom = [sp.Number(random.randint(1, 1000)) for _ in range(len_)]
numer = [sp.Number(random.randint(np.iinfo(np.int32).min, np.iinfo(np.int32).max)) for _ in range(len_)]
coef = [(numer[i] / denom[i]) for i in range(len_)]
expected = [sp.Rational(numer[i], denom[i]) for i in range(len_)]
actual = simplify_coefficients(coef)
self.assertEqual(expected, actual)
h = sp.symbols(random_string(MAX_SYMBOL_LENGTH))
for len_ in random_int(2, (STENCIL_HALF_WIDTH * 2)):
with self.subTest(len_):
denom = [sp.Number(random.randint(1, 1000)) for _ in range(len_)]
numer = [sp.Number(random.randint(np.iinfo(np.int32).min, np.iinfo(np.int32).max)) for _ in range(len_)]
coef = [((numer[i] / denom[i]) * h) for i in range(len_)]
expected = [sp.Rational(numer[i], denom[i]) for i in range(len_)]
actual = simplify_coefficients(coef)
self.assertEqual(expected, actual)
for len_ in random_int(2, (STENCIL_HALF_WIDTH * 2)):
with self.subTest(len_):
numer = [sp.Number(random.randint(np.iinfo(np.int32).min, np.iinfo(np.int32).max)) for _ in range(len_)]
expected = ([(n / 1) for n in numer], 1)
actual = simplify_coefficients(numer, as_numer_denom=True)
self.assertEqual(expected, actual)
h = sp.symbols(random_string(MAX_SYMBOL_LENGTH))
for len_ in random_int(2, (STENCIL_HALF_WIDTH * 2)):
with self.subTest(len_):
numer = [random.randint(np.iinfo(np.int32).min, np.iinfo(np.int32).max) for _ in range(len_)]
coef = [(n * h) for n in numer]
expected = ([(n / 1) for n in numer], 1)
actual = simplify_coefficients(coef, as_numer_denom=True)
self.assertEqual(expected, actual)
for coef in [[sp.Number((3 / 4)), sp.Number((2 / 3)), sp.Number((4 / 5))]]:
with self.subTest(coef):
expected = ([45, 40, 48], 60)
actual = simplify_coefficients(coef, as_numer_denom=True)
self.assertEqual(expected, actual) | def test_simplify_coefficients(self):
'test suite for utils.simplify_coefficients.\n 1. it returns integer list when sympy integer list is passed.\n 2. it returns integer list when list of sympy symbols is passed.\n 3. it returns list of sympy rational when sympy number list is passed.\n 4. it returns list of sympy rational when list of sympy symbols is passed.\n 5. it returns tuple of integer list and a integer when sympy integer list and as_numer_denom=True are passed.\n 6. it returns tuple of integer list and a integer when list of sympy symbols and as_numer_denom=True are passed.\n '
for len_ in random_int(2, (STENCIL_HALF_WIDTH * 2)):
with self.subTest(len_):
numer = [sp.Number(random.randint(np.iinfo(np.int32).min, np.iinfo(np.int32).max)) for _ in range(len_)]
expected = [(n / 1) for n in numer]
actual = simplify_coefficients(numer)
self.assertEqual(expected, actual)
h = sp.symbols(random_string(MAX_SYMBOL_LENGTH))
for len_ in random_int(2, (STENCIL_HALF_WIDTH * 2)):
with self.subTest(len_):
numer = [random.randint(np.iinfo(np.int32).min, np.iinfo(np.int32).max) for _ in range(len_)]
coef = [(n * h) for n in numer]
expected = [(n / 1) for n in numer]
actual = simplify_coefficients(coef)
self.assertEqual(expected, actual)
for len_ in random_int(2, (STENCIL_HALF_WIDTH * 2)):
with self.subTest(len_):
denom = [sp.Number(random.randint(1, 1000)) for _ in range(len_)]
numer = [sp.Number(random.randint(np.iinfo(np.int32).min, np.iinfo(np.int32).max)) for _ in range(len_)]
coef = [(numer[i] / denom[i]) for i in range(len_)]
expected = [sp.Rational(numer[i], denom[i]) for i in range(len_)]
actual = simplify_coefficients(coef)
self.assertEqual(expected, actual)
h = sp.symbols(random_string(MAX_SYMBOL_LENGTH))
for len_ in random_int(2, (STENCIL_HALF_WIDTH * 2)):
with self.subTest(len_):
denom = [sp.Number(random.randint(1, 1000)) for _ in range(len_)]
numer = [sp.Number(random.randint(np.iinfo(np.int32).min, np.iinfo(np.int32).max)) for _ in range(len_)]
coef = [((numer[i] / denom[i]) * h) for i in range(len_)]
expected = [sp.Rational(numer[i], denom[i]) for i in range(len_)]
actual = simplify_coefficients(coef)
self.assertEqual(expected, actual)
for len_ in random_int(2, (STENCIL_HALF_WIDTH * 2)):
with self.subTest(len_):
numer = [sp.Number(random.randint(np.iinfo(np.int32).min, np.iinfo(np.int32).max)) for _ in range(len_)]
expected = ([(n / 1) for n in numer], 1)
actual = simplify_coefficients(numer, as_numer_denom=True)
self.assertEqual(expected, actual)
h = sp.symbols(random_string(MAX_SYMBOL_LENGTH))
for len_ in random_int(2, (STENCIL_HALF_WIDTH * 2)):
with self.subTest(len_):
numer = [random.randint(np.iinfo(np.int32).min, np.iinfo(np.int32).max) for _ in range(len_)]
coef = [(n * h) for n in numer]
expected = ([(n / 1) for n in numer], 1)
actual = simplify_coefficients(coef, as_numer_denom=True)
self.assertEqual(expected, actual)
for coef in [[sp.Number((3 / 4)), sp.Number((2 / 3)), sp.Number((4 / 5))]]:
with self.subTest(coef):
expected = ([45, 40, 48], 60)
actual = simplify_coefficients(coef, as_numer_denom=True)
self.assertEqual(expected, actual)<|docstring|>test suite for utils.simplify_coefficients.
1. it returns integer list when sympy integer list is passed.
2. it returns integer list when list of sympy symbols is passed.
3. it returns list of sympy rational when sympy number list is passed.
4. it returns list of sympy rational when list of sympy symbols is passed.
5. it returns tuple of integer list and a integer when sympy integer list and as_numer_denom=True are passed.
6. it returns tuple of integer list and a integer when list of sympy symbols and as_numer_denom=True are passed.<|endoftext|> |
ce97c537cbee3d2a9135f02672a8f771e84d6e1dd1d5113c79c53305578bfd74 | def test_extract_coefficients_as_numer_denom(self):
'test suite for utils.extract_coefficients_as_numer_denom.'
(x, h, f_0, f_1, f_2, f_3, f_4) = sp.symbols('x h f_0 f_1 f_2 f_3 f_4')
f_set = [f_0, f_1]
expr = (((((- 1.0) * f_0) * (((- 0.5) * h) + x)) / h) + (((1.0 * f_1) * ((0.5 * h) + x)) / h))
with self.subTest(expr):
expected = ([((0.5 * h) - (1.0 * x)), ((0.5 * h) + (1.0 * x))], [h])
actual = extract_coefficients_as_numer_denom(expr, f_set)
self.assertEqual(expected, actual)
expr = ((0.5 * f_0) + (0.5 * f_1))
with self.subTest(expr):
expected = ([0.5, 0.5], [1])
actual = extract_coefficients_as_numer_denom(expr, f_set)
self.assertEqual(expected, actual)
f_set = [f_0, f_1, f_2, f_3, f_4]
expr = (((((((((f_0 * x) * (((- 2) * h) + x)) * ((- h) + x)) * (h + x)) / (24 * (h ** 4))) - (((((f_1 * x) * (((- 2) * h) + x)) * ((- h) + x)) * ((2 * h) + x)) / (6 * (h ** 4)))) + (((((f_2 * (((- 2) * h) + x)) * ((- h) + x)) * (h + x)) * ((2 * h) + x)) / (4 * (h ** 4)))) - (((((f_3 * x) * (((- 2) * h) + x)) * (h + x)) * ((2 * h) + x)) / (6 * (h ** 4)))) + (((((f_4 * x) * ((- h) + x)) * (h + x)) * ((2 * h) + x)) / (24 * (h ** 4))))
with self.subTest(expr):
expected = ([(((((2 * (h ** 3)) * x) - ((h ** 2) * (x ** 2))) - ((2 * h) * (x ** 3))) + (x ** 4)), ((((((- 16) * (h ** 3)) * x) + ((16 * (h ** 2)) * (x ** 2))) + ((4 * h) * (x ** 3))) - (4 * (x ** 4))), (((24 * (h ** 4)) - ((30 * (h ** 2)) * (x ** 2))) + (6 * (x ** 4))), (((((16 * (h ** 3)) * x) + ((16 * (h ** 2)) * (x ** 2))) - ((4 * h) * (x ** 3))) - (4 * (x ** 4))), ((((((- 2) * (h ** 3)) * x) - ((h ** 2) * (x ** 2))) + ((2 * h) * (x ** 3))) + (x ** 4))], [(24 * (h ** 4))])
actual = extract_coefficients_as_numer_denom(expr, f_set)
self.assertEqual(expected, actual)
expr = (((((- f_0) / 6) + ((2 * f_1) / 3)) + ((2 * f_2) / 3)) - (f_3 / 6))
with self.subTest(expr):
expected = ([(- 1), 4, 4, (- 1)], [6])
actual = extract_coefficients_as_numer_denom(expr, f_set)
self.assertEqual(expected, actual) | test suite for utils.extract_coefficients_as_numer_denom. | test/test_utils.py | test_extract_coefficients_as_numer_denom | degawa/dictos | 1 | python | def test_extract_coefficients_as_numer_denom(self):
(x, h, f_0, f_1, f_2, f_3, f_4) = sp.symbols('x h f_0 f_1 f_2 f_3 f_4')
f_set = [f_0, f_1]
expr = (((((- 1.0) * f_0) * (((- 0.5) * h) + x)) / h) + (((1.0 * f_1) * ((0.5 * h) + x)) / h))
with self.subTest(expr):
expected = ([((0.5 * h) - (1.0 * x)), ((0.5 * h) + (1.0 * x))], [h])
actual = extract_coefficients_as_numer_denom(expr, f_set)
self.assertEqual(expected, actual)
expr = ((0.5 * f_0) + (0.5 * f_1))
with self.subTest(expr):
expected = ([0.5, 0.5], [1])
actual = extract_coefficients_as_numer_denom(expr, f_set)
self.assertEqual(expected, actual)
f_set = [f_0, f_1, f_2, f_3, f_4]
expr = (((((((((f_0 * x) * (((- 2) * h) + x)) * ((- h) + x)) * (h + x)) / (24 * (h ** 4))) - (((((f_1 * x) * (((- 2) * h) + x)) * ((- h) + x)) * ((2 * h) + x)) / (6 * (h ** 4)))) + (((((f_2 * (((- 2) * h) + x)) * ((- h) + x)) * (h + x)) * ((2 * h) + x)) / (4 * (h ** 4)))) - (((((f_3 * x) * (((- 2) * h) + x)) * (h + x)) * ((2 * h) + x)) / (6 * (h ** 4)))) + (((((f_4 * x) * ((- h) + x)) * (h + x)) * ((2 * h) + x)) / (24 * (h ** 4))))
with self.subTest(expr):
expected = ([(((((2 * (h ** 3)) * x) - ((h ** 2) * (x ** 2))) - ((2 * h) * (x ** 3))) + (x ** 4)), ((((((- 16) * (h ** 3)) * x) + ((16 * (h ** 2)) * (x ** 2))) + ((4 * h) * (x ** 3))) - (4 * (x ** 4))), (((24 * (h ** 4)) - ((30 * (h ** 2)) * (x ** 2))) + (6 * (x ** 4))), (((((16 * (h ** 3)) * x) + ((16 * (h ** 2)) * (x ** 2))) - ((4 * h) * (x ** 3))) - (4 * (x ** 4))), ((((((- 2) * (h ** 3)) * x) - ((h ** 2) * (x ** 2))) + ((2 * h) * (x ** 3))) + (x ** 4))], [(24 * (h ** 4))])
actual = extract_coefficients_as_numer_denom(expr, f_set)
self.assertEqual(expected, actual)
expr = (((((- f_0) / 6) + ((2 * f_1) / 3)) + ((2 * f_2) / 3)) - (f_3 / 6))
with self.subTest(expr):
expected = ([(- 1), 4, 4, (- 1)], [6])
actual = extract_coefficients_as_numer_denom(expr, f_set)
self.assertEqual(expected, actual) | def test_extract_coefficients_as_numer_denom(self):
(x, h, f_0, f_1, f_2, f_3, f_4) = sp.symbols('x h f_0 f_1 f_2 f_3 f_4')
f_set = [f_0, f_1]
expr = (((((- 1.0) * f_0) * (((- 0.5) * h) + x)) / h) + (((1.0 * f_1) * ((0.5 * h) + x)) / h))
with self.subTest(expr):
expected = ([((0.5 * h) - (1.0 * x)), ((0.5 * h) + (1.0 * x))], [h])
actual = extract_coefficients_as_numer_denom(expr, f_set)
self.assertEqual(expected, actual)
expr = ((0.5 * f_0) + (0.5 * f_1))
with self.subTest(expr):
expected = ([0.5, 0.5], [1])
actual = extract_coefficients_as_numer_denom(expr, f_set)
self.assertEqual(expected, actual)
f_set = [f_0, f_1, f_2, f_3, f_4]
expr = (((((((((f_0 * x) * (((- 2) * h) + x)) * ((- h) + x)) * (h + x)) / (24 * (h ** 4))) - (((((f_1 * x) * (((- 2) * h) + x)) * ((- h) + x)) * ((2 * h) + x)) / (6 * (h ** 4)))) + (((((f_2 * (((- 2) * h) + x)) * ((- h) + x)) * (h + x)) * ((2 * h) + x)) / (4 * (h ** 4)))) - (((((f_3 * x) * (((- 2) * h) + x)) * (h + x)) * ((2 * h) + x)) / (6 * (h ** 4)))) + (((((f_4 * x) * ((- h) + x)) * (h + x)) * ((2 * h) + x)) / (24 * (h ** 4))))
with self.subTest(expr):
expected = ([(((((2 * (h ** 3)) * x) - ((h ** 2) * (x ** 2))) - ((2 * h) * (x ** 3))) + (x ** 4)), ((((((- 16) * (h ** 3)) * x) + ((16 * (h ** 2)) * (x ** 2))) + ((4 * h) * (x ** 3))) - (4 * (x ** 4))), (((24 * (h ** 4)) - ((30 * (h ** 2)) * (x ** 2))) + (6 * (x ** 4))), (((((16 * (h ** 3)) * x) + ((16 * (h ** 2)) * (x ** 2))) - ((4 * h) * (x ** 3))) - (4 * (x ** 4))), ((((((- 2) * (h ** 3)) * x) - ((h ** 2) * (x ** 2))) + ((2 * h) * (x ** 3))) + (x ** 4))], [(24 * (h ** 4))])
actual = extract_coefficients_as_numer_denom(expr, f_set)
self.assertEqual(expected, actual)
expr = (((((- f_0) / 6) + ((2 * f_1) / 3)) + ((2 * f_2) / 3)) - (f_3 / 6))
with self.subTest(expr):
expected = ([(- 1), 4, 4, (- 1)], [6])
actual = extract_coefficients_as_numer_denom(expr, f_set)
self.assertEqual(expected, actual)<|docstring|>test suite for utils.extract_coefficients_as_numer_denom.<|endoftext|> |
81ca539725da0efedbcb7380b5262bf68a9ab90e0eff03f1526e55a83785217d | def test_utils_exception(self):
'test suite for exception in utils'
stencil = [0]
with self.subTest('create_coordinate_symbols with too narrow stencil'):
with self.assertRaises(TooNarrowError):
create_coordinate_symbols(stencil)
stencil = [1, 1, 2, 3, 4]
with self.subTest('create_coordinate_symbols with invalid stencil'):
with self.assertRaises(DuplicatedPointError):
create_coordinate_symbols(stencil) | test suite for exception in utils | test/test_utils.py | test_utils_exception | degawa/dictos | 1 | python | def test_utils_exception(self):
stencil = [0]
with self.subTest('create_coordinate_symbols with too narrow stencil'):
with self.assertRaises(TooNarrowError):
create_coordinate_symbols(stencil)
stencil = [1, 1, 2, 3, 4]
with self.subTest('create_coordinate_symbols with invalid stencil'):
with self.assertRaises(DuplicatedPointError):
create_coordinate_symbols(stencil) | def test_utils_exception(self):
stencil = [0]
with self.subTest('create_coordinate_symbols with too narrow stencil'):
with self.assertRaises(TooNarrowError):
create_coordinate_symbols(stencil)
stencil = [1, 1, 2, 3, 4]
with self.subTest('create_coordinate_symbols with invalid stencil'):
with self.assertRaises(DuplicatedPointError):
create_coordinate_symbols(stencil)<|docstring|>test suite for exception in utils<|endoftext|> |
d6852f5290a900edc0ebc144249645183e887b4c4d6c7766e5d0553bccbc6051 | def test_utils_sort_by_subscript(self):
'test suite for utils.sort_by_subscript.'
for half_width in range(1, 11):
with self.subTest(f'get subscript {((half_width * 2) + 1)}-point stencil'):
stencil = [to_subscript(i) for i in range((- half_width), (half_width + 1))]
sym_str = ''.join([(((('f' + '_{') + s) + '}') + ' ') for s in stencil])
f_set = sp.symbols(sym_str)
expected = dot_product(f_set, [1 for _ in range(len(f_set))], evaluate=False)
num = random_int((- half_width), half_width)
stencil = [to_subscript(i) for i in num]
sym_str = ''.join([(((('f' + '_{') + s) + '}') + ' ') for s in stencil])
f_set = sp.symbols(sym_str)
eq = dot_product(f_set, [1 for _ in range(len(f_set))], evaluate=False)
actual = sort_by_subscript(eq)
ac_str = str(actual)
ex_str = str(expected)
self.assertEqual(ex_str, ac_str) | test suite for utils.sort_by_subscript. | test/test_utils.py | test_utils_sort_by_subscript | degawa/dictos | 1 | python | def test_utils_sort_by_subscript(self):
for half_width in range(1, 11):
with self.subTest(f'get subscript {((half_width * 2) + 1)}-point stencil'):
stencil = [to_subscript(i) for i in range((- half_width), (half_width + 1))]
sym_str = .join([(((('f' + '_{') + s) + '}') + ' ') for s in stencil])
f_set = sp.symbols(sym_str)
expected = dot_product(f_set, [1 for _ in range(len(f_set))], evaluate=False)
num = random_int((- half_width), half_width)
stencil = [to_subscript(i) for i in num]
sym_str = .join([(((('f' + '_{') + s) + '}') + ' ') for s in stencil])
f_set = sp.symbols(sym_str)
eq = dot_product(f_set, [1 for _ in range(len(f_set))], evaluate=False)
actual = sort_by_subscript(eq)
ac_str = str(actual)
ex_str = str(expected)
self.assertEqual(ex_str, ac_str) | def test_utils_sort_by_subscript(self):
for half_width in range(1, 11):
with self.subTest(f'get subscript {((half_width * 2) + 1)}-point stencil'):
stencil = [to_subscript(i) for i in range((- half_width), (half_width + 1))]
sym_str = .join([(((('f' + '_{') + s) + '}') + ' ') for s in stencil])
f_set = sp.symbols(sym_str)
expected = dot_product(f_set, [1 for _ in range(len(f_set))], evaluate=False)
num = random_int((- half_width), half_width)
stencil = [to_subscript(i) for i in num]
sym_str = .join([(((('f' + '_{') + s) + '}') + ' ') for s in stencil])
f_set = sp.symbols(sym_str)
eq = dot_product(f_set, [1 for _ in range(len(f_set))], evaluate=False)
actual = sort_by_subscript(eq)
ac_str = str(actual)
ex_str = str(expected)
self.assertEqual(ex_str, ac_str)<|docstring|>test suite for utils.sort_by_subscript.<|endoftext|> |
2318bc2ee51c05d25be70e20daec1b1bc568dfb85ce27ecfab1add2fc4877a14 | def randstr(length=6):
'\n >>> randstr() == randstr()\n False\n\n >>> len(randstr(8))\n 8\n '
ALPHABET = '0123456789abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ'
return ''.join([choice(ALPHABET) for _ in range(length)]) | >>> randstr() == randstr()
False
>>> len(randstr(8))
8 | gcloud/utils/random.py | randstr | SHUN-YI/bk-sops | 2 | python | def randstr(length=6):
'\n >>> randstr() == randstr()\n False\n\n >>> len(randstr(8))\n 8\n '
ALPHABET = '0123456789abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ'
return .join([choice(ALPHABET) for _ in range(length)]) | def randstr(length=6):
'\n >>> randstr() == randstr()\n False\n\n >>> len(randstr(8))\n 8\n '
ALPHABET = '0123456789abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ'
return .join([choice(ALPHABET) for _ in range(length)])<|docstring|>>>> randstr() == randstr()
False
>>> len(randstr(8))
8<|endoftext|> |
ba488ae947072c0e8155208ea11606b4e874c2d725c896e04c0196abdf15bd9b | @abc.abstractmethod
def encode(self):
'Abstract encode function. Children must implement this function.'
pass | Abstract encode function. Children must implement this function. | ring/coder.py | encode | xymz/ring | 450 | python | @abc.abstractmethod
def encode(self):
pass | @abc.abstractmethod
def encode(self):
pass<|docstring|>Abstract encode function. Children must implement this function.<|endoftext|> |
74463a52a9b9766c3456dedeedde8a2b1538fc3cb2cae51f48ce92ef499b0d73 | @abc.abstractmethod
def decode(self):
'Abstract decode function. Children must implement this function.'
pass | Abstract decode function. Children must implement this function. | ring/coder.py | decode | xymz/ring | 450 | python | @abc.abstractmethod
def decode(self):
pass | @abc.abstractmethod
def decode(self):
pass<|docstring|>Abstract decode function. Children must implement this function.<|endoftext|> |
37359c4104446eea6edbd219671135c2bebce50717d67b3800091e0bb0d2b290 | def register(self, coder_name, raw_coder):
'Register `raw_coder` as a new coder with alias `coder_name`.\n\n Coder can be one of next types:\n\n - A :class:`Coder` subclass.\n - A :class:`CoderTuple` object.\n - A tuple of encode and decode functions.\n - An object which has encode and decode methods.\n\n :param str coder_name: A new coder name to register.\n :param object raw_coder: A new coder object.\n '
coder = coderize(raw_coder)
self.coders[coder_name] = coder | Register `raw_coder` as a new coder with alias `coder_name`.
Coder can be one of next types:
- A :class:`Coder` subclass.
- A :class:`CoderTuple` object.
- A tuple of encode and decode functions.
- An object which has encode and decode methods.
:param str coder_name: A new coder name to register.
:param object raw_coder: A new coder object. | ring/coder.py | register | xymz/ring | 450 | python | def register(self, coder_name, raw_coder):
'Register `raw_coder` as a new coder with alias `coder_name`.\n\n Coder can be one of next types:\n\n - A :class:`Coder` subclass.\n - A :class:`CoderTuple` object.\n - A tuple of encode and decode functions.\n - An object which has encode and decode methods.\n\n :param str coder_name: A new coder name to register.\n :param object raw_coder: A new coder object.\n '
coder = coderize(raw_coder)
self.coders[coder_name] = coder | def register(self, coder_name, raw_coder):
'Register `raw_coder` as a new coder with alias `coder_name`.\n\n Coder can be one of next types:\n\n - A :class:`Coder` subclass.\n - A :class:`CoderTuple` object.\n - A tuple of encode and decode functions.\n - An object which has encode and decode methods.\n\n :param str coder_name: A new coder name to register.\n :param object raw_coder: A new coder object.\n '
coder = coderize(raw_coder)
self.coders[coder_name] = coder<|docstring|>Register `raw_coder` as a new coder with alias `coder_name`.
Coder can be one of next types:
- A :class:`Coder` subclass.
- A :class:`CoderTuple` object.
- A tuple of encode and decode functions.
- An object which has encode and decode methods.
:param str coder_name: A new coder name to register.
:param object raw_coder: A new coder object.<|endoftext|> |
99a0e0b307e5963aac1efaf8b06f0a491508ce5adce2ce40565b1e7c235ec4e2 | def get(self, coder_name):
'Get the registered coder for corresponding `coder_name`.\n\n This method is internally called when `coder` parameter is passed to\n ring object factory.\n '
coder = self.coders.get(coder_name)
return coder | Get the registered coder for corresponding `coder_name`.
This method is internally called when `coder` parameter is passed to
ring object factory. | ring/coder.py | get | xymz/ring | 450 | python | def get(self, coder_name):
'Get the registered coder for corresponding `coder_name`.\n\n This method is internally called when `coder` parameter is passed to\n ring object factory.\n '
coder = self.coders.get(coder_name)
return coder | def get(self, coder_name):
'Get the registered coder for corresponding `coder_name`.\n\n This method is internally called when `coder` parameter is passed to\n ring object factory.\n '
coder = self.coders.get(coder_name)
return coder<|docstring|>Get the registered coder for corresponding `coder_name`.
This method is internally called when `coder` parameter is passed to
ring object factory.<|endoftext|> |
b3b8139985ccaeba6479667f26ddb44ffada5b703b2c66d5a3d22636f3be16d5 | @staticmethod
def encode(data):
'Dump data to JSON string and encode it to UTF-8 bytes'
return json_mod.dumps(data).encode('utf-8') | Dump data to JSON string and encode it to UTF-8 bytes | ring/coder.py | encode | xymz/ring | 450 | python | @staticmethod
def encode(data):
return json_mod.dumps(data).encode('utf-8') | @staticmethod
def encode(data):
return json_mod.dumps(data).encode('utf-8')<|docstring|>Dump data to JSON string and encode it to UTF-8 bytes<|endoftext|> |
184a7e07435dbe946a4f62cfa7e636ff50238c93a2ce0efb0dc40cc1402f0f7f | @staticmethod
def decode(binary):
'Decode UTF-8 bytes to JSON string and load it to object'
return json_mod.loads(binary.decode('utf-8')) | Decode UTF-8 bytes to JSON string and load it to object | ring/coder.py | decode | xymz/ring | 450 | python | @staticmethod
def decode(binary):
return json_mod.loads(binary.decode('utf-8')) | @staticmethod
def decode(binary):
return json_mod.loads(binary.decode('utf-8'))<|docstring|>Decode UTF-8 bytes to JSON string and load it to object<|endoftext|> |
124a6055954b75a719bd5f47a0ad10c4b1501c8cb24224069478a97f5d688e43 | @staticmethod
def encode(data):
'Serialize dataclass object to json encoded dictionary'
target_dict = (type(data).__name__, dataclasses.asdict(data))
return JsonCoder.encode(target_dict) | Serialize dataclass object to json encoded dictionary | ring/coder.py | encode | xymz/ring | 450 | python | @staticmethod
def encode(data):
target_dict = (type(data).__name__, dataclasses.asdict(data))
return JsonCoder.encode(target_dict) | @staticmethod
def encode(data):
target_dict = (type(data).__name__, dataclasses.asdict(data))
return JsonCoder.encode(target_dict)<|docstring|>Serialize dataclass object to json encoded dictionary<|endoftext|> |
22caac84f9416ae862446220184af307dbe66d6fc141c18bcb8295218c3ec9c5 | @staticmethod
def decode(binary):
'Deserialize json encoded dictionary to dataclass object'
(name, fields) = JsonCoder.decode(binary)
dataclass = dataclasses.make_dataclass(name, [(key, type(value)) for (key, value) in fields.items()])
instance = dataclass(**fields)
return instance | Deserialize json encoded dictionary to dataclass object | ring/coder.py | decode | xymz/ring | 450 | python | @staticmethod
def decode(binary):
(name, fields) = JsonCoder.decode(binary)
dataclass = dataclasses.make_dataclass(name, [(key, type(value)) for (key, value) in fields.items()])
instance = dataclass(**fields)
return instance | @staticmethod
def decode(binary):
(name, fields) = JsonCoder.decode(binary)
dataclass = dataclasses.make_dataclass(name, [(key, type(value)) for (key, value) in fields.items()])
instance = dataclass(**fields)
return instance<|docstring|>Deserialize json encoded dictionary to dataclass object<|endoftext|> |
ac4f8e2f91dcf6d1a8ab580805b97ad335db2bc11115c49f1cae5928e50a2f14 | def cross_validate(model_provider: BaseModelProvider, X: np.ndarray, y: np.ndarray, seq_lengths: Optional[List[int]]=None, nfolds: int=3, train_length: Union[(int, TrainLength)]=3, bs: int=DEFAULT_BS, trainer: BaseTrainer=None, metric: Metric=None, hp=None, device=None, **train_params) -> CrossValidationStats:
"Evaluate a model using k-fold cross validation.\n \n Args:\n model_provider: model provider that initializes the model, optimizer and loss function once per fold.\n X: array of inputs. The size of the first dimension must be equal to the number of examples; the rest of the \n dimensions must adhere to the requirements of the model.\n y: array of targets. The size of the first dimension size must be the number of examples; the rest of the \n dimensions must adhere to the requirements of the model.\n seq_lengths: length of every input sequence, with length `(X.shape[0],)`. It must be `None` when \n `model_provider` returns a model that doesn't need them.\n nfolds: number of cross validation folds.\n train_length: if it's an integer, number of epochs to train the model for each fold. Otherwise, it must be an\n instance of a `TrainLength` child class that defines a stop criterion.\n trainer: training session runner that is called once per fold.\n metric: metric function callable that is evaluated once per fold, epoch and set (train/valid). Its results\n are included in the output of `cross_validate`.\n hp: hyperparameters passed to `model_provider.create`.\n device: device where the model is trained and the metrics are evaluated, with PyTorch format.\n\n Returns:\n Statistics (losses, metrics, steps) summarized and per fold.\n "
stats_by_fold = []
if (trainer is None):
trainer = DefaultTrainer()
for (X_train, X_test, y_train, y_test, sl_train, sl_test) in get_kfolds(X, y, seq_lengths=seq_lengths, n=nfolds):
train_arrays = ([X_train, y_train] if (seq_lengths is None) else [X_train, y_train, sl_train])
valid_arrays = ([X_test, y_test] if (seq_lengths is None) else [X_test, y_test, sl_test])
train_tensors = [torch.tensor(t) for t in train_arrays]
valid_tensors = [torch.tensor(t) for t in valid_arrays]
dls = DataLoaders(get_dl_from_tensors(*train_tensors, bs=bs), get_dl_from_tensors(*valid_tensors, bs=bs))
(model, opt, loss_func, clip_grad) = model_provider.create(hp=hp, device=device)
train_length_copy = copy.deepcopy(train_length)
stats = trainer.train(train_length_copy, model, dls, loss_func, opt, metric=metric, device=device, clip_grad=clip_grad, **train_params)
stats_by_fold.append(stats)
model = None
opt = None
gc.collect()
return CrossValidationStats(stats_by_fold, (metric.lower_is_better if (metric is not None) else False)) | Evaluate a model using k-fold cross validation.
Args:
model_provider: model provider that initializes the model, optimizer and loss function once per fold.
X: array of inputs. The size of the first dimension must be equal to the number of examples; the rest of the
dimensions must adhere to the requirements of the model.
y: array of targets. The size of the first dimension size must be the number of examples; the rest of the
dimensions must adhere to the requirements of the model.
seq_lengths: length of every input sequence, with length `(X.shape[0],)`. It must be `None` when
`model_provider` returns a model that doesn't need them.
nfolds: number of cross validation folds.
train_length: if it's an integer, number of epochs to train the model for each fold. Otherwise, it must be an
instance of a `TrainLength` child class that defines a stop criterion.
trainer: training session runner that is called once per fold.
metric: metric function callable that is evaluated once per fold, epoch and set (train/valid). Its results
are included in the output of `cross_validate`.
hp: hyperparameters passed to `model_provider.create`.
device: device where the model is trained and the metrics are evaluated, with PyTorch format.
Returns:
Statistics (losses, metrics, steps) summarized and per fold. | mininlp/validate.py | cross_validate | davidleonfdez/mini-nlp-framework | 0 | python | def cross_validate(model_provider: BaseModelProvider, X: np.ndarray, y: np.ndarray, seq_lengths: Optional[List[int]]=None, nfolds: int=3, train_length: Union[(int, TrainLength)]=3, bs: int=DEFAULT_BS, trainer: BaseTrainer=None, metric: Metric=None, hp=None, device=None, **train_params) -> CrossValidationStats:
"Evaluate a model using k-fold cross validation.\n \n Args:\n model_provider: model provider that initializes the model, optimizer and loss function once per fold.\n X: array of inputs. The size of the first dimension must be equal to the number of examples; the rest of the \n dimensions must adhere to the requirements of the model.\n y: array of targets. The size of the first dimension size must be the number of examples; the rest of the \n dimensions must adhere to the requirements of the model.\n seq_lengths: length of every input sequence, with length `(X.shape[0],)`. It must be `None` when \n `model_provider` returns a model that doesn't need them.\n nfolds: number of cross validation folds.\n train_length: if it's an integer, number of epochs to train the model for each fold. Otherwise, it must be an\n instance of a `TrainLength` child class that defines a stop criterion.\n trainer: training session runner that is called once per fold.\n metric: metric function callable that is evaluated once per fold, epoch and set (train/valid). Its results\n are included in the output of `cross_validate`.\n hp: hyperparameters passed to `model_provider.create`.\n device: device where the model is trained and the metrics are evaluated, with PyTorch format.\n\n Returns:\n Statistics (losses, metrics, steps) summarized and per fold.\n "
stats_by_fold = []
if (trainer is None):
trainer = DefaultTrainer()
for (X_train, X_test, y_train, y_test, sl_train, sl_test) in get_kfolds(X, y, seq_lengths=seq_lengths, n=nfolds):
train_arrays = ([X_train, y_train] if (seq_lengths is None) else [X_train, y_train, sl_train])
valid_arrays = ([X_test, y_test] if (seq_lengths is None) else [X_test, y_test, sl_test])
train_tensors = [torch.tensor(t) for t in train_arrays]
valid_tensors = [torch.tensor(t) for t in valid_arrays]
dls = DataLoaders(get_dl_from_tensors(*train_tensors, bs=bs), get_dl_from_tensors(*valid_tensors, bs=bs))
(model, opt, loss_func, clip_grad) = model_provider.create(hp=hp, device=device)
train_length_copy = copy.deepcopy(train_length)
stats = trainer.train(train_length_copy, model, dls, loss_func, opt, metric=metric, device=device, clip_grad=clip_grad, **train_params)
stats_by_fold.append(stats)
model = None
opt = None
gc.collect()
return CrossValidationStats(stats_by_fold, (metric.lower_is_better if (metric is not None) else False)) | def cross_validate(model_provider: BaseModelProvider, X: np.ndarray, y: np.ndarray, seq_lengths: Optional[List[int]]=None, nfolds: int=3, train_length: Union[(int, TrainLength)]=3, bs: int=DEFAULT_BS, trainer: BaseTrainer=None, metric: Metric=None, hp=None, device=None, **train_params) -> CrossValidationStats:
"Evaluate a model using k-fold cross validation.\n \n Args:\n model_provider: model provider that initializes the model, optimizer and loss function once per fold.\n X: array of inputs. The size of the first dimension must be equal to the number of examples; the rest of the \n dimensions must adhere to the requirements of the model.\n y: array of targets. The size of the first dimension size must be the number of examples; the rest of the \n dimensions must adhere to the requirements of the model.\n seq_lengths: length of every input sequence, with length `(X.shape[0],)`. It must be `None` when \n `model_provider` returns a model that doesn't need them.\n nfolds: number of cross validation folds.\n train_length: if it's an integer, number of epochs to train the model for each fold. Otherwise, it must be an\n instance of a `TrainLength` child class that defines a stop criterion.\n trainer: training session runner that is called once per fold.\n metric: metric function callable that is evaluated once per fold, epoch and set (train/valid). Its results\n are included in the output of `cross_validate`.\n hp: hyperparameters passed to `model_provider.create`.\n device: device where the model is trained and the metrics are evaluated, with PyTorch format.\n\n Returns:\n Statistics (losses, metrics, steps) summarized and per fold.\n "
stats_by_fold = []
if (trainer is None):
trainer = DefaultTrainer()
for (X_train, X_test, y_train, y_test, sl_train, sl_test) in get_kfolds(X, y, seq_lengths=seq_lengths, n=nfolds):
train_arrays = ([X_train, y_train] if (seq_lengths is None) else [X_train, y_train, sl_train])
valid_arrays = ([X_test, y_test] if (seq_lengths is None) else [X_test, y_test, sl_test])
train_tensors = [torch.tensor(t) for t in train_arrays]
valid_tensors = [torch.tensor(t) for t in valid_arrays]
dls = DataLoaders(get_dl_from_tensors(*train_tensors, bs=bs), get_dl_from_tensors(*valid_tensors, bs=bs))
(model, opt, loss_func, clip_grad) = model_provider.create(hp=hp, device=device)
train_length_copy = copy.deepcopy(train_length)
stats = trainer.train(train_length_copy, model, dls, loss_func, opt, metric=metric, device=device, clip_grad=clip_grad, **train_params)
stats_by_fold.append(stats)
model = None
opt = None
gc.collect()
return CrossValidationStats(stats_by_fold, (metric.lower_is_better if (metric is not None) else False))<|docstring|>Evaluate a model using k-fold cross validation.
Args:
model_provider: model provider that initializes the model, optimizer and loss function once per fold.
X: array of inputs. The size of the first dimension must be equal to the number of examples; the rest of the
dimensions must adhere to the requirements of the model.
y: array of targets. The size of the first dimension size must be the number of examples; the rest of the
dimensions must adhere to the requirements of the model.
seq_lengths: length of every input sequence, with length `(X.shape[0],)`. It must be `None` when
`model_provider` returns a model that doesn't need them.
nfolds: number of cross validation folds.
train_length: if it's an integer, number of epochs to train the model for each fold. Otherwise, it must be an
instance of a `TrainLength` child class that defines a stop criterion.
trainer: training session runner that is called once per fold.
metric: metric function callable that is evaluated once per fold, epoch and set (train/valid). Its results
are included in the output of `cross_validate`.
hp: hyperparameters passed to `model_provider.create`.
device: device where the model is trained and the metrics are evaluated, with PyTorch format.
Returns:
Statistics (losses, metrics, steps) summarized and per fold.<|endoftext|> |
ac8a0f8c350b52d2cc96d717deb611a53d29473572ecaf1b5645534e1d2d14c2 | @property
def avg_best_metric_and_epoch(self) -> Tuple[(float, float)]:
'Calculate the average between the best metric/epoch of each fold'
return tuple(np.array(self.best_metric_and_epoch_by_fold).mean(axis=0)) | Calculate the average between the best metric/epoch of each fold | mininlp/validate.py | avg_best_metric_and_epoch | davidleonfdez/mini-nlp-framework | 0 | python | @property
def avg_best_metric_and_epoch(self) -> Tuple[(float, float)]:
return tuple(np.array(self.best_metric_and_epoch_by_fold).mean(axis=0)) | @property
def avg_best_metric_and_epoch(self) -> Tuple[(float, float)]:
return tuple(np.array(self.best_metric_and_epoch_by_fold).mean(axis=0))<|docstring|>Calculate the average between the best metric/epoch of each fold<|endoftext|> |
d7e2b69a7422c72685dd35b80d9da47bcc3ef13957f53629b7934d219828a4e6 | def setId(self, id):
'\n :param id: (Optional) \n '
self.id = id | :param id: (Optional) | python_code/vnev/Lib/site-packages/jdcloud_sdk/services/detection/apis/GetSiteMonitorDataPointsRequest.py | setId | Ureimu/weather-robot | 14 | python | def setId(self, id):
'\n \n '
self.id = id | def setId(self, id):
'\n \n '
self.id = id<|docstring|>:param id: (Optional)<|endoftext|> |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.