hexsha stringlengths 40 40 | size int64 2 1.02M | ext stringclasses 10
values | lang stringclasses 1
value | max_stars_repo_path stringlengths 4 245 | max_stars_repo_name stringlengths 6 130 | max_stars_repo_head_hexsha stringlengths 40 40 | max_stars_repo_licenses listlengths 1 10 | max_stars_count int64 1 191k ⌀ | max_stars_repo_stars_event_min_datetime stringlengths 24 24 ⌀ | max_stars_repo_stars_event_max_datetime stringlengths 24 24 ⌀ | max_issues_repo_path stringlengths 4 245 | max_issues_repo_name stringlengths 6 130 | max_issues_repo_head_hexsha stringlengths 40 40 | max_issues_repo_licenses listlengths 1 10 | max_issues_count int64 1 67k ⌀ | max_issues_repo_issues_event_min_datetime stringlengths 24 24 ⌀ | max_issues_repo_issues_event_max_datetime stringlengths 24 24 ⌀ | max_forks_repo_path stringlengths 4 245 | max_forks_repo_name stringlengths 6 130 | max_forks_repo_head_hexsha stringlengths 40 40 | max_forks_repo_licenses listlengths 1 10 | max_forks_count int64 1 105k ⌀ | max_forks_repo_forks_event_min_datetime stringlengths 24 24 ⌀ | max_forks_repo_forks_event_max_datetime stringlengths 24 24 ⌀ | content stringlengths 2 1.02M | avg_line_length float64 1 417k | max_line_length int64 1 987k | alphanum_fraction float64 0 1 | content_no_comment stringlengths 0 1.01M | is_comment_constant_removed bool 1
class | is_sharp_comment_removed bool 1
class |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
f7300bb85137e40f00493456c34280037c0a2f36 | 273 | py | Python | src/oscar/apps/offer/config.py | sainjusajan/django-oscar | 466e8edc807be689b0a28c9e525c8323cc48b8e1 | [
"BSD-3-Clause"
] | null | null | null | src/oscar/apps/offer/config.py | sainjusajan/django-oscar | 466e8edc807be689b0a28c9e525c8323cc48b8e1 | [
"BSD-3-Clause"
] | null | null | null | src/oscar/apps/offer/config.py | sainjusajan/django-oscar | 466e8edc807be689b0a28c9e525c8323cc48b8e1 | [
"BSD-3-Clause"
] | null | null | null | from django.apps import AppConfig
from django.utils.translation import ugettext_lazy as _
class OfferConfig(AppConfig):
label = 'offer'
name = 'oscar.apps.offer'
verbose_name = _('Offer')
def ready(self):
from . import signals # noqa
| 22.75 | 56 | 0.663004 | from django.apps import AppConfig
from django.utils.translation import ugettext_lazy as _
class OfferConfig(AppConfig):
label = 'offer'
name = 'oscar.apps.offer'
verbose_name = _('Offer')
def ready(self):
from . import signals
| true | true |
f7300c7f3d0d042ee232156f7fea0be53fb5f268 | 18,769 | py | Python | ixnetwork_restpy/testplatform/sessions/ixnetwork/quicktest/passcriteria_1568efcb71d423db7b9caee1463792cd.py | OpenIxia/ixnetwork_restpy | f628db450573a104f327cf3c737ca25586e067ae | [
"MIT"
] | 20 | 2019-05-07T01:59:14.000Z | 2022-02-11T05:24:47.000Z | ixnetwork_restpy/testplatform/sessions/ixnetwork/quicktest/passcriteria_1568efcb71d423db7b9caee1463792cd.py | OpenIxia/ixnetwork_restpy | f628db450573a104f327cf3c737ca25586e067ae | [
"MIT"
] | 60 | 2019-04-03T18:59:35.000Z | 2022-02-22T12:05:05.000Z | ixnetwork_restpy/testplatform/sessions/ixnetwork/quicktest/passcriteria_1568efcb71d423db7b9caee1463792cd.py | OpenIxia/ixnetwork_restpy | f628db450573a104f327cf3c737ca25586e067ae | [
"MIT"
] | 13 | 2019-05-20T10:48:31.000Z | 2021-10-06T07:45:44.000Z | # MIT LICENSE
#
# Copyright 1997 - 2020 by IXIA Keysight
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"),
# to deal in the Software without restriction, including without limitation
# the rights to use, copy, modify, merge, publish, distribute, sublicense,
# and/or sell copies of the Software, and to permit persons to whom the
# Software is furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
from ixnetwork_restpy.base import Base
from ixnetwork_restpy.files import Files
from typing import List, Any, Union
class PassCriteria(Base):
"""This applies the Pass Criteria to each trial in the test and determines whether the trial passed or failed.
The PassCriteria class encapsulates a required passCriteria resource which will be retrieved from the server every time the property is accessed.
"""
__slots__ = ()
_SDM_NAME = 'passCriteria'
_SDM_ATT_MAP = {
'EnableLatencyPassFail': 'enableLatencyPassFail',
'EnablePassFail': 'enablePassFail',
'EnableRatePassFail': 'enableRatePassFail',
'LatencyThresholdMode': 'latencyThresholdMode',
'LatencyThresholdScale': 'latencyThresholdScale',
'LatencyThresholdValue': 'latencyThresholdValue',
'PassCriteriaLoadRateMode': 'passCriteriaLoadRateMode',
'PassCriteriaLoadRateScale': 'passCriteriaLoadRateScale',
'PassCriteriaLoadRateValue': 'passCriteriaLoadRateValue',
'PassFailFrequency': 'passFailFrequency',
}
_SDM_ENUM_MAP = {
'latencyThresholdMode': ['average', 'maximum'],
'latencyThresholdScale': ['ms', 'ns', 'us'],
'passCriteriaLoadRateMode': ['average', 'minimum'],
'passCriteriaLoadRateScale': ['fps', 'gbps', 'kbps', 'mbps', 'percent'],
'passFailFrequency': ['framesizes', 'trials'],
}
def __init__(self, parent, list_op=False):
super(PassCriteria, self).__init__(parent, list_op)
@property
def EnableLatencyPassFail(self):
# type: () -> bool
"""
Returns
-------
- bool: If true, the latency pass fail criteria is set.
"""
return self._get_attribute(self._SDM_ATT_MAP['EnableLatencyPassFail'])
@EnableLatencyPassFail.setter
def EnableLatencyPassFail(self, value):
# type: (bool) -> None
self._set_attribute(self._SDM_ATT_MAP['EnableLatencyPassFail'], value)
@property
def EnablePassFail(self):
# type: () -> bool
"""
Returns
-------
- bool: If true, the pass fail criteria is set.
"""
return self._get_attribute(self._SDM_ATT_MAP['EnablePassFail'])
@EnablePassFail.setter
def EnablePassFail(self, value):
# type: (bool) -> None
self._set_attribute(self._SDM_ATT_MAP['EnablePassFail'], value)
@property
def EnableRatePassFail(self):
# type: () -> bool
"""
Returns
-------
- bool: If true, the rate of pass and fail criteria is set.
"""
return self._get_attribute(self._SDM_ATT_MAP['EnableRatePassFail'])
@EnableRatePassFail.setter
def EnableRatePassFail(self, value):
# type: (bool) -> None
self._set_attribute(self._SDM_ATT_MAP['EnableRatePassFail'], value)
@property
def LatencyThresholdMode(self):
# type: () -> str
"""
Returns
-------
- str(average | maximum): The threshold mode for the latency.
"""
return self._get_attribute(self._SDM_ATT_MAP['LatencyThresholdMode'])
@LatencyThresholdMode.setter
def LatencyThresholdMode(self, value):
# type: (str) -> None
self._set_attribute(self._SDM_ATT_MAP['LatencyThresholdMode'], value)
@property
def LatencyThresholdScale(self):
# type: () -> str
"""
Returns
-------
- str(ms | ns | us): The scale by which the latency threshold is measured.
"""
return self._get_attribute(self._SDM_ATT_MAP['LatencyThresholdScale'])
@LatencyThresholdScale.setter
def LatencyThresholdScale(self, value):
# type: (str) -> None
self._set_attribute(self._SDM_ATT_MAP['LatencyThresholdScale'], value)
@property
def LatencyThresholdValue(self):
# type: () -> int
"""
Returns
-------
- number: The value by which legacy threshold value is to be measured.
"""
return self._get_attribute(self._SDM_ATT_MAP['LatencyThresholdValue'])
@LatencyThresholdValue.setter
def LatencyThresholdValue(self, value):
# type: (int) -> None
self._set_attribute(self._SDM_ATT_MAP['LatencyThresholdValue'], value)
@property
def PassCriteriaLoadRateMode(self):
# type: () -> str
"""
Returns
-------
- str(average | minimum): The pass criteria set for the load rate mode.
"""
return self._get_attribute(self._SDM_ATT_MAP['PassCriteriaLoadRateMode'])
@PassCriteriaLoadRateMode.setter
def PassCriteriaLoadRateMode(self, value):
# type: (str) -> None
self._set_attribute(self._SDM_ATT_MAP['PassCriteriaLoadRateMode'], value)
@property
def PassCriteriaLoadRateScale(self):
# type: () -> str
"""
Returns
-------
- str(fps | gbps | kbps | mbps | percent): The pass criteria scale in which the load rate is to be measured.
"""
return self._get_attribute(self._SDM_ATT_MAP['PassCriteriaLoadRateScale'])
@PassCriteriaLoadRateScale.setter
def PassCriteriaLoadRateScale(self, value):
# type: (str) -> None
self._set_attribute(self._SDM_ATT_MAP['PassCriteriaLoadRateScale'], value)
@property
def PassCriteriaLoadRateValue(self):
# type: () -> int
"""
Returns
-------
- number: The pass criteria for the Value of the load rate.
"""
return self._get_attribute(self._SDM_ATT_MAP['PassCriteriaLoadRateValue'])
@PassCriteriaLoadRateValue.setter
def PassCriteriaLoadRateValue(self, value):
# type: (int) -> None
self._set_attribute(self._SDM_ATT_MAP['PassCriteriaLoadRateValue'], value)
@property
def PassFailFrequency(self):
# type: () -> str
"""
Returns
-------
- str(framesizes | trials): NOT DEFINED
"""
return self._get_attribute(self._SDM_ATT_MAP['PassFailFrequency'])
@PassFailFrequency.setter
def PassFailFrequency(self, value):
# type: (str) -> None
self._set_attribute(self._SDM_ATT_MAP['PassFailFrequency'], value)
def update(self, EnableLatencyPassFail=None, EnablePassFail=None, EnableRatePassFail=None, LatencyThresholdMode=None, LatencyThresholdScale=None, LatencyThresholdValue=None, PassCriteriaLoadRateMode=None, PassCriteriaLoadRateScale=None, PassCriteriaLoadRateValue=None, PassFailFrequency=None):
# type: (bool, bool, bool, str, str, int, str, str, int, str) -> PassCriteria
"""Updates passCriteria resource on the server.
Args
----
- EnableLatencyPassFail (bool): If true, the latency pass fail criteria is set.
- EnablePassFail (bool): If true, the pass fail criteria is set.
- EnableRatePassFail (bool): If true, the rate of pass and fail criteria is set.
- LatencyThresholdMode (str(average | maximum)): The threshold mode for the latency.
- LatencyThresholdScale (str(ms | ns | us)): The scale by which the latency threshold is measured.
- LatencyThresholdValue (number): The value by which legacy threshold value is to be measured.
- PassCriteriaLoadRateMode (str(average | minimum)): The pass criteria set for the load rate mode.
- PassCriteriaLoadRateScale (str(fps | gbps | kbps | mbps | percent)): The pass criteria scale in which the load rate is to be measured.
- PassCriteriaLoadRateValue (number): The pass criteria for the Value of the load rate.
- PassFailFrequency (str(framesizes | trials)): NOT DEFINED
Raises
------
- ServerError: The server has encountered an uncategorized error condition
"""
return self._update(self._map_locals(self._SDM_ATT_MAP, locals()))
def Apply(self, *args, **kwargs):
# type: (*Any, **Any) -> None
"""Executes the apply operation on the server.
Applies the specified Quick Test.
apply(async_operation=bool)
---------------------------
- async_operation (bool=False): True to execute the operation asynchronously. Any subsequent rest api calls made through the Connection class will block until the operation is complete.
Raises
------
- NotFoundError: The requested resource does not exist on the server
- ServerError: The server has encountered an uncategorized error condition
"""
payload = { "Arg1": self.href }
for i in range(len(args)): payload['Arg%s' % (i + 2)] = args[i]
for item in kwargs.items(): payload[item[0]] = item[1]
return self._execute('apply', payload=payload, response_object=None)
def ApplyAsync(self, *args, **kwargs):
# type: (*Any, **Any) -> None
"""Executes the applyAsync operation on the server.
applyAsync(async_operation=bool)
--------------------------------
- async_operation (bool=False): True to execute the operation asynchronously. Any subsequent rest api calls made through the Connection class will block until the operation is complete.
Raises
------
- NotFoundError: The requested resource does not exist on the server
- ServerError: The server has encountered an uncategorized error condition
"""
payload = { "Arg1": self.href }
for i in range(len(args)): payload['Arg%s' % (i + 2)] = args[i]
for item in kwargs.items(): payload[item[0]] = item[1]
return self._execute('applyAsync', payload=payload, response_object=None)
def ApplyAsyncResult(self, *args, **kwargs):
# type: (*Any, **Any) -> Union[bool, None]
"""Executes the applyAsyncResult operation on the server.
applyAsyncResult(async_operation=bool)bool
------------------------------------------
- async_operation (bool=False): True to execute the operation asynchronously. Any subsequent rest api calls made through the Connection class will block until the operation is complete.
- Returns bool:
Raises
------
- NotFoundError: The requested resource does not exist on the server
- ServerError: The server has encountered an uncategorized error condition
"""
payload = { "Arg1": self.href }
for i in range(len(args)): payload['Arg%s' % (i + 2)] = args[i]
for item in kwargs.items(): payload[item[0]] = item[1]
return self._execute('applyAsyncResult', payload=payload, response_object=None)
def ApplyITWizardConfiguration(self, *args, **kwargs):
# type: (*Any, **Any) -> None
"""Executes the applyITWizardConfiguration operation on the server.
Applies the specified Quick Test.
applyITWizardConfiguration(async_operation=bool)
------------------------------------------------
- async_operation (bool=False): True to execute the operation asynchronously. Any subsequent rest api calls made through the Connection class will block until the operation is complete.
Raises
------
- NotFoundError: The requested resource does not exist on the server
- ServerError: The server has encountered an uncategorized error condition
"""
payload = { "Arg1": self.href }
for i in range(len(args)): payload['Arg%s' % (i + 2)] = args[i]
for item in kwargs.items(): payload[item[0]] = item[1]
return self._execute('applyITWizardConfiguration', payload=payload, response_object=None)
def GenerateReport(self, *args, **kwargs):
# type: (*Any, **Any) -> Union[str, None]
"""Executes the generateReport operation on the server.
Generate a PDF report for the last succesfull test run.
generateReport(async_operation=bool)string
------------------------------------------
- async_operation (bool=False): True to execute the operation asynchronously. Any subsequent rest api calls made through the Connection class will block until the operation is complete.
- Returns str: This method is asynchronous and has no return value.
Raises
------
- NotFoundError: The requested resource does not exist on the server
- ServerError: The server has encountered an uncategorized error condition
"""
payload = { "Arg1": self.href }
for i in range(len(args)): payload['Arg%s' % (i + 2)] = args[i]
for item in kwargs.items(): payload[item[0]] = item[1]
return self._execute('generateReport', payload=payload, response_object=None)
def Run(self, *args, **kwargs):
# type: (*Any, **Any) -> Union[List[str], None]
"""Executes the run operation on the server.
Starts the specified Quick Test and waits for its execution to finish.
The IxNetwork model allows for multiple method Signatures with the same name while python does not.
run(async_operation=bool)list
-----------------------------
- async_operation (bool=False): True to execute the operation asynchronously. Any subsequent rest api calls made through the Connection class will block until the operation is complete.
- Returns list(str): This method is synchronous and returns the result of the test.
run(InputParameters=string, async_operation=bool)list
-----------------------------------------------------
- InputParameters (str): The input arguments of the test.
- async_operation (bool=False): True to execute the operation asynchronously. Any subsequent rest api calls made through the Connection class will block until the operation is complete.
- Returns list(str): This method is synchronous and returns the result of the test.
Raises
------
- NotFoundError: The requested resource does not exist on the server
- ServerError: The server has encountered an uncategorized error condition
"""
payload = { "Arg1": self.href }
for i in range(len(args)): payload['Arg%s' % (i + 2)] = args[i]
for item in kwargs.items(): payload[item[0]] = item[1]
return self._execute('run', payload=payload, response_object=None)
def Start(self, *args, **kwargs):
# type: (*Any, **Any) -> None
"""Executes the start operation on the server.
Starts the specified Quick Test.
The IxNetwork model allows for multiple method Signatures with the same name while python does not.
start(async_operation=bool)
---------------------------
- async_operation (bool=False): True to execute the operation asynchronously. Any subsequent rest api calls made through the Connection class will block until the operation is complete.
start(InputParameters=string, async_operation=bool)
---------------------------------------------------
- InputParameters (str): The input arguments of the test.
- async_operation (bool=False): True to execute the operation asynchronously. Any subsequent rest api calls made through the Connection class will block until the operation is complete.
Raises
------
- NotFoundError: The requested resource does not exist on the server
- ServerError: The server has encountered an uncategorized error condition
"""
payload = { "Arg1": self.href }
for i in range(len(args)): payload['Arg%s' % (i + 2)] = args[i]
for item in kwargs.items(): payload[item[0]] = item[1]
return self._execute('start', payload=payload, response_object=None)
def Stop(self, *args, **kwargs):
# type: (*Any, **Any) -> None
"""Executes the stop operation on the server.
Stops the currently running Quick Test.
stop(async_operation=bool)
--------------------------
- async_operation (bool=False): True to execute the operation asynchronously. Any subsequent rest api calls made through the Connection class will block until the operation is complete.
Raises
------
- NotFoundError: The requested resource does not exist on the server
- ServerError: The server has encountered an uncategorized error condition
"""
payload = { "Arg1": self.href }
for i in range(len(args)): payload['Arg%s' % (i + 2)] = args[i]
for item in kwargs.items(): payload[item[0]] = item[1]
return self._execute('stop', payload=payload, response_object=None)
def WaitForTest(self, *args, **kwargs):
# type: (*Any, **Any) -> Union[List[str], None]
"""Executes the waitForTest operation on the server.
Waits for the execution of the specified Quick Test to be completed.
waitForTest(async_operation=bool)list
-------------------------------------
- async_operation (bool=False): True to execute the operation asynchronously. Any subsequent rest api calls made through the Connection class will block until the operation is complete.
- Returns list(str): This method is synchronous and returns the result of the test.
Raises
------
- NotFoundError: The requested resource does not exist on the server
- ServerError: The server has encountered an uncategorized error condition
"""
payload = { "Arg1": self.href }
for i in range(len(args)): payload['Arg%s' % (i + 2)] = args[i]
for item in kwargs.items(): payload[item[0]] = item[1]
return self._execute('waitForTest', payload=payload, response_object=None)
| 45.335749 | 297 | 0.644414 |
from ixnetwork_restpy.base import Base
from ixnetwork_restpy.files import Files
from typing import List, Any, Union
class PassCriteria(Base):
__slots__ = ()
_SDM_NAME = 'passCriteria'
_SDM_ATT_MAP = {
'EnableLatencyPassFail': 'enableLatencyPassFail',
'EnablePassFail': 'enablePassFail',
'EnableRatePassFail': 'enableRatePassFail',
'LatencyThresholdMode': 'latencyThresholdMode',
'LatencyThresholdScale': 'latencyThresholdScale',
'LatencyThresholdValue': 'latencyThresholdValue',
'PassCriteriaLoadRateMode': 'passCriteriaLoadRateMode',
'PassCriteriaLoadRateScale': 'passCriteriaLoadRateScale',
'PassCriteriaLoadRateValue': 'passCriteriaLoadRateValue',
'PassFailFrequency': 'passFailFrequency',
}
_SDM_ENUM_MAP = {
'latencyThresholdMode': ['average', 'maximum'],
'latencyThresholdScale': ['ms', 'ns', 'us'],
'passCriteriaLoadRateMode': ['average', 'minimum'],
'passCriteriaLoadRateScale': ['fps', 'gbps', 'kbps', 'mbps', 'percent'],
'passFailFrequency': ['framesizes', 'trials'],
}
def __init__(self, parent, list_op=False):
super(PassCriteria, self).__init__(parent, list_op)
@property
def EnableLatencyPassFail(self):
return self._get_attribute(self._SDM_ATT_MAP['EnableLatencyPassFail'])
@EnableLatencyPassFail.setter
def EnableLatencyPassFail(self, value):
self._set_attribute(self._SDM_ATT_MAP['EnableLatencyPassFail'], value)
@property
def EnablePassFail(self):
return self._get_attribute(self._SDM_ATT_MAP['EnablePassFail'])
@EnablePassFail.setter
def EnablePassFail(self, value):
self._set_attribute(self._SDM_ATT_MAP['EnablePassFail'], value)
@property
def EnableRatePassFail(self):
return self._get_attribute(self._SDM_ATT_MAP['EnableRatePassFail'])
@EnableRatePassFail.setter
def EnableRatePassFail(self, value):
self._set_attribute(self._SDM_ATT_MAP['EnableRatePassFail'], value)
@property
def LatencyThresholdMode(self):
return self._get_attribute(self._SDM_ATT_MAP['LatencyThresholdMode'])
@LatencyThresholdMode.setter
def LatencyThresholdMode(self, value):
self._set_attribute(self._SDM_ATT_MAP['LatencyThresholdMode'], value)
@property
def LatencyThresholdScale(self):
return self._get_attribute(self._SDM_ATT_MAP['LatencyThresholdScale'])
@LatencyThresholdScale.setter
def LatencyThresholdScale(self, value):
self._set_attribute(self._SDM_ATT_MAP['LatencyThresholdScale'], value)
@property
def LatencyThresholdValue(self):
return self._get_attribute(self._SDM_ATT_MAP['LatencyThresholdValue'])
@LatencyThresholdValue.setter
def LatencyThresholdValue(self, value):
self._set_attribute(self._SDM_ATT_MAP['LatencyThresholdValue'], value)
@property
def PassCriteriaLoadRateMode(self):
return self._get_attribute(self._SDM_ATT_MAP['PassCriteriaLoadRateMode'])
@PassCriteriaLoadRateMode.setter
def PassCriteriaLoadRateMode(self, value):
self._set_attribute(self._SDM_ATT_MAP['PassCriteriaLoadRateMode'], value)
@property
def PassCriteriaLoadRateScale(self):
return self._get_attribute(self._SDM_ATT_MAP['PassCriteriaLoadRateScale'])
@PassCriteriaLoadRateScale.setter
def PassCriteriaLoadRateScale(self, value):
self._set_attribute(self._SDM_ATT_MAP['PassCriteriaLoadRateScale'], value)
@property
def PassCriteriaLoadRateValue(self):
return self._get_attribute(self._SDM_ATT_MAP['PassCriteriaLoadRateValue'])
@PassCriteriaLoadRateValue.setter
def PassCriteriaLoadRateValue(self, value):
self._set_attribute(self._SDM_ATT_MAP['PassCriteriaLoadRateValue'], value)
@property
def PassFailFrequency(self):
return self._get_attribute(self._SDM_ATT_MAP['PassFailFrequency'])
@PassFailFrequency.setter
def PassFailFrequency(self, value):
self._set_attribute(self._SDM_ATT_MAP['PassFailFrequency'], value)
def update(self, EnableLatencyPassFail=None, EnablePassFail=None, EnableRatePassFail=None, LatencyThresholdMode=None, LatencyThresholdScale=None, LatencyThresholdValue=None, PassCriteriaLoadRateMode=None, PassCriteriaLoadRateScale=None, PassCriteriaLoadRateValue=None, PassFailFrequency=None):
return self._update(self._map_locals(self._SDM_ATT_MAP, locals()))
def Apply(self, *args, **kwargs):
payload = { "Arg1": self.href }
for i in range(len(args)): payload['Arg%s' % (i + 2)] = args[i]
for item in kwargs.items(): payload[item[0]] = item[1]
return self._execute('apply', payload=payload, response_object=None)
def ApplyAsync(self, *args, **kwargs):
payload = { "Arg1": self.href }
for i in range(len(args)): payload['Arg%s' % (i + 2)] = args[i]
for item in kwargs.items(): payload[item[0]] = item[1]
return self._execute('applyAsync', payload=payload, response_object=None)
def ApplyAsyncResult(self, *args, **kwargs):
payload = { "Arg1": self.href }
for i in range(len(args)): payload['Arg%s' % (i + 2)] = args[i]
for item in kwargs.items(): payload[item[0]] = item[1]
return self._execute('applyAsyncResult', payload=payload, response_object=None)
def ApplyITWizardConfiguration(self, *args, **kwargs):
payload = { "Arg1": self.href }
for i in range(len(args)): payload['Arg%s' % (i + 2)] = args[i]
for item in kwargs.items(): payload[item[0]] = item[1]
return self._execute('applyITWizardConfiguration', payload=payload, response_object=None)
def GenerateReport(self, *args, **kwargs):
payload = { "Arg1": self.href }
for i in range(len(args)): payload['Arg%s' % (i + 2)] = args[i]
for item in kwargs.items(): payload[item[0]] = item[1]
return self._execute('generateReport', payload=payload, response_object=None)
def Run(self, *args, **kwargs):
payload = { "Arg1": self.href }
for i in range(len(args)): payload['Arg%s' % (i + 2)] = args[i]
for item in kwargs.items(): payload[item[0]] = item[1]
return self._execute('run', payload=payload, response_object=None)
def Start(self, *args, **kwargs):
payload = { "Arg1": self.href }
for i in range(len(args)): payload['Arg%s' % (i + 2)] = args[i]
for item in kwargs.items(): payload[item[0]] = item[1]
return self._execute('start', payload=payload, response_object=None)
def Stop(self, *args, **kwargs):
payload = { "Arg1": self.href }
for i in range(len(args)): payload['Arg%s' % (i + 2)] = args[i]
for item in kwargs.items(): payload[item[0]] = item[1]
return self._execute('stop', payload=payload, response_object=None)
def WaitForTest(self, *args, **kwargs):
payload = { "Arg1": self.href }
for i in range(len(args)): payload['Arg%s' % (i + 2)] = args[i]
for item in kwargs.items(): payload[item[0]] = item[1]
return self._execute('waitForTest', payload=payload, response_object=None)
| true | true |
f7300d0ecc7c55bf1cf645207acdfe80dd0197b9 | 40 | py | Python | GAN-man/__init__.py | shauray8/GameGAN-stuff | 54eec30e839279b697166d7ad1bbcf0d342f62b3 | [
"MIT"
] | null | null | null | GAN-man/__init__.py | shauray8/GameGAN-stuff | 54eec30e839279b697166d7ad1bbcf0d342f62b3 | [
"MIT"
] | null | null | null | GAN-man/__init__.py | shauray8/GameGAN-stuff | 54eec30e839279b697166d7ad1bbcf0d342f62b3 | [
"MIT"
] | null | null | null | ## right now i know nothing about this
| 20 | 39 | 0.725 | true | true | |
f7300e0e1a30b53883b40f11ba8186a2c62c128e | 2,827 | py | Python | gw2/gw2_authserver.py | Mixaill/galaxy-integration-gw2 | 3727c90e340763e61738f8edd97025242ff34946 | [
"MIT"
] | 20 | 2019-07-26T10:38:26.000Z | 2021-01-31T17:16:45.000Z | gw2/gw2_authserver.py | FriendsOfGalaxy/galaxy-integration-gw2 | dbb5cd082f4ebeef502e2185773e1ab36ead7c74 | [
"MIT"
] | 18 | 2019-08-01T10:18:00.000Z | 2022-03-01T08:10:56.000Z | gw2/gw2_authserver.py | Mixaill/galaxy-integration-gw2 | 3727c90e340763e61738f8edd97025242ff34946 | [
"MIT"
] | 4 | 2019-08-08T16:39:53.000Z | 2020-10-17T09:01:47.000Z | # (c) 2019-2020 Mikhail Paulyshka
# SPDX-License-Identifier: MIT
import os.path
import aiohttp
import common.mglx_webserver
from .gw2_constants import GW2AuthorizationResult
class Gw2AuthServer(common.mglx_webserver.MglxWebserver):
def __init__(self, gw2api = None):
super(Gw2AuthServer, self).__init__()
self.__gw2api = gw2api
self.add_route('GET', '/', self.handle_login_get)
self.add_route('GET', '/login', self.handle_login_get)
self.add_route('GET', '/login_baddata', self.handle_login_baddata_get)
self.add_route('GET', '/login_failed', self.handle_login_baddata_get)
self.add_route('GET', '/login_noaccount', self.handle_login_noaccount_get)
self.add_route('GET', '/finished', self.handle_finished_get)
self.add_route('POST', '/', self.handle_login_post)
self.add_route('POST', '/login', self.handle_login_post)
#
# Handlers
#
async def handle_login_get(self, request):
return aiohttp.web.FileResponse(os.path.join(os.path.dirname(os.path.realpath(__file__)),'html/login.html'))
async def handle_login_baddata_get(self, request):
return aiohttp.web.FileResponse(os.path.join(os.path.dirname(os.path.realpath(__file__)),'html/login_baddata.html'))
async def handle_login_failed_get(self, request):
return aiohttp.web.FileResponse(os.path.join(os.path.dirname(os.path.realpath(__file__)),'html/login_failed.html'))
async def handle_login_noaccount_get(self, request):
return aiohttp.web.FileResponse(os.path.join(os.path.dirname(os.path.realpath(__file__)),'html/login_noaccount.html'))
async def handle_finished_get(self, request):
return aiohttp.web.FileResponse(os.path.join(os.path.dirname(os.path.realpath(__file__)),'html/login_noaccount.html'))
async def handle_login_post(self, request):
data = await request.post()
#check for apikey field
if 'apikey' not in data:
raise aiohttp.web.HTTPFound('/login_baddata')
#process authentication
auth_result = None
try:
auth_result = await self.__gw2api.do_auth_apikey(data['apikey'])
except Exception:
self._logger.exception("exception on doing auth:")
raise aiohttp.web.HTTPFound('/login_baddata')
if auth_result == GW2AuthorizationResult.FINISHED:
raise aiohttp.web.HTTPFound('/finished')
elif auth_result == GW2AuthorizationResult.FAILED_NO_ACCOUNT:
raise aiohttp.web.HTTPFound('/login_noaccount')
elif auth_result == GW2AuthorizationResult.FAILED_BAD_DATA:
raise aiohttp.web.HTTPFound('/login_baddata')
else:
raise aiohttp.web.HTTPFound('/login_failed')
raise aiohttp.web.HTTPFound('/login_failed')
| 39.263889 | 126 | 0.696498 |
import os.path
import aiohttp
import common.mglx_webserver
from .gw2_constants import GW2AuthorizationResult
class Gw2AuthServer(common.mglx_webserver.MglxWebserver):
def __init__(self, gw2api = None):
super(Gw2AuthServer, self).__init__()
self.__gw2api = gw2api
self.add_route('GET', '/', self.handle_login_get)
self.add_route('GET', '/login', self.handle_login_get)
self.add_route('GET', '/login_baddata', self.handle_login_baddata_get)
self.add_route('GET', '/login_failed', self.handle_login_baddata_get)
self.add_route('GET', '/login_noaccount', self.handle_login_noaccount_get)
self.add_route('GET', '/finished', self.handle_finished_get)
self.add_route('POST', '/', self.handle_login_post)
self.add_route('POST', '/login', self.handle_login_post)
async def handle_login_get(self, request):
return aiohttp.web.FileResponse(os.path.join(os.path.dirname(os.path.realpath(__file__)),'html/login.html'))
async def handle_login_baddata_get(self, request):
return aiohttp.web.FileResponse(os.path.join(os.path.dirname(os.path.realpath(__file__)),'html/login_baddata.html'))
async def handle_login_failed_get(self, request):
return aiohttp.web.FileResponse(os.path.join(os.path.dirname(os.path.realpath(__file__)),'html/login_failed.html'))
async def handle_login_noaccount_get(self, request):
return aiohttp.web.FileResponse(os.path.join(os.path.dirname(os.path.realpath(__file__)),'html/login_noaccount.html'))
async def handle_finished_get(self, request):
return aiohttp.web.FileResponse(os.path.join(os.path.dirname(os.path.realpath(__file__)),'html/login_noaccount.html'))
async def handle_login_post(self, request):
data = await request.post()
if 'apikey' not in data:
raise aiohttp.web.HTTPFound('/login_baddata')
auth_result = None
try:
auth_result = await self.__gw2api.do_auth_apikey(data['apikey'])
except Exception:
self._logger.exception("exception on doing auth:")
raise aiohttp.web.HTTPFound('/login_baddata')
if auth_result == GW2AuthorizationResult.FINISHED:
raise aiohttp.web.HTTPFound('/finished')
elif auth_result == GW2AuthorizationResult.FAILED_NO_ACCOUNT:
raise aiohttp.web.HTTPFound('/login_noaccount')
elif auth_result == GW2AuthorizationResult.FAILED_BAD_DATA:
raise aiohttp.web.HTTPFound('/login_baddata')
else:
raise aiohttp.web.HTTPFound('/login_failed')
raise aiohttp.web.HTTPFound('/login_failed')
| true | true |
f7301020113d711cc94ea89e1f61b1588ee24669 | 30,958 | py | Python | func.py | jhe8281/openSW | 6f3bc5bb34996616a6e862b48e5d164da12344a7 | [
"BSD-3-Clause"
] | 2 | 2019-01-16T02:03:41.000Z | 2019-03-07T04:43:08.000Z | func.py | jhe8281/openSW | 6f3bc5bb34996616a6e862b48e5d164da12344a7 | [
"BSD-3-Clause"
] | null | null | null | func.py | jhe8281/openSW | 6f3bc5bb34996616a6e862b48e5d164da12344a7 | [
"BSD-3-Clause"
] | null | null | null | import email.mime.text
import urllib.request
import sqlite3
import hashlib
import smtplib
import bcrypt
import flask
import json
import html
import sys
import re
import os
try:
import css_html_js_minify
except:
pass
if sys.version_info < (3, 6):
import sha3
from set_mark.tool import *
from mark import *
def load_conn(data):
global conn
global curs
conn = data
curs = conn.cursor()
load_conn2(data)
def send_email(who, title, data):
smtp = smtplib.SMTP_SSL('smtp.gmail.com', 465)
try:
curs.execute('select name, data from other where name = "g_email" or name = "g_pass"')
rep_data = curs.fetchall()
if rep_data:
g_email = ''
g_pass = ''
for i in rep_data:
if i[0] == 'g_email':
g_email = i[1]
else:
g_pass = i[1]
smtp.login(g_email, g_pass)
msg = email.mime.text.MIMEText(data)
msg['Subject'] = title
smtp.sendmail(g_email, who, msg.as_string())
smtp.quit()
except:
print('error : email login error')
def easy_minify(data, tool = None):
try:
if not tool:
data = css_html_js_minify.html_minify(data)
else:
if tool == 'css':
data = css_html_js_minify.css_minify(data)
elif tool == 'js':
data = css_html_js_minify.js_minify(data)
except:
data = re.sub('\n +<', '\n<', data)
data = re.sub('>(\n| )+<', '> <', data)
return data
def render_set(title = '', data = '', num = 0):
if acl_check(title, 'render') == 1:
return 'http request 401.3'
else:
return namumark(title, data, num)
def captcha_get():
data = ''
if custom()[2] == 0:
curs.execute('select data from other where name = "recaptcha"')
recaptcha = curs.fetchall()
if recaptcha and recaptcha[0][0] != '':
curs.execute('select data from other where name = "sec_re"')
sec_re = curs.fetchall()
if sec_re and sec_re[0][0] != '':
data += recaptcha[0][0] + '<hr class=\"main_hr\">'
return data
def update():
# v3.0.5 사용자 문서, 파일 문서, 분류 문서 영어화
try:
all_rep = [['사용자:', 'user:'], ['파일:', 'file:'], ['분류:', 'category:']]
all_rep2 = ['data', 'history', 'acl', 'topic', 'back']
test = 0
for i in range(3):
for j in range(6):
if not j == 5:
curs.execute('select title from ' + all_rep2[j] + ' where title like ?', [all_rep[i][0] + '%'])
else:
curs.execute('select link from back where link like ?', [all_rep[i][0] + '%'])
user_rep = curs.fetchall()
if user_rep:
for user_rep2 in user_rep:
test = 1
first = re.sub('^' + all_rep[i][0], all_rep[i][1], user_rep2[0])
if j == 0:
curs.execute("update data set title = ? where title = ?", [first, user_rep2[0]])
elif j == 1:
curs.execute("update history set title = ? where title = ?", [first, user_rep2[0]])
elif j == 2:
curs.execute("update acl set title = ? where title = ?", [first, user_rep2[0]])
elif j == 3:
curs.execute("update topic set title = ? where title = ?", [first, user_rep2[0]])
elif j == 4:
curs.execute("update back set title = ? where title = ?", [first, user_rep2[0]])
elif j == 5:
curs.execute("update back set link = ? where link = ?", [first, user_rep2[0]])
if test == 1:
print('사용자 to user, 파일 to file, 분류 to category')
except:
pass
# v3.0.8 rd, agreedis, stop 테이블 통합
try:
curs.execute("select title, sub, close from stop")
for i in curs.fetchall():
if i[2] == '':
curs.execute("update rd set stop = 'S' where title = ? and sub = ?", [i[0], i[1]])
else:
curs.execute("update rd set stop = 'O' where title = ? and sub = ?", [i[0], i[1]])
except:
pass
try:
curs.execute("select title, sub from agreedis")
for i in curs.fetchall():
curs.execute("update rd set agree = 'O' where title = ? and sub = ?", [i[0], i[1]])
except:
pass
try:
curs.execute("drop table if exists stop")
curs.execute("drop table if exists agreedis")
except:
pass
def pw_encode(data, data2 = '', type_d = ''):
if type_d == '':
curs.execute('select data from other where name = "encode"')
set_data = curs.fetchall()
type_d = set_data[0][0]
if type_d == 'sha256':
return hashlib.sha256(bytes(data, 'utf-8')).hexdigest()
elif type_d == 'sha3':
if sys.version_info < (3, 6):
return sha3.sha3_256(bytes(data, 'utf-8')).hexdigest()
else:
return hashlib.sha3_256(bytes(data, 'utf-8')).hexdigest()
else:
if data2 != '':
salt_data = bytes(data2, 'utf-8')
else:
salt_data = bcrypt.gensalt(11)
return bcrypt.hashpw(bytes(data, 'utf-8'), salt_data).decode()
def pw_check(data, data2, type_d = 'no', id_d = ''):
curs.execute('select data from other where name = "encode"')
db_data = curs.fetchall()
if type_d != 'no':
if type_d == '':
set_data = 'bcrypt'
else:
set_data = type_d
else:
set_data = db_data[0][0]
while 1:
if set_data in ['sha256', 'sha3']:
data3 = pw_encode(data = data, type_d = set_data)
if data3 == data2:
re_data = 1
else:
re_data = 0
break
else:
try:
if pw_encode(data, data2, 'bcrypt') == data2:
re_data = 1
else:
re_data = 0
break
except:
set_data = db_data[0][0]
if db_data[0][0] != set_data and re_data == 1 and id_d != '':
curs.execute("update user set pw = ?, encode = ? where id = ?", [pw_encode(data), db_data[0][0], id_d])
return re_data
def captcha_post(re_data, num = 1):
if num == 1:
if custom()[2] == 0 and captcha_get() != '':
curs.execute('select data from other where name = "sec_re"')
sec_re = curs.fetchall()
if sec_re and sec_re[0][0] != '':
data = urllib.request.urlopen('https://www.google.com/recaptcha/api/siteverify?secret=' + sec_re[0][0] + '&response=' + re_data)
if not data:
return 0
else:
json_data = data.read().decode(data.headers.get_content_charset())
json_data = json.loads(json_data)
if data.getcode() == 200 and json_data['success'] == True:
return 0
else:
return 1
else:
return 0
else:
return 0
else:
pass
def load_lang(data, num = 2):
if num == 1:
curs.execute("select data from other where name = 'language'")
rep_data = curs.fetchall()
json_data = open(os.path.join('language', rep_data[0][0] + '.json'), 'rt', encoding='utf-8').read()
lang = json.loads(json_data)
if data in lang:
return lang[data]
else:
return data + ' (missing)'
else:
curs.execute('select data from user_set where name = "lang" and id = ?', [ip_check()])
rep_data = curs.fetchall()
if rep_data:
try:
json_data = open(os.path.join('language', rep_data[0][0] + '.json'), 'rt', encoding='utf-8').read()
lang = json.loads(json_data)
except:
return load_lang(data, 1)
if data in lang:
return lang[data]
else:
return load_lang(data, 1)
else:
return load_lang(data, 1)
def load_oauth(provider):
oauth = json.loads(open('oauthsettings.json', encoding='utf-8').read())
return oauth[provider]
def update_oauth(provider, target, content):
oauth = json.loads(open('oauthsettings.json', encoding='utf-8').read())
oauth[provider][target] = content
with open('oauthsettings.json', 'w', encoding='utf-8') as f:
json.dump(oauth, f)
return 'Done'
def ip_or_user(data):
if re.search('(\.|:)', data):
return 1
else:
return 0
def edit_help_button():
# https://stackoverflow.com/questions/11076975/insert-text-into-textarea-at-cursor-position-javascript
js_data = '''
<script>
function insert_data(name, data) {
if(document.selection) {
document.getElementById(name).focus();
sel = document.selection.createRange();
sel.text = data;
} else if(document.getElementById(name).selectionStart || document.getElementById(name).selectionStart == '0') {
var startPos = document.getElementById(name).selectionStart;
var endPos = document.getElementById(name).selectionEnd;
document.getElementById(name).value = document.getElementById(name).value.substring(0, startPos) + data + document.getElementById(name).value.substring(endPos, document.getElementById(name).value.length);
} else {
document.getElementById(name).value += data;
}
}
</script>
'''
insert_list = [['[[|]]', '[[|]]'], ['[*()]', '[*()]'], ['{{{#!}}}', '{{{#!}}}'], ['||<>||', '||<>||'], ["\\'\\'\\'", "\'\'\'"]]
data = ''
for insert_data in insert_list:
data += '<a href="javascript:void(0);" onclick="insert_data(\'content\', \'' + insert_data[0] + '\');">(' + insert_data[1] + ')</a> '
return [js_data, data + '<hr class=\"main_hr\">']
def ip_warring():
if custom()[2] == 0:
curs.execute('select data from other where name = "no_login_warring"')
data = curs.fetchall()
if data and data[0][0] != '':
text_data = '<span>' + data[0][0] + '</span><hr class=\"main_hr\">'
else:
text_data = '<span>' + load_lang('no_login_warring') + '</span><hr class=\"main_hr\">'
else:
text_data = ''
return text_data
def skin_check():
skin = './views/neo_yousoro/'
curs.execute('select data from other where name = "skin"')
skin_exist = curs.fetchall()
if skin_exist and skin_exist[0][0] != '':
if os.path.exists(os.path.abspath('./views/' + skin_exist[0][0] + '/index.html')) == 1:
skin = './views/' + skin_exist[0][0] + '/'
curs.execute('select data from user_set where name = "skin" and id = ?', [ip_check()])
skin_exist = curs.fetchall()
if skin_exist and skin_exist[0][0] != '':
if os.path.exists(os.path.abspath('./views/' + skin_exist[0][0] + '/index.html')) == 1:
skin = './views/' + skin_exist[0][0] + '/'
return skin + 'index.html'
def next_fix(link, num, page, end = 50):
list_data = ''
if num == 1:
if len(page) == end:
list_data += '<hr class=\"main_hr\"><a href="' + link + str(num + 1) + '">(' + load_lang('next') + ')</a>'
elif len(page) != end:
list_data += '<hr class=\"main_hr\"><a href="' + link + str(num - 1) + '">(' + load_lang('previous') + ')</a>'
else:
list_data += '<hr class=\"main_hr\"><a href="' + link + str(num - 1) + '">(' + load_lang('previous') + ')</a> <a href="' + link + str(num + 1) + '">(' + load_lang('next') + ')</a>'
return list_data
def other2(data):
return data + ['']
def wiki_set(num = 1):
if num == 1:
data_list = []
curs.execute('select data from other where name = ?', ['name'])
db_data = curs.fetchall()
if db_data and db_data[0][0] != '':
data_list += [db_data[0][0]]
else:
data_list += ['wiki']
curs.execute('select data from other where name = "license"')
db_data = curs.fetchall()
if db_data and db_data[0][0] != '':
data_list += [db_data[0][0]]
else:
data_list += ['CC 0']
data_list += ['', '']
curs.execute('select data from other where name = "logo"')
db_data = curs.fetchall()
if db_data and db_data[0][0] != '':
data_list += [db_data[0][0]]
else:
data_list += [data_list[0]]
curs.execute("select data from other where name = 'head'")
db_data = curs.fetchall()
if db_data and db_data[0][0] != '':
data_list += [db_data[0][0]]
else:
data_list += ['']
return data_list
if num == 2:
var_data = 'FrontPage'
curs.execute('select data from other where name = "frontpage"')
elif num == 3:
var_data = '2'
curs.execute('select data from other where name = "upload"')
db_data = curs.fetchall()
if db_data and db_data[0][0] != '':
return db_data[0][0]
else:
return var_data
def diff(seqm):
output = []
for opcode, a0, a1, b0, b1 in seqm.get_opcodes():
if opcode == 'equal':
output += [seqm.a[a0:a1]]
elif opcode == 'insert':
output += ["<span style='background:#CFC;'>" + seqm.b[b0:b1] + "</span>"]
elif opcode == 'delete':
output += ["<span style='background:#FDD;'>" + seqm.a[a0:a1] + "</span>"]
elif opcode == 'replace':
output += ["<span style='background:#FDD;'>" + seqm.a[a0:a1] + "</span>"]
output += ["<span style='background:#CFC;'>" + seqm.b[b0:b1] + "</span>"]
end = ''.join(output)
end = end.replace('\r\n', '\n')
sub = ''
if not re.search('\n', end):
end += '\n'
num = 0
left = 1
while 1:
data = re.search('((?:(?!\n).)*)\n', end)
if data:
data = data.groups()[0]
left += 1
if re.search('<span style=\'(?:(?:(?!\').)+)\'>', data):
num += 1
if re.search('<\/span>', data):
num -= 1
sub += str(left) + ' : ' + re.sub('(?P<in>(?:(?!\n).)*)\n', '\g<in>', data, 1) + '<br>'
else:
if re.search('<\/span>', data):
num -= 1
sub += str(left) + ' : ' + re.sub('(?P<in>(?:(?!\n).)*)\n', '\g<in>', data, 1) + '<br>'
else:
if num > 0:
sub += str(left) + ' : ' + re.sub('(?P<in>.*)\n', '\g<in>', data, 1) + '<br>'
end = re.sub('((?:(?!\n).)*)\n', '', end, 1)
else:
break
return sub
def admin_check(num = None, what = None):
ip = ip_check()
curs.execute("select acl from user where id = ?", [ip])
user = curs.fetchall()
if user:
reset = 0
while 1:
if num == 1 and reset == 0:
check = 'ban'
elif num == 3 and reset == 0:
check = 'toron'
elif num == 4 and reset == 0:
check = 'check'
elif num == 5 and reset == 0:
check = 'acl'
elif num == 6 and reset == 0:
check = 'hidel'
elif num == 7 and reset == 0:
check = 'give'
else:
check = 'owner'
curs.execute('select name from alist where name = ? and acl = ?', [user[0][0], check])
if curs.fetchall():
if what:
curs.execute("insert into re_admin (who, what, time) values (?, ?, ?)", [ip, what, get_time()])
conn.commit()
return 1
else:
if reset == 0:
reset = 1
else:
break
return 0
def ip_pas(raw_ip):
hide = 0
if re.search("(\.|:)", raw_ip):
if not re.search("^" + load_lang('tool', 1) + ":", raw_ip):
curs.execute("select data from other where name = 'ip_view'")
data = curs.fetchall()
if data and data[0][0] != '':
ip = '<span style="font-size: 75%;">' + hashlib.md5(bytes(raw_ip, 'utf-8')).hexdigest() + '</span>'
if not admin_check('ban', None):
hide = 1
else:
ip = raw_ip
else:
ip = raw_ip
hide = 1
else:
curs.execute("select title from data where title = ?", ['user:' + raw_ip])
if curs.fetchall():
ip = '<a href="/w/' + url_pas('user:' + raw_ip) + '">' + raw_ip + '</a>'
else:
ip = '<a id="not_thing" href="/w/' + url_pas('user:' + raw_ip) + '">' + raw_ip + '</a>'
if hide == 0:
ip += ' <a href="/tool/' + url_pas(raw_ip) + '">(' + load_lang('tool') + ')</a>'
return ip
def custom():
if 'head' in flask.session:
user_head = flask.session['head']
else:
user_head = ''
if 'state' in flask.session and flask.session['state'] == 1:
curs.execute('select name from alarm where name = ? limit 1', [ip_check()])
if curs.fetchall():
user_icon = 2
else:
user_icon = 1
else:
user_icon = 0
if user_icon != 0:
curs.execute('select data from user_set where name = "email" and id = ?', [ip_check()])
data = curs.fetchall()
if data:
email = data[0][0]
else:
email = ''
else:
email = ''
if user_icon != 0:
user_name = ip_check()
else:
user_name = load_lang('user')
return ['', '', user_icon, user_head, email, user_name, load_lang(data = '', num = 2)]
def load_skin(data = ''):
div2 = ''
system_file = ['main_css', 'easter_egg.html']
if data == '':
ip = ip_check()
curs.execute('select data from user_set where name = "skin" and id = ?', [ip])
data = curs.fetchall()
for skin_data in os.listdir(os.path.abspath('views')):
if not skin_data in system_file:
if not data:
curs.execute('select data from other where name = "skin"')
sql_data = curs.fetchall()
if sql_data and sql_data[0][0] == skin_data:
div2 = '<option value="' + skin_data + '">' + skin_data + '</option>' + div2
else:
div2 += '<option value="' + skin_data + '">' + skin_data + '</option>'
elif data[0][0] == skin_data:
div2 = '<option value="' + skin_data + '">' + skin_data + '</option>' + div2
else:
div2 += '<option value="' + skin_data + '">' + skin_data + '</option>'
else:
for skin_data in os.listdir(os.path.abspath('views')):
if not skin_data in system_file:
if data == skin_data:
div2 = '<option value="' + skin_data + '">' + skin_data + '</option>' + div2
else:
div2 += '<option value="' + skin_data + '">' + skin_data + '</option>'
return div2
def acl_check(name, tool = ''):
ip = ip_check()
if tool == 'render':
curs.execute("select view from acl where title = ?", [name])
acl_data = curs.fetchall()
if acl_data:
if acl_data[0][0] == 'user':
if not user_data:
return 1
if acl_data[0][0] == 'admin':
if not user_data:
return 1
if not admin_check(5, 'view (' + name + ')') == 1:
return 1
return 0
else:
if ban_check() == 1:
return 1
acl_c = re.search("^user:([^/]*)", name)
if acl_c:
acl_n = acl_c.groups()
if admin_check(5, None) == 1:
return 0
curs.execute("select dec from acl where title = ?", ['user:' + acl_n[0]])
acl_data = curs.fetchall()
if acl_data:
if acl_data[0][0] == 'all':
return 0
if acl_data[0][0] == 'user' and not re.search("(\.|:)", ip):
return 0
if ip != acl_n[0] or re.search("(\.|:)", ip):
return 1
if ip == acl_n[0] and not re.search("(\.|:)", ip) and not re.search("(\.|:)", acl_n[0]):
return 0
else:
return 1
file_c = re.search("^file:(.*)", name)
if file_c and admin_check(5, 'edit (' + name + ')') != 1:
return 1
curs.execute("select acl from user where id = ?", [ip])
user_data = curs.fetchall()
curs.execute("select dec from acl where title = ?", [name])
acl_data = curs.fetchall()
if acl_data:
if acl_data[0][0] == 'user':
if not user_data:
return 1
if acl_data[0][0] == 'admin':
if not user_data:
return 1
if not admin_check(5, 'edit (' + name + ')') == 1:
return 1
curs.execute('select data from other where name = "edit"')
set_data = curs.fetchall()
if set_data:
if set_data[0][0] == 'login':
if not user_data:
return 1
if set_data[0][0] == 'admin':
if not user_data:
return 1
if not admin_check(5, None) == 1:
return 1
return 0
def ban_check(ip = None, tool = None):
if not ip:
ip = ip_check()
band = re.search("^([0-9]{1,3}\.[0-9]{1,3})", ip)
if band:
band_it = band.groups()[0]
else:
band_it = '-'
curs.execute("select end, login from ban where block = ?", [band_it])
band_d = curs.fetchall()
curs.execute("select end, login from ban where block = ?", [ip])
ban_d = curs.fetchall()
data = band_d or ban_d
if data and (data[0][0] == '' or data[0][0] > get_time()):
if tool and tool == 'login':
if data[0][1] == 'O':
return 0
return 1
return 0
def topic_check(name, sub):
ip = ip_check()
if ban_check() == 1:
return 1
curs.execute("select acl from user where id = ?", [ip])
user_data = curs.fetchall()
curs.execute('select data from other where name = "discussion"')
acl_data = curs.fetchall()
if acl_data:
if acl_data[0][0] == 'login':
if not user_data:
return 1
if acl_data[0][0] == 'admin':
if not user_data:
return 1
if not admin_check(3, 'topic (' + name + ')') == 1:
return 1
curs.execute("select dis from acl where title = ?", [name])
acl_data = curs.fetchall()
if acl_data:
if acl_data[0][0] == 'user':
if not user_data:
return 1
if acl_data[0][0] == 'admin':
if not user_data:
return 1
if not admin_check(3, 'topic (' + name + ')') == 1:
return 1
curs.execute("select title from rd where title = ? and sub = ? and not stop = ''", [name, sub])
if curs.fetchall():
if not admin_check(3, 'topic (' + name + ')') == 1:
return 1
return 0
def ban_insert(name, end, why, login, blocker):
now_time = get_time()
if re.search("^([0-9]{1,3}\.[0-9]{1,3})$", name):
band = 'O'
else:
band = ''
curs.execute("select block from ban where block = ?", [name])
if curs.fetchall():
curs.execute("insert into rb (block, end, today, blocker, why, band) values (?, ?, ?, ?, ?, ?)", [name, load_lang('release', 1), now_time, blocker, '', band])
curs.execute("delete from ban where block = ?", [name])
else:
if login != '':
login = 'O'
else:
login = ''
if end != '0':
time = datetime.datetime.now()
plus = datetime.timedelta(seconds = int(end))
r_time = (time + plus).strftime("%Y-%m-%d %H:%M:%S")
else:
r_time = ''
curs.execute("insert into rb (block, end, today, blocker, why, band) values (?, ?, ?, ?, ?, ?)", [name, r_time, now_time, blocker, why, band])
curs.execute("insert into ban (block, end, why, band, login) values (?, ?, ?, ?, ?)", [name, r_time, why, band, login])
conn.commit()
def rd_plus(title, sub, date):
curs.execute("select title from rd where title = ? and sub = ?", [title, sub])
if curs.fetchall():
curs.execute("update rd set date = ? where title = ? and sub = ?", [date, title, sub])
else:
curs.execute("insert into rd (title, sub, date) values (?, ?, ?)", [title, sub, date])
def history_plus(title, data, date, ip, send, leng):
curs.execute("select id from history where title = ? order by id + 0 desc limit 1", [title])
id_data = curs.fetchall()
curs.execute("insert into history (id, title, data, date, ip, send, leng, hide) values (?, ?, ?, ?, ?, ?, ?, '')", [str(int(id_data[0][0]) + 1) if id_data else '1', title, data, date, ip, send, leng])
def leng_check(first, second):
if first < second:
all_plus = '+' + str(second - first)
elif second < first:
all_plus = '-' + str(first - second)
else:
all_plus = '0'
return all_plus
def edit_filter_do(data):
if admin_check(1, 'edit_filter pass') != 1:
curs.execute("select regex, sub from filter")
for data_list in curs.fetchall():
match = re.compile(data_list[0], re.I)
if match.search(data):
ban_insert(
ip_check(),
'0' if data_list[1] == 'X' else data_list[1],
load_lang('edit', 1) + ' ' + load_lang('filter', 1),
None,
load_lang('tool', 1) + ':' + load_lang('edit', 1) + ' ' + load_lang('filter', 1)
)
return 1
return 0
def redirect(data):
return flask.redirect(data)
def re_error(data):
conn.commit()
if data == '/ban':
ip = ip_check()
end = '<li>' + load_lang('why') + ' : ' + load_lang('authority_error') + '</li>'
if ban_check() == 1:
curs.execute("select end, why from ban where block = ?", [ip])
end_data = curs.fetchall()
if not end_data:
match = re.search("^([0-9]{1,3}\.[0-9]{1,3})", ip)
if match:
curs.execute("select end, why from ban where block = ?", [match.groups()[0]])
end_data = curs.fetchall()
if end_data:
end = '<li>' + load_lang('state') + ' : ' + load_lang('ban') + '</li><li>'
if end_data[0][0]:
now = int(re.sub('(\-| |:)', '', get_time()))
day = int(re.sub('(\-| |:)', '', end_data[0][0]))
if now >= day:
curs.execute("delete from ban where block = ?", [ip])
conn.commit()
end += '<script>location.reload();</script>'
else:
end += 'end : ' + end_data[0][0]
else:
end += load_lang('limitless')
end += '</li>'
if end_data[0][1] != '':
end += '<li>' + load_lang('why') + ' : ' + end_data[0][1] + '</li>'
return easy_minify(flask.render_template(skin_check(),
imp = ['error', wiki_set(1), custom(), other2([0, 0])],
data = '<h2>error</h2><ul>' + end + '</ul>',
menu = 0
))
else:
error_data = re.search('\/error\/([0-9]+)', data)
if error_data:
num = int(error_data.groups()[0])
if num == 1:
data = load_lang('no_login_error')
elif num == 2:
data = load_lang('no_exist_user_error')
elif num == 3:
data = load_lang('authority_error')
elif num == 4:
data = load_lang('no_admin_block_error')
elif num == 5:
data = load_lang('skin_error')
elif num == 6:
data = load_lang('same_id_exist_error')
elif num == 7:
data = load_lang('long_id_error')
elif num == 8:
data = load_lang('id_char_error') + ' <a href="/name_filter">(' + load_lang('id') + ' ' + load_lang('filter') + ')</a>'
elif num == 9:
data = load_lang('file_exist_error')
elif num == 10:
data = load_lang('password_error')
elif num == 13:
data = load_lang('recaptcha_error')
elif num == 14:
data = load_lang('file_extension_error')
elif num == 15:
data = load_lang('edit_record_error')
elif num == 16:
data = load_lang('same_file_error')
elif num == 17:
data = load_lang('file_capacity_error') + ' ' + wiki_set(3)
elif num == 19:
data = load_lang('decument_exist_error')
elif num == 20:
data = load_lang('password_diffrent_error')
elif num == 21:
data = load_lang('edit_filter_error')
elif num == 22:
data = load_lang('file_name_error')
else:
data = '???'
return easy_minify(flask.render_template(skin_check(),
imp = ['error', wiki_set(1), custom(), other2([0, 0])],
data = '<h2>error</h2><ul><li>' + data + '</li></ul>',
menu = 0
))
else:
return redirect('/') | 33.686616 | 233 | 0.473093 | import email.mime.text
import urllib.request
import sqlite3
import hashlib
import smtplib
import bcrypt
import flask
import json
import html
import sys
import re
import os
try:
import css_html_js_minify
except:
pass
if sys.version_info < (3, 6):
import sha3
from set_mark.tool import *
from mark import *
def load_conn(data):
global conn
global curs
conn = data
curs = conn.cursor()
load_conn2(data)
def send_email(who, title, data):
smtp = smtplib.SMTP_SSL('smtp.gmail.com', 465)
try:
curs.execute('select name, data from other where name = "g_email" or name = "g_pass"')
rep_data = curs.fetchall()
if rep_data:
g_email = ''
g_pass = ''
for i in rep_data:
if i[0] == 'g_email':
g_email = i[1]
else:
g_pass = i[1]
smtp.login(g_email, g_pass)
msg = email.mime.text.MIMEText(data)
msg['Subject'] = title
smtp.sendmail(g_email, who, msg.as_string())
smtp.quit()
except:
print('error : email login error')
def easy_minify(data, tool = None):
try:
if not tool:
data = css_html_js_minify.html_minify(data)
else:
if tool == 'css':
data = css_html_js_minify.css_minify(data)
elif tool == 'js':
data = css_html_js_minify.js_minify(data)
except:
data = re.sub('\n +<', '\n<', data)
data = re.sub('>(\n| )+<', '> <', data)
return data
def render_set(title = '', data = '', num = 0):
if acl_check(title, 'render') == 1:
return 'http request 401.3'
else:
return namumark(title, data, num)
def captcha_get():
data = ''
if custom()[2] == 0:
curs.execute('select data from other where name = "recaptcha"')
recaptcha = curs.fetchall()
if recaptcha and recaptcha[0][0] != '':
curs.execute('select data from other where name = "sec_re"')
sec_re = curs.fetchall()
if sec_re and sec_re[0][0] != '':
data += recaptcha[0][0] + '<hr class=\"main_hr\">'
return data
def update():
try:
all_rep = [['사용자:', 'user:'], ['파일:', 'file:'], ['분류:', 'category:']]
all_rep2 = ['data', 'history', 'acl', 'topic', 'back']
test = 0
for i in range(3):
for j in range(6):
if not j == 5:
curs.execute('select title from ' + all_rep2[j] + ' where title like ?', [all_rep[i][0] + '%'])
else:
curs.execute('select link from back where link like ?', [all_rep[i][0] + '%'])
user_rep = curs.fetchall()
if user_rep:
for user_rep2 in user_rep:
test = 1
first = re.sub('^' + all_rep[i][0], all_rep[i][1], user_rep2[0])
if j == 0:
curs.execute("update data set title = ? where title = ?", [first, user_rep2[0]])
elif j == 1:
curs.execute("update history set title = ? where title = ?", [first, user_rep2[0]])
elif j == 2:
curs.execute("update acl set title = ? where title = ?", [first, user_rep2[0]])
elif j == 3:
curs.execute("update topic set title = ? where title = ?", [first, user_rep2[0]])
elif j == 4:
curs.execute("update back set title = ? where title = ?", [first, user_rep2[0]])
elif j == 5:
curs.execute("update back set link = ? where link = ?", [first, user_rep2[0]])
if test == 1:
print('사용자 to user, 파일 to file, 분류 to category')
except:
pass
try:
curs.execute("select title, sub, close from stop")
for i in curs.fetchall():
if i[2] == '':
curs.execute("update rd set stop = 'S' where title = ? and sub = ?", [i[0], i[1]])
else:
curs.execute("update rd set stop = 'O' where title = ? and sub = ?", [i[0], i[1]])
except:
pass
try:
curs.execute("select title, sub from agreedis")
for i in curs.fetchall():
curs.execute("update rd set agree = 'O' where title = ? and sub = ?", [i[0], i[1]])
except:
pass
try:
curs.execute("drop table if exists stop")
curs.execute("drop table if exists agreedis")
except:
pass
def pw_encode(data, data2 = '', type_d = ''):
if type_d == '':
curs.execute('select data from other where name = "encode"')
set_data = curs.fetchall()
type_d = set_data[0][0]
if type_d == 'sha256':
return hashlib.sha256(bytes(data, 'utf-8')).hexdigest()
elif type_d == 'sha3':
if sys.version_info < (3, 6):
return sha3.sha3_256(bytes(data, 'utf-8')).hexdigest()
else:
return hashlib.sha3_256(bytes(data, 'utf-8')).hexdigest()
else:
if data2 != '':
salt_data = bytes(data2, 'utf-8')
else:
salt_data = bcrypt.gensalt(11)
return bcrypt.hashpw(bytes(data, 'utf-8'), salt_data).decode()
def pw_check(data, data2, type_d = 'no', id_d = ''):
curs.execute('select data from other where name = "encode"')
db_data = curs.fetchall()
if type_d != 'no':
if type_d == '':
set_data = 'bcrypt'
else:
set_data = type_d
else:
set_data = db_data[0][0]
while 1:
if set_data in ['sha256', 'sha3']:
data3 = pw_encode(data = data, type_d = set_data)
if data3 == data2:
re_data = 1
else:
re_data = 0
break
else:
try:
if pw_encode(data, data2, 'bcrypt') == data2:
re_data = 1
else:
re_data = 0
break
except:
set_data = db_data[0][0]
if db_data[0][0] != set_data and re_data == 1 and id_d != '':
curs.execute("update user set pw = ?, encode = ? where id = ?", [pw_encode(data), db_data[0][0], id_d])
return re_data
def captcha_post(re_data, num = 1):
if num == 1:
if custom()[2] == 0 and captcha_get() != '':
curs.execute('select data from other where name = "sec_re"')
sec_re = curs.fetchall()
if sec_re and sec_re[0][0] != '':
data = urllib.request.urlopen('https://www.google.com/recaptcha/api/siteverify?secret=' + sec_re[0][0] + '&response=' + re_data)
if not data:
return 0
else:
json_data = data.read().decode(data.headers.get_content_charset())
json_data = json.loads(json_data)
if data.getcode() == 200 and json_data['success'] == True:
return 0
else:
return 1
else:
return 0
else:
return 0
else:
pass
def load_lang(data, num = 2):
if num == 1:
curs.execute("select data from other where name = 'language'")
rep_data = curs.fetchall()
json_data = open(os.path.join('language', rep_data[0][0] + '.json'), 'rt', encoding='utf-8').read()
lang = json.loads(json_data)
if data in lang:
return lang[data]
else:
return data + ' (missing)'
else:
curs.execute('select data from user_set where name = "lang" and id = ?', [ip_check()])
rep_data = curs.fetchall()
if rep_data:
try:
json_data = open(os.path.join('language', rep_data[0][0] + '.json'), 'rt', encoding='utf-8').read()
lang = json.loads(json_data)
except:
return load_lang(data, 1)
if data in lang:
return lang[data]
else:
return load_lang(data, 1)
else:
return load_lang(data, 1)
def load_oauth(provider):
oauth = json.loads(open('oauthsettings.json', encoding='utf-8').read())
return oauth[provider]
def update_oauth(provider, target, content):
oauth = json.loads(open('oauthsettings.json', encoding='utf-8').read())
oauth[provider][target] = content
with open('oauthsettings.json', 'w', encoding='utf-8') as f:
json.dump(oauth, f)
return 'Done'
def ip_or_user(data):
if re.search('(\.|:)', data):
return 1
else:
return 0
def edit_help_button():
js_data = '''
<script>
function insert_data(name, data) {
if(document.selection) {
document.getElementById(name).focus();
sel = document.selection.createRange();
sel.text = data;
} else if(document.getElementById(name).selectionStart || document.getElementById(name).selectionStart == '0') {
var startPos = document.getElementById(name).selectionStart;
var endPos = document.getElementById(name).selectionEnd;
document.getElementById(name).value = document.getElementById(name).value.substring(0, startPos) + data + document.getElementById(name).value.substring(endPos, document.getElementById(name).value.length);
} else {
document.getElementById(name).value += data;
}
}
</script>
'''
insert_list = [['[[|]]', '[[|]]'], ['[*()]', '[*()]'], ['{{{#!}}}', '{{{#!}}}'], ['||<>||', '||<>||'], ["\\'\\'\\'", "\'\'\'"]]
data = ''
for insert_data in insert_list:
data += '<a href="javascript:void(0);" onclick="insert_data(\'content\', \'' + insert_data[0] + '\');">(' + insert_data[1] + ')</a> '
return [js_data, data + '<hr class=\"main_hr\">']
def ip_warring():
if custom()[2] == 0:
curs.execute('select data from other where name = "no_login_warring"')
data = curs.fetchall()
if data and data[0][0] != '':
text_data = '<span>' + data[0][0] + '</span><hr class=\"main_hr\">'
else:
text_data = '<span>' + load_lang('no_login_warring') + '</span><hr class=\"main_hr\">'
else:
text_data = ''
return text_data
def skin_check():
skin = './views/neo_yousoro/'
curs.execute('select data from other where name = "skin"')
skin_exist = curs.fetchall()
if skin_exist and skin_exist[0][0] != '':
if os.path.exists(os.path.abspath('./views/' + skin_exist[0][0] + '/index.html')) == 1:
skin = './views/' + skin_exist[0][0] + '/'
curs.execute('select data from user_set where name = "skin" and id = ?', [ip_check()])
skin_exist = curs.fetchall()
if skin_exist and skin_exist[0][0] != '':
if os.path.exists(os.path.abspath('./views/' + skin_exist[0][0] + '/index.html')) == 1:
skin = './views/' + skin_exist[0][0] + '/'
return skin + 'index.html'
def next_fix(link, num, page, end = 50):
list_data = ''
if num == 1:
if len(page) == end:
list_data += '<hr class=\"main_hr\"><a href="' + link + str(num + 1) + '">(' + load_lang('next') + ')</a>'
elif len(page) != end:
list_data += '<hr class=\"main_hr\"><a href="' + link + str(num - 1) + '">(' + load_lang('previous') + ')</a>'
else:
list_data += '<hr class=\"main_hr\"><a href="' + link + str(num - 1) + '">(' + load_lang('previous') + ')</a> <a href="' + link + str(num + 1) + '">(' + load_lang('next') + ')</a>'
return list_data
def other2(data):
return data + ['']
def wiki_set(num = 1):
if num == 1:
data_list = []
curs.execute('select data from other where name = ?', ['name'])
db_data = curs.fetchall()
if db_data and db_data[0][0] != '':
data_list += [db_data[0][0]]
else:
data_list += ['wiki']
curs.execute('select data from other where name = "license"')
db_data = curs.fetchall()
if db_data and db_data[0][0] != '':
data_list += [db_data[0][0]]
else:
data_list += ['CC 0']
data_list += ['', '']
curs.execute('select data from other where name = "logo"')
db_data = curs.fetchall()
if db_data and db_data[0][0] != '':
data_list += [db_data[0][0]]
else:
data_list += [data_list[0]]
curs.execute("select data from other where name = 'head'")
db_data = curs.fetchall()
if db_data and db_data[0][0] != '':
data_list += [db_data[0][0]]
else:
data_list += ['']
return data_list
if num == 2:
var_data = 'FrontPage'
curs.execute('select data from other where name = "frontpage"')
elif num == 3:
var_data = '2'
curs.execute('select data from other where name = "upload"')
db_data = curs.fetchall()
if db_data and db_data[0][0] != '':
return db_data[0][0]
else:
return var_data
def diff(seqm):
output = []
for opcode, a0, a1, b0, b1 in seqm.get_opcodes():
if opcode == 'equal':
output += [seqm.a[a0:a1]]
elif opcode == 'insert':
output += ["<span style='background:#CFC;'>" + seqm.b[b0:b1] + "</span>"]
elif opcode == 'delete':
output += ["<span style='background:#FDD;'>" + seqm.a[a0:a1] + "</span>"]
elif opcode == 'replace':
output += ["<span style='background:#FDD;'>" + seqm.a[a0:a1] + "</span>"]
output += ["<span style='background:#CFC;'>" + seqm.b[b0:b1] + "</span>"]
end = ''.join(output)
end = end.replace('\r\n', '\n')
sub = ''
if not re.search('\n', end):
end += '\n'
num = 0
left = 1
while 1:
data = re.search('((?:(?!\n).)*)\n', end)
if data:
data = data.groups()[0]
left += 1
if re.search('<span style=\'(?:(?:(?!\').)+)\'>', data):
num += 1
if re.search('<\/span>', data):
num -= 1
sub += str(left) + ' : ' + re.sub('(?P<in>(?:(?!\n).)*)\n', '\g<in>', data, 1) + '<br>'
else:
if re.search('<\/span>', data):
num -= 1
sub += str(left) + ' : ' + re.sub('(?P<in>(?:(?!\n).)*)\n', '\g<in>', data, 1) + '<br>'
else:
if num > 0:
sub += str(left) + ' : ' + re.sub('(?P<in>.*)\n', '\g<in>', data, 1) + '<br>'
end = re.sub('((?:(?!\n).)*)\n', '', end, 1)
else:
break
return sub
def admin_check(num = None, what = None):
ip = ip_check()
curs.execute("select acl from user where id = ?", [ip])
user = curs.fetchall()
if user:
reset = 0
while 1:
if num == 1 and reset == 0:
check = 'ban'
elif num == 3 and reset == 0:
check = 'toron'
elif num == 4 and reset == 0:
check = 'check'
elif num == 5 and reset == 0:
check = 'acl'
elif num == 6 and reset == 0:
check = 'hidel'
elif num == 7 and reset == 0:
check = 'give'
else:
check = 'owner'
curs.execute('select name from alist where name = ? and acl = ?', [user[0][0], check])
if curs.fetchall():
if what:
curs.execute("insert into re_admin (who, what, time) values (?, ?, ?)", [ip, what, get_time()])
conn.commit()
return 1
else:
if reset == 0:
reset = 1
else:
break
return 0
def ip_pas(raw_ip):
hide = 0
if re.search("(\.|:)", raw_ip):
if not re.search("^" + load_lang('tool', 1) + ":", raw_ip):
curs.execute("select data from other where name = 'ip_view'")
data = curs.fetchall()
if data and data[0][0] != '':
ip = '<span style="font-size: 75%;">' + hashlib.md5(bytes(raw_ip, 'utf-8')).hexdigest() + '</span>'
if not admin_check('ban', None):
hide = 1
else:
ip = raw_ip
else:
ip = raw_ip
hide = 1
else:
curs.execute("select title from data where title = ?", ['user:' + raw_ip])
if curs.fetchall():
ip = '<a href="/w/' + url_pas('user:' + raw_ip) + '">' + raw_ip + '</a>'
else:
ip = '<a id="not_thing" href="/w/' + url_pas('user:' + raw_ip) + '">' + raw_ip + '</a>'
if hide == 0:
ip += ' <a href="/tool/' + url_pas(raw_ip) + '">(' + load_lang('tool') + ')</a>'
return ip
def custom():
if 'head' in flask.session:
user_head = flask.session['head']
else:
user_head = ''
if 'state' in flask.session and flask.session['state'] == 1:
curs.execute('select name from alarm where name = ? limit 1', [ip_check()])
if curs.fetchall():
user_icon = 2
else:
user_icon = 1
else:
user_icon = 0
if user_icon != 0:
curs.execute('select data from user_set where name = "email" and id = ?', [ip_check()])
data = curs.fetchall()
if data:
email = data[0][0]
else:
email = ''
else:
email = ''
if user_icon != 0:
user_name = ip_check()
else:
user_name = load_lang('user')
return ['', '', user_icon, user_head, email, user_name, load_lang(data = '', num = 2)]
def load_skin(data = ''):
div2 = ''
system_file = ['main_css', 'easter_egg.html']
if data == '':
ip = ip_check()
curs.execute('select data from user_set where name = "skin" and id = ?', [ip])
data = curs.fetchall()
for skin_data in os.listdir(os.path.abspath('views')):
if not skin_data in system_file:
if not data:
curs.execute('select data from other where name = "skin"')
sql_data = curs.fetchall()
if sql_data and sql_data[0][0] == skin_data:
div2 = '<option value="' + skin_data + '">' + skin_data + '</option>' + div2
else:
div2 += '<option value="' + skin_data + '">' + skin_data + '</option>'
elif data[0][0] == skin_data:
div2 = '<option value="' + skin_data + '">' + skin_data + '</option>' + div2
else:
div2 += '<option value="' + skin_data + '">' + skin_data + '</option>'
else:
for skin_data in os.listdir(os.path.abspath('views')):
if not skin_data in system_file:
if data == skin_data:
div2 = '<option value="' + skin_data + '">' + skin_data + '</option>' + div2
else:
div2 += '<option value="' + skin_data + '">' + skin_data + '</option>'
return div2
def acl_check(name, tool = ''):
ip = ip_check()
if tool == 'render':
curs.execute("select view from acl where title = ?", [name])
acl_data = curs.fetchall()
if acl_data:
if acl_data[0][0] == 'user':
if not user_data:
return 1
if acl_data[0][0] == 'admin':
if not user_data:
return 1
if not admin_check(5, 'view (' + name + ')') == 1:
return 1
return 0
else:
if ban_check() == 1:
return 1
acl_c = re.search("^user:([^/]*)", name)
if acl_c:
acl_n = acl_c.groups()
if admin_check(5, None) == 1:
return 0
curs.execute("select dec from acl where title = ?", ['user:' + acl_n[0]])
acl_data = curs.fetchall()
if acl_data:
if acl_data[0][0] == 'all':
return 0
if acl_data[0][0] == 'user' and not re.search("(\.|:)", ip):
return 0
if ip != acl_n[0] or re.search("(\.|:)", ip):
return 1
if ip == acl_n[0] and not re.search("(\.|:)", ip) and not re.search("(\.|:)", acl_n[0]):
return 0
else:
return 1
file_c = re.search("^file:(.*)", name)
if file_c and admin_check(5, 'edit (' + name + ')') != 1:
return 1
curs.execute("select acl from user where id = ?", [ip])
user_data = curs.fetchall()
curs.execute("select dec from acl where title = ?", [name])
acl_data = curs.fetchall()
if acl_data:
if acl_data[0][0] == 'user':
if not user_data:
return 1
if acl_data[0][0] == 'admin':
if not user_data:
return 1
if not admin_check(5, 'edit (' + name + ')') == 1:
return 1
curs.execute('select data from other where name = "edit"')
set_data = curs.fetchall()
if set_data:
if set_data[0][0] == 'login':
if not user_data:
return 1
if set_data[0][0] == 'admin':
if not user_data:
return 1
if not admin_check(5, None) == 1:
return 1
return 0
def ban_check(ip = None, tool = None):
if not ip:
ip = ip_check()
band = re.search("^([0-9]{1,3}\.[0-9]{1,3})", ip)
if band:
band_it = band.groups()[0]
else:
band_it = '-'
curs.execute("select end, login from ban where block = ?", [band_it])
band_d = curs.fetchall()
curs.execute("select end, login from ban where block = ?", [ip])
ban_d = curs.fetchall()
data = band_d or ban_d
if data and (data[0][0] == '' or data[0][0] > get_time()):
if tool and tool == 'login':
if data[0][1] == 'O':
return 0
return 1
return 0
def topic_check(name, sub):
ip = ip_check()
if ban_check() == 1:
return 1
curs.execute("select acl from user where id = ?", [ip])
user_data = curs.fetchall()
curs.execute('select data from other where name = "discussion"')
acl_data = curs.fetchall()
if acl_data:
if acl_data[0][0] == 'login':
if not user_data:
return 1
if acl_data[0][0] == 'admin':
if not user_data:
return 1
if not admin_check(3, 'topic (' + name + ')') == 1:
return 1
curs.execute("select dis from acl where title = ?", [name])
acl_data = curs.fetchall()
if acl_data:
if acl_data[0][0] == 'user':
if not user_data:
return 1
if acl_data[0][0] == 'admin':
if not user_data:
return 1
if not admin_check(3, 'topic (' + name + ')') == 1:
return 1
curs.execute("select title from rd where title = ? and sub = ? and not stop = ''", [name, sub])
if curs.fetchall():
if not admin_check(3, 'topic (' + name + ')') == 1:
return 1
return 0
def ban_insert(name, end, why, login, blocker):
now_time = get_time()
if re.search("^([0-9]{1,3}\.[0-9]{1,3})$", name):
band = 'O'
else:
band = ''
curs.execute("select block from ban where block = ?", [name])
if curs.fetchall():
curs.execute("insert into rb (block, end, today, blocker, why, band) values (?, ?, ?, ?, ?, ?)", [name, load_lang('release', 1), now_time, blocker, '', band])
curs.execute("delete from ban where block = ?", [name])
else:
if login != '':
login = 'O'
else:
login = ''
if end != '0':
time = datetime.datetime.now()
plus = datetime.timedelta(seconds = int(end))
r_time = (time + plus).strftime("%Y-%m-%d %H:%M:%S")
else:
r_time = ''
curs.execute("insert into rb (block, end, today, blocker, why, band) values (?, ?, ?, ?, ?, ?)", [name, r_time, now_time, blocker, why, band])
curs.execute("insert into ban (block, end, why, band, login) values (?, ?, ?, ?, ?)", [name, r_time, why, band, login])
conn.commit()
def rd_plus(title, sub, date):
curs.execute("select title from rd where title = ? and sub = ?", [title, sub])
if curs.fetchall():
curs.execute("update rd set date = ? where title = ? and sub = ?", [date, title, sub])
else:
curs.execute("insert into rd (title, sub, date) values (?, ?, ?)", [title, sub, date])
def history_plus(title, data, date, ip, send, leng):
curs.execute("select id from history where title = ? order by id + 0 desc limit 1", [title])
id_data = curs.fetchall()
curs.execute("insert into history (id, title, data, date, ip, send, leng, hide) values (?, ?, ?, ?, ?, ?, ?, '')", [str(int(id_data[0][0]) + 1) if id_data else '1', title, data, date, ip, send, leng])
def leng_check(first, second):
if first < second:
all_plus = '+' + str(second - first)
elif second < first:
all_plus = '-' + str(first - second)
else:
all_plus = '0'
return all_plus
def edit_filter_do(data):
if admin_check(1, 'edit_filter pass') != 1:
curs.execute("select regex, sub from filter")
for data_list in curs.fetchall():
match = re.compile(data_list[0], re.I)
if match.search(data):
ban_insert(
ip_check(),
'0' if data_list[1] == 'X' else data_list[1],
load_lang('edit', 1) + ' ' + load_lang('filter', 1),
None,
load_lang('tool', 1) + ':' + load_lang('edit', 1) + ' ' + load_lang('filter', 1)
)
return 1
return 0
def redirect(data):
return flask.redirect(data)
def re_error(data):
conn.commit()
if data == '/ban':
ip = ip_check()
end = '<li>' + load_lang('why') + ' : ' + load_lang('authority_error') + '</li>'
if ban_check() == 1:
curs.execute("select end, why from ban where block = ?", [ip])
end_data = curs.fetchall()
if not end_data:
match = re.search("^([0-9]{1,3}\.[0-9]{1,3})", ip)
if match:
curs.execute("select end, why from ban where block = ?", [match.groups()[0]])
end_data = curs.fetchall()
if end_data:
end = '<li>' + load_lang('state') + ' : ' + load_lang('ban') + '</li><li>'
if end_data[0][0]:
now = int(re.sub('(\-| |:)', '', get_time()))
day = int(re.sub('(\-| |:)', '', end_data[0][0]))
if now >= day:
curs.execute("delete from ban where block = ?", [ip])
conn.commit()
end += '<script>location.reload();</script>'
else:
end += 'end : ' + end_data[0][0]
else:
end += load_lang('limitless')
end += '</li>'
if end_data[0][1] != '':
end += '<li>' + load_lang('why') + ' : ' + end_data[0][1] + '</li>'
return easy_minify(flask.render_template(skin_check(),
imp = ['error', wiki_set(1), custom(), other2([0, 0])],
data = '<h2>error</h2><ul>' + end + '</ul>',
menu = 0
))
else:
error_data = re.search('\/error\/([0-9]+)', data)
if error_data:
num = int(error_data.groups()[0])
if num == 1:
data = load_lang('no_login_error')
elif num == 2:
data = load_lang('no_exist_user_error')
elif num == 3:
data = load_lang('authority_error')
elif num == 4:
data = load_lang('no_admin_block_error')
elif num == 5:
data = load_lang('skin_error')
elif num == 6:
data = load_lang('same_id_exist_error')
elif num == 7:
data = load_lang('long_id_error')
elif num == 8:
data = load_lang('id_char_error') + ' <a href="/name_filter">(' + load_lang('id') + ' ' + load_lang('filter') + ')</a>'
elif num == 9:
data = load_lang('file_exist_error')
elif num == 10:
data = load_lang('password_error')
elif num == 13:
data = load_lang('recaptcha_error')
elif num == 14:
data = load_lang('file_extension_error')
elif num == 15:
data = load_lang('edit_record_error')
elif num == 16:
data = load_lang('same_file_error')
elif num == 17:
data = load_lang('file_capacity_error') + ' ' + wiki_set(3)
elif num == 19:
data = load_lang('decument_exist_error')
elif num == 20:
data = load_lang('password_diffrent_error')
elif num == 21:
data = load_lang('edit_filter_error')
elif num == 22:
data = load_lang('file_name_error')
else:
data = '???'
return easy_minify(flask.render_template(skin_check(),
imp = ['error', wiki_set(1), custom(), other2([0, 0])],
data = '<h2>error</h2><ul><li>' + data + '</li></ul>',
menu = 0
))
else:
return redirect('/') | true | true |
f73010c48c4a643298d574ef9fdbe70bed0b28c8 | 20,916 | py | Python | fitapp/tests/test_integration.py | thesignalcenter/django-fitbit | aa17ee5dacbbf4ad1edea85f480829185e6f39f9 | [
"Apache-2.0"
] | null | null | null | fitapp/tests/test_integration.py | thesignalcenter/django-fitbit | aa17ee5dacbbf4ad1edea85f480829185e6f39f9 | [
"Apache-2.0"
] | null | null | null | fitapp/tests/test_integration.py | thesignalcenter/django-fitbit | aa17ee5dacbbf4ad1edea85f480829185e6f39f9 | [
"Apache-2.0"
] | 2 | 2018-06-21T20:12:01.000Z | 2019-06-11T23:32:07.000Z | import json
import time
from collections import OrderedDict
from datetime import datetime
import requests_mock
from django.conf import settings
from django.contrib import messages
from django.contrib.auth.models import AnonymousUser
from django.http import HttpRequest
from django.test.utils import override_settings
from django.urls import reverse
from fitbit.exceptions import HTTPConflict
from freezegun import freeze_time
from mock import patch
from requests.auth import _basic_auth_str
from fitapp import utils
from fitapp.decorators import fitbit_integration_warning
from fitapp.models import TimeSeriesDataType, UserFitbit
from fitapp.tasks import subscribe, unsubscribe
from .base import FitappTestBase
class TestIntegrationUtility(FitappTestBase):
def test_is_integrated(self):
"""Users with stored OAuth information are integrated."""
self.assertTrue(utils.is_integrated(self.user))
def test_is_not_integrated(self):
"""User is not integrated if we have no OAuth data for them"""
UserFitbit.objects.all().delete()
self.assertFalse(utils.is_integrated(self.user))
def test_unauthenticated(self):
"""User is not integrated if they aren't logged in."""
user = AnonymousUser()
self.assertFalse(utils.is_integrated(user))
class TestIntegrationDecorator(FitappTestBase):
def setUp(self):
super(TestIntegrationDecorator, self).setUp()
self.fake_request = HttpRequest()
self.fake_request.user = self.user
self.fake_view = lambda request: "hello"
self.messages = []
def _mock_decorator(self, msg=None):
def mock_error(request, message, *args, **kwargs):
self.messages.append(message)
with patch.object(messages, 'error', mock_error) as error:
return fitbit_integration_warning(msg=msg)(self.fake_view)(
self.fake_request)
def test_unauthenticated(self):
"""Message should be added if user is not logged in."""
self.fake_request.user = AnonymousUser()
results = self._mock_decorator()
self.assertEqual(results, "hello")
self.assertEqual(len(self.messages), 1)
self.assertEqual(
self.messages[0], utils.get_setting('FITAPP_DECORATOR_MESSAGE'))
def test_is_integrated(self):
"""Decorator should have no effect if user is integrated."""
results = self._mock_decorator()
self.assertEqual(results, "hello")
self.assertEqual(len(self.messages), 0)
def test_is_not_integrated(self):
"""Message should be added if user is not integrated."""
UserFitbit.objects.all().delete()
results = self._mock_decorator()
self.assertEqual(results, "hello")
self.assertEqual(len(self.messages), 1)
self.assertEqual(
self.messages[0], utils.get_setting('FITAPP_DECORATOR_MESSAGE'))
def test_custom_msg(self):
"""Decorator should support a custom message string."""
UserFitbit.objects.all().delete()
msg = "customized"
results = self._mock_decorator(msg)
self.assertEqual(results, "hello")
self.assertEqual(len(self.messages), 1)
self.assertEqual(self.messages[0], "customized")
def test_custom_msg_func(self):
"""Decorator should support a custom message function."""
UserFitbit.objects.all().delete()
msg = lambda request: "message to {0}".format(request.user)
results = self._mock_decorator(msg)
self.assertEqual(results, "hello")
self.assertEqual(len(self.messages), 1)
self.assertEqual(self.messages[0], msg(self.fake_request))
class TestLoginView(FitappTestBase):
url_name = 'fitbit-login'
def test_get(self):
"""
Login view should generate a token_url and then
redirect to an authorization URL.
"""
response = self._mock_client()
self.assertRedirectsNoFollow(response, '/complete/')
self.assertEqual(response.status_code, 302)
self.assertEqual(UserFitbit.objects.count(), 1)
def test_unauthenticated(self):
"""User must be logged in to access Login view."""
self.client.logout()
response = self._get()
self.assertEqual(response.status_code, 302)
self.assertEqual(UserFitbit.objects.count(), 1)
def test_unintegrated(self):
"""Fitbit credentials not required to access Login view."""
self.fbuser.delete()
response = self._mock_client()
self.assertRedirectsNoFollow(response, '/complete/')
self.assertEqual(UserFitbit.objects.count(), 0)
def test_next(self):
response = self._mock_client(get_kwargs={'next': '/next'})
self.assertRedirectsNoFollow(response, '/complete/')
self.assertEqual(self.client.session.get('fitbit_next', None), '/next')
self.assertEqual(UserFitbit.objects.count(), 1)
class TestCompleteView(FitappTestBase):
url_name = 'fitbit-complete'
user_id = 'userid'
token = {
'access_token': 'AccessToken123',
'refresh_token': 'RefreshToken123',
'expires_at': time.time() + 300,
'user_id': user_id
}
code = 'Code123'
def setUp(self):
super(TestCompleteView, self).setUp()
self.fbuser.delete()
@patch('fitapp.tasks.subscribe.apply_async')
@patch('fitapp.tasks.get_time_series_data.apply_async')
def test_complete(self, tsd_apply_async, sub_apply_async):
"""Complete view should fetch & store user's access credentials."""
response = self._mock_client(
client_kwargs=self.token, get_kwargs={'code': self.code})
self.assertRedirectsNoFollow(
response, utils.get_setting('FITAPP_LOGIN_REDIRECT'))
fbuser = UserFitbit.objects.get()
sub_apply_async.assert_called_once_with(
(fbuser.fitbit_user, settings.FITAPP_SUBSCRIBER_ID), countdown=5)
tsdts = TimeSeriesDataType.objects.all()
self.assertEqual(tsd_apply_async.call_count, tsdts.count())
for i, _type in enumerate(tsdts):
tsd_apply_async.assert_any_call(
(fbuser.fitbit_user, _type.category, _type.resource,),
countdown=10 + (i * 5))
self.assertEqual(fbuser.user, self.user)
self.assertEqual(fbuser.access_token, self.token['access_token'])
self.assertEqual(fbuser.refresh_token, self.token['refresh_token'])
self.assertEqual(fbuser.fitbit_user, self.user_id)
@override_settings(FITAPP_HISTORICAL_INIT_DELAY=11)
@override_settings(FITAPP_BETWEEN_DELAY=6)
@patch('fitapp.tasks.subscribe.apply_async')
@patch('fitapp.tasks.get_time_series_data.apply_async')
def test_complete_different_delays(self, tsd_apply_async, sub_apply_async):
"""Complete view should use configured delays"""
tsdts = TimeSeriesDataType.objects.all()
response = self._mock_client(
client_kwargs=self.token, get_kwargs={'code': self.code})
fbuser = UserFitbit.objects.get()
self.assertRedirectsNoFollow(
response, utils.get_setting('FITAPP_LOGIN_REDIRECT'))
for i, _type in enumerate(tsdts):
tsd_apply_async.assert_any_call(
(fbuser.fitbit_user, _type.category, _type.resource,),
countdown=11 + (i * 6))
@override_settings(FITAPP_SUBSCRIPTIONS=OrderedDict([]))
@patch('fitapp.tasks.subscribe.apply_async')
@patch('fitapp.tasks.get_time_series_data.apply_async')
def test_complete_empty_subs(self, tsd_apply_async, sub_apply_async):
"""Complete view should not import data if subs dict is empty"""
response = self._mock_client(
client_kwargs=self.token, get_kwargs={'code': self.code})
self.assertRedirectsNoFollow(
response, utils.get_setting('FITAPP_LOGIN_REDIRECT'))
self.assertEqual(tsd_apply_async.call_count, 0)
@override_settings(FITAPP_SUBSCRIPTIONS=OrderedDict([('foods', [])]))
@patch('fitapp.tasks.subscribe.apply_async')
@patch('fitapp.tasks.get_time_series_data.apply_async')
def test_complete_no_res(self, tsd_apply_async, sub_apply_async):
"""Complete view shouldn't import data if subs dict has no resources"""
response = self._mock_client(
client_kwargs=self.token, get_kwargs={'code': self.code})
self.assertRedirectsNoFollow(
response, utils.get_setting('FITAPP_LOGIN_REDIRECT'))
self.assertEqual(tsd_apply_async.call_count, 0)
@override_settings(FITAPP_SUBSCRIPTIONS=OrderedDict([
('foods', ['steps'])
]))
@patch('fitapp.tasks.subscribe.apply_async')
@patch('fitapp.tasks.get_time_series_data.apply_async')
def test_complete_bad_resources(self, tsd_apply_async, sub_apply_async):
"""
Complete view shouldn't import data if subs dict has invalid resources
"""
response = self._mock_client(
client_kwargs=self.token, get_kwargs={'code': self.code})
self.assertContains(
response,
"['steps'] resources are invalid for the foods category",
status_code=500
)
self.assertEqual(tsd_apply_async.call_count, 0)
@override_settings(FITAPP_SUBSCRIPTIONS=OrderedDict([
('activities', ['steps', 'calories', 'distance', 'activityCalories']),
('foods', ['log/water']),
]))
@patch('fitapp.tasks.subscribe.apply_async')
@patch('fitapp.tasks.get_time_series_data.apply_async')
def test_complete_sub_list(self, tsd_apply_async, sub_apply_async):
"""
Complete view should only import the listed subscriptions, in the right
order
"""
activities = TimeSeriesDataType.activities
response = self._mock_client(
client_kwargs=self.token, get_kwargs={'code': self.code})
fbuser = UserFitbit.objects.get()
self.assertRedirectsNoFollow(
response, utils.get_setting('FITAPP_LOGIN_REDIRECT'))
tsd_apply_async.assert_any_call(
(fbuser.fitbit_user, activities, 'steps',), countdown=10)
tsd_apply_async.assert_any_call(
(fbuser.fitbit_user, activities, 'calories',), countdown=15)
tsd_apply_async.assert_any_call(
(fbuser.fitbit_user, activities, 'distance',), countdown=20)
tsd_apply_async.assert_any_call(
(fbuser.fitbit_user, activities, 'activityCalories'), countdown=25)
tsd_apply_async.assert_any_call(
(fbuser.fitbit_user, TimeSeriesDataType.foods, 'log/water',),
countdown=30)
@patch('fitapp.tasks.subscribe.apply_async')
@patch('fitapp.tasks.get_time_series_data.apply_async')
def test_complete_already_integrated(self, tsd_apply_async, sub_apply_async):
"""
Complete view redirect to the error view if a user attempts to connect
an already integrated fitbit user to a second user.
"""
self.create_userfitbit(user=self.user, fitbit_user=self.user_id)
username = '{0}2'.format(self.username)
self.create_user(username=username, password=self.password)
self.client.logout()
self.client.login(username=username, password=self.password)
response = self._mock_client(
client_kwargs=self.token, get_kwargs={'code': self.code})
self.assertRedirectsNoFollow(response, reverse('fitbit-error'))
self.assertEqual(UserFitbit.objects.all().count(), 1)
self.assertEqual(sub_apply_async.call_count, 0)
self.assertEqual(tsd_apply_async.call_count, 0)
def test_unauthenticated(self):
"""User must be logged in to access Complete view."""
self.client.logout()
response = self._mock_client()
self.assertEqual(response.status_code, 302)
self.assertEqual(UserFitbit.objects.count(), 0)
@patch('fitapp.tasks.subscribe.apply_async')
@patch('fitapp.tasks.get_time_series_data.apply_async')
def test_next(self, tsd_apply_async, sub_apply_async):
"""
Complete view should redirect to session['fitbit_next'] if available.
"""
self._set_session_vars(fitbit_next='/test')
response = self._mock_client(
client_kwargs=self.token, get_kwargs={'code': self.code})
self.assertRedirectsNoFollow(response, '/test')
fbuser = UserFitbit.objects.get()
sub_apply_async.assert_called_once_with(
(fbuser.fitbit_user, settings.FITAPP_SUBSCRIBER_ID), countdown=5)
self.assertEqual(
tsd_apply_async.call_count, TimeSeriesDataType.objects.count())
self.assertEqual(fbuser.user, self.user)
self.assertEqual(fbuser.access_token, self.token['access_token'])
self.assertEqual(fbuser.refresh_token, self.token['refresh_token'])
self.assertEqual(fbuser.expires_at, self.token['expires_at'])
self.assertEqual(fbuser.fitbit_user, self.user_id)
def test_access_error(self):
"""
Complete view should redirect to error if access token is
inaccessible.
"""
response = self._mock_client(client_kwargs={'error': Exception})
self.assertRedirectsNoFollow(response, reverse('fitbit-error'))
self.assertEqual(UserFitbit.objects.count(), 0)
def test_no_code(self):
"""
Complete view should redirect to error if `code` param is not
present.
"""
response = self._mock_client()
self.assertRedirectsNoFollow(response, reverse('fitbit-error'))
self.assertEqual(UserFitbit.objects.count(), 0)
def test_no_access_token(self):
"""
Complete view should redirect to error if there isn't an access_token.
"""
token = self.token.copy()
token.pop('access_token')
response = self._mock_client(
client_kwargs=token, get_kwargs={'code': self.code})
self.assertRedirectsNoFollow(response, reverse('fitbit-error'))
self.assertEqual(UserFitbit.objects.count(), 0)
@patch('fitapp.tasks.subscribe.apply_async')
@patch('fitapp.tasks.get_time_series_data.apply_async')
def test_integrated(self, tsd_apply_async, sub_apply_async):
"""Complete view should overwrite existing credentials for this user.
"""
self.fbuser = self.create_userfitbit(user=self.user)
response = self._mock_client(
client_kwargs=self.token, get_kwargs={'code': self.code})
fbuser = UserFitbit.objects.get()
sub_apply_async.assert_called_with(
(fbuser.fitbit_user, settings.FITAPP_SUBSCRIBER_ID), countdown=5)
self.assertEqual(tsd_apply_async.call_count,
TimeSeriesDataType.objects.count())
self.assertEqual(fbuser.user, self.user)
self.assertEqual(fbuser.access_token, self.token['access_token'])
self.assertEqual(fbuser.refresh_token, self.token['refresh_token'])
self.assertEqual(fbuser.fitbit_user, self.user_id)
self.assertRedirectsNoFollow(
response, utils.get_setting('FITAPP_LOGIN_REDIRECT'))
class TestErrorView(FitappTestBase):
url_name = 'fitbit-error'
def test_get(self):
"""Should be able to retrieve Error page."""
response = self._get()
self.assertEqual(response.status_code, 200)
def test_unauthenticated(self):
"""User must be logged in to access Error view."""
self.client.logout()
response = self._get()
self.assertEqual(response.status_code, 302)
def test_unintegrated(self):
"""No Fitbit credentials required to access Error view."""
self.fbuser.delete()
response = self._get()
self.assertEqual(response.status_code, 200)
class TestLogoutView(FitappTestBase):
url_name = 'fitbit-logout'
@patch('fitapp.tasks.unsubscribe.apply_async')
def test_get(self, apply_async):
"""Logout view should remove associated UserFitbit and redirect."""
response = self._get()
kwargs = self.fbuser.get_user_data()
del kwargs['refresh_cb']
apply_async.assert_called_once_with(kwargs=kwargs, countdown=5)
self.assertRedirectsNoFollow(
response, utils.get_setting('FITAPP_LOGIN_REDIRECT'))
self.assertEqual(UserFitbit.objects.count(), 0)
@freeze_time(datetime.fromtimestamp(1483500000))
@patch('fitbit.Fitbit.subscription')
def test_get_token_expired(self, subscription):
subs_url = 'https://api.fitbit.com/1/user/-/apiSubscriptions.json'
self.fbuser.expires_at = 1483400000
self.fbuser.save()
sub = {
'ownerId': self.fbuser.fitbit_user,
'subscriberId': '1',
'subscriptionId': str(self.user.id),
'collectionType': 'user',
'ownerType': 'user'
}
subs = {'apiSubscriptions': [sub]}
tok = {
'access_token': 'fake_return_access_token',
'refresh_token': 'fake_return_refresh_token',
'expires_at': 1483600000,
}
with requests_mock.mock() as m:
m.get(subs_url, text=json.dumps(subs), status_code=200)
m.post('https://api.fitbit.com/oauth2/token', text=json.dumps(tok))
response = self._get()
mock_requests = m.request_history
assert mock_requests[0].path == '/oauth2/token'
assert mock_requests[0].headers['Authorization'] == _basic_auth_str(
settings.FITAPP_CONSUMER_KEY,
settings.FITAPP_CONSUMER_SECRET
)
assert mock_requests[1].path == '/1/user/-/apisubscriptions.json'
assert mock_requests[1].headers['Authorization'] == 'Bearer {}'.format(
tok['access_token']
)
subscription.assert_called_once_with(
sub['subscriptionId'], sub['subscriberId'], method="DELETE")
def test_unauthenticated(self):
"""User must be logged in to access Logout view."""
self.client.logout()
response = self._get()
self.assertEqual(response.status_code, 302)
self.assertEqual(UserFitbit.objects.count(), 1)
def test_unintegrated(self):
"""No Fitbit credentials required to access Logout view."""
self.fbuser.delete()
response = self._get()
self.assertRedirectsNoFollow(
response, utils.get_setting('FITAPP_LOGIN_REDIRECT'))
self.assertEqual(UserFitbit.objects.count(), 0)
@patch('fitapp.tasks.unsubscribe.apply_async')
def test_next(self, apply_async):
"""Logout view should redirect to GET['next'] if available."""
response = self._get(get_kwargs={'next': '/test'})
kwargs = self.fbuser.get_user_data()
del kwargs['refresh_cb']
apply_async.assert_called_with(kwargs=kwargs, countdown=5)
self.assertRedirectsNoFollow(response, '/test')
self.assertEqual(UserFitbit.objects.count(), 0)
class TestSubscription(FitappTestBase):
@patch('fitbit.Fitbit.subscription')
def test_subscribe(self, subscription):
subscribe.apply_async((self.fbuser.fitbit_user, 1,))
subscription.assert_called_once_with(self.user.id, 1, )
@patch('fitbit.Fitbit.subscription')
def test_subscribe_error(self, subscription):
subscription.side_effect = HTTPConflict
apply_result = subscribe.apply_async((self.fbuser.fitbit_user, 1,))
self.assertEqual(apply_result.status, 'REJECTED')
subscription.assert_called_once_with(self.user.id, 1, )
@patch('fitbit.Fitbit.subscription')
@patch('fitbit.Fitbit.list_subscriptions')
def test_unsubscribe(self, list_subscriptions, subscription):
sub = {
'ownerId': self.fbuser.fitbit_user,
'subscriberId': '1',
'subscriptionId': str(self.user.id).encode('utf8'),
'collectionType': 'user',
'ownerType': 'user'
}
list_subscriptions.return_value = {'apiSubscriptions': [sub]}
kwargs = self.fbuser.get_user_data()
del kwargs['refresh_cb']
unsubscribe.apply_async(kwargs=kwargs)
list_subscriptions.assert_called_once_with()
subscription.assert_called_once_with(
sub['subscriptionId'], sub['subscriberId'], method="DELETE")
@patch('fitbit.Fitbit.subscription')
@patch('fitbit.Fitbit.list_subscriptions')
def test_unsubscribe_error(self, list_subscriptions, subscription):
list_subscriptions.side_effect = HTTPConflict
kwargs = self.fbuser.get_user_data()
del kwargs['refresh_cb']
result = unsubscribe.apply_async(kwargs=kwargs)
self.assertEqual(result.status, 'REJECTED')
list_subscriptions.assert_called_once_with()
self.assertEqual(subscription.call_count, 0)
| 41.335968 | 81 | 0.672117 | import json
import time
from collections import OrderedDict
from datetime import datetime
import requests_mock
from django.conf import settings
from django.contrib import messages
from django.contrib.auth.models import AnonymousUser
from django.http import HttpRequest
from django.test.utils import override_settings
from django.urls import reverse
from fitbit.exceptions import HTTPConflict
from freezegun import freeze_time
from mock import patch
from requests.auth import _basic_auth_str
from fitapp import utils
from fitapp.decorators import fitbit_integration_warning
from fitapp.models import TimeSeriesDataType, UserFitbit
from fitapp.tasks import subscribe, unsubscribe
from .base import FitappTestBase
class TestIntegrationUtility(FitappTestBase):
def test_is_integrated(self):
self.assertTrue(utils.is_integrated(self.user))
def test_is_not_integrated(self):
UserFitbit.objects.all().delete()
self.assertFalse(utils.is_integrated(self.user))
def test_unauthenticated(self):
user = AnonymousUser()
self.assertFalse(utils.is_integrated(user))
class TestIntegrationDecorator(FitappTestBase):
def setUp(self):
super(TestIntegrationDecorator, self).setUp()
self.fake_request = HttpRequest()
self.fake_request.user = self.user
self.fake_view = lambda request: "hello"
self.messages = []
def _mock_decorator(self, msg=None):
def mock_error(request, message, *args, **kwargs):
self.messages.append(message)
with patch.object(messages, 'error', mock_error) as error:
return fitbit_integration_warning(msg=msg)(self.fake_view)(
self.fake_request)
def test_unauthenticated(self):
self.fake_request.user = AnonymousUser()
results = self._mock_decorator()
self.assertEqual(results, "hello")
self.assertEqual(len(self.messages), 1)
self.assertEqual(
self.messages[0], utils.get_setting('FITAPP_DECORATOR_MESSAGE'))
def test_is_integrated(self):
results = self._mock_decorator()
self.assertEqual(results, "hello")
self.assertEqual(len(self.messages), 0)
def test_is_not_integrated(self):
UserFitbit.objects.all().delete()
results = self._mock_decorator()
self.assertEqual(results, "hello")
self.assertEqual(len(self.messages), 1)
self.assertEqual(
self.messages[0], utils.get_setting('FITAPP_DECORATOR_MESSAGE'))
def test_custom_msg(self):
UserFitbit.objects.all().delete()
msg = "customized"
results = self._mock_decorator(msg)
self.assertEqual(results, "hello")
self.assertEqual(len(self.messages), 1)
self.assertEqual(self.messages[0], "customized")
def test_custom_msg_func(self):
UserFitbit.objects.all().delete()
msg = lambda request: "message to {0}".format(request.user)
results = self._mock_decorator(msg)
self.assertEqual(results, "hello")
self.assertEqual(len(self.messages), 1)
self.assertEqual(self.messages[0], msg(self.fake_request))
class TestLoginView(FitappTestBase):
url_name = 'fitbit-login'
def test_get(self):
response = self._mock_client()
self.assertRedirectsNoFollow(response, '/complete/')
self.assertEqual(response.status_code, 302)
self.assertEqual(UserFitbit.objects.count(), 1)
def test_unauthenticated(self):
self.client.logout()
response = self._get()
self.assertEqual(response.status_code, 302)
self.assertEqual(UserFitbit.objects.count(), 1)
def test_unintegrated(self):
self.fbuser.delete()
response = self._mock_client()
self.assertRedirectsNoFollow(response, '/complete/')
self.assertEqual(UserFitbit.objects.count(), 0)
def test_next(self):
response = self._mock_client(get_kwargs={'next': '/next'})
self.assertRedirectsNoFollow(response, '/complete/')
self.assertEqual(self.client.session.get('fitbit_next', None), '/next')
self.assertEqual(UserFitbit.objects.count(), 1)
class TestCompleteView(FitappTestBase):
url_name = 'fitbit-complete'
user_id = 'userid'
token = {
'access_token': 'AccessToken123',
'refresh_token': 'RefreshToken123',
'expires_at': time.time() + 300,
'user_id': user_id
}
code = 'Code123'
def setUp(self):
super(TestCompleteView, self).setUp()
self.fbuser.delete()
@patch('fitapp.tasks.subscribe.apply_async')
@patch('fitapp.tasks.get_time_series_data.apply_async')
def test_complete(self, tsd_apply_async, sub_apply_async):
response = self._mock_client(
client_kwargs=self.token, get_kwargs={'code': self.code})
self.assertRedirectsNoFollow(
response, utils.get_setting('FITAPP_LOGIN_REDIRECT'))
fbuser = UserFitbit.objects.get()
sub_apply_async.assert_called_once_with(
(fbuser.fitbit_user, settings.FITAPP_SUBSCRIBER_ID), countdown=5)
tsdts = TimeSeriesDataType.objects.all()
self.assertEqual(tsd_apply_async.call_count, tsdts.count())
for i, _type in enumerate(tsdts):
tsd_apply_async.assert_any_call(
(fbuser.fitbit_user, _type.category, _type.resource,),
countdown=10 + (i * 5))
self.assertEqual(fbuser.user, self.user)
self.assertEqual(fbuser.access_token, self.token['access_token'])
self.assertEqual(fbuser.refresh_token, self.token['refresh_token'])
self.assertEqual(fbuser.fitbit_user, self.user_id)
@override_settings(FITAPP_HISTORICAL_INIT_DELAY=11)
@override_settings(FITAPP_BETWEEN_DELAY=6)
@patch('fitapp.tasks.subscribe.apply_async')
@patch('fitapp.tasks.get_time_series_data.apply_async')
def test_complete_different_delays(self, tsd_apply_async, sub_apply_async):
tsdts = TimeSeriesDataType.objects.all()
response = self._mock_client(
client_kwargs=self.token, get_kwargs={'code': self.code})
fbuser = UserFitbit.objects.get()
self.assertRedirectsNoFollow(
response, utils.get_setting('FITAPP_LOGIN_REDIRECT'))
for i, _type in enumerate(tsdts):
tsd_apply_async.assert_any_call(
(fbuser.fitbit_user, _type.category, _type.resource,),
countdown=11 + (i * 6))
@override_settings(FITAPP_SUBSCRIPTIONS=OrderedDict([]))
@patch('fitapp.tasks.subscribe.apply_async')
@patch('fitapp.tasks.get_time_series_data.apply_async')
def test_complete_empty_subs(self, tsd_apply_async, sub_apply_async):
response = self._mock_client(
client_kwargs=self.token, get_kwargs={'code': self.code})
self.assertRedirectsNoFollow(
response, utils.get_setting('FITAPP_LOGIN_REDIRECT'))
self.assertEqual(tsd_apply_async.call_count, 0)
@override_settings(FITAPP_SUBSCRIPTIONS=OrderedDict([('foods', [])]))
@patch('fitapp.tasks.subscribe.apply_async')
@patch('fitapp.tasks.get_time_series_data.apply_async')
def test_complete_no_res(self, tsd_apply_async, sub_apply_async):
response = self._mock_client(
client_kwargs=self.token, get_kwargs={'code': self.code})
self.assertRedirectsNoFollow(
response, utils.get_setting('FITAPP_LOGIN_REDIRECT'))
self.assertEqual(tsd_apply_async.call_count, 0)
@override_settings(FITAPP_SUBSCRIPTIONS=OrderedDict([
('foods', ['steps'])
]))
@patch('fitapp.tasks.subscribe.apply_async')
@patch('fitapp.tasks.get_time_series_data.apply_async')
def test_complete_bad_resources(self, tsd_apply_async, sub_apply_async):
response = self._mock_client(
client_kwargs=self.token, get_kwargs={'code': self.code})
self.assertContains(
response,
"['steps'] resources are invalid for the foods category",
status_code=500
)
self.assertEqual(tsd_apply_async.call_count, 0)
@override_settings(FITAPP_SUBSCRIPTIONS=OrderedDict([
('activities', ['steps', 'calories', 'distance', 'activityCalories']),
('foods', ['log/water']),
]))
@patch('fitapp.tasks.subscribe.apply_async')
@patch('fitapp.tasks.get_time_series_data.apply_async')
def test_complete_sub_list(self, tsd_apply_async, sub_apply_async):
activities = TimeSeriesDataType.activities
response = self._mock_client(
client_kwargs=self.token, get_kwargs={'code': self.code})
fbuser = UserFitbit.objects.get()
self.assertRedirectsNoFollow(
response, utils.get_setting('FITAPP_LOGIN_REDIRECT'))
tsd_apply_async.assert_any_call(
(fbuser.fitbit_user, activities, 'steps',), countdown=10)
tsd_apply_async.assert_any_call(
(fbuser.fitbit_user, activities, 'calories',), countdown=15)
tsd_apply_async.assert_any_call(
(fbuser.fitbit_user, activities, 'distance',), countdown=20)
tsd_apply_async.assert_any_call(
(fbuser.fitbit_user, activities, 'activityCalories'), countdown=25)
tsd_apply_async.assert_any_call(
(fbuser.fitbit_user, TimeSeriesDataType.foods, 'log/water',),
countdown=30)
@patch('fitapp.tasks.subscribe.apply_async')
@patch('fitapp.tasks.get_time_series_data.apply_async')
def test_complete_already_integrated(self, tsd_apply_async, sub_apply_async):
self.create_userfitbit(user=self.user, fitbit_user=self.user_id)
username = '{0}2'.format(self.username)
self.create_user(username=username, password=self.password)
self.client.logout()
self.client.login(username=username, password=self.password)
response = self._mock_client(
client_kwargs=self.token, get_kwargs={'code': self.code})
self.assertRedirectsNoFollow(response, reverse('fitbit-error'))
self.assertEqual(UserFitbit.objects.all().count(), 1)
self.assertEqual(sub_apply_async.call_count, 0)
self.assertEqual(tsd_apply_async.call_count, 0)
def test_unauthenticated(self):
self.client.logout()
response = self._mock_client()
self.assertEqual(response.status_code, 302)
self.assertEqual(UserFitbit.objects.count(), 0)
@patch('fitapp.tasks.subscribe.apply_async')
@patch('fitapp.tasks.get_time_series_data.apply_async')
def test_next(self, tsd_apply_async, sub_apply_async):
self._set_session_vars(fitbit_next='/test')
response = self._mock_client(
client_kwargs=self.token, get_kwargs={'code': self.code})
self.assertRedirectsNoFollow(response, '/test')
fbuser = UserFitbit.objects.get()
sub_apply_async.assert_called_once_with(
(fbuser.fitbit_user, settings.FITAPP_SUBSCRIBER_ID), countdown=5)
self.assertEqual(
tsd_apply_async.call_count, TimeSeriesDataType.objects.count())
self.assertEqual(fbuser.user, self.user)
self.assertEqual(fbuser.access_token, self.token['access_token'])
self.assertEqual(fbuser.refresh_token, self.token['refresh_token'])
self.assertEqual(fbuser.expires_at, self.token['expires_at'])
self.assertEqual(fbuser.fitbit_user, self.user_id)
def test_access_error(self):
response = self._mock_client(client_kwargs={'error': Exception})
self.assertRedirectsNoFollow(response, reverse('fitbit-error'))
self.assertEqual(UserFitbit.objects.count(), 0)
def test_no_code(self):
response = self._mock_client()
self.assertRedirectsNoFollow(response, reverse('fitbit-error'))
self.assertEqual(UserFitbit.objects.count(), 0)
def test_no_access_token(self):
token = self.token.copy()
token.pop('access_token')
response = self._mock_client(
client_kwargs=token, get_kwargs={'code': self.code})
self.assertRedirectsNoFollow(response, reverse('fitbit-error'))
self.assertEqual(UserFitbit.objects.count(), 0)
@patch('fitapp.tasks.subscribe.apply_async')
@patch('fitapp.tasks.get_time_series_data.apply_async')
def test_integrated(self, tsd_apply_async, sub_apply_async):
self.fbuser = self.create_userfitbit(user=self.user)
response = self._mock_client(
client_kwargs=self.token, get_kwargs={'code': self.code})
fbuser = UserFitbit.objects.get()
sub_apply_async.assert_called_with(
(fbuser.fitbit_user, settings.FITAPP_SUBSCRIBER_ID), countdown=5)
self.assertEqual(tsd_apply_async.call_count,
TimeSeriesDataType.objects.count())
self.assertEqual(fbuser.user, self.user)
self.assertEqual(fbuser.access_token, self.token['access_token'])
self.assertEqual(fbuser.refresh_token, self.token['refresh_token'])
self.assertEqual(fbuser.fitbit_user, self.user_id)
self.assertRedirectsNoFollow(
response, utils.get_setting('FITAPP_LOGIN_REDIRECT'))
class TestErrorView(FitappTestBase):
url_name = 'fitbit-error'
def test_get(self):
response = self._get()
self.assertEqual(response.status_code, 200)
def test_unauthenticated(self):
self.client.logout()
response = self._get()
self.assertEqual(response.status_code, 302)
def test_unintegrated(self):
self.fbuser.delete()
response = self._get()
self.assertEqual(response.status_code, 200)
class TestLogoutView(FitappTestBase):
url_name = 'fitbit-logout'
@patch('fitapp.tasks.unsubscribe.apply_async')
def test_get(self, apply_async):
response = self._get()
kwargs = self.fbuser.get_user_data()
del kwargs['refresh_cb']
apply_async.assert_called_once_with(kwargs=kwargs, countdown=5)
self.assertRedirectsNoFollow(
response, utils.get_setting('FITAPP_LOGIN_REDIRECT'))
self.assertEqual(UserFitbit.objects.count(), 0)
@freeze_time(datetime.fromtimestamp(1483500000))
@patch('fitbit.Fitbit.subscription')
def test_get_token_expired(self, subscription):
subs_url = 'https://api.fitbit.com/1/user/-/apiSubscriptions.json'
self.fbuser.expires_at = 1483400000
self.fbuser.save()
sub = {
'ownerId': self.fbuser.fitbit_user,
'subscriberId': '1',
'subscriptionId': str(self.user.id),
'collectionType': 'user',
'ownerType': 'user'
}
subs = {'apiSubscriptions': [sub]}
tok = {
'access_token': 'fake_return_access_token',
'refresh_token': 'fake_return_refresh_token',
'expires_at': 1483600000,
}
with requests_mock.mock() as m:
m.get(subs_url, text=json.dumps(subs), status_code=200)
m.post('https://api.fitbit.com/oauth2/token', text=json.dumps(tok))
response = self._get()
mock_requests = m.request_history
assert mock_requests[0].path == '/oauth2/token'
assert mock_requests[0].headers['Authorization'] == _basic_auth_str(
settings.FITAPP_CONSUMER_KEY,
settings.FITAPP_CONSUMER_SECRET
)
assert mock_requests[1].path == '/1/user/-/apisubscriptions.json'
assert mock_requests[1].headers['Authorization'] == 'Bearer {}'.format(
tok['access_token']
)
subscription.assert_called_once_with(
sub['subscriptionId'], sub['subscriberId'], method="DELETE")
def test_unauthenticated(self):
self.client.logout()
response = self._get()
self.assertEqual(response.status_code, 302)
self.assertEqual(UserFitbit.objects.count(), 1)
def test_unintegrated(self):
self.fbuser.delete()
response = self._get()
self.assertRedirectsNoFollow(
response, utils.get_setting('FITAPP_LOGIN_REDIRECT'))
self.assertEqual(UserFitbit.objects.count(), 0)
@patch('fitapp.tasks.unsubscribe.apply_async')
def test_next(self, apply_async):
response = self._get(get_kwargs={'next': '/test'})
kwargs = self.fbuser.get_user_data()
del kwargs['refresh_cb']
apply_async.assert_called_with(kwargs=kwargs, countdown=5)
self.assertRedirectsNoFollow(response, '/test')
self.assertEqual(UserFitbit.objects.count(), 0)
class TestSubscription(FitappTestBase):
@patch('fitbit.Fitbit.subscription')
def test_subscribe(self, subscription):
subscribe.apply_async((self.fbuser.fitbit_user, 1,))
subscription.assert_called_once_with(self.user.id, 1, )
@patch('fitbit.Fitbit.subscription')
def test_subscribe_error(self, subscription):
subscription.side_effect = HTTPConflict
apply_result = subscribe.apply_async((self.fbuser.fitbit_user, 1,))
self.assertEqual(apply_result.status, 'REJECTED')
subscription.assert_called_once_with(self.user.id, 1, )
@patch('fitbit.Fitbit.subscription')
@patch('fitbit.Fitbit.list_subscriptions')
def test_unsubscribe(self, list_subscriptions, subscription):
sub = {
'ownerId': self.fbuser.fitbit_user,
'subscriberId': '1',
'subscriptionId': str(self.user.id).encode('utf8'),
'collectionType': 'user',
'ownerType': 'user'
}
list_subscriptions.return_value = {'apiSubscriptions': [sub]}
kwargs = self.fbuser.get_user_data()
del kwargs['refresh_cb']
unsubscribe.apply_async(kwargs=kwargs)
list_subscriptions.assert_called_once_with()
subscription.assert_called_once_with(
sub['subscriptionId'], sub['subscriberId'], method="DELETE")
@patch('fitbit.Fitbit.subscription')
@patch('fitbit.Fitbit.list_subscriptions')
def test_unsubscribe_error(self, list_subscriptions, subscription):
list_subscriptions.side_effect = HTTPConflict
kwargs = self.fbuser.get_user_data()
del kwargs['refresh_cb']
result = unsubscribe.apply_async(kwargs=kwargs)
self.assertEqual(result.status, 'REJECTED')
list_subscriptions.assert_called_once_with()
self.assertEqual(subscription.call_count, 0)
| true | true |
f730117870c82072f11be34d8f41060542937d2d | 2,060 | py | Python | otcextensions/tests/functional/sdk/vpc/v1/test_vpc.py | artem-lifshits/python-otcextensions | 2021da124f393e0429dd5913a3bc635e6143ba1e | [
"Apache-2.0"
] | 10 | 2018-03-03T17:59:59.000Z | 2020-01-08T10:03:00.000Z | otcextensions/tests/functional/sdk/vpc/v1/test_vpc.py | artem-lifshits/python-otcextensions | 2021da124f393e0429dd5913a3bc635e6143ba1e | [
"Apache-2.0"
] | 39 | 2018-03-26T14:43:23.000Z | 2020-02-07T16:42:53.000Z | otcextensions/tests/functional/sdk/vpc/v1/test_vpc.py | artem-lifshits/python-otcextensions | 2021da124f393e0429dd5913a3bc635e6143ba1e | [
"Apache-2.0"
] | 9 | 2018-03-27T09:17:40.000Z | 2019-08-07T12:53:49.000Z | # Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import uuid
from openstack import _log
from otcextensions.sdk.vpc.v1 import vpc
from otcextensions.tests.functional import base
_logger = _log.setup_logging('openstack')
class TestService(base.BaseFunctionalTest):
ID = None
uuid = uuid.uuid4().hex[:8]
def setUp(self):
super(TestService, self).setUp()
attrs = {
'name': "test-vpc-" + self.uuid,
'cidr': '192.168.0.0/24'
}
self.NAME = "test-vpc-" + self.uuid
self.UPDATE_NAME = "test-vpc-upd-" + self.uuid
self.vpc = self.conn.vpc.create_vpc(**attrs)
assert isinstance(self.vpc, vpc.Vpc)
self.assertEqual(self.NAME, self.vpc.name)
self.ID = self.vpc.id
self.addCleanup(self.conn.vpc.delete_vpc, self.vpc)
def test_find_vpc(self):
found = self.conn.vpc.find_vpc(self.NAME)
self.assertEqual(found.id, self.ID)
def test_get_vpc(self):
found = self.conn.vpc.get_vpc(self.ID)
self.assertEqual(found.name, self.NAME)
self.assertEqual(found.id, self.ID)
def test_list_vpcs(self):
vpcs = [o.name for o in self.conn.vpc.vpcs()]
self.assertIn(self.NAME, vpcs)
def test_update_vpc(self):
new_attrs = {
'name': self.UPDATE_NAME,
'cidr': '192.168.0.0/16'
}
updated = self.conn.vpc.update_vpc(self.ID, **new_attrs)
self.assertEqual(updated.name, new_attrs['name'])
self.assertEqual(updated.cidr, new_attrs['cidr'])
| 32.1875 | 75 | 0.655825 |
import uuid
from openstack import _log
from otcextensions.sdk.vpc.v1 import vpc
from otcextensions.tests.functional import base
_logger = _log.setup_logging('openstack')
class TestService(base.BaseFunctionalTest):
ID = None
uuid = uuid.uuid4().hex[:8]
def setUp(self):
super(TestService, self).setUp()
attrs = {
'name': "test-vpc-" + self.uuid,
'cidr': '192.168.0.0/24'
}
self.NAME = "test-vpc-" + self.uuid
self.UPDATE_NAME = "test-vpc-upd-" + self.uuid
self.vpc = self.conn.vpc.create_vpc(**attrs)
assert isinstance(self.vpc, vpc.Vpc)
self.assertEqual(self.NAME, self.vpc.name)
self.ID = self.vpc.id
self.addCleanup(self.conn.vpc.delete_vpc, self.vpc)
def test_find_vpc(self):
found = self.conn.vpc.find_vpc(self.NAME)
self.assertEqual(found.id, self.ID)
def test_get_vpc(self):
found = self.conn.vpc.get_vpc(self.ID)
self.assertEqual(found.name, self.NAME)
self.assertEqual(found.id, self.ID)
def test_list_vpcs(self):
vpcs = [o.name for o in self.conn.vpc.vpcs()]
self.assertIn(self.NAME, vpcs)
def test_update_vpc(self):
new_attrs = {
'name': self.UPDATE_NAME,
'cidr': '192.168.0.0/16'
}
updated = self.conn.vpc.update_vpc(self.ID, **new_attrs)
self.assertEqual(updated.name, new_attrs['name'])
self.assertEqual(updated.cidr, new_attrs['cidr'])
| true | true |
f73012160ab63e97ce0cba976dae618df8d31d23 | 15,096 | py | Python | src/python/org/cassandra/geo_maps/geo_maps.py | cassandra/geo_maps | 0257bd73456f9312070e3f7627effee30b73fdea | [
"MIT"
] | null | null | null | src/python/org/cassandra/geo_maps/geo_maps.py | cassandra/geo_maps | 0257bd73456f9312070e3f7627effee30b73fdea | [
"MIT"
] | null | null | null | src/python/org/cassandra/geo_maps/geo_maps.py | cassandra/geo_maps | 0257bd73456f9312070e3f7627effee30b73fdea | [
"MIT"
] | null | null | null | from dataclasses import dataclass
import math
from typing import List
from .display_bounds import DisplayBounds
from .geo_bounds import GeoBounds
from .view_box import ViewBox
from . import utils
@dataclass
class AlbersMapProjection:
# Ref: https://en.wikipedia.org/wiki/Albers_projection
# The center of the displayed map
#
reference_longitude_deg : float
reference_latitude_deg : float
standard_parallel_1_deg : float
standard_parallel_2_deg : float
# Ref: https://spatialreference.org/ref/esri/usa-contiguous-albers-equal-area-conic/prettywkt/
#
# SPHEROID["GRS_1980",6378137,298.257222101]] -> 6378137 meters = 3963.190592 miles
#
radius_miles = utils.EARTH_RADIUS_AT_EQUATOR_MILES
# For zero comparisons
EPSILON = 0.000000001
@property
def reference_longitude_radians(self):
return math.radians( self.reference_longitude_deg )
@property
def reference_latitude_radians(self):
return math.radians( self.reference_latitude_deg )
@property
def standard_parallel_1_radians(self):
return math.radians( self.standard_parallel_1_deg )
@property
def standard_parallel_2_radians(self):
return math.radians( self.standard_parallel_2_deg )
def __post_init__(self):
# Common for all projections
self.n = 0.5 * ( math.sin( self.standard_parallel_1_radians )
+ math.sin( self.standard_parallel_2_radians ) )
self.C = ( math.cos( self.standard_parallel_1_radians ) ** 2 ) \
+ 2 * self.n * math.sin( self.standard_parallel_1_radians )
self.rho_0 = ( self.radius_miles / self.n ) \
* math.sqrt( self.C - ( 2 * self.n * math.sin( self.reference_latitude_radians ) ))
return
def x_y_from_deg( self, longitude_deg : float, latitude_deg : float ):
# Ref: https://en.wikipedia.org/wiki/Albers_projection#Formulas
longitude = math.radians( longitude_deg )
latitude = math.radians( latitude_deg )
theta = self.n * ( longitude - self.reference_longitude_radians )
rho_basis = self.C - ( 2 * self.n * math.sin( latitude ))
if rho_basis < 0.0:
return ( 0, 0 )
rho = ( self.radius_miles / self.n ) * math.sqrt( rho_basis )
x = rho * math.sin( theta )
y = self.rho_0 - ( rho * math.cos( theta ))
return ( x, y )
def deg_from_x_y( self, x : float, y : float ):
# Ref: https://mathworld.wolfram.com/AlbersEqual-AreaConicProjection.html
rho_0_minus_y = self.rho_0 - y
rho = math.sqrt( x**2 + rho_0_minus_y**2 )
if abs(rho) > self.EPSILON:
if self.n < 0.0:
rho *= -1.0
x *= -1.0
rho_0_minus_y *= -1.0
rho_adjusted = rho * self.n / self.radius_miles
latitude_operand = ( self.C - ( rho_adjusted * rho_adjusted ) ) / ( 2 * self.n )
if abs(latitude_operand) <= 1.0:
latitude_radians = math.asin( latitude_operand )
elif latitude_operand < 0.0:
latitude_radians = -1.0 * math.pi / 2.0
else:
latitude_radians = math.pi / 2.0
theta = math.atan2( x, rho_0_minus_y )
else:
theta = 0.0
if self.n > 0:
latitude_radians = math.pi / 2.0
else:
latitude_radians = -1.0 * math.pi / 2.0
longitude_radians = self.reference_longitude_radians + ( theta / self.n )
longitude_deg = math.degrees( longitude_radians )
latitude_deg = math.degrees( latitude_radians )
return ( longitude_deg, latitude_deg )
@dataclass
class GeoMap:
""" Defines how a map projection lines up with an SVG file of a map with that projection. """
projection : AlbersMapProjection
geo_bounds : GeoBounds
svg_template_name : str
view_box : ViewBox
# To adjust for the placement of the image (in SVG view box scale units)
display_x_offset : float = None
display_y_offset : float = None
display_x_scale : float = None
display_y_scale : float = None
rotation_angle_deg : float = None
calibration_points : List = None
def __post_init__(self):
self._rotation_angle_radians = None
self._sine_angle = None
self._cosine_angle = None
if self.rotation_angle_deg:
self._rotation_angle_radians = math.radians( self.rotation_angle_deg )
self._sine_angle = math.sin( self._rotation_angle_radians )
self._cosine_angle = math.cos( self._rotation_angle_radians )
return
@property
def aspect_ratio(self):
return self.view_box.width / self.view_box.height
def long_lat_deg_to_coords( self, longitude_deg, latitude_deg ):
projected_x, projected_y = self.projection.x_y_from_deg( longitude_deg = longitude_deg,
latitude_deg = latitude_deg )
if self._rotation_angle_radians:
rotated_x = ( projected_x * self._cosine_angle ) - ( projected_y * self._sine_angle )
rotated_y = ( projected_x * self._sine_angle ) + ( projected_y * self._cosine_angle )
scaled_x = rotated_x * self.display_x_scale
scaled_y = rotated_y * self.display_y_scale
else:
scaled_x = projected_x * self.display_x_scale
scaled_y = projected_y * self.display_y_scale
offset_x = scaled_x + self.display_x_offset
offset_y = self.display_y_offset - scaled_y
return ( offset_x , offset_y )
def coords_to_long_lat_deg( self, x, y ):
offset_x = x - self.display_x_offset
offset_y = self.display_y_offset - y
scaled_x = offset_x / self.display_x_scale
scaled_y = offset_y / self.display_y_scale
if self._rotation_angle_radians:
rotated_x = ( scaled_x * self._cosine_angle ) + ( scaled_y * self._sine_angle )
rotated_y = ( -1.0 * scaled_x * self._sine_angle ) + ( scaled_y * self._cosine_angle )
longitude, latitude = self.projection.deg_from_x_y( x = rotated_x, y = rotated_y )
else:
longitude, latitude = self.projection.deg_from_x_y( x = scaled_x, y = scaled_y )
return ( longitude, latitude )
USA_CONTINENTAL_PROJECTION = AlbersMapProjection(
# References:
#
# https://gis.stackexchange.com/questions/141580/which-projection-is-best-for-mapping-the-contiguous-united-states
# https://spatialreference.org/ref/esri/usa-contiguous-albers-equal-area-conic/html/
#
# From: https://pubs.usgs.gov/bul/1532/report.pdf, p. 94
#
# Albers Equal-Area Conic projection, with standard parallels 20° G.nd 60° N.
# This illustration includes all of North America to show the change in spacing of the
# parallels. When used for maps of the 48 conterminous States standard parallels
# are 29.5° and 45.5° N.
#
# For maps of Alaska, the chosen standard parallels are lats. 55° and
# 65° N., and for Hawaii, lats. 8° and 18° N. In the latter case,
# both parallels are south of the islands, but they were chosen to
# include maps of the more southerly Canal Zone and especially the
# Philippine Islands.
reference_longitude_deg = -96.0,
reference_latitude_deg = 37.5,
standard_parallel_1_deg = 29.5,
standard_parallel_2_deg = 45.5,
)
ALASKA_PROJECTION = AlbersMapProjection(
# References:
# https://epsg.io/3338
reference_longitude_deg = -154.0,
reference_latitude_deg = 50.0,
standard_parallel_1_deg = 55.0,
standard_parallel_2_deg = 65.0,
)
HAWAII_PROJECTION = AlbersMapProjection(
# References:
# https://epsg.io/102007
reference_longitude_deg = -157.0,
reference_latitude_deg = 13.0,
standard_parallel_1_deg = 8.0,
standard_parallel_2_deg = 18.0,
)
USA_CONTINENTAL_GEO_MAP = GeoMap(
# Values arrived at by trial and error via map calibration testing page.
projection = USA_CONTINENTAL_PROJECTION,
geo_bounds = GeoBounds(
longitude_min = -124.8679,
longitude_max = -66.8628,
latitude_min = 24.3959,
latitude_max = 49.3877,
),
svg_template_name = "usa_continental.svg",
view_box = ViewBox(
x = 0.0,
y = 0.0,
width = 958.0,
height = 602.0,
),
display_x_scale = 0.3332,
display_y_scale = 0.3318,
display_x_offset = 491.0249,
display_y_offset = 323.6935,
)
ALASKA_CONTINENTAL_GEO_MAP = GeoMap(
# Values arrived at by trial and error via map calibration testing page.
projection = ALASKA_PROJECTION,
geo_bounds = GeoBounds(
longitude_min = -180.0,
longitude_max = -129.993,
latitude_min = 50.5,
latitude_max = 71.5232,
),
svg_template_name = "usa_continental.svg",
view_box = ViewBox(
x = 0.0,
y = 0.0,
width = 958.0,
height = 602.0,
),
display_x_scale = 0.1301,
display_y_scale = 0.1311,
display_x_offset = 132.4555,
display_y_offset = 638.5017,
rotation_angle_deg = -11.0,
)
HAWAII_CONTINENTAL_GEO_MAP = GeoMap(
# Values arrived at by trial and error via map calibration testing page.
projection = HAWAII_PROJECTION,
geo_bounds = GeoBounds(
longitude_min = -160.3922,
longitude_max = -154.6271,
latitude_min = 18.71,
latitude_max = 22.3386,
),
svg_template_name = "usa_continental.svg",
view_box = ViewBox(
x = 0.0,
y = 0.0,
width = 958.0,
height = 602.0,
),
display_x_scale = 0.3279,
display_y_scale = 0.3371,
display_x_offset = 325.5313,
display_y_offset = 729.5,
rotation_angle_deg = -0.5,
)
class CompositeGeoMap:
"""Combines multiple GeoMaps that share the same SVG file. i.e.,
multiple maps rendered together.
To common example of this is the US map showing Alaska and Hawaii in
the lower right hand corner below the Continetal US. These are not in
the same geo coordinate space as the 48 continuous states *AND* they
use different projection parameters (same Albers projection type, but
different reference points on the globe.)
Note that this means that the GeoBounds for the map can be a list of
bounds, not just one bounding box, since different areas of the map can
represent different geographic areas.
"""
def __init__( self, map_id : int, geo_map_list : List[GeoMap] ):
""" First one in list is considered default. List cannot be empty. """
assert( geo_map_list )
self._map_id = map_id
self._geo_map_list = geo_map_list
self._default_geo_map = self._geo_map_list[0]
self._geo_bounds = GeoBounds()
self._geo_bounds_list = list()
svg_template_name_set = set() # GeoMap often share the same SVG
for geo_map in self._geo_map_list:
# Make sure each view box reflects this composite map's id
geo_map.view_box.map_id = self._map_id
self._geo_bounds.add_bounds( geo_map.geo_bounds )
self._geo_bounds_list.append( geo_map.geo_bounds )
svg_template_name_set.add( geo_map.svg_template_name )
continue
self._svg_template_name_list = list(svg_template_name_set)
return
@property
def map_id(self):
return self._map_id
@property
def geo_bounds(self):
""" A single view of the union of bounds for all contained GeoMap """
return self._geo_bounds
@property
def geo_bounds_list(self):
""" A list of the individual bounds for each contained GeoMap """
return self._geo_bounds_list
@property
def default_view_box(self):
return self._default_geo_map.view_box
@property
def default_reference_longitude_deg(self):
return self._default_geo_map.projection.reference_longitude_deg
@property
def default_reference_latitude_deg(self):
return self._default_geo_map.projection.reference_latitude_deg
@property
def default_aspect_ratio(self):
return self._default_geo_map.aspect_ratio
@property
def svg_template_name_list(self):
return self._svg_template_name_list
def contains_bounds( self, geo_bounds : GeoBounds ):
for geo_map_bounds in self._geo_bounds_list:
if geo_map_bounds.contains_bounds( other_geo_bounds = geo_bounds ):
return True
continue
return False
def get_geo_map_for_point( self,
longitude_deg : float,
latitude_deg : float ):
for geo_map in self._geo_map_list:
if geo_map.geo_bounds.contains_point( longitude_deg = longitude_deg,
latitude_deg = latitude_deg ):
return geo_map
continue
return self._default_geo_map
def geo_bounds_to_display_bounds( self, geo_bounds : GeoBounds ):
display_bounds = DisplayBounds()
for geo_map in self._geo_map_list:
intersection_geo_bounds = geo_map.geo_bounds.intersect( geo_bounds )
if not intersection_geo_bounds:
continue
for longitude, latitude in intersection_geo_bounds.corner_points():
x, y = geo_map.long_lat_deg_to_coords( longitude_deg = longitude,
latitude_deg = latitude )
display_bounds.add_point( x = x, y = y )
continue
continue
return display_bounds
def view_box_to_geo_bounds_list( self, view_box : ViewBox ):
geo_bounds_list = list()
for geo_map in self._geo_map_list:
geo_bounds = GeoBounds()
for x, y in view_box.corner_points():
longitude, latitude = geo_map.coords_to_long_lat_deg( x = x, y = y )
geo_bounds.add_point( longitude = longitude, latitude = latitude )
continue
# If the long/lat form the projections do not fall inside the
# known bounds, then we can ignore it.
if not geo_map.geo_bounds.intersects( geo_bounds ):
continue
geo_bounds_list.append( geo_bounds )
continue
return geo_bounds_list
UsaContinentalCompositeGeoMap = CompositeGeoMap(
map_id = 1,
geo_map_list = [
USA_CONTINENTAL_GEO_MAP,
ALASKA_CONTINENTAL_GEO_MAP,
HAWAII_CONTINENTAL_GEO_MAP,
]
)
| 31.58159 | 118 | 0.624338 | from dataclasses import dataclass
import math
from typing import List
from .display_bounds import DisplayBounds
from .geo_bounds import GeoBounds
from .view_box import ViewBox
from . import utils
@dataclass
class AlbersMapProjection:
reference_longitude_deg : float
reference_latitude_deg : float
standard_parallel_1_deg : float
standard_parallel_2_deg : float
radius_miles = utils.EARTH_RADIUS_AT_EQUATOR_MILES
EPSILON = 0.000000001
@property
def reference_longitude_radians(self):
return math.radians( self.reference_longitude_deg )
@property
def reference_latitude_radians(self):
return math.radians( self.reference_latitude_deg )
@property
def standard_parallel_1_radians(self):
return math.radians( self.standard_parallel_1_deg )
@property
def standard_parallel_2_radians(self):
return math.radians( self.standard_parallel_2_deg )
def __post_init__(self):
self.n = 0.5 * ( math.sin( self.standard_parallel_1_radians )
+ math.sin( self.standard_parallel_2_radians ) )
self.C = ( math.cos( self.standard_parallel_1_radians ) ** 2 ) \
+ 2 * self.n * math.sin( self.standard_parallel_1_radians )
self.rho_0 = ( self.radius_miles / self.n ) \
* math.sqrt( self.C - ( 2 * self.n * math.sin( self.reference_latitude_radians ) ))
return
def x_y_from_deg( self, longitude_deg : float, latitude_deg : float ):
longitude = math.radians( longitude_deg )
latitude = math.radians( latitude_deg )
theta = self.n * ( longitude - self.reference_longitude_radians )
rho_basis = self.C - ( 2 * self.n * math.sin( latitude ))
if rho_basis < 0.0:
return ( 0, 0 )
rho = ( self.radius_miles / self.n ) * math.sqrt( rho_basis )
x = rho * math.sin( theta )
y = self.rho_0 - ( rho * math.cos( theta ))
return ( x, y )
def deg_from_x_y( self, x : float, y : float ):
rho_0_minus_y = self.rho_0 - y
rho = math.sqrt( x**2 + rho_0_minus_y**2 )
if abs(rho) > self.EPSILON:
if self.n < 0.0:
rho *= -1.0
x *= -1.0
rho_0_minus_y *= -1.0
rho_adjusted = rho * self.n / self.radius_miles
latitude_operand = ( self.C - ( rho_adjusted * rho_adjusted ) ) / ( 2 * self.n )
if abs(latitude_operand) <= 1.0:
latitude_radians = math.asin( latitude_operand )
elif latitude_operand < 0.0:
latitude_radians = -1.0 * math.pi / 2.0
else:
latitude_radians = math.pi / 2.0
theta = math.atan2( x, rho_0_minus_y )
else:
theta = 0.0
if self.n > 0:
latitude_radians = math.pi / 2.0
else:
latitude_radians = -1.0 * math.pi / 2.0
longitude_radians = self.reference_longitude_radians + ( theta / self.n )
longitude_deg = math.degrees( longitude_radians )
latitude_deg = math.degrees( latitude_radians )
return ( longitude_deg, latitude_deg )
@dataclass
class GeoMap:
projection : AlbersMapProjection
geo_bounds : GeoBounds
svg_template_name : str
view_box : ViewBox
display_x_offset : float = None
display_y_offset : float = None
display_x_scale : float = None
display_y_scale : float = None
rotation_angle_deg : float = None
calibration_points : List = None
def __post_init__(self):
self._rotation_angle_radians = None
self._sine_angle = None
self._cosine_angle = None
if self.rotation_angle_deg:
self._rotation_angle_radians = math.radians( self.rotation_angle_deg )
self._sine_angle = math.sin( self._rotation_angle_radians )
self._cosine_angle = math.cos( self._rotation_angle_radians )
return
@property
def aspect_ratio(self):
return self.view_box.width / self.view_box.height
def long_lat_deg_to_coords( self, longitude_deg, latitude_deg ):
projected_x, projected_y = self.projection.x_y_from_deg( longitude_deg = longitude_deg,
latitude_deg = latitude_deg )
if self._rotation_angle_radians:
rotated_x = ( projected_x * self._cosine_angle ) - ( projected_y * self._sine_angle )
rotated_y = ( projected_x * self._sine_angle ) + ( projected_y * self._cosine_angle )
scaled_x = rotated_x * self.display_x_scale
scaled_y = rotated_y * self.display_y_scale
else:
scaled_x = projected_x * self.display_x_scale
scaled_y = projected_y * self.display_y_scale
offset_x = scaled_x + self.display_x_offset
offset_y = self.display_y_offset - scaled_y
return ( offset_x , offset_y )
def coords_to_long_lat_deg( self, x, y ):
offset_x = x - self.display_x_offset
offset_y = self.display_y_offset - y
scaled_x = offset_x / self.display_x_scale
scaled_y = offset_y / self.display_y_scale
if self._rotation_angle_radians:
rotated_x = ( scaled_x * self._cosine_angle ) + ( scaled_y * self._sine_angle )
rotated_y = ( -1.0 * scaled_x * self._sine_angle ) + ( scaled_y * self._cosine_angle )
longitude, latitude = self.projection.deg_from_x_y( x = rotated_x, y = rotated_y )
else:
longitude, latitude = self.projection.deg_from_x_y( x = scaled_x, y = scaled_y )
return ( longitude, latitude )
USA_CONTINENTAL_PROJECTION = AlbersMapProjection(
reference_longitude_deg = -96.0,
reference_latitude_deg = 37.5,
standard_parallel_1_deg = 29.5,
standard_parallel_2_deg = 45.5,
)
ALASKA_PROJECTION = AlbersMapProjection(
reference_longitude_deg = -154.0,
reference_latitude_deg = 50.0,
standard_parallel_1_deg = 55.0,
standard_parallel_2_deg = 65.0,
)
HAWAII_PROJECTION = AlbersMapProjection(
reference_longitude_deg = -157.0,
reference_latitude_deg = 13.0,
standard_parallel_1_deg = 8.0,
standard_parallel_2_deg = 18.0,
)
USA_CONTINENTAL_GEO_MAP = GeoMap(
projection = USA_CONTINENTAL_PROJECTION,
geo_bounds = GeoBounds(
longitude_min = -124.8679,
longitude_max = -66.8628,
latitude_min = 24.3959,
latitude_max = 49.3877,
),
svg_template_name = "usa_continental.svg",
view_box = ViewBox(
x = 0.0,
y = 0.0,
width = 958.0,
height = 602.0,
),
display_x_scale = 0.3332,
display_y_scale = 0.3318,
display_x_offset = 491.0249,
display_y_offset = 323.6935,
)
ALASKA_CONTINENTAL_GEO_MAP = GeoMap(
projection = ALASKA_PROJECTION,
geo_bounds = GeoBounds(
longitude_min = -180.0,
longitude_max = -129.993,
latitude_min = 50.5,
latitude_max = 71.5232,
),
svg_template_name = "usa_continental.svg",
view_box = ViewBox(
x = 0.0,
y = 0.0,
width = 958.0,
height = 602.0,
),
display_x_scale = 0.1301,
display_y_scale = 0.1311,
display_x_offset = 132.4555,
display_y_offset = 638.5017,
rotation_angle_deg = -11.0,
)
HAWAII_CONTINENTAL_GEO_MAP = GeoMap(
projection = HAWAII_PROJECTION,
geo_bounds = GeoBounds(
longitude_min = -160.3922,
longitude_max = -154.6271,
latitude_min = 18.71,
latitude_max = 22.3386,
),
svg_template_name = "usa_continental.svg",
view_box = ViewBox(
x = 0.0,
y = 0.0,
width = 958.0,
height = 602.0,
),
display_x_scale = 0.3279,
display_y_scale = 0.3371,
display_x_offset = 325.5313,
display_y_offset = 729.5,
rotation_angle_deg = -0.5,
)
class CompositeGeoMap:
def __init__( self, map_id : int, geo_map_list : List[GeoMap] ):
assert( geo_map_list )
self._map_id = map_id
self._geo_map_list = geo_map_list
self._default_geo_map = self._geo_map_list[0]
self._geo_bounds = GeoBounds()
self._geo_bounds_list = list()
svg_template_name_set = set()
for geo_map in self._geo_map_list:
geo_map.view_box.map_id = self._map_id
self._geo_bounds.add_bounds( geo_map.geo_bounds )
self._geo_bounds_list.append( geo_map.geo_bounds )
svg_template_name_set.add( geo_map.svg_template_name )
continue
self._svg_template_name_list = list(svg_template_name_set)
return
@property
def map_id(self):
return self._map_id
@property
def geo_bounds(self):
return self._geo_bounds
@property
def geo_bounds_list(self):
return self._geo_bounds_list
@property
def default_view_box(self):
return self._default_geo_map.view_box
@property
def default_reference_longitude_deg(self):
return self._default_geo_map.projection.reference_longitude_deg
@property
def default_reference_latitude_deg(self):
return self._default_geo_map.projection.reference_latitude_deg
@property
def default_aspect_ratio(self):
return self._default_geo_map.aspect_ratio
@property
def svg_template_name_list(self):
return self._svg_template_name_list
def contains_bounds( self, geo_bounds : GeoBounds ):
for geo_map_bounds in self._geo_bounds_list:
if geo_map_bounds.contains_bounds( other_geo_bounds = geo_bounds ):
return True
continue
return False
def get_geo_map_for_point( self,
longitude_deg : float,
latitude_deg : float ):
for geo_map in self._geo_map_list:
if geo_map.geo_bounds.contains_point( longitude_deg = longitude_deg,
latitude_deg = latitude_deg ):
return geo_map
continue
return self._default_geo_map
def geo_bounds_to_display_bounds( self, geo_bounds : GeoBounds ):
display_bounds = DisplayBounds()
for geo_map in self._geo_map_list:
intersection_geo_bounds = geo_map.geo_bounds.intersect( geo_bounds )
if not intersection_geo_bounds:
continue
for longitude, latitude in intersection_geo_bounds.corner_points():
x, y = geo_map.long_lat_deg_to_coords( longitude_deg = longitude,
latitude_deg = latitude )
display_bounds.add_point( x = x, y = y )
continue
continue
return display_bounds
def view_box_to_geo_bounds_list( self, view_box : ViewBox ):
geo_bounds_list = list()
for geo_map in self._geo_map_list:
geo_bounds = GeoBounds()
for x, y in view_box.corner_points():
longitude, latitude = geo_map.coords_to_long_lat_deg( x = x, y = y )
geo_bounds.add_point( longitude = longitude, latitude = latitude )
continue
# If the long/lat form the projections do not fall inside the
# known bounds, then we can ignore it.
if not geo_map.geo_bounds.intersects( geo_bounds ):
continue
geo_bounds_list.append( geo_bounds )
continue
return geo_bounds_list
UsaContinentalCompositeGeoMap = CompositeGeoMap(
map_id = 1,
geo_map_list = [
USA_CONTINENTAL_GEO_MAP,
ALASKA_CONTINENTAL_GEO_MAP,
HAWAII_CONTINENTAL_GEO_MAP,
]
)
| true | true |
f7301234057765dcbe4ab1b5008caded738d56b6 | 5,879 | py | Python | src/sadie/airr/igblast/germline.py | jwillis0720/sadie | d289ae68f06f5698ee40ffc1757e1b8aa85f1175 | [
"MIT"
] | 9 | 2020-12-22T19:14:01.000Z | 2022-03-17T04:34:06.000Z | src/sadie/airr/igblast/germline.py | jwillis0720/sadie | d289ae68f06f5698ee40ffc1757e1b8aa85f1175 | [
"MIT"
] | 32 | 2020-12-28T07:46:44.000Z | 2022-03-31T01:25:01.000Z | src/sadie/airr/igblast/germline.py | jwillis0720/sadie | d289ae68f06f5698ee40ffc1757e1b8aa85f1175 | [
"MIT"
] | 2 | 2021-07-30T16:44:46.000Z | 2022-01-12T20:15:17.000Z | import os
import warnings
from pathlib import Path
# package/module level
from sadie.reference.reference import YamlRef
from sadie.airr.igblast.igblast import ensure_prefix_to
class GermlineData:
"""
The germline data paths are extremely cumbersome to workwith. This class will abstract away their paths to make it easier to fold into IgBLAST
Examples
--------
>>> gd = GermlineData('human')
>>> gd.base_dir
/Users/jwillis/repos/sadie/airr/data/germlines
>>> gd.v_gene_dir
/Users/jwillis/repos/sadie/airr/data/germlines/blastdb/Ig/human/human_V'
>>> gd.aux_path
/Users/jwillis/repos/sadie/airr/data/germlines/aux_data/human_gl.aux
"""
def __init__(
self,
species: str,
database: str = "imgt",
receptor: str = "Ig",
database_dir: str = None,
):
"""
Parameters
----------
species : str
The species of interest, e.g. human
receptor : str, optional
the receptor type, by default "Ig"
"""
self.species = species
if database_dir:
self.base_dir = Path(database_dir).absolute()
else:
self.base_dir = os.path.abspath(os.path.join(os.path.dirname(__file__), "../data/germlines"))
self.blast_dir = os.path.join(self.base_dir, f"{database}/{receptor}/blastdb/{species}_")
self.v_gene_dir = self.blast_dir + "V"
self.d_gene_dir = self.blast_dir + "D"
self.j_gene_dir = self.blast_dir + "J"
self.aux_path = os.path.join(self.base_dir, f"{database}/aux_db/{species}_gl.aux")
self.igdata = os.path.join(self.base_dir, f"{database}/{receptor}/")
@property
def base_dir(self) -> Path:
"""The base dir
Returns
-------
Path
The base directory path that contains all the germline data
"""
return self._base_dir
@base_dir.setter
def base_dir(self, directory: str):
_path = Path(directory)
if not _path.exists():
raise FileNotFoundError(f"Base directory, {directory} not found")
self._base_dir = directory
@property
def blast_dir(self) -> Path:
return self._blast_dir
@blast_dir.setter
def blast_dir(self, directory: str):
# Must be a parent since this is not a valid path yet
_path = Path(directory).parent
if not _path.exists():
raise FileNotFoundError(f"Blast directory, {directory} not found")
self._blast_dir = directory
@property
def v_gene_dir(self) -> Path:
"""The V gene directory prefix for the species of interest
Returns
-------
str
this is not a qualified path but a glob path.
human_V does not exists but it's the prefix to human_V.nod and other files used by blast
"""
return self._v_gene_dir
@v_gene_dir.setter
def v_gene_dir(self, directory: str):
_path = Path(directory)
if not ensure_prefix_to(_path):
raise FileNotFoundError(f"V gene directory glob, {directory} not found")
self._v_gene_dir = _path
@property
def d_gene_dir(self) -> Path:
"""The D gene directory prefix for the species of interest
Returns
-------
str
this is not a qualified path but a glob path.
ex: human_D does not exists but it's the prefix to human_D.nod and other files used by blast
"""
return self._d_gene_dir
@d_gene_dir.setter
def d_gene_dir(self, directory: str):
_path = Path(directory)
if not ensure_prefix_to(_path):
warnings.warn(f"D gene directory not found for {self.species}", UserWarning)
self._d_gene_dir = _path
@property
def j_gene_dir(self) -> Path:
"""The J gene directory prefix for the species of interest
Returns
-------
str
this is not a qualified path but a glob path.
ex: human_J does not exists but it's the prefix to human_j.nod and other files used by blast
"""
return self._j_gene_dir
@j_gene_dir.setter
def j_gene_dir(self, directory: str):
_path = Path(directory)
if not ensure_prefix_to(_path):
raise FileNotFoundError(f"J gene directory glob, {directory} not found")
self._j_gene_dir = _path
@property
def aux_path(self) -> Path:
"""The auxillary data path used to reconstruct CDR3 regions.
Returns
-------
Path
the fully qualified path to the species auxilary data
ex:/Users/jwillis/repos/sadie/airr/data/germlines/aux_data/human_gl.aux
"""
return self._aux_path
@aux_path.setter
def aux_path(self, directory: str):
_path = Path(directory)
if not _path.exists():
raise FileNotFoundError(f"J gene directory glob, {directory} not found")
self._aux_path = _path
@property
def igdata(self) -> Path:
return self._igdata
@igdata.setter
def igdata(self, directory: Path):
_path = Path(directory)
if not _path.exists():
raise FileNotFoundError(f"IGDATA, {directory} not found")
self._igdata = _path
@staticmethod
def get_available_datasets() -> list:
"""A static non-instantiated method to get a list of avaialble species with the builtin data
Returns
-------
list
available datasets (common_name, custom|imgt, functional|all)
"""
y = YamlRef()
db_types = []
for database_type in y.yaml:
for common in y.yaml[database_type]:
if (common, database_type) not in db_types:
db_types.append((common, database_type))
return db_types
| 31.607527 | 146 | 0.610478 | import os
import warnings
from pathlib import Path
from sadie.reference.reference import YamlRef
from sadie.airr.igblast.igblast import ensure_prefix_to
class GermlineData:
def __init__(
self,
species: str,
database: str = "imgt",
receptor: str = "Ig",
database_dir: str = None,
):
self.species = species
if database_dir:
self.base_dir = Path(database_dir).absolute()
else:
self.base_dir = os.path.abspath(os.path.join(os.path.dirname(__file__), "../data/germlines"))
self.blast_dir = os.path.join(self.base_dir, f"{database}/{receptor}/blastdb/{species}_")
self.v_gene_dir = self.blast_dir + "V"
self.d_gene_dir = self.blast_dir + "D"
self.j_gene_dir = self.blast_dir + "J"
self.aux_path = os.path.join(self.base_dir, f"{database}/aux_db/{species}_gl.aux")
self.igdata = os.path.join(self.base_dir, f"{database}/{receptor}/")
@property
def base_dir(self) -> Path:
return self._base_dir
@base_dir.setter
def base_dir(self, directory: str):
_path = Path(directory)
if not _path.exists():
raise FileNotFoundError(f"Base directory, {directory} not found")
self._base_dir = directory
@property
def blast_dir(self) -> Path:
return self._blast_dir
@blast_dir.setter
def blast_dir(self, directory: str):
_path = Path(directory).parent
if not _path.exists():
raise FileNotFoundError(f"Blast directory, {directory} not found")
self._blast_dir = directory
@property
def v_gene_dir(self) -> Path:
return self._v_gene_dir
@v_gene_dir.setter
def v_gene_dir(self, directory: str):
_path = Path(directory)
if not ensure_prefix_to(_path):
raise FileNotFoundError(f"V gene directory glob, {directory} not found")
self._v_gene_dir = _path
@property
def d_gene_dir(self) -> Path:
return self._d_gene_dir
@d_gene_dir.setter
def d_gene_dir(self, directory: str):
_path = Path(directory)
if not ensure_prefix_to(_path):
warnings.warn(f"D gene directory not found for {self.species}", UserWarning)
self._d_gene_dir = _path
@property
def j_gene_dir(self) -> Path:
return self._j_gene_dir
@j_gene_dir.setter
def j_gene_dir(self, directory: str):
_path = Path(directory)
if not ensure_prefix_to(_path):
raise FileNotFoundError(f"J gene directory glob, {directory} not found")
self._j_gene_dir = _path
@property
def aux_path(self) -> Path:
return self._aux_path
@aux_path.setter
def aux_path(self, directory: str):
_path = Path(directory)
if not _path.exists():
raise FileNotFoundError(f"J gene directory glob, {directory} not found")
self._aux_path = _path
@property
def igdata(self) -> Path:
return self._igdata
@igdata.setter
def igdata(self, directory: Path):
_path = Path(directory)
if not _path.exists():
raise FileNotFoundError(f"IGDATA, {directory} not found")
self._igdata = _path
@staticmethod
def get_available_datasets() -> list:
y = YamlRef()
db_types = []
for database_type in y.yaml:
for common in y.yaml[database_type]:
if (common, database_type) not in db_types:
db_types.append((common, database_type))
return db_types
| true | true |
f7301314cf1642769e6fb9225052147fc23be5cb | 996 | py | Python | cluster.py | vgp314/Udacity-Arvato-Identify-Customer-Segments | 6be1d4f1eeac391c17c70fdf584bdc4813f80fd8 | [
"ADSL"
] | 1 | 2020-05-21T23:56:57.000Z | 2020-05-21T23:56:57.000Z | cluster.py | vgp314/Udacity-Arvato-Identify-Customer-Segments | 6be1d4f1eeac391c17c70fdf584bdc4813f80fd8 | [
"ADSL"
] | null | null | null | cluster.py | vgp314/Udacity-Arvato-Identify-Customer-Segments | 6be1d4f1eeac391c17c70fdf584bdc4813f80fd8 | [
"ADSL"
] | null | null | null | from sklearn.cluster import KMeans
from sklearn.cluster import MiniBatchKMeans
import matplotlib.pyplot as plt
def plot_clustering(data):
'''
Definition:
This function plot the squared error for the clustered points
args:
data to be clusterd
returns:
None
'''
cost =[]
max_clusters = 20
for i in range(2, max_clusters):
print("Analysing ", i, " clusters")
KM = MiniBatchKMeans(n_clusters = i,batch_size=20000)
KM.fit(data)
cost.append(KM.inertia_)
plt.plot(range(2, max_clusters), cost, color ='g', linewidth ='3')
plt.xlabel("Number of Clusters")
plt.ylabel("Squared Error (Cost)")
plt.show()
def do_clustering(data,number_clusters):
'''
Definition:
This function initizalize KMeans with number_clusters and fit to data
args:
data to be clustered, number_clusters
returns:
fitted K-Means mdel
'''
kmeans = KMeans(number_clusters)
fitted_model_k_means = kmeans.fit(data)
return fitted_model_k_means
| 21.652174 | 72 | 0.702811 | from sklearn.cluster import KMeans
from sklearn.cluster import MiniBatchKMeans
import matplotlib.pyplot as plt
def plot_clustering(data):
cost =[]
max_clusters = 20
for i in range(2, max_clusters):
print("Analysing ", i, " clusters")
KM = MiniBatchKMeans(n_clusters = i,batch_size=20000)
KM.fit(data)
cost.append(KM.inertia_)
plt.plot(range(2, max_clusters), cost, color ='g', linewidth ='3')
plt.xlabel("Number of Clusters")
plt.ylabel("Squared Error (Cost)")
plt.show()
def do_clustering(data,number_clusters):
kmeans = KMeans(number_clusters)
fitted_model_k_means = kmeans.fit(data)
return fitted_model_k_means
| true | true |
f730132751b666d6cb1f40177578d5b53a823707 | 4,202 | py | Python | Main.py | Serdobe/Markus | 725538666f81a11361b90e57fad2a00cb9888685 | [
"MIT"
] | null | null | null | Main.py | Serdobe/Markus | 725538666f81a11361b90e57fad2a00cb9888685 | [
"MIT"
] | null | null | null | Main.py | Serdobe/Markus | 725538666f81a11361b90e57fad2a00cb9888685 | [
"MIT"
] | null | null | null | # MAIN SCRIPT
"""
This script computes all the biological experiments. To run it is necessary to
load the Function_Files script that contains all the functions.
"""
import os
import multiprocessing
from multiprocessing import Pool
# Set work directory:
os.chdir(r"C:\Users\Sergio\Desktop\Markus_Project")
import Functions_Files
######################
# Yeast PPI Network: #
######################
# Arguments:
network = "systematic_PPI_BioGRID"
technique_1 = "GCV-all"
technique_2 = "GCV-O+"
technique_3 = "triangle"
technique_4 = "GDV"
enrichemnt_1 = "BP"
enrichemnt_2 = "CC"
enrichemnt_3 = "MF"
total_run = 10
# 1. Prepare the Data: #
# Technique "GCV_all":
# BP, CC, and MF:
sub_command1 = (network, technique_1, enrichemnt_1, 10)
sub_command2 = (network, technique_1, enrichemnt_2, 10)
sub_command3 = (network, technique_1, enrichemnt_3, 10)
# Technique "GCV-O+":
# BP, CC, and MF:
sub_command4 = (network, technique_2, enrichemnt_1, 10)
sub_command5 = (network, technique_2, enrichemnt_2, 10)
sub_command6 = (network, technique_2, enrichemnt_3, 10)
# Technique "triangle":
# BP, CC, and MF:
sub_command7 = (network, technique_3, enrichemnt_1, 10)
sub_command8 = (network, technique_3, enrichemnt_2, 10)
sub_command9 = (network, technique_3, enrichemnt_3, 10)
# Technique "GDV":
# BP, CC, and MF:
sub_command10 = (network, technique_4, enrichemnt_1, 10)
sub_command11 = (network, technique_4, enrichemnt_2, 10)
sub_command12 = (network, technique_4, enrichemnt_3, 10)
# Run the code:
Process = [sub_command1,sub_command2, sub_command3, sub_command4, sub_command5,
sub_command6, sub_command7,sub_command8,sub_command9, sub_command10,
sub_command11, sub_command12]
for arguments in Process:
Functions_Files.Load_Data(*arguments)
# 2. Prepare the PairWise Comparison (for the four techniques):
# For BP, CC, and MF:
# For each annotation all the four different techniques are compared:
sub_command1 = (network, enrichemnt_1, 10)
sub_command2 = (network, enrichemnt_2, 10)
sub_command3 = (network, enrichemnt_3, 10)
Process = [sub_command1,sub_command2, sub_command3]
for arguments in Process:
Functions_Files.Pair_Wise_GO_Comparison(*arguments)
# 3. Create the plots for the comparisons:
# Technique "GCV_all" VS "GCV-O+":
# BP, CC, and MF:
sub_command1 = (network, technique_1, technique_2, enrichemnt_1, 10)
sub_command2 = (network, technique_1, technique_2, enrichemnt_2, 10)
sub_command3 = (network, technique_1, technique_2, enrichemnt_3, 10)
# Technique "GCV_all" VS "triangle":
# BP, CC, and MF:
sub_command4 = (network, technique_1, technique_3, enrichemnt_1, 10)
sub_command5 = (network, technique_1, technique_3, enrichemnt_2, 10)
sub_command6 = (network, technique_1, technique_3, enrichemnt_3, 10)
# Technique "GCV_all" VS "GDV":
# BP, CC, and MF:
sub_command4 = (network, technique_1, technique_4, enrichemnt_1, 10)
sub_command5 = (network, technique_1, technique_4, enrichemnt_2, 10)
sub_command6 = (network, technique_1, technique_4, enrichemnt_3, 10)
# Technique "GCV-O+" VS "triangle":
# BP, CC, and MF:
sub_command7 = (network, technique_2, technique_3, enrichemnt_1, 10)
sub_command8 = (network, technique_2, technique_3, enrichemnt_2, 10)
sub_command9 = (network, technique_2, technique_3, enrichemnt_3, 10)
# Technique "GCV-O+" VS "GDV":
# BP, CC, and MF:
sub_command10 = (network, technique_2, technique_4, enrichemnt_1, 10)
sub_command11 = (network, technique_2, technique_4, enrichemnt_2, 10)
sub_command12 = (network, technique_2, technique_4, enrichemnt_3, 10)
# Technique "triangle" VS "GDV":
# BP, CC, and MF:
sub_command13 = (network, technique_3, technique_4, enrichemnt_1, 10)
sub_command14 = (network, technique_3, technique_4, enrichemnt_2, 10)
sub_command15 = (network, technique_3, technique_4, enrichemnt_3, 10)
Process = [sub_command1,sub_command2, sub_command3, sub_command4, sub_command5,
sub_command6, sub_command7,sub_command8,sub_command9, sub_command10,
sub_command11, sub_command12, sub_command13, sub_command14, sub_command15]
for arguments in Process:
Functions_Files.Main_Plot_Function(*arguments)
| 30.671533 | 85 | 0.73584 |
import os
import multiprocessing
from multiprocessing import Pool
os.chdir(r"C:\Users\Sergio\Desktop\Markus_Project")
import Functions_Files
5 = (network, technique_2, enrichemnt_2, 10)
sub_command6 = (network, technique_2, enrichemnt_3, 10)
sub_command7 = (network, technique_3, enrichemnt_1, 10)
sub_command8 = (network, technique_3, enrichemnt_2, 10)
sub_command9 = (network, technique_3, enrichemnt_3, 10)
sub_command10 = (network, technique_4, enrichemnt_1, 10)
sub_command11 = (network, technique_4, enrichemnt_2, 10)
sub_command12 = (network, technique_4, enrichemnt_3, 10)
Process = [sub_command1,sub_command2, sub_command3, sub_command4, sub_command5,
sub_command6, sub_command7,sub_command8,sub_command9, sub_command10,
sub_command11, sub_command12]
for arguments in Process:
Functions_Files.Load_Data(*arguments)
sub_command1 = (network, enrichemnt_1, 10)
sub_command2 = (network, enrichemnt_2, 10)
sub_command3 = (network, enrichemnt_3, 10)
Process = [sub_command1,sub_command2, sub_command3]
for arguments in Process:
Functions_Files.Pair_Wise_GO_Comparison(*arguments)
sub_command1 = (network, technique_1, technique_2, enrichemnt_1, 10)
sub_command2 = (network, technique_1, technique_2, enrichemnt_2, 10)
sub_command3 = (network, technique_1, technique_2, enrichemnt_3, 10)
sub_command4 = (network, technique_1, technique_3, enrichemnt_1, 10)
sub_command5 = (network, technique_1, technique_3, enrichemnt_2, 10)
sub_command6 = (network, technique_1, technique_3, enrichemnt_3, 10)
sub_command4 = (network, technique_1, technique_4, enrichemnt_1, 10)
sub_command5 = (network, technique_1, technique_4, enrichemnt_2, 10)
sub_command6 = (network, technique_1, technique_4, enrichemnt_3, 10)
sub_command7 = (network, technique_2, technique_3, enrichemnt_1, 10)
sub_command8 = (network, technique_2, technique_3, enrichemnt_2, 10)
sub_command9 = (network, technique_2, technique_3, enrichemnt_3, 10)
sub_command10 = (network, technique_2, technique_4, enrichemnt_1, 10)
sub_command11 = (network, technique_2, technique_4, enrichemnt_2, 10)
sub_command12 = (network, technique_2, technique_4, enrichemnt_3, 10)
sub_command13 = (network, technique_3, technique_4, enrichemnt_1, 10)
sub_command14 = (network, technique_3, technique_4, enrichemnt_2, 10)
sub_command15 = (network, technique_3, technique_4, enrichemnt_3, 10)
Process = [sub_command1,sub_command2, sub_command3, sub_command4, sub_command5,
sub_command6, sub_command7,sub_command8,sub_command9, sub_command10,
sub_command11, sub_command12, sub_command13, sub_command14, sub_command15]
for arguments in Process:
Functions_Files.Main_Plot_Function(*arguments)
| true | true |
f73013d5b899b1bc75fe9a03e46f38591a6f58b9 | 2,287 | py | Python | userbot/plugins/gbun.py | Aliensuniquebot/CatUserbot | 93561a620fc1198c6fe6c259412088f4bc81d97b | [
"MIT"
] | 1 | 2020-07-18T07:42:58.000Z | 2020-07-18T07:42:58.000Z | userbot/plugins/gbun.py | praveen368/CatUserbot | 4b0cd970551ffaf86b9fdd5da584c1b3882821ff | [
"MIT"
] | null | null | null | userbot/plugins/gbun.py | praveen368/CatUserbot | 4b0cd970551ffaf86b9fdd5da584c1b3882821ff | [
"MIT"
] | 2 | 2020-06-25T11:14:50.000Z | 2021-04-04T13:49:13.000Z | # This is a troll indeed ffs *facepalm*
import asyncio
from telethon import events
from telethon.tl.functions.users import GetFullUserRequest
from telethon.tl.types import ChannelParticipantsAdmins
from userbot.utils import admin_cmd
@borg.on(admin_cmd(pattern="gbun"))
async def gbun(event):
if event.fwd_from:
return
gbunVar = event.text
gbunVar = gbunVar[6:]
mentions = "`Warning!! User 𝙂𝘽𝘼𝙉𝙉𝙀𝘿 By Admin...\n`"
no_reason = "__Reason: Potential spammer. __"
await event.edit("**Summoning out le Gungnir ❗️⚜️☠️**")
asyncio.sleep(3.5)
chat = await event.get_input_chat()
async for x in borg.iter_participants(chat, filter=ChannelParticipantsAdmins):
mentions += f""
reply_message = None
if event.reply_to_msg_id:
reply_message = await event.get_reply_message()
replied_user = await event.client(GetFullUserRequest(reply_message.from_id))
firstname = replied_user.user.first_name
usname = replied_user.user.username
idd = reply_message.from_id
# make meself invulnerable cuz why not xD
if idd == 1118936839:
await reply_message.reply("`Wait a second, This is my master!`\n**How dare you threaten to ban my master nigger!**\n\n__Your account has been hacked! Pay 69$ to my master__ [✰Sᴀͥʀᴀͣᴛͫʜ™✰](tg://user?id=1118936839) __to release your account__😏")
else:
jnl=("`Warning!! `"
"[{}](tg://user?id={})"
"` 𝙂𝘽𝘼𝙉𝙉𝙀𝘿 By Admin...\n\n`"
"**Rendi's Name: ** __{}__\n"
"**ID : ** `{}`\n"
).format(firstname, idd, firstname, idd)
if usname == None:
jnl += "**Victim Nigga's username: ** `Doesn't own a username!`\n"
elif usname != "None":
jnl += "**Victim Nigga's username** : @{}\n".format(usname)
if len(gbunVar) > 0:
gbunm = "`{}`".format(gbunVar)
gbunr = "**Reason: **"+gbunm
jnl += gbunr
else:
jnl += no_reason
await reply_message.reply(jnl)
else:
mention = "`Warning!! User 𝙂𝘽𝘼𝙉𝙉𝙀𝘿 By Admin...\nReason: Potential spammer. `"
await event.reply(mention)
await event.delete()
| 42.351852 | 255 | 0.594666 |
import asyncio
from telethon import events
from telethon.tl.functions.users import GetFullUserRequest
from telethon.tl.types import ChannelParticipantsAdmins
from userbot.utils import admin_cmd
@borg.on(admin_cmd(pattern="gbun"))
async def gbun(event):
if event.fwd_from:
return
gbunVar = event.text
gbunVar = gbunVar[6:]
mentions = "`Warning!! User 𝙂𝘽𝘼𝙉𝙉𝙀𝘿 By Admin...\n`"
no_reason = "__Reason: Potential spammer. __"
await event.edit("**Summoning out le Gungnir ❗️⚜️☠️**")
asyncio.sleep(3.5)
chat = await event.get_input_chat()
async for x in borg.iter_participants(chat, filter=ChannelParticipantsAdmins):
mentions += f""
reply_message = None
if event.reply_to_msg_id:
reply_message = await event.get_reply_message()
replied_user = await event.client(GetFullUserRequest(reply_message.from_id))
firstname = replied_user.user.first_name
usname = replied_user.user.username
idd = reply_message.from_id
if idd == 1118936839:
await reply_message.reply("`Wait a second, This is my master!`\n**How dare you threaten to ban my master nigger!**\n\n__Your account has been hacked! Pay 69$ to my master__ [✰Sᴀͥʀᴀͣᴛͫʜ™✰](tg://user?id=1118936839) __to release your account__😏")
else:
jnl=("`Warning!! `"
"[{}](tg://user?id={})"
"` 𝙂𝘽𝘼𝙉𝙉𝙀𝘿 By Admin...\n\n`"
"**Rendi's Name: ** __{}__\n"
"**ID : ** `{}`\n"
).format(firstname, idd, firstname, idd)
if usname == None:
jnl += "**Victim Nigga's username: ** `Doesn't own a username!`\n"
elif usname != "None":
jnl += "**Victim Nigga's username** : @{}\n".format(usname)
if len(gbunVar) > 0:
gbunm = "`{}`".format(gbunVar)
gbunr = "**Reason: **"+gbunm
jnl += gbunr
else:
jnl += no_reason
await reply_message.reply(jnl)
else:
mention = "`Warning!! User 𝙂𝘽𝘼𝙉𝙉𝙀𝘿 By Admin...\nReason: Potential spammer. `"
await event.reply(mention)
await event.delete()
| true | true |
f7301463f9c4beb4ba5e2fac1fb2efbd03eeb42b | 2,697 | py | Python | projects/PointRend/point_rend/coarse_mask_head.py | tkhe/tkdetection | 54e6c112ef2930e755f457e38449736f5743a9ea | [
"MIT"
] | 1 | 2020-10-09T02:27:13.000Z | 2020-10-09T02:27:13.000Z | projects/PointRend/point_rend/coarse_mask_head.py | tkhe/tkdetection | 54e6c112ef2930e755f457e38449736f5743a9ea | [
"MIT"
] | null | null | null | projects/PointRend/point_rend/coarse_mask_head.py | tkhe/tkdetection | 54e6c112ef2930e755f457e38449736f5743a9ea | [
"MIT"
] | null | null | null | import torch
import torch.nn as nn
import torch.nn.functional as F
from tkdet.layers import Conv2d
from tkdet.layers import ShapeSpec
from tkdet.models.roi_head.mask_head import MASK_HEAD_REGISTRY
from tkdet.utils import weight_init
__all__ = ["CoarseMaskHead"]
@MASK_HEAD_REGISTRY.register()
class CoarseMaskHead(nn.Module):
def __init__(self, cfg, input_shape: ShapeSpec):
super(CoarseMaskHead, self).__init__()
self.num_classes = cfg.MODEL.NUM_CLASSES
conv_dim = cfg.MODEL.ROI_MASK_HEAD.CONV_DIM
self.fc_dim = cfg.MODEL.ROI_MASK_HEAD.FC_DIM
num_fc = cfg.MODEL.ROI_MASK_HEAD.NUM_FC
self.output_side_resolution = cfg.MODEL.ROI_MASK_HEAD.OUTPUT_SIDE_RESOLUTION
self.input_channels = input_shape.channels
self.input_h = input_shape.height
self.input_w = input_shape.width
self.conv_layers = []
if self.input_channels > conv_dim:
self.reduce_channel_dim_conv = Conv2d(
self.input_channels,
conv_dim,
kernel_size=1,
activation="ReLU"
)
self.conv_layers.append(self.reduce_channel_dim_conv)
self.reduce_spatial_dim_conv = Conv2d(
conv_dim,
conv_dim,
kernel_size=2,
stride=2,
padding=0,
bias=True,
activation="ReLU"
)
self.conv_layers.append(self.reduce_spatial_dim_conv)
input_dim = conv_dim * self.input_h * self.input_w
input_dim //= 4
self.fcs = []
for k in range(num_fc):
fc = nn.Linear(input_dim, self.fc_dim)
self.add_module("coarse_mask_fc{}".format(k + 1), fc)
self.fcs.append(fc)
input_dim = self.fc_dim
output_dim = self.num_classes * self.output_side_resolution * self.output_side_resolution
self.prediction = nn.Linear(self.fc_dim, output_dim)
nn.init.normal_(self.prediction.weight, std=0.001)
nn.init.constant_(self.prediction.bias, 0)
for layer in self.conv_layers:
weight_init.c2_msra_fill(layer)
for layer in self.fcs:
weight_init.c2_xavier_fill(layer)
def forward(self, x):
N = x.shape[0]
x = x.view(N, self.input_channels, self.input_h, self.input_w)
for layer in self.conv_layers:
x = layer(x)
x = torch.flatten(x, start_dim=1)
for layer in self.fcs:
x = F.relu(layer(x))
return self.prediction(x).view(
N,
self.num_classes,
self.output_side_resolution,
self.output_side_resolution
)
| 32.493976 | 97 | 0.622914 | import torch
import torch.nn as nn
import torch.nn.functional as F
from tkdet.layers import Conv2d
from tkdet.layers import ShapeSpec
from tkdet.models.roi_head.mask_head import MASK_HEAD_REGISTRY
from tkdet.utils import weight_init
__all__ = ["CoarseMaskHead"]
@MASK_HEAD_REGISTRY.register()
class CoarseMaskHead(nn.Module):
def __init__(self, cfg, input_shape: ShapeSpec):
super(CoarseMaskHead, self).__init__()
self.num_classes = cfg.MODEL.NUM_CLASSES
conv_dim = cfg.MODEL.ROI_MASK_HEAD.CONV_DIM
self.fc_dim = cfg.MODEL.ROI_MASK_HEAD.FC_DIM
num_fc = cfg.MODEL.ROI_MASK_HEAD.NUM_FC
self.output_side_resolution = cfg.MODEL.ROI_MASK_HEAD.OUTPUT_SIDE_RESOLUTION
self.input_channels = input_shape.channels
self.input_h = input_shape.height
self.input_w = input_shape.width
self.conv_layers = []
if self.input_channels > conv_dim:
self.reduce_channel_dim_conv = Conv2d(
self.input_channels,
conv_dim,
kernel_size=1,
activation="ReLU"
)
self.conv_layers.append(self.reduce_channel_dim_conv)
self.reduce_spatial_dim_conv = Conv2d(
conv_dim,
conv_dim,
kernel_size=2,
stride=2,
padding=0,
bias=True,
activation="ReLU"
)
self.conv_layers.append(self.reduce_spatial_dim_conv)
input_dim = conv_dim * self.input_h * self.input_w
input_dim //= 4
self.fcs = []
for k in range(num_fc):
fc = nn.Linear(input_dim, self.fc_dim)
self.add_module("coarse_mask_fc{}".format(k + 1), fc)
self.fcs.append(fc)
input_dim = self.fc_dim
output_dim = self.num_classes * self.output_side_resolution * self.output_side_resolution
self.prediction = nn.Linear(self.fc_dim, output_dim)
nn.init.normal_(self.prediction.weight, std=0.001)
nn.init.constant_(self.prediction.bias, 0)
for layer in self.conv_layers:
weight_init.c2_msra_fill(layer)
for layer in self.fcs:
weight_init.c2_xavier_fill(layer)
def forward(self, x):
N = x.shape[0]
x = x.view(N, self.input_channels, self.input_h, self.input_w)
for layer in self.conv_layers:
x = layer(x)
x = torch.flatten(x, start_dim=1)
for layer in self.fcs:
x = F.relu(layer(x))
return self.prediction(x).view(
N,
self.num_classes,
self.output_side_resolution,
self.output_side_resolution
)
| true | true |
f730147e7eb4322f1a6d2019bd8d168aab36bec1 | 3,924 | py | Python | adiscorduser/settings/mock_prod.py | ADiscordUser/adiscorduser-site | af6eb58ab528fddba82b8c65bfdd06b6663333cc | [
"MIT"
] | 2 | 2021-03-06T02:21:35.000Z | 2021-03-06T09:34:57.000Z | adiscorduser/settings/mock_prod.py | ADiscordUser/adiscorduser-site | af6eb58ab528fddba82b8c65bfdd06b6663333cc | [
"MIT"
] | null | null | null | adiscorduser/settings/mock_prod.py | ADiscordUser/adiscorduser-site | af6eb58ab528fddba82b8c65bfdd06b6663333cc | [
"MIT"
] | null | null | null | """
Django settings for adiscorduser project.
Generated by 'django-admin startproject' using Django 3.0.7.
For more information on this file, see
https://docs.djangoproject.com/en/3.0/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/3.0/ref/settings/
"""
import os
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/3.0/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = ''
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = False
ALLOWED_HOSTS = []
# Application definition
INSTALLED_APPS = [
'core.apps.CoreConfig',
'uploader.apps.UploaderConfig',
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'rest_framework',
'rest_framework.authtoken',
]
MIDDLEWARE = [
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
]
ROOT_URLCONF = 'adiscorduser.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
WSGI_APPLICATION = 'adiscorduser.wsgi.application'
REST_FRAMEWORK = {
'DEFAULT_PAGINATION_CLASS': 'core.drf.APIPagination',
'DEFAULT_AUTHENTICATION_CLASSES': [
'rest_framework.authentication.TokenAuthentication'
],
'ORDERING_PARAM': 'order'
}
# Database
# https://docs.djangoproject.com/en/3.0/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.postgresql',
'NAME': '',
'USER': '',
'PASSWORD': '',
'HOST': '',
'PORT': ''
}
}
# Password validation
# https://docs.djangoproject.com/en/3.0/ref/settings/#auth-password-validators
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
# Internationalization
# https://docs.djangoproject.com/en/3.0/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/3.0/howto/static-files/
STATIC_URL = '/static/'
STATIC_ROOT = os.path.join(BASE_DIR, 'static/')
# Authentication backend
AUTH_USER_MODEL = "core.User"
# Media
MEDIA_ROOT = ""
MEDIA = {
"image": {
"url": "",
"mime_types": ["image/png", "image/gif", "image/jpeg"]
},
"video": {
"url": "",
"mime_types": ["video/webm", "video/mp4"]
}
}
# Cloudflare
CLOUDFLARE = {
"ZONE_IDENTIFIER": "",
"API_KEY": "",
"PROD_HOST": ALLOWED_HOSTS # you could also make this a list
} | 23.926829 | 91 | 0.66947 |
import os
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
SECRET_KEY = ''
DEBUG = False
ALLOWED_HOSTS = []
# Application definition
INSTALLED_APPS = [
'core.apps.CoreConfig',
'uploader.apps.UploaderConfig',
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'rest_framework',
'rest_framework.authtoken',
]
MIDDLEWARE = [
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
]
ROOT_URLCONF = 'adiscorduser.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
WSGI_APPLICATION = 'adiscorduser.wsgi.application'
REST_FRAMEWORK = {
'DEFAULT_PAGINATION_CLASS': 'core.drf.APIPagination',
'DEFAULT_AUTHENTICATION_CLASSES': [
'rest_framework.authentication.TokenAuthentication'
],
'ORDERING_PARAM': 'order'
}
# Database
# https://docs.djangoproject.com/en/3.0/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.postgresql',
'NAME': '',
'USER': '',
'PASSWORD': '',
'HOST': '',
'PORT': ''
}
}
# Password validation
# https://docs.djangoproject.com/en/3.0/ref/settings/#auth-password-validators
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
# Internationalization
# https://docs.djangoproject.com/en/3.0/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/3.0/howto/static-files/
STATIC_URL = '/static/'
STATIC_ROOT = os.path.join(BASE_DIR, 'static/')
# Authentication backend
AUTH_USER_MODEL = "core.User"
# Media
MEDIA_ROOT = ""
MEDIA = {
"image": {
"url": "",
"mime_types": ["image/png", "image/gif", "image/jpeg"]
},
"video": {
"url": "",
"mime_types": ["video/webm", "video/mp4"]
}
}
# Cloudflare
CLOUDFLARE = {
"ZONE_IDENTIFIER": "",
"API_KEY": "",
"PROD_HOST": ALLOWED_HOSTS # you could also make this a list
} | true | true |
f73014c18015c10c4a57a92b8fba96062dd8c405 | 110 | py | Python | runtest.py | krerkkiat/word-fusion | 54074539b5255830b700c5de185e6dded8f3aec4 | [
"MIT"
] | null | null | null | runtest.py | krerkkiat/word-fusion | 54074539b5255830b700c5de185e6dded8f3aec4 | [
"MIT"
] | null | null | null | runtest.py | krerkkiat/word-fusion | 54074539b5255830b700c5de185e6dded8f3aec4 | [
"MIT"
] | null | null | null | # -*- coding: utf-8 -*-
'''A module to run testcaes.'''
import unittest
from tests import *
unittest.main()
| 13.75 | 31 | 0.645455 |
import unittest
from tests import *
unittest.main()
| true | true |
f7301503bf0efbff166667ede074659aa5f11e70 | 391 | py | Python | Twitter/wsgi.py | sonus21/MiniTwitter | b62c0c540c1726fc7e6197b33514d7af15f1b58e | [
"BSD-2-Clause"
] | null | null | null | Twitter/wsgi.py | sonus21/MiniTwitter | b62c0c540c1726fc7e6197b33514d7af15f1b58e | [
"BSD-2-Clause"
] | null | null | null | Twitter/wsgi.py | sonus21/MiniTwitter | b62c0c540c1726fc7e6197b33514d7af15f1b58e | [
"BSD-2-Clause"
] | null | null | null | """
WSGI config for Twitter project.
It exposes the WSGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/1.9/howto/deployment/wsgi/
"""
import os
from django.core.wsgi import get_wsgi_application
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "Twitter.settings")
application = get_wsgi_application()
| 23 | 78 | 0.785166 |
import os
from django.core.wsgi import get_wsgi_application
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "Twitter.settings")
application = get_wsgi_application()
| true | true |
f730151b01f0716512edaac7c34300146b453531 | 5,477 | py | Python | docs/conf.py | agaveplatform/agavepy | c2b39fdf60648f7025234f7aa175af873ef0ff75 | [
"BSD-3-Clause"
] | 16 | 2015-07-24T16:54:23.000Z | 2020-10-18T23:10:37.000Z | docs/conf.py | agaveplatform/agavepy | c2b39fdf60648f7025234f7aa175af873ef0ff75 | [
"BSD-3-Clause"
] | 77 | 2015-06-11T22:08:10.000Z | 2020-09-09T19:25:27.000Z | docs/conf.py | agaveplatform/agavepy | c2b39fdf60648f7025234f7aa175af873ef0ff75 | [
"BSD-3-Clause"
] | 24 | 2015-11-05T20:32:48.000Z | 2022-01-26T22:05:53.000Z | # -*- coding: utf-8 -*-
#
# AgavePy documentation build configuration file, created by
# sphinx-quickstart on Mon Feb 5 11:08:11 2018.
#
# This file is execfile()d with the current directory set to its
# containing dir.
#
# Note that not all possible configuration values are present in this
# autogenerated file.
#
# All configuration values have a default; values that are commented out
# serve to show the default.
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
#
# import os
# import sys
# sys.path.insert(0, os.path.abspath('.'))
# -- General configuration ------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
#
# needs_sphinx = '1.0'
# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
# ones.
extensions = ['sphinx.ext.autodoc',
'sphinx.ext.viewcode']
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# The suffix(es) of source filenames.
# You can specify multiple suffix as a list of string:
#
# source_suffix = ['.rst', '.md']
source_suffix = '.rst'
# The master toctree document.
master_doc = 'index'
# General information about the project.
project = u'AgavePy'
copyright = u'2018- Texas Advanced Computing Center'
author = u'Texas Advanced Computing Center'
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The short X.Y version.
version = u'1.0.0'
# The full version, including alpha/beta/rc tags.
release = u'1.0.0'
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#
# This is also used if you do content translation via gettext catalogs.
# Usually you set "language" from the command line for these cases.
language = None
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
# This patterns also effect to html_static_path and html_extra_path
exclude_patterns = ['_build', 'Thumbs.db', '.DS_Store']
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# If true, `todo` and `todoList` produce output, else they produce nothing.
todo_include_todos = False
# Implement
# https://rackerlabs.github.io/docs-rackspace/tools/rtd-tables.html
html_static_path = ['_static']
html_context = {
'css_files': [
'_static/theme_overrides.css', # override wide tables in RTD theme
],
}
# -- Options for HTML output ----------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
#
html_theme = 'sphinx_rtd_theme'
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
#
# html_theme_options = {}
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['_static']
# Custom sidebar templates, must be a dictionary that maps document names
# to template names.
#
# This is required for the alabaster theme
# refs: http://alabaster.readthedocs.io/en/latest/installation.html#sidebars
html_sidebars = {
'**': [
'about.html',
'navigation.html',
'relations.html', # needs 'show_related': True theme option to display
'searchbox.html'
]
}
# -- Options for HTMLHelp output ------------------------------------------
# Output file base name for HTML help builder.
htmlhelp_basename = 'AgavePydoc'
# -- Options for LaTeX output ---------------------------------------------
latex_elements = {
# The paper size ('letterpaper' or 'a4paper').
#
# 'papersize': 'letterpaper',
# The font size ('10pt', '11pt' or '12pt').
#
# 'pointsize': '10pt',
# Additional stuff for the LaTeX preamble.
#
# 'preamble': '',
# Latex figure (float) alignment
#
# 'figure_align': 'htbp',
}
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title,
# author, documentclass [howto, manual, or own class]).
latex_documents = [
(master_doc, 'AgavePy.tex', u'AgavePy Documentation',
u'Joe Stubbs, Walter Moreira, Matt Vaughn', 'manual'),
]
# -- Options for manual page output ---------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
(master_doc, 'agavepy', u'AgavePy Documentation',
[author], 1)
]
# -- Options for Texinfo output -------------------------------------------
# Grouping the document tree into Texinfo files. List of tuples
# (source start file, target name, title, author,
# dir menu entry, description, category)
texinfo_documents = [
(master_doc, 'AgavePy', u'AgavePy Documentation',
author, 'AgavePy', 'One line description of project.',
'Miscellaneous'),
]
| 30.427778 | 79 | 0.680117 |
extensions = ['sphinx.ext.autodoc',
'sphinx.ext.viewcode']
templates_path = ['_templates']
source_suffix = '.rst'
master_doc = 'index'
project = u'AgavePy'
copyright = u'2018- Texas Advanced Computing Center'
author = u'Texas Advanced Computing Center'
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The short X.Y version.
version = u'1.0.0'
# The full version, including alpha/beta/rc tags.
release = u'1.0.0'
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#
# This is also used if you do content translation via gettext catalogs.
# Usually you set "language" from the command line for these cases.
language = None
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
# This patterns also effect to html_static_path and html_extra_path
exclude_patterns = ['_build', 'Thumbs.db', '.DS_Store']
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# If true, `todo` and `todoList` produce output, else they produce nothing.
todo_include_todos = False
# Implement
# https://rackerlabs.github.io/docs-rackspace/tools/rtd-tables.html
html_static_path = ['_static']
html_context = {
'css_files': [
'_static/theme_overrides.css', # override wide tables in RTD theme
],
}
# -- Options for HTML output ----------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
#
html_theme = 'sphinx_rtd_theme'
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
#
# html_theme_options = {}
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['_static']
# Custom sidebar templates, must be a dictionary that maps document names
# to template names.
#
# This is required for the alabaster theme
# refs: http://alabaster.readthedocs.io/en/latest/installation.html#sidebars
html_sidebars = {
'**': [
'about.html',
'navigation.html',
'relations.html', # needs 'show_related': True theme option to display
'searchbox.html'
]
}
# -- Options for HTMLHelp output ------------------------------------------
# Output file base name for HTML help builder.
htmlhelp_basename = 'AgavePydoc'
# -- Options for LaTeX output ---------------------------------------------
latex_elements = {
# The paper size ('letterpaper' or 'a4paper').
#
# 'papersize': 'letterpaper',
# The font size ('10pt', '11pt' or '12pt').
#
# 'pointsize': '10pt',
# Additional stuff for the LaTeX preamble.
#
# 'preamble': '',
# Latex figure (float) alignment
#
# 'figure_align': 'htbp',
}
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title,
# author, documentclass [howto, manual, or own class]).
latex_documents = [
(master_doc, 'AgavePy.tex', u'AgavePy Documentation',
u'Joe Stubbs, Walter Moreira, Matt Vaughn', 'manual'),
]
# -- Options for manual page output ---------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
(master_doc, 'agavepy', u'AgavePy Documentation',
[author], 1)
]
# -- Options for Texinfo output -------------------------------------------
# Grouping the document tree into Texinfo files. List of tuples
# (source start file, target name, title, author,
# dir menu entry, description, category)
texinfo_documents = [
(master_doc, 'AgavePy', u'AgavePy Documentation',
author, 'AgavePy', 'One line description of project.',
'Miscellaneous'),
]
| true | true |
f73017628abc5a648c3b0d3f76094d50e2de040a | 8,111 | py | Python | contrib/trainer/dream_tf/layers/policy_head.py | Chicoryn/dream-go | 6a4b71d7e1fcc28110ba859c0a2b59c10041c083 | [
"Apache-2.0"
] | 46 | 2017-12-08T01:40:08.000Z | 2022-02-07T12:56:14.000Z | contrib/trainer/dream_tf/layers/policy_head.py | Chicoryn/dream-go | 6a4b71d7e1fcc28110ba859c0a2b59c10041c083 | [
"Apache-2.0"
] | 56 | 2017-12-28T04:00:31.000Z | 2022-03-20T12:39:39.000Z | contrib/trainer/dream_tf/layers/policy_head.py | Chicoryn/dream-go | 6a4b71d7e1fcc28110ba859c0a2b59c10041c083 | [
"Apache-2.0"
] | 8 | 2018-02-01T13:12:32.000Z | 2020-05-11T04:12:25.000Z | # Copyright (c) 2019 Karl Sundequist Blomdahl <karl.sundequist.blomdahl@gmail.com>
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
import numpy as np
import tensorflow as tf
from .batch_norm import batch_norm_conv2d
from .dense import dense
from .recompute_grad import recompute_grad
def policy_head(x, mode, params):
"""
The policy head attached after the residual blocks as described by DeepMind:
1. A convolution of 8 filters of kernel size 3 × 3 with stride 1
2. Batch normalisation
3. A rectifier non-linearity
4. A fully connected linear layer that outputs a vector of size 19²+1 = 362
corresponding to logit probabilities for all intersections and the pass
move
"""
num_channels = params['num_channels']
num_samples = params['num_samples']
def _forward(x, is_recomputing=False):
""" Returns the result of the forward inference pass on `x` """
y = batch_norm_conv2d(x, 'conv_1', (3, 3, num_channels, num_samples), mode, params, is_recomputing=is_recomputing)
y = tf.nn.relu(y)
y = tf.reshape(y, (-1, 361 * num_samples))
y = dense(y, 'linear_1', (361 * num_samples, 362), policy_offset_op, mode, params, is_recomputing=is_recomputing)
return tf.cast(y, tf.float32)
return recompute_grad(_forward)(x)
def policy_offset_op(shape, dtype=None, partition_info=None):
""" Initial value for the policy offset, this should roughly correspond to
the log probability of each move being played. """
return np.array([
-7.93991e+00, -6.91853e+00, -6.86255e+00, -6.78094e+00, -6.79361e+00, -6.75976e+00,
-6.88288e+00, -6.90817e+00, -6.93508e+00, -6.92374e+00, -6.91856e+00, -6.91075e+00,
-6.87607e+00, -6.75246e+00, -6.79823e+00, -6.80791e+00, -6.86863e+00, -6.89708e+00,
-7.93729e+00, -6.95779e+00, -6.11830e+00, -5.85974e+00, -5.83566e+00, -5.81966e+00,
-5.84875e+00, -5.90686e+00, -5.97848e+00, -5.99648e+00, -5.99342e+00, -5.99524e+00,
-5.96306e+00, -5.88135e+00, -5.83725e+00, -5.81963e+00, -5.84671e+00, -5.85574e+00,
-6.07402e+00, -6.89741e+00, -6.91472e+00, -5.87616e+00, -5.51456e+00, -5.48398e+00,
-5.55522e+00, -5.49329e+00, -5.70271e+00, -5.65749e+00, -5.70621e+00, -5.68975e+00,
-5.69774e+00, -5.66463e+00, -5.68246e+00, -5.43859e+00, -5.59398e+00, -5.44977e+00,
-5.45890e+00, -5.81432e+00, -6.85663e+00, -6.83055e+00, -5.84429e+00, -5.40160e+00,
-5.34049e+00, -5.66119e+00, -5.62512e+00, -5.71932e+00, -5.72455e+00, -5.70309e+00,
-5.69903e+00, -5.70189e+00, -5.71451e+00, -5.68138e+00, -5.59716e+00, -5.64521e+00,
-5.29867e+00, -5.42794e+00, -5.80074e+00, -6.80807e+00, -6.81930e+00, -5.82896e+00,
-5.63177e+00, -5.67078e+00, -5.93261e+00, -5.78339e+00, -5.80250e+00, -5.78522e+00,
-5.79703e+00, -5.79409e+00, -5.79848e+00, -5.78746e+00, -5.77879e+00, -5.76154e+00,
-5.94899e+00, -5.67992e+00, -5.59753e+00, -5.78787e+00, -6.79474e+00, -6.79318e+00,
-5.85460e+00, -5.47365e+00, -5.60804e+00, -5.79080e+00, -5.80699e+00, -5.80015e+00,
-5.81436e+00, -5.81617e+00, -5.80918e+00, -5.81150e+00, -5.80510e+00, -5.77611e+00,
-5.78804e+00, -5.76476e+00, -5.58303e+00, -5.41241e+00, -5.83056e+00, -6.78050e+00,
-6.88840e+00, -5.91061e+00, -5.69064e+00, -5.71108e+00, -5.79579e+00, -5.80311e+00,
-5.81472e+00, -5.81526e+00, -5.81671e+00, -5.81616e+00, -5.81570e+00, -5.80513e+00,
-5.79622e+00, -5.77254e+00, -5.77513e+00, -5.67571e+00, -5.67228e+00, -5.89279e+00,
-6.86025e+00, -6.91154e+00, -5.97718e+00, -5.66273e+00, -5.72542e+00, -5.78770e+00,
-5.81699e+00, -5.81516e+00, -5.81869e+00, -5.81941e+00, -5.81940e+00, -5.81482e+00,
-5.80754e+00, -5.79365e+00, -5.78832e+00, -5.75882e+00, -5.70202e+00, -5.63253e+00,
-5.94600e+00, -6.88401e+00, -6.91774e+00, -5.99960e+00, -5.70958e+00, -5.70386e+00,
-5.80010e+00, -5.81106e+00, -5.81648e+00, -5.81789e+00, -5.81997e+00, -5.81948e+00,
-5.81279e+00, -5.80583e+00, -5.80135e+00, -5.78998e+00, -5.77203e+00, -5.68193e+00,
-5.67815e+00, -5.96948e+00, -6.88898e+00, -6.91699e+00, -5.99684e+00, -5.69323e+00,
-5.68440e+00, -5.79516e+00, -5.81060e+00, -5.81611e+00, -5.81406e+00, -5.81620e+00,
-5.80901e+00, -5.81298e+00, -5.80653e+00, -5.79696e+00, -5.78196e+00, -5.76473e+00,
-5.65428e+00, -5.66398e+00, -5.96876e+00, -6.89641e+00, -6.92151e+00, -5.99694e+00,
-5.71110e+00, -5.71325e+00, -5.79821e+00, -5.80778e+00, -5.81212e+00, -5.81205e+00,
-5.81020e+00, -5.81116e+00, -5.80801e+00, -5.79830e+00, -5.79276e+00, -5.78653e+00,
-5.77101e+00, -5.68899e+00, -5.69274e+00, -5.97098e+00, -6.90131e+00, -6.89817e+00,
-5.95772e+00, -5.64660e+00, -5.72654e+00, -5.77678e+00, -5.80212e+00, -5.80607e+00,
-5.80127e+00, -5.80551e+00, -5.80743e+00, -5.80042e+00, -5.79346e+00, -5.79025e+00,
-5.78733e+00, -5.75338e+00, -5.69506e+00, -5.63437e+00, -5.95747e+00, -6.88818e+00,
-6.86408e+00, -5.86964e+00, -5.67686e+00, -5.70769e+00, -5.79369e+00, -5.78719e+00,
-5.79913e+00, -5.80025e+00, -5.80054e+00, -5.80132e+00, -5.79529e+00, -5.78667e+00,
-5.78821e+00, -5.76922e+00, -5.76675e+00, -5.69570e+00, -5.68074e+00, -5.90285e+00,
-6.86338e+00, -6.76061e+00, -5.80263e+00, -5.41706e+00, -5.58843e+00, -5.78328e+00,
-5.79366e+00, -5.78934e+00, -5.79841e+00, -5.79591e+00, -5.79041e+00, -5.79060e+00,
-5.78705e+00, -5.78000e+00, -5.77674e+00, -5.75681e+00, -5.57623e+00, -5.50113e+00,
-5.85626e+00, -6.78012e+00, -6.79139e+00, -5.80594e+00, -5.58041e+00, -5.65286e+00,
-5.94338e+00, -5.77647e+00, -5.78968e+00, -5.77167e+00, -5.78232e+00, -5.76841e+00,
-5.77241e+00, -5.75895e+00, -5.78530e+00, -5.76951e+00, -5.88238e+00, -5.64461e+00,
-5.61617e+00, -5.82903e+00, -6.80791e+00, -6.81286e+00, -5.84175e+00, -5.48596e+00,
-5.28293e+00, -5.71807e+00, -5.60505e+00, -5.71724e+00, -5.70963e+00, -5.68757e+00,
-5.65039e+00, -5.67046e+00, -5.68983e+00, -5.69079e+00, -5.58636e+00, -5.60082e+00,
-5.39104e+00, -5.38788e+00, -5.85818e+00, -6.81584e+00, -6.83461e+00, -5.85197e+00,
-5.47331e+00, -5.40193e+00, -5.63715e+00, -5.47135e+00, -5.68295e+00, -5.64977e+00,
-5.67997e+00, -5.64680e+00, -5.67367e+00, -5.61327e+00, -5.67216e+00, -5.50078e+00,
-5.53072e+00, -5.40751e+00, -5.52960e+00, -5.87713e+00, -6.89602e+00, -6.89446e+00,
-6.07997e+00, -5.83860e+00, -5.78284e+00, -5.77460e+00, -5.81606e+00, -5.88522e+00,
-5.95163e+00, -5.97232e+00, -5.95954e+00, -5.96527e+00, -5.94048e+00, -5.88465e+00,
-5.82810e+00, -5.82003e+00, -5.84255e+00, -5.88531e+00, -6.11968e+00, -6.92480e+00,
-7.88397e+00, -6.89418e+00, -6.83908e+00, -6.78821e+00, -6.75784e+00, -6.75053e+00,
-6.85545e+00, -6.88249e+00, -6.88945e+00, -6.88525e+00, -6.88876e+00, -6.86828e+00,
-6.83631e+00, -6.75981e+00, -6.76317e+00, -6.74771e+00, -6.86408e+00, -6.90874e+00,
-7.91371e+00, -6.27113e+00
])
| 66.483607 | 122 | 0.633461 |
import numpy as np
import tensorflow as tf
from .batch_norm import batch_norm_conv2d
from .dense import dense
from .recompute_grad import recompute_grad
def policy_head(x, mode, params):
num_channels = params['num_channels']
num_samples = params['num_samples']
def _forward(x, is_recomputing=False):
y = batch_norm_conv2d(x, 'conv_1', (3, 3, num_channels, num_samples), mode, params, is_recomputing=is_recomputing)
y = tf.nn.relu(y)
y = tf.reshape(y, (-1, 361 * num_samples))
y = dense(y, 'linear_1', (361 * num_samples, 362), policy_offset_op, mode, params, is_recomputing=is_recomputing)
return tf.cast(y, tf.float32)
return recompute_grad(_forward)(x)
def policy_offset_op(shape, dtype=None, partition_info=None):
return np.array([
-7.93991e+00, -6.91853e+00, -6.86255e+00, -6.78094e+00, -6.79361e+00, -6.75976e+00,
-6.88288e+00, -6.90817e+00, -6.93508e+00, -6.92374e+00, -6.91856e+00, -6.91075e+00,
-6.87607e+00, -6.75246e+00, -6.79823e+00, -6.80791e+00, -6.86863e+00, -6.89708e+00,
-7.93729e+00, -6.95779e+00, -6.11830e+00, -5.85974e+00, -5.83566e+00, -5.81966e+00,
-5.84875e+00, -5.90686e+00, -5.97848e+00, -5.99648e+00, -5.99342e+00, -5.99524e+00,
-5.96306e+00, -5.88135e+00, -5.83725e+00, -5.81963e+00, -5.84671e+00, -5.85574e+00,
-6.07402e+00, -6.89741e+00, -6.91472e+00, -5.87616e+00, -5.51456e+00, -5.48398e+00,
-5.55522e+00, -5.49329e+00, -5.70271e+00, -5.65749e+00, -5.70621e+00, -5.68975e+00,
-5.69774e+00, -5.66463e+00, -5.68246e+00, -5.43859e+00, -5.59398e+00, -5.44977e+00,
-5.45890e+00, -5.81432e+00, -6.85663e+00, -6.83055e+00, -5.84429e+00, -5.40160e+00,
-5.34049e+00, -5.66119e+00, -5.62512e+00, -5.71932e+00, -5.72455e+00, -5.70309e+00,
-5.69903e+00, -5.70189e+00, -5.71451e+00, -5.68138e+00, -5.59716e+00, -5.64521e+00,
-5.29867e+00, -5.42794e+00, -5.80074e+00, -6.80807e+00, -6.81930e+00, -5.82896e+00,
-5.63177e+00, -5.67078e+00, -5.93261e+00, -5.78339e+00, -5.80250e+00, -5.78522e+00,
-5.79703e+00, -5.79409e+00, -5.79848e+00, -5.78746e+00, -5.77879e+00, -5.76154e+00,
-5.94899e+00, -5.67992e+00, -5.59753e+00, -5.78787e+00, -6.79474e+00, -6.79318e+00,
-5.85460e+00, -5.47365e+00, -5.60804e+00, -5.79080e+00, -5.80699e+00, -5.80015e+00,
-5.81436e+00, -5.81617e+00, -5.80918e+00, -5.81150e+00, -5.80510e+00, -5.77611e+00,
-5.78804e+00, -5.76476e+00, -5.58303e+00, -5.41241e+00, -5.83056e+00, -6.78050e+00,
-6.88840e+00, -5.91061e+00, -5.69064e+00, -5.71108e+00, -5.79579e+00, -5.80311e+00,
-5.81472e+00, -5.81526e+00, -5.81671e+00, -5.81616e+00, -5.81570e+00, -5.80513e+00,
-5.79622e+00, -5.77254e+00, -5.77513e+00, -5.67571e+00, -5.67228e+00, -5.89279e+00,
-6.86025e+00, -6.91154e+00, -5.97718e+00, -5.66273e+00, -5.72542e+00, -5.78770e+00,
-5.81699e+00, -5.81516e+00, -5.81869e+00, -5.81941e+00, -5.81940e+00, -5.81482e+00,
-5.80754e+00, -5.79365e+00, -5.78832e+00, -5.75882e+00, -5.70202e+00, -5.63253e+00,
-5.94600e+00, -6.88401e+00, -6.91774e+00, -5.99960e+00, -5.70958e+00, -5.70386e+00,
-5.80010e+00, -5.81106e+00, -5.81648e+00, -5.81789e+00, -5.81997e+00, -5.81948e+00,
-5.81279e+00, -5.80583e+00, -5.80135e+00, -5.78998e+00, -5.77203e+00, -5.68193e+00,
-5.67815e+00, -5.96948e+00, -6.88898e+00, -6.91699e+00, -5.99684e+00, -5.69323e+00,
-5.68440e+00, -5.79516e+00, -5.81060e+00, -5.81611e+00, -5.81406e+00, -5.81620e+00,
-5.80901e+00, -5.81298e+00, -5.80653e+00, -5.79696e+00, -5.78196e+00, -5.76473e+00,
-5.65428e+00, -5.66398e+00, -5.96876e+00, -6.89641e+00, -6.92151e+00, -5.99694e+00,
-5.71110e+00, -5.71325e+00, -5.79821e+00, -5.80778e+00, -5.81212e+00, -5.81205e+00,
-5.81020e+00, -5.81116e+00, -5.80801e+00, -5.79830e+00, -5.79276e+00, -5.78653e+00,
-5.77101e+00, -5.68899e+00, -5.69274e+00, -5.97098e+00, -6.90131e+00, -6.89817e+00,
-5.95772e+00, -5.64660e+00, -5.72654e+00, -5.77678e+00, -5.80212e+00, -5.80607e+00,
-5.80127e+00, -5.80551e+00, -5.80743e+00, -5.80042e+00, -5.79346e+00, -5.79025e+00,
-5.78733e+00, -5.75338e+00, -5.69506e+00, -5.63437e+00, -5.95747e+00, -6.88818e+00,
-6.86408e+00, -5.86964e+00, -5.67686e+00, -5.70769e+00, -5.79369e+00, -5.78719e+00,
-5.79913e+00, -5.80025e+00, -5.80054e+00, -5.80132e+00, -5.79529e+00, -5.78667e+00,
-5.78821e+00, -5.76922e+00, -5.76675e+00, -5.69570e+00, -5.68074e+00, -5.90285e+00,
-6.86338e+00, -6.76061e+00, -5.80263e+00, -5.41706e+00, -5.58843e+00, -5.78328e+00,
-5.79366e+00, -5.78934e+00, -5.79841e+00, -5.79591e+00, -5.79041e+00, -5.79060e+00,
-5.78705e+00, -5.78000e+00, -5.77674e+00, -5.75681e+00, -5.57623e+00, -5.50113e+00,
-5.85626e+00, -6.78012e+00, -6.79139e+00, -5.80594e+00, -5.58041e+00, -5.65286e+00,
-5.94338e+00, -5.77647e+00, -5.78968e+00, -5.77167e+00, -5.78232e+00, -5.76841e+00,
-5.77241e+00, -5.75895e+00, -5.78530e+00, -5.76951e+00, -5.88238e+00, -5.64461e+00,
-5.61617e+00, -5.82903e+00, -6.80791e+00, -6.81286e+00, -5.84175e+00, -5.48596e+00,
-5.28293e+00, -5.71807e+00, -5.60505e+00, -5.71724e+00, -5.70963e+00, -5.68757e+00,
-5.65039e+00, -5.67046e+00, -5.68983e+00, -5.69079e+00, -5.58636e+00, -5.60082e+00,
-5.39104e+00, -5.38788e+00, -5.85818e+00, -6.81584e+00, -6.83461e+00, -5.85197e+00,
-5.47331e+00, -5.40193e+00, -5.63715e+00, -5.47135e+00, -5.68295e+00, -5.64977e+00,
-5.67997e+00, -5.64680e+00, -5.67367e+00, -5.61327e+00, -5.67216e+00, -5.50078e+00,
-5.53072e+00, -5.40751e+00, -5.52960e+00, -5.87713e+00, -6.89602e+00, -6.89446e+00,
-6.07997e+00, -5.83860e+00, -5.78284e+00, -5.77460e+00, -5.81606e+00, -5.88522e+00,
-5.95163e+00, -5.97232e+00, -5.95954e+00, -5.96527e+00, -5.94048e+00, -5.88465e+00,
-5.82810e+00, -5.82003e+00, -5.84255e+00, -5.88531e+00, -6.11968e+00, -6.92480e+00,
-7.88397e+00, -6.89418e+00, -6.83908e+00, -6.78821e+00, -6.75784e+00, -6.75053e+00,
-6.85545e+00, -6.88249e+00, -6.88945e+00, -6.88525e+00, -6.88876e+00, -6.86828e+00,
-6.83631e+00, -6.75981e+00, -6.76317e+00, -6.74771e+00, -6.86408e+00, -6.90874e+00,
-7.91371e+00, -6.27113e+00
])
| true | true |
f73017774feaf0d8d33dce061c23d77e28fb2da8 | 1,072 | py | Python | aidistillery/models/fasttext_wrapper.py | TheMTank/arxiv-summariser | db4f1e42bcc9185e197a00a18e280a4a3011453c | [
"MIT"
] | 17 | 2018-11-26T23:06:20.000Z | 2022-01-18T21:43:17.000Z | aidistillery/models/fasttext_wrapper.py | TheMTank/arxiv-summariser | db4f1e42bcc9185e197a00a18e280a4a3011453c | [
"MIT"
] | 3 | 2018-11-27T12:17:20.000Z | 2019-02-05T11:40:44.000Z | aidistillery/models/fasttext_wrapper.py | TheMTank/arxiv-summariser | db4f1e42bcc9185e197a00a18e280a4a3011453c | [
"MIT"
] | 3 | 2019-03-06T10:14:08.000Z | 2020-01-21T17:26:20.000Z | import logging
from gensim.models import fasttext
from aidistillery import file_handling
class FastTextWrapper:
def __init__(self, sentences, use_bf = True, dimension=100, window=5, min_count=5, workers=4, sg=0, iterations=5,
type="fasttext", dataset = ""):
logging.info("FastText Wrapper Initialized")
self.sentences = sentences
self.type = type
self.dataset = dataset
self.use_bf = use_bf
self.dimension = dimension
self.window = window
self.min_count = min_count
self.workers = workers
self.sg = sg
self.iterations = iterations
def fit(self):
model = fasttext.FastText(self.sentences,
size=self.dimension,
window=self.window,
min_count=self.min_count,
workers=self.workers,
sg=self.sg,
iter=self.iterations)
if not self.use_bf:
return model
else:
bf_format = file_handling.BF().load_from_gensim(model)
return bf_format
| 29.777778 | 117 | 0.606343 | import logging
from gensim.models import fasttext
from aidistillery import file_handling
class FastTextWrapper:
def __init__(self, sentences, use_bf = True, dimension=100, window=5, min_count=5, workers=4, sg=0, iterations=5,
type="fasttext", dataset = ""):
logging.info("FastText Wrapper Initialized")
self.sentences = sentences
self.type = type
self.dataset = dataset
self.use_bf = use_bf
self.dimension = dimension
self.window = window
self.min_count = min_count
self.workers = workers
self.sg = sg
self.iterations = iterations
def fit(self):
model = fasttext.FastText(self.sentences,
size=self.dimension,
window=self.window,
min_count=self.min_count,
workers=self.workers,
sg=self.sg,
iter=self.iterations)
if not self.use_bf:
return model
else:
bf_format = file_handling.BF().load_from_gensim(model)
return bf_format
| true | true |
f730187437fb8528dba378747c162f34dd4029e0 | 270 | py | Python | subprocess_audio_scripts/success_bad_form.py | colesteere/PT-Exercise-Feedback | 974b6e441bc096d691bd4abd490cfab165560512 | [
"Apache-2.0"
] | null | null | null | subprocess_audio_scripts/success_bad_form.py | colesteere/PT-Exercise-Feedback | 974b6e441bc096d691bd4abd490cfab165560512 | [
"Apache-2.0"
] | null | null | null | subprocess_audio_scripts/success_bad_form.py | colesteere/PT-Exercise-Feedback | 974b6e441bc096d691bd4abd490cfab165560512 | [
"Apache-2.0"
] | null | null | null | import pyttsx3
# from run_bicep_curl import bicepCount
import sys
engine = pyttsx3.init()
voices = engine.getProperty("voices")
engine.setProperty("rate", 165)
engine.setProperty("voice", "english-us")
engine.say("Number {}.".format(sys.argv[1]))
engine.runAndWait() | 20.769231 | 44 | 0.748148 | import pyttsx3
import sys
engine = pyttsx3.init()
voices = engine.getProperty("voices")
engine.setProperty("rate", 165)
engine.setProperty("voice", "english-us")
engine.say("Number {}.".format(sys.argv[1]))
engine.runAndWait() | true | true |
f73019c9ad078a6db358a139a1c9a8db4ff33165 | 2,259 | py | Python | newdust/graindist/composition/cmdrude.py | eblur/newdust | 7e843ae2604a844826606ea04c459694fdd5c178 | [
"BSD-2-Clause"
] | 4 | 2018-02-04T19:04:01.000Z | 2022-02-09T04:11:18.000Z | newdust/graindist/composition/cmdrude.py | eblur/newdust | 7e843ae2604a844826606ea04c459694fdd5c178 | [
"BSD-2-Clause"
] | 21 | 2017-08-15T21:13:42.000Z | 2021-12-23T20:07:24.000Z | newdust/graindist/composition/cmdrude.py | eblur/newdust | 7e843ae2604a844826606ea04c459694fdd5c178 | [
"BSD-2-Clause"
] | 1 | 2021-01-28T18:29:12.000Z | 2021-01-28T18:29:12.000Z | import numpy as np
from newdust import constants as c
__all__ = ['CmDrude']
RHO_DRUDE = 3.0 # g cm^-3
LAM_MAX = c.hc / 0.01 # maximal wavelength that we will allow for RG-Drude
class CmDrude(object):
"""
| **ATTRIBUTES**
| cmtype : 'Drude'
| rho : grain density [g cm^-3]
| citation : A string containing citation to original work
|
| *functions*
| rp(lam, unit='kev') : Returns real part (unit='kev'|'angs')
| ip(lam, unit='kev') : Returns imaginary part (always 0.0)
| cm(lam, unit='kev') : Complex index of refraction of dtype='complex'
| plot(lam, unit='kev') : Plots Re(m-1)
"""
def __init__(self, rho=RHO_DRUDE): # Returns a CM using the Drude approximation
self.cmtype = 'Drude'
self.rho = rho
self.citation = "Using the Drude approximation.\nBohren, C. F. & Huffman, D. R., 1983, Absorption and Scattering of Light by Small Particles (New York: Wiley)"
def rp(self, lam, unit='kev'):
assert unit in c.ALLOWED_LAM_UNITS
lam_cm = c._lam_cm(lam, unit)
mm1 = self.rho / (2.0*c.m_p) * c.r_e/(2.0*np.pi) * np.power(lam_cm, 2)
return mm1 + 1.0
'''# Returns 1 if the wavelength supplied is too low energy (i.e. inappropriate for applying Drude)
mm1 = np.zeros(np.size(lam_cm))
if (np.size(lam_cm) == 1):
if lam_cm >= LAM_MAX:
pass
else:
mm1 = self.rho / (2.0*c.m_p) * c.r_e/(2.0*np.pi) * np.power(lam_cm, 2)
else:
ii = (lam_cm <= LAM_MAX)
mm1[ii] = self.rho / (2.0*c.m_p) * c.r_e/(2.0*np.pi) * np.power(lam_cm[ii], 2)
return mm1 + 1.0'''
def ip(self, lam, unit='kev'):
if np.size(lam) > 1:
return np.zeros(np.size(lam))
else:
return 0.0
def cm(self, lam, unit='kev'):
return self.rp(lam, unit=unit) + 0j
def plot(self, ax, lam, unit='kev', **kwargs):
assert unit in c.ALLOWED_LAM_UNITS
rp = self.rp(lam, unit=unit)
ax.plot(lam, rp-1.0, **kwargs)
ax.set_ylabel("m-1")
if unit == 'kev':
ax.set_xlabel("Energy (keV)")
if unit == 'angs':
ax.set_xlabel("Wavelength (Angstroms)")
| 35.296875 | 167 | 0.558212 | import numpy as np
from newdust import constants as c
__all__ = ['CmDrude']
RHO_DRUDE = 3.0
LAM_MAX = c.hc / 0.01
class CmDrude(object):
def __init__(self, rho=RHO_DRUDE):
self.cmtype = 'Drude'
self.rho = rho
self.citation = "Using the Drude approximation.\nBohren, C. F. & Huffman, D. R., 1983, Absorption and Scattering of Light by Small Particles (New York: Wiley)"
def rp(self, lam, unit='kev'):
assert unit in c.ALLOWED_LAM_UNITS
lam_cm = c._lam_cm(lam, unit)
mm1 = self.rho / (2.0*c.m_p) * c.r_e/(2.0*np.pi) * np.power(lam_cm, 2)
return mm1 + 1.0
def ip(self, lam, unit='kev'):
if np.size(lam) > 1:
return np.zeros(np.size(lam))
else:
return 0.0
def cm(self, lam, unit='kev'):
return self.rp(lam, unit=unit) + 0j
def plot(self, ax, lam, unit='kev', **kwargs):
assert unit in c.ALLOWED_LAM_UNITS
rp = self.rp(lam, unit=unit)
ax.plot(lam, rp-1.0, **kwargs)
ax.set_ylabel("m-1")
if unit == 'kev':
ax.set_xlabel("Energy (keV)")
if unit == 'angs':
ax.set_xlabel("Wavelength (Angstroms)")
| true | true |
f7301aac92ea6041e0cee9eb39608de909e9d91c | 6,055 | py | Python | tests/doc_test/doc_export_id/conf.py | David-Le-Nir/sphinxcontrib-needs | fe809445505fa1e9bf5963eab1d6283dad405e92 | [
"MIT"
] | null | null | null | tests/doc_test/doc_export_id/conf.py | David-Le-Nir/sphinxcontrib-needs | fe809445505fa1e9bf5963eab1d6283dad405e92 | [
"MIT"
] | 2 | 2022-02-13T19:49:18.000Z | 2022-02-13T19:49:18.000Z | tests/doc_test/doc_export_id/conf.py | David-Le-Nir/sphinxcontrib-needs | fe809445505fa1e9bf5963eab1d6283dad405e92 | [
"MIT"
] | null | null | null | #
# needs test docs documentation build configuration file, created by
# sphinx-quickstart on Tue Mar 28 11:37:14 2017.
#
# This file is execfile()d with the current directory set to its
# containing dir.
#
# Note that not all possible configuration values are present in this
# autogenerated file.
#
# All configuration values have a default; values that are commented out
# serve to show the default.
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
#
import os
import sys
sys.path.insert(0, os.path.abspath("../../sphinxcontrib"))
# -- General configuration ------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
#
# needs_sphinx = '1.0'
# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
# ones.
extensions = ["sphinxcontrib.needs", "sphinxcontrib.plantuml"]
needs_types = [
{"directive": "story", "title": "User Story", "prefix": "US_", "color": "#BFD8D2", "style": "node"},
{"directive": "spec", "title": "Specification", "prefix": "SP_", "color": "#FEDCD2", "style": "node"},
{"directive": "impl", "title": "Implementation", "prefix": "IM_", "color": "#DF744A", "style": "node"},
{"directive": "test", "title": "Test Case", "prefix": "TC_", "color": "#DCB239", "style": "node"},
]
needs_extra_links = [
{
"option": "links",
"incoming": "is linked by",
"outgoing": "links to",
"copy": False,
"style": "#black",
"style_part": "dotted,#black",
},
{
"option": "blocks",
"incoming": "is blocked by",
"outgoing": "blocks",
"copy": True,
"style": "bold,#AA0000",
},
{
"option": "tests",
"incoming": "is tested by",
"outgoing": "tests",
"copy": False,
"style": "dashed,#00AA00",
"style_part": "dotted,#00AA00",
},
]
needs_flow_link_types = ["links", "tests"]
plantuml = "java -jar %s" % os.path.join(os.path.dirname(__file__), "..", "..", "..", "docs", "utils", "plantuml.jar")
plantuml_output_format = "svg"
# Add any paths that contain templates here, relative to this directory.
templates_path = ["_templates"]
# The suffix(es) of source filenames.
# You can specify multiple suffix as a list of string:
#
# source_suffix = ['.rst', '.md']
source_suffix = ".rst"
# The master toctree document.
master_doc = "index"
# General information about the project.
project = "needs test docs"
copyright = "2017, team useblocks"
author = "team useblocks"
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The short X.Y version.
version = "1.0"
# The full version, including alpha/beta/rc tags.
release = "1.0"
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#
# This is also used if you do content translation via gettext catalogs.
# Usually you set "language" from the command line for these cases.
language = None
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
# This patterns also effect to html_static_path and html_extra_path
exclude_patterns = ["_build", "Thumbs.db", ".DS_Store"]
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = "sphinx"
# If true, `todo` and `todoList` produce output, else they produce nothing.
todo_include_todos = False
# -- Options for HTML output ----------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
#
html_theme = "alabaster"
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
#
# html_theme_options = {}
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ["_static"]
# -- Options for HTMLHelp output ------------------------------------------
# Output file base name for HTML help builder.
htmlhelp_basename = "needstestdocsdoc"
# -- Options for LaTeX output ---------------------------------------------
latex_elements = {
# The paper size ('letterpaper' or 'a4paper').
#
# 'papersize': 'letterpaper',
# The font size ('10pt', '11pt' or '12pt').
#
# 'pointsize': '10pt',
# Additional stuff for the LaTeX preamble.
#
# 'preamble': '',
# Latex figure (float) alignment
#
# 'figure_align': 'htbp',
}
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title,
# author, documentclass [howto, manual, or own class]).
latex_documents = [
(master_doc, "needstestdocs.tex", "needs test docs Documentation", "team useblocks", "manual"),
]
# -- Options for manual page output ---------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [(master_doc, "needstestdocs", "needs test docs Documentation", [author], 1)]
# -- Options for Texinfo output -------------------------------------------
# Grouping the document tree into Texinfo files. List of tuples
# (source start file, target name, title, author,
# dir menu entry, description, category)
texinfo_documents = [
(
master_doc,
"needstestdocs",
"needs test docs Documentation",
author,
"needstestdocs",
"One line description of project.",
"Miscellaneous",
),
]
| 32.553763 | 118 | 0.647399 |
import os
import sys
sys.path.insert(0, os.path.abspath("../../sphinxcontrib"))
extensions = ["sphinxcontrib.needs", "sphinxcontrib.plantuml"]
needs_types = [
{"directive": "story", "title": "User Story", "prefix": "US_", "color": "#BFD8D2", "style": "node"},
{"directive": "spec", "title": "Specification", "prefix": "SP_", "color": "#FEDCD2", "style": "node"},
{"directive": "impl", "title": "Implementation", "prefix": "IM_", "color": "#DF744A", "style": "node"},
{"directive": "test", "title": "Test Case", "prefix": "TC_", "color": "#DCB239", "style": "node"},
]
needs_extra_links = [
{
"option": "links",
"incoming": "is linked by",
"outgoing": "links to",
"copy": False,
"style": "#black",
"style_part": "dotted,#black",
},
{
"option": "blocks",
"incoming": "is blocked by",
"outgoing": "blocks",
"copy": True,
"style": "bold,#AA0000",
},
{
"option": "tests",
"incoming": "is tested by",
"outgoing": "tests",
"copy": False,
"style": "dashed,#00AA00",
"style_part": "dotted,#00AA00",
},
]
needs_flow_link_types = ["links", "tests"]
plantuml = "java -jar %s" % os.path.join(os.path.dirname(__file__), "..", "..", "..", "docs", "utils", "plantuml.jar")
plantuml_output_format = "svg"
templates_path = ["_templates"]
source_suffix = ".rst"
master_doc = "index"
project = "needs test docs"
copyright = "2017, team useblocks"
author = "team useblocks"
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The short X.Y version.
version = "1.0"
# The full version, including alpha/beta/rc tags.
release = "1.0"
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#
# This is also used if you do content translation via gettext catalogs.
# Usually you set "language" from the command line for these cases.
language = None
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
# This patterns also effect to html_static_path and html_extra_path
exclude_patterns = ["_build", "Thumbs.db", ".DS_Store"]
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = "sphinx"
# If true, `todo` and `todoList` produce output, else they produce nothing.
todo_include_todos = False
# -- Options for HTML output ----------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
#
html_theme = "alabaster"
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
#
# html_theme_options = {}
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ["_static"]
# -- Options for HTMLHelp output ------------------------------------------
# Output file base name for HTML help builder.
htmlhelp_basename = "needstestdocsdoc"
# -- Options for LaTeX output ---------------------------------------------
latex_elements = {
# The paper size ('letterpaper' or 'a4paper').
#
# 'papersize': 'letterpaper',
# The font size ('10pt', '11pt' or '12pt').
#
# 'pointsize': '10pt',
# Additional stuff for the LaTeX preamble.
#
# 'preamble': '',
# Latex figure (float) alignment
#
# 'figure_align': 'htbp',
}
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title,
# author, documentclass [howto, manual, or own class]).
latex_documents = [
(master_doc, "needstestdocs.tex", "needs test docs Documentation", "team useblocks", "manual"),
]
# -- Options for manual page output ---------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [(master_doc, "needstestdocs", "needs test docs Documentation", [author], 1)]
# -- Options for Texinfo output -------------------------------------------
# Grouping the document tree into Texinfo files. List of tuples
# (source start file, target name, title, author,
# dir menu entry, description, category)
texinfo_documents = [
(
master_doc,
"needstestdocs",
"needs test docs Documentation",
author,
"needstestdocs",
"One line description of project.",
"Miscellaneous",
),
]
| true | true |
f7301c822967e62a7a90c57cb1b15bfd69425390 | 1,409 | py | Python | pyganalytics/extract.py | dacker-team/pyganalytics | e64eedc582ddd31cc9534cf414d1734b4f512f9e | [
"BSD-2-Clause"
] | 6 | 2018-05-07T14:33:32.000Z | 2019-12-05T12:58:24.000Z | pyganalytics/extract.py | dacker-team/pyganalytics | e64eedc582ddd31cc9534cf414d1734b4f512f9e | [
"BSD-2-Clause"
] | 1 | 2020-02-17T09:24:54.000Z | 2020-02-17T09:24:54.000Z | pyganalytics/extract.py | dacker-team/pyganalytics | e64eedc582ddd31cc9534cf414d1734b4f512f9e | [
"BSD-2-Clause"
] | null | null | null | def get_metrics(response):
"""
Extract asked metrics from api response
@list_metrics : list of dict
"""
list_metrics = []
for i in response['reports'][0]['columnHeader']['metricHeader']['metricHeaderEntries']:
list_metrics.append(i['name'])
return list_metrics
def get_dimensions(response):
"""
Extract asked dimensions from api response
@list_dimensions : list of dict
"""
return response['reports'][0]['columnHeader']['dimensions']
def extract_api_data(response):
"""
Extract all data from api response
"""
try:
rows = response['reports'][0]['data']['rows']
except:
return []
try:
samples_read_counts = response['reports'][0]['data']['samplesReadCounts']
sampling_space_sizes = response['reports'][0]['data']['samplesReadCounts']
print("SAMPLING")
print(samples_read_counts)
print(sampling_space_sizes)
exit()
except:
pass
metric_response = get_metrics(response)
dimensions_response = get_dimensions(response)
data = []
for row in rows:
d = {}
j = 0
for i in dimensions_response:
d[i] = row['dimensions'][j]
j = j + 1
j = 0
for i in metric_response:
d[i] = row['metrics'][0]['values'][j]
j = j + 1
data.append(d)
return data
| 26.092593 | 91 | 0.584102 | def get_metrics(response):
list_metrics = []
for i in response['reports'][0]['columnHeader']['metricHeader']['metricHeaderEntries']:
list_metrics.append(i['name'])
return list_metrics
def get_dimensions(response):
return response['reports'][0]['columnHeader']['dimensions']
def extract_api_data(response):
try:
rows = response['reports'][0]['data']['rows']
except:
return []
try:
samples_read_counts = response['reports'][0]['data']['samplesReadCounts']
sampling_space_sizes = response['reports'][0]['data']['samplesReadCounts']
print("SAMPLING")
print(samples_read_counts)
print(sampling_space_sizes)
exit()
except:
pass
metric_response = get_metrics(response)
dimensions_response = get_dimensions(response)
data = []
for row in rows:
d = {}
j = 0
for i in dimensions_response:
d[i] = row['dimensions'][j]
j = j + 1
j = 0
for i in metric_response:
d[i] = row['metrics'][0]['values'][j]
j = j + 1
data.append(d)
return data
| true | true |
f7301e348946657ef9cc15b2a18a1e18c5bb9a53 | 1,556 | py | Python | Graph/P02_DepthFirstSearch.py | Abhishekkumar001/Data-Structures-using-Python-master | b17b1f50032f1460000f411e22a419675a0c08dc | [
"MIT"
] | null | null | null | Graph/P02_DepthFirstSearch.py | Abhishekkumar001/Data-Structures-using-Python-master | b17b1f50032f1460000f411e22a419675a0c08dc | [
"MIT"
] | null | null | null | Graph/P02_DepthFirstSearch.py | Abhishekkumar001/Data-Structures-using-Python-master | b17b1f50032f1460000f411e22a419675a0c08dc | [
"MIT"
] | null | null | null | class Graph():
def __init__(self):
self.vertex = {}
# for printing the Graph vertexes
def printGraph(self):
print(self.vertex)
for i in self.vertex.keys():
print(i,' -> ', ' -> '.join([str(j) for j in self.vertex[i]]))
# for adding the edge beween two vertexes
def addEdge(self, fromVertex, toVertex):
# check if vertex is already present,
if fromVertex in self.vertex.keys():
self.vertex[fromVertex].append(toVertex)
else:
# else make a new vertex
self.vertex[fromVertex] = [toVertex]
def DFS(self):
# visited array for storing already visited nodes
visited = [False] * len(self.vertex)
# call the recursive helper function
for i in range(len(self.vertex)):
if visited[i] == False:
self.DFSRec(i, visited)
def DFSRec(self, startVertex, visited):
# mark start vertex as visited
visited[startVertex] = True
print(startVertex, end = ' ')
# Recur for all the vertexes that are adjacent to this node
for i in self.vertex.keys():
if visited[i] == False:
self.DFSRec(i, visited)
if __name__ == '__main__':
g = Graph()
g.addEdge(0, 1)
g.addEdge(0, 2)
g.addEdge(1, 2)
g.addEdge(2, 0)
g.addEdge(2, 3)
g.addEdge(3, 3)
g.printGraph()
print('DFS:')
g.DFS()
# OUTPUT:
# 0 -> 1 -> 2
# 1 -> 2
# 2 -> 0 -> 3
# 3 -> 3
# DFS:
# 0 1 2 3
| 25.933333 | 74 | 0.539846 | class Graph():
def __init__(self):
self.vertex = {}
def printGraph(self):
print(self.vertex)
for i in self.vertex.keys():
print(i,' -> ', ' -> '.join([str(j) for j in self.vertex[i]]))
def addEdge(self, fromVertex, toVertex):
if fromVertex in self.vertex.keys():
self.vertex[fromVertex].append(toVertex)
else:
self.vertex[fromVertex] = [toVertex]
def DFS(self):
visited = [False] * len(self.vertex)
for i in range(len(self.vertex)):
if visited[i] == False:
self.DFSRec(i, visited)
def DFSRec(self, startVertex, visited):
visited[startVertex] = True
print(startVertex, end = ' ')
for i in self.vertex.keys():
if visited[i] == False:
self.DFSRec(i, visited)
if __name__ == '__main__':
g = Graph()
g.addEdge(0, 1)
g.addEdge(0, 2)
g.addEdge(1, 2)
g.addEdge(2, 0)
g.addEdge(2, 3)
g.addEdge(3, 3)
g.printGraph()
print('DFS:')
g.DFS()
| true | true |
f7301e8a0de80422b08e8287165f800e2b77df36 | 29,295 | py | Python | perfkitbenchmarker/benchmark_spec.py | sachinpatkar/PerfKitBenchmarker | ed2898278244d71501de87bb181d50b3561dcf44 | [
"Apache-2.0"
] | null | null | null | perfkitbenchmarker/benchmark_spec.py | sachinpatkar/PerfKitBenchmarker | ed2898278244d71501de87bb181d50b3561dcf44 | [
"Apache-2.0"
] | 1 | 2021-03-26T00:41:05.000Z | 2021-03-26T00:41:05.000Z | perfkitbenchmarker/benchmark_spec.py | sachinpatkar/PerfKitBenchmarker | ed2898278244d71501de87bb181d50b3561dcf44 | [
"Apache-2.0"
] | null | null | null | # Copyright 2019 PerfKitBenchmarker Authors. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Container for all data required for a benchmark to run."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import contextlib
import copy
import datetime
import importlib
import logging
import os
import pickle
import threading
import uuid
from perfkitbenchmarker import benchmark_status
from perfkitbenchmarker import capacity_reservation
from perfkitbenchmarker import cloud_tpu
from perfkitbenchmarker import container_service
from perfkitbenchmarker import context
from perfkitbenchmarker import disk
from perfkitbenchmarker import dpb_service
from perfkitbenchmarker import edw_service
from perfkitbenchmarker import errors
from perfkitbenchmarker import flags
from perfkitbenchmarker import managed_relational_db
from perfkitbenchmarker import nfs_service
from perfkitbenchmarker import os_types
from perfkitbenchmarker import provider_info
from perfkitbenchmarker import providers
from perfkitbenchmarker import smb_service
from perfkitbenchmarker import spark_service
from perfkitbenchmarker import stages
from perfkitbenchmarker import static_virtual_machine as static_vm
from perfkitbenchmarker import virtual_machine
from perfkitbenchmarker import vm_util
import six
from six.moves import range
import six.moves._thread
import six.moves.copyreg
def PickleLock(lock):
return UnPickleLock, (lock.locked(),)
def UnPickleLock(locked, *args):
lock = threading.Lock()
if locked:
if not lock.acquire(False):
raise pickle.UnpicklingError('Cannot acquire lock')
return lock
six.moves.copyreg.pickle(six.moves._thread.LockType, PickleLock)
SUPPORTED = 'strict'
NOT_EXCLUDED = 'permissive'
SKIP_CHECK = 'none'
# GCP labels only allow hyphens (-), underscores (_), lowercase characters, and
# numbers and International characters.
# metadata allow all characters and numbers.
METADATA_TIME_FORMAT = '%Y%m%dt%H%M%Sz'
FLAGS = flags.FLAGS
flags.DEFINE_enum('cloud', providers.GCP, providers.VALID_CLOUDS,
'Name of the cloud to use.')
flags.DEFINE_string('scratch_dir', None,
'Base name for all scratch disk directories in the VM. '
'Upon creation, these directories will have numbers '
'appended to them (for example /scratch0, /scratch1, etc).')
flags.DEFINE_string('startup_script', None,
'Script to run right after vm boot.')
flags.DEFINE_string('postrun_script', None,
'Script to run right after run stage.')
flags.DEFINE_integer('create_and_boot_post_task_delay', None,
'Delay in seconds to delay in between boot tasks.')
# pyformat: disable
flags.DEFINE_enum('benchmark_compatibility_checking', SUPPORTED,
[SUPPORTED, NOT_EXCLUDED, SKIP_CHECK],
'Method used to check compatibility between the benchmark '
' and the cloud. ' + SUPPORTED + ' runs the benchmark only'
' if the cloud provider has declared it supported. ' +
NOT_EXCLUDED + ' runs the benchmark unless it has been'
' declared not supported by the cloud provider. ' + SKIP_CHECK
+ ' does not do the compatibility'
' check.')
# pyformat: enable
class BenchmarkSpec(object):
"""Contains the various data required to make a benchmark run."""
total_benchmarks = 0
def __init__(self, benchmark_module, benchmark_config, benchmark_uid):
"""Initialize a BenchmarkSpec object.
Args:
benchmark_module: The benchmark module object.
benchmark_config: BenchmarkConfigSpec. The configuration for the
benchmark.
benchmark_uid: An identifier unique to this run of the benchmark even
if the same benchmark is run multiple times with different configs.
"""
self.config = benchmark_config
self.name = benchmark_module.BENCHMARK_NAME
self.uid = benchmark_uid
self.status = benchmark_status.SKIPPED
self.failed_substatus = None
self.status_detail = None
BenchmarkSpec.total_benchmarks += 1
self.sequence_number = BenchmarkSpec.total_benchmarks
self.vms = []
self.networks = {}
self.firewalls = {}
self.networks_lock = threading.Lock()
self.firewalls_lock = threading.Lock()
self.vm_groups = {}
self.container_specs = benchmark_config.container_specs or {}
self.container_registry = None
self.deleted = False
self.uuid = '%s-%s' % (FLAGS.run_uri, uuid.uuid4())
self.always_call_cleanup = False
self.spark_service = None
self.dpb_service = None
self.container_cluster = None
self.managed_relational_db = None
self.tpus = []
self.tpu_groups = {}
self.edw_service = None
self.nfs_service = None
self.smb_service = None
self.app_groups = {}
self._zone_index = 0
self.capacity_reservations = []
# Modules can't be pickled, but functions can, so we store the functions
# necessary to run the benchmark.
self.BenchmarkPrepare = benchmark_module.Prepare
self.BenchmarkRun = benchmark_module.Run
self.BenchmarkCleanup = benchmark_module.Cleanup
# Set the current thread's BenchmarkSpec object to this one.
context.SetThreadBenchmarkSpec(self)
def __repr__(self):
return '%s(%r)' % (self.__class__, self.__dict__)
def __str__(self):
return(
'Benchmark name: {0}\nFlags: {1}'
.format(self.name, self.config.flags))
@contextlib.contextmanager
def RedirectGlobalFlags(self):
"""Redirects flag reads and writes to the benchmark-specific flags object.
Within the enclosed code block, reads and writes to the flags.FLAGS object
are redirected to a copy that has been merged with config-provided flag
overrides specific to this benchmark run.
"""
with self.config.RedirectFlags(FLAGS):
yield
def ConstructContainerCluster(self):
"""Create the container cluster."""
if self.config.container_cluster is None:
return
cloud = self.config.container_cluster.cloud
cluster_type = self.config.container_cluster.type
providers.LoadProvider(cloud)
container_cluster_class = container_service.GetContainerClusterClass(
cloud, cluster_type)
self.container_cluster = container_cluster_class(
self.config.container_cluster)
def ConstructContainerRegistry(self):
"""Create the container registry."""
if self.config.container_registry is None:
return
cloud = self.config.container_registry.cloud
providers.LoadProvider(cloud)
container_registry_class = container_service.GetContainerRegistryClass(
cloud)
self.container_registry = container_registry_class(
self.config.container_registry)
def ConstructDpbService(self):
"""Create the dpb_service object and create groups for its vms."""
if self.config.dpb_service is None:
return
providers.LoadProvider(self.config.dpb_service.worker_group.cloud)
dpb_service_class = dpb_service.GetDpbServiceClass(
self.config.dpb_service.service_type)
self.dpb_service = dpb_service_class(self.config.dpb_service)
def ConstructManagedRelationalDb(self):
"""Create the managed relational db and create groups for its vms."""
if self.config.managed_relational_db is None:
return
cloud = self.config.managed_relational_db.cloud
providers.LoadProvider(cloud)
managed_relational_db_class = (
managed_relational_db.GetManagedRelationalDbClass(cloud))
self.managed_relational_db = managed_relational_db_class(
self.config.managed_relational_db)
def ConstructTpuGroup(self, group_spec):
"""Constructs the BenchmarkSpec's cloud TPU objects."""
if group_spec is None:
return
cloud = group_spec.cloud
providers.LoadProvider(cloud)
tpu_class = cloud_tpu.GetTpuClass(cloud)
return tpu_class(group_spec)
def ConstructTpu(self):
"""Constructs the BenchmarkSpec's cloud TPU objects."""
tpu_group_specs = self.config.tpu_groups
for group_name, group_spec in sorted(six.iteritems(tpu_group_specs)):
tpu = self.ConstructTpuGroup(group_spec)
self.tpu_groups[group_name] = tpu
self.tpus.append(tpu)
def ConstructEdwService(self):
"""Create the edw_service object."""
if self.config.edw_service is None:
return
# Load necessary modules from the provider to account for dependencies
providers.LoadProvider(
edw_service.TYPE_2_PROVIDER.get(self.config.edw_service.type))
# Load the module for the edw service based on type
edw_service_module = importlib.import_module(edw_service.TYPE_2_MODULE.get(
self.config.edw_service.type))
edw_service_class = getattr(edw_service_module,
self.config.edw_service.type[0].upper() +
self.config.edw_service.type[1:])
# Check if a new instance needs to be created or restored from snapshot
self.edw_service = edw_service_class(self.config.edw_service)
def ConstructNfsService(self):
"""Construct the NFS service object.
Creates an NFS Service only if an NFS disk is found in the disk_specs.
"""
if self.nfs_service:
logging.info('NFS service already created: %s', self.nfs_service)
return
for group_spec in self.config.vm_groups.values():
if not group_spec.disk_spec or not group_spec.vm_count:
continue
disk_spec = group_spec.disk_spec
if disk_spec.disk_type != disk.NFS:
continue
if disk_spec.nfs_ip_address:
self.nfs_service = nfs_service.StaticNfsService(disk_spec)
else:
cloud = group_spec.cloud
providers.LoadProvider(cloud)
nfs_class = nfs_service.GetNfsServiceClass(cloud)
self.nfs_service = nfs_class(disk_spec, group_spec.vm_spec.zone)
logging.debug('NFS service %s', self.nfs_service)
break
def ConstructSmbService(self):
"""Construct the SMB service object.
Creates an SMB Service only if an SMB disk is found in the disk_specs.
"""
if self.smb_service:
logging.info('SMB service already created: %s', self.smb_service)
return
for group_spec in self.config.vm_groups.values():
if not group_spec.disk_spec or not group_spec.vm_count:
continue
disk_spec = group_spec.disk_spec
if disk_spec.disk_type != disk.SMB:
continue
cloud = group_spec.cloud
providers.LoadProvider(cloud)
smb_class = smb_service.GetSmbServiceClass(cloud)
self.smb_service = smb_class(disk_spec, group_spec.vm_spec.zone)
logging.debug('SMB service %s', self.smb_service)
break
def ConstructVirtualMachineGroup(self, group_name, group_spec):
"""Construct the virtual machine(s) needed for a group."""
vms = []
vm_count = group_spec.vm_count
disk_count = group_spec.disk_count
# First create the Static VM objects.
if group_spec.static_vms:
specs = [
spec for spec in group_spec.static_vms
if (FLAGS.static_vm_tags is None or spec.tag in FLAGS.static_vm_tags)
][:vm_count]
for vm_spec in specs:
static_vm_class = static_vm.GetStaticVmClass(vm_spec.os_type)
vms.append(static_vm_class(vm_spec))
os_type = group_spec.os_type
cloud = group_spec.cloud
# This throws an exception if the benchmark is not
# supported.
self._CheckBenchmarkSupport(cloud)
# Then create the remaining VM objects using VM and disk specs.
if group_spec.disk_spec:
disk_spec = group_spec.disk_spec
# disk_spec.disk_type may contain legacy values that were
# copied from FLAGS.scratch_disk_type into
# FLAGS.data_disk_type at the beginning of the run. We
# translate them here, rather than earlier, because here is
# where we know what cloud we're using and therefore we're
# able to pick the right translation table.
disk_spec.disk_type = disk.WarnAndTranslateDiskTypes(
disk_spec.disk_type, cloud)
else:
disk_spec = None
for _ in range(vm_count - len(vms)):
# Assign a zone to each VM sequentially from the --zones flag.
if FLAGS.zones or FLAGS.extra_zones or FLAGS.zone:
zone_list = FLAGS.zones + FLAGS.extra_zones + FLAGS.zone
group_spec.vm_spec.zone = zone_list[self._zone_index]
self._zone_index = (self._zone_index + 1
if self._zone_index < len(zone_list) - 1 else 0)
vm = self._CreateVirtualMachine(group_spec.vm_spec, os_type, cloud)
if disk_spec and not vm.is_static:
if disk_spec.disk_type == disk.LOCAL and disk_count is None:
disk_count = vm.max_local_disks
vm.disk_specs = [copy.copy(disk_spec) for _ in range(disk_count)]
# In the event that we need to create multiple disks from the same
# DiskSpec, we need to ensure that they have different mount points.
if (disk_count > 1 and disk_spec.mount_point):
for i, spec in enumerate(vm.disk_specs):
spec.mount_point += str(i)
vms.append(vm)
return vms
def ConstructCapacityReservations(self):
"""Construct capacity reservations for each VM group."""
if not FLAGS.use_capacity_reservations:
return
for vm_group in six.itervalues(self.vm_groups):
cloud = vm_group[0].CLOUD
providers.LoadProvider(cloud)
capacity_reservation_class = capacity_reservation.GetResourceClass(
cloud)
self.capacity_reservations.append(
capacity_reservation_class(vm_group))
def _CheckBenchmarkSupport(self, cloud):
"""Throw an exception if the benchmark isn't supported."""
if FLAGS.benchmark_compatibility_checking == SKIP_CHECK:
return
provider_info_class = provider_info.GetProviderInfoClass(cloud)
benchmark_ok = provider_info_class.IsBenchmarkSupported(self.name)
if FLAGS.benchmark_compatibility_checking == NOT_EXCLUDED:
if benchmark_ok is None:
benchmark_ok = True
if not benchmark_ok:
raise ValueError('Provider {0} does not support {1}. Use '
'--benchmark_compatibility_checking=none '
'to override this check.'.format(
provider_info_class.CLOUD, self.name))
def _ConstructJujuController(self, group_spec):
"""Construct a VirtualMachine object for a Juju controller."""
juju_spec = copy.copy(group_spec)
juju_spec.vm_count = 1
jujuvms = self.ConstructVirtualMachineGroup('juju', juju_spec)
if len(jujuvms):
jujuvm = jujuvms.pop()
jujuvm.is_controller = True
return jujuvm
return None
def ConstructVirtualMachines(self):
"""Constructs the BenchmarkSpec's VirtualMachine objects."""
vm_group_specs = self.config.vm_groups
clouds = {}
for group_name, group_spec in sorted(six.iteritems(vm_group_specs)):
vms = self.ConstructVirtualMachineGroup(group_name, group_spec)
if group_spec.os_type == os_types.JUJU:
# The Juju VM needs to be created first, so that subsequent units can
# be properly added under its control.
if group_spec.cloud in clouds:
jujuvm = clouds[group_spec.cloud]
else:
jujuvm = self._ConstructJujuController(group_spec)
clouds[group_spec.cloud] = jujuvm
for vm in vms:
vm.controller = clouds[group_spec.cloud]
vm.vm_group = group_name
jujuvm.units.extend(vms)
if jujuvm and jujuvm not in self.vms:
self.vms.extend([jujuvm])
self.vm_groups['%s_juju_controller' % group_spec.cloud] = [jujuvm]
self.vm_groups[group_name] = vms
self.vms.extend(vms)
# If we have a spark service, it needs to access the master_group and
# the worker group.
if (self.config.spark_service and
self.config.spark_service.service_type == spark_service.PKB_MANAGED):
for group_name in 'master_group', 'worker_group':
self.spark_service.vms[group_name] = self.vm_groups[group_name]
def ConstructSparkService(self):
"""Create the spark_service object and create groups for its vms."""
if self.config.spark_service is None:
return
spark_spec = self.config.spark_service
# Worker group is required, master group is optional
cloud = spark_spec.worker_group.cloud
if spark_spec.master_group:
cloud = spark_spec.master_group.cloud
providers.LoadProvider(cloud)
service_type = spark_spec.service_type
spark_service_class = spark_service.GetSparkServiceClass(
cloud, service_type)
self.spark_service = spark_service_class(spark_spec)
# If this is Pkb managed, the benchmark spec needs to adopt vms.
if service_type == spark_service.PKB_MANAGED:
for name, spec in [('master_group', spark_spec.master_group),
('worker_group', spark_spec.worker_group)]:
if name in self.config.vm_groups:
raise Exception('Cannot have a vm group {0} with a {1} spark '
'service'.format(name, spark_service.PKB_MANAGED))
self.config.vm_groups[name] = spec
def Prepare(self):
targets = [(vm.PrepareBackgroundWorkload, (), {}) for vm in self.vms]
vm_util.RunParallelThreads(targets, len(targets))
def Provision(self):
"""Prepares the VMs and networks necessary for the benchmark to run."""
# Create capacity reservations if the cloud supports it. Note that the
# capacity reservation class may update the VMs themselves. This is true
# on AWS, because the VM needs to be aware of the capacity resrevation id
# before its Create() method is called. Furthermore, if the user does not
# specify an AWS zone, but a region instead, the AwsCapacityReservation
# class will make a reservation in a zone that has sufficient capacity.
# In this case the VM's zone attribute, and the VMs network instance
# need to be updated as well.
if self.capacity_reservations:
vm_util.RunThreaded(lambda res: res.Create(), self.capacity_reservations)
# Sort networks into a guaranteed order of creation based on dict key.
# There is a finite limit on the number of threads that are created to
# provision networks. Until support is added to provision resources in an
# order based on dependencies, this key ordering can be used to avoid
# deadlock by placing dependent networks later and their dependencies
# earlier. As an example, AWS stores both per-region and per-zone objects
# in this dict, and each per-zone object depends on a corresponding
# per-region object, so the per-region objects are given keys that come
# first when sorted.
networks = [self.networks[key]
for key in sorted(six.iterkeys(self.networks))]
vm_util.RunThreaded(lambda net: net.Create(), networks)
if self.container_registry:
self.container_registry.Create()
for container_spec in six.itervalues(self.container_specs):
if container_spec.static_image:
continue
container_spec.image = self.container_registry.GetOrBuild(
container_spec.image)
if self.container_cluster:
self.container_cluster.Create()
# do after network setup but before VM created
if self.nfs_service:
self.nfs_service.Create()
if self.smb_service:
self.smb_service.Create()
if self.vms:
# We separate out creating, booting, and preparing the VMs into two phases
# so that we don't slow down the creation of all the VMs by running
# commands on the VMs that booted.
vm_util.RunThreaded(
self.CreateAndBootVm,
self.vms,
post_task_delay=FLAGS.create_and_boot_post_task_delay)
vm_util.RunThreaded(self.PrepareVmAfterBoot, self.vms)
sshable_vms = [
vm for vm in self.vms if vm.OS_TYPE not in os_types.WINDOWS_OS_TYPES
]
sshable_vm_groups = {}
for group_name, group_vms in six.iteritems(self.vm_groups):
sshable_vm_groups[group_name] = [
vm for vm in group_vms
if vm.OS_TYPE not in os_types.WINDOWS_OS_TYPES
]
vm_util.GenerateSSHConfig(sshable_vms, sshable_vm_groups)
if self.spark_service:
self.spark_service.Create()
if self.dpb_service:
self.dpb_service.Create()
if self.managed_relational_db:
self.managed_relational_db.client_vm = self.vms[0]
self.managed_relational_db.Create()
if self.tpus:
vm_util.RunThreaded(lambda tpu: tpu.Create(), self.tpus)
if self.edw_service:
if not self.edw_service.user_managed:
# The benchmark creates the Redshift cluster's subnet group in the
# already provisioned virtual private cloud (vpc).
for network in networks:
if network.__class__.__name__ == 'AwsNetwork':
self.config.edw_service.subnet_id = network.subnet.id
self.edw_service.Create()
def Delete(self):
if self.deleted:
return
if self.container_registry:
self.container_registry.Delete()
if self.spark_service:
self.spark_service.Delete()
if self.dpb_service:
self.dpb_service.Delete()
if self.managed_relational_db:
self.managed_relational_db.Delete()
if self.tpus:
vm_util.RunThreaded(lambda tpu: tpu.Delete(), self.tpus)
if self.edw_service:
self.edw_service.Delete()
if self.nfs_service:
self.nfs_service.Delete()
if self.smb_service:
self.smb_service.Delete()
# Note: It is ok to delete capacity reservations before deleting the VMs,
# and will actually save money (mere seconds of usage).
if self.capacity_reservations:
try:
vm_util.RunThreaded(lambda reservation: reservation.Delete(),
self.capacity_reservations)
except Exception: # pylint: disable=broad-except
logging.exception('Got an exception deleting CapacityReservations. '
'Attempting to continue tearing down.')
if self.vms:
try:
vm_util.RunThreaded(self.DeleteVm, self.vms)
except Exception:
logging.exception('Got an exception deleting VMs. '
'Attempting to continue tearing down.')
for firewall in six.itervalues(self.firewalls):
try:
firewall.DisallowAllPorts()
except Exception:
logging.exception('Got an exception disabling firewalls. '
'Attempting to continue tearing down.')
if self.container_cluster:
self.container_cluster.DeleteServices()
self.container_cluster.DeleteContainers()
self.container_cluster.Delete()
for net in six.itervalues(self.networks):
try:
net.Delete()
except Exception:
logging.exception('Got an exception deleting networks. '
'Attempting to continue tearing down.')
self.deleted = True
def GetSamples(self):
"""Returns samples created from benchmark resources."""
samples = []
if self.container_cluster:
samples.extend(self.container_cluster.GetSamples())
if self.container_registry:
samples.extend(self.container_registry.GetSamples())
return samples
def StartBackgroundWorkload(self):
targets = [(vm.StartBackgroundWorkload, (), {}) for vm in self.vms]
vm_util.RunParallelThreads(targets, len(targets))
def StopBackgroundWorkload(self):
targets = [(vm.StopBackgroundWorkload, (), {}) for vm in self.vms]
vm_util.RunParallelThreads(targets, len(targets))
def _GetResourceDict(self, time_format, timeout_minutes=None):
"""Gets a list of tags to be used to tag resources."""
now_utc = datetime.datetime.utcnow()
if not timeout_minutes:
timeout_minutes = FLAGS.timeout_minutes
timeout_utc = (
now_utc +
datetime.timedelta(minutes=timeout_minutes))
tags = {
'timeout_utc': timeout_utc.strftime(time_format),
'create_time_utc': now_utc.strftime(time_format),
'benchmark': self.name,
'perfkit_uuid': self.uuid,
'owner': FLAGS.owner
}
return tags
def GetResourceTags(self, timeout_minutes=None):
"""Gets a list of tags to be used to tag resources."""
return self._GetResourceDict(METADATA_TIME_FORMAT, timeout_minutes)
def _CreateVirtualMachine(self, vm_spec, os_type, cloud):
"""Create a vm in zone.
Args:
vm_spec: A virtual_machine.BaseVmSpec object.
os_type: The type of operating system for the VM. See the flag of the
same name for more information.
cloud: The cloud for the VM. See the flag of the same name for more
information.
Returns:
A virtual_machine.BaseVirtualMachine object.
"""
vm = static_vm.StaticVirtualMachine.GetStaticVirtualMachine()
if vm:
return vm
vm_class = virtual_machine.GetVmClass(cloud, os_type)
if vm_class is None:
raise errors.Error(
'VMs of type %s" are not currently supported on cloud "%s".' %
(os_type, cloud))
return vm_class(vm_spec)
def CreateAndBootVm(self, vm):
"""Creates a single VM and waits for boot to complete.
Args:
vm: The BaseVirtualMachine object representing the VM.
"""
vm.Create()
logging.info('VM: %s', vm.ip_address)
logging.info('Waiting for boot completion.')
vm.AllowRemoteAccessPorts()
vm.WaitForBootCompletion()
def PrepareVmAfterBoot(self, vm):
"""Prepares a VM after it has booted.
This function will prepare a scratch disk if required.
Args:
vm: The BaseVirtualMachine object representing the VM.
Raises:
Exception: If --vm_metadata is malformed.
"""
vm_metadata = {
'benchmark':
self.name,
'perfkit_uuid':
self.uuid,
'benchmark_uid':
self.uid,
'create_time_utc':
datetime.datetime.utcfromtimestamp(vm.create_start_time),
'owner':
FLAGS.owner
}
for item in FLAGS.vm_metadata:
if ':' not in item:
raise Exception('"%s" not in expected key:value format' % item)
key, value = item.split(':', 1)
vm_metadata[key] = value
vm.AddMetadata(**vm_metadata)
vm.OnStartup()
if any((spec.disk_type == disk.LOCAL for spec in vm.disk_specs)):
vm.SetupLocalDisks()
for disk_spec in vm.disk_specs:
if disk_spec.disk_type == disk.RAM:
vm.CreateRamDisk(disk_spec)
else:
vm.CreateScratchDisk(disk_spec)
# TODO(user): Simplify disk logic.
if disk_spec.num_striped_disks > 1:
# scratch disks has already been created and striped together.
break
# This must come after Scratch Disk creation to support the
# Containerized VM case
vm.PrepareVMEnvironment()
def DeleteVm(self, vm):
"""Deletes a single vm and scratch disk if required.
Args:
vm: The BaseVirtualMachine object representing the VM.
"""
if vm.is_static and vm.install_packages:
vm.PackageCleanup()
vm.Delete()
vm.DeleteScratchDisks()
@staticmethod
def _GetPickleFilename(uid):
"""Returns the filename for the pickled BenchmarkSpec."""
return os.path.join(vm_util.GetTempDir(), uid)
def Pickle(self):
"""Pickles the spec so that it can be unpickled on a subsequent run."""
with open(self._GetPickleFilename(self.uid), 'wb') as pickle_file:
pickle.dump(self, pickle_file, 2)
@classmethod
def GetBenchmarkSpec(cls, benchmark_module, config, uid):
"""Unpickles or creates a BenchmarkSpec and returns it.
Args:
benchmark_module: The benchmark module object.
config: BenchmarkConfigSpec. The configuration for the benchmark.
uid: An identifier unique to this run of the benchmark even if the same
benchmark is run multiple times with different configs.
Returns:
A BenchmarkSpec object.
"""
if stages.PROVISION in FLAGS.run_stage:
return cls(benchmark_module, config, uid)
try:
with open(cls._GetPickleFilename(uid), 'rb') as pickle_file:
spec = pickle.load(pickle_file)
except Exception as e: # pylint: disable=broad-except
logging.error('Unable to unpickle spec file for benchmark %s.',
benchmark_module.BENCHMARK_NAME)
raise e
# Always let the spec be deleted after being unpickled so that
# it's possible to run cleanup even if cleanup has already run.
spec.deleted = False
spec.status = benchmark_status.SKIPPED
context.SetThreadBenchmarkSpec(spec)
return spec
| 37.557692 | 80 | 0.697901 |
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import contextlib
import copy
import datetime
import importlib
import logging
import os
import pickle
import threading
import uuid
from perfkitbenchmarker import benchmark_status
from perfkitbenchmarker import capacity_reservation
from perfkitbenchmarker import cloud_tpu
from perfkitbenchmarker import container_service
from perfkitbenchmarker import context
from perfkitbenchmarker import disk
from perfkitbenchmarker import dpb_service
from perfkitbenchmarker import edw_service
from perfkitbenchmarker import errors
from perfkitbenchmarker import flags
from perfkitbenchmarker import managed_relational_db
from perfkitbenchmarker import nfs_service
from perfkitbenchmarker import os_types
from perfkitbenchmarker import provider_info
from perfkitbenchmarker import providers
from perfkitbenchmarker import smb_service
from perfkitbenchmarker import spark_service
from perfkitbenchmarker import stages
from perfkitbenchmarker import static_virtual_machine as static_vm
from perfkitbenchmarker import virtual_machine
from perfkitbenchmarker import vm_util
import six
from six.moves import range
import six.moves._thread
import six.moves.copyreg
def PickleLock(lock):
return UnPickleLock, (lock.locked(),)
def UnPickleLock(locked, *args):
lock = threading.Lock()
if locked:
if not lock.acquire(False):
raise pickle.UnpicklingError('Cannot acquire lock')
return lock
six.moves.copyreg.pickle(six.moves._thread.LockType, PickleLock)
SUPPORTED = 'strict'
NOT_EXCLUDED = 'permissive'
SKIP_CHECK = 'none'
METADATA_TIME_FORMAT = '%Y%m%dt%H%M%Sz'
FLAGS = flags.FLAGS
flags.DEFINE_enum('cloud', providers.GCP, providers.VALID_CLOUDS,
'Name of the cloud to use.')
flags.DEFINE_string('scratch_dir', None,
'Base name for all scratch disk directories in the VM. '
'Upon creation, these directories will have numbers '
'appended to them (for example /scratch0, /scratch1, etc).')
flags.DEFINE_string('startup_script', None,
'Script to run right after vm boot.')
flags.DEFINE_string('postrun_script', None,
'Script to run right after run stage.')
flags.DEFINE_integer('create_and_boot_post_task_delay', None,
'Delay in seconds to delay in between boot tasks.')
flags.DEFINE_enum('benchmark_compatibility_checking', SUPPORTED,
[SUPPORTED, NOT_EXCLUDED, SKIP_CHECK],
'Method used to check compatibility between the benchmark '
' and the cloud. ' + SUPPORTED + ' runs the benchmark only'
' if the cloud provider has declared it supported. ' +
NOT_EXCLUDED + ' runs the benchmark unless it has been'
' declared not supported by the cloud provider. ' + SKIP_CHECK
+ ' does not do the compatibility'
' check.')
class BenchmarkSpec(object):
total_benchmarks = 0
def __init__(self, benchmark_module, benchmark_config, benchmark_uid):
self.config = benchmark_config
self.name = benchmark_module.BENCHMARK_NAME
self.uid = benchmark_uid
self.status = benchmark_status.SKIPPED
self.failed_substatus = None
self.status_detail = None
BenchmarkSpec.total_benchmarks += 1
self.sequence_number = BenchmarkSpec.total_benchmarks
self.vms = []
self.networks = {}
self.firewalls = {}
self.networks_lock = threading.Lock()
self.firewalls_lock = threading.Lock()
self.vm_groups = {}
self.container_specs = benchmark_config.container_specs or {}
self.container_registry = None
self.deleted = False
self.uuid = '%s-%s' % (FLAGS.run_uri, uuid.uuid4())
self.always_call_cleanup = False
self.spark_service = None
self.dpb_service = None
self.container_cluster = None
self.managed_relational_db = None
self.tpus = []
self.tpu_groups = {}
self.edw_service = None
self.nfs_service = None
self.smb_service = None
self.app_groups = {}
self._zone_index = 0
self.capacity_reservations = []
# necessary to run the benchmark.
self.BenchmarkPrepare = benchmark_module.Prepare
self.BenchmarkRun = benchmark_module.Run
self.BenchmarkCleanup = benchmark_module.Cleanup
# Set the current thread's BenchmarkSpec object to this one.
context.SetThreadBenchmarkSpec(self)
def __repr__(self):
return '%s(%r)' % (self.__class__, self.__dict__)
def __str__(self):
return(
'Benchmark name: {0}\nFlags: {1}'
.format(self.name, self.config.flags))
@contextlib.contextmanager
def RedirectGlobalFlags(self):
with self.config.RedirectFlags(FLAGS):
yield
def ConstructContainerCluster(self):
if self.config.container_cluster is None:
return
cloud = self.config.container_cluster.cloud
cluster_type = self.config.container_cluster.type
providers.LoadProvider(cloud)
container_cluster_class = container_service.GetContainerClusterClass(
cloud, cluster_type)
self.container_cluster = container_cluster_class(
self.config.container_cluster)
def ConstructContainerRegistry(self):
if self.config.container_registry is None:
return
cloud = self.config.container_registry.cloud
providers.LoadProvider(cloud)
container_registry_class = container_service.GetContainerRegistryClass(
cloud)
self.container_registry = container_registry_class(
self.config.container_registry)
def ConstructDpbService(self):
if self.config.dpb_service is None:
return
providers.LoadProvider(self.config.dpb_service.worker_group.cloud)
dpb_service_class = dpb_service.GetDpbServiceClass(
self.config.dpb_service.service_type)
self.dpb_service = dpb_service_class(self.config.dpb_service)
def ConstructManagedRelationalDb(self):
if self.config.managed_relational_db is None:
return
cloud = self.config.managed_relational_db.cloud
providers.LoadProvider(cloud)
managed_relational_db_class = (
managed_relational_db.GetManagedRelationalDbClass(cloud))
self.managed_relational_db = managed_relational_db_class(
self.config.managed_relational_db)
def ConstructTpuGroup(self, group_spec):
if group_spec is None:
return
cloud = group_spec.cloud
providers.LoadProvider(cloud)
tpu_class = cloud_tpu.GetTpuClass(cloud)
return tpu_class(group_spec)
def ConstructTpu(self):
tpu_group_specs = self.config.tpu_groups
for group_name, group_spec in sorted(six.iteritems(tpu_group_specs)):
tpu = self.ConstructTpuGroup(group_spec)
self.tpu_groups[group_name] = tpu
self.tpus.append(tpu)
def ConstructEdwService(self):
if self.config.edw_service is None:
return
providers.LoadProvider(
edw_service.TYPE_2_PROVIDER.get(self.config.edw_service.type))
edw_service_module = importlib.import_module(edw_service.TYPE_2_MODULE.get(
self.config.edw_service.type))
edw_service_class = getattr(edw_service_module,
self.config.edw_service.type[0].upper() +
self.config.edw_service.type[1:])
self.edw_service = edw_service_class(self.config.edw_service)
def ConstructNfsService(self):
if self.nfs_service:
logging.info('NFS service already created: %s', self.nfs_service)
return
for group_spec in self.config.vm_groups.values():
if not group_spec.disk_spec or not group_spec.vm_count:
continue
disk_spec = group_spec.disk_spec
if disk_spec.disk_type != disk.NFS:
continue
if disk_spec.nfs_ip_address:
self.nfs_service = nfs_service.StaticNfsService(disk_spec)
else:
cloud = group_spec.cloud
providers.LoadProvider(cloud)
nfs_class = nfs_service.GetNfsServiceClass(cloud)
self.nfs_service = nfs_class(disk_spec, group_spec.vm_spec.zone)
logging.debug('NFS service %s', self.nfs_service)
break
def ConstructSmbService(self):
if self.smb_service:
logging.info('SMB service already created: %s', self.smb_service)
return
for group_spec in self.config.vm_groups.values():
if not group_spec.disk_spec or not group_spec.vm_count:
continue
disk_spec = group_spec.disk_spec
if disk_spec.disk_type != disk.SMB:
continue
cloud = group_spec.cloud
providers.LoadProvider(cloud)
smb_class = smb_service.GetSmbServiceClass(cloud)
self.smb_service = smb_class(disk_spec, group_spec.vm_spec.zone)
logging.debug('SMB service %s', self.smb_service)
break
def ConstructVirtualMachineGroup(self, group_name, group_spec):
vms = []
vm_count = group_spec.vm_count
disk_count = group_spec.disk_count
if group_spec.static_vms:
specs = [
spec for spec in group_spec.static_vms
if (FLAGS.static_vm_tags is None or spec.tag in FLAGS.static_vm_tags)
][:vm_count]
for vm_spec in specs:
static_vm_class = static_vm.GetStaticVmClass(vm_spec.os_type)
vms.append(static_vm_class(vm_spec))
os_type = group_spec.os_type
cloud = group_spec.cloud
self._CheckBenchmarkSupport(cloud)
if group_spec.disk_spec:
disk_spec = group_spec.disk_spec
disk_spec.disk_type = disk.WarnAndTranslateDiskTypes(
disk_spec.disk_type, cloud)
else:
disk_spec = None
for _ in range(vm_count - len(vms)):
if FLAGS.zones or FLAGS.extra_zones or FLAGS.zone:
zone_list = FLAGS.zones + FLAGS.extra_zones + FLAGS.zone
group_spec.vm_spec.zone = zone_list[self._zone_index]
self._zone_index = (self._zone_index + 1
if self._zone_index < len(zone_list) - 1 else 0)
vm = self._CreateVirtualMachine(group_spec.vm_spec, os_type, cloud)
if disk_spec and not vm.is_static:
if disk_spec.disk_type == disk.LOCAL and disk_count is None:
disk_count = vm.max_local_disks
vm.disk_specs = [copy.copy(disk_spec) for _ in range(disk_count)]
if (disk_count > 1 and disk_spec.mount_point):
for i, spec in enumerate(vm.disk_specs):
spec.mount_point += str(i)
vms.append(vm)
return vms
def ConstructCapacityReservations(self):
if not FLAGS.use_capacity_reservations:
return
for vm_group in six.itervalues(self.vm_groups):
cloud = vm_group[0].CLOUD
providers.LoadProvider(cloud)
capacity_reservation_class = capacity_reservation.GetResourceClass(
cloud)
self.capacity_reservations.append(
capacity_reservation_class(vm_group))
def _CheckBenchmarkSupport(self, cloud):
if FLAGS.benchmark_compatibility_checking == SKIP_CHECK:
return
provider_info_class = provider_info.GetProviderInfoClass(cloud)
benchmark_ok = provider_info_class.IsBenchmarkSupported(self.name)
if FLAGS.benchmark_compatibility_checking == NOT_EXCLUDED:
if benchmark_ok is None:
benchmark_ok = True
if not benchmark_ok:
raise ValueError('Provider {0} does not support {1}. Use '
'--benchmark_compatibility_checking=none '
'to override this check.'.format(
provider_info_class.CLOUD, self.name))
def _ConstructJujuController(self, group_spec):
juju_spec = copy.copy(group_spec)
juju_spec.vm_count = 1
jujuvms = self.ConstructVirtualMachineGroup('juju', juju_spec)
if len(jujuvms):
jujuvm = jujuvms.pop()
jujuvm.is_controller = True
return jujuvm
return None
def ConstructVirtualMachines(self):
vm_group_specs = self.config.vm_groups
clouds = {}
for group_name, group_spec in sorted(six.iteritems(vm_group_specs)):
vms = self.ConstructVirtualMachineGroup(group_name, group_spec)
if group_spec.os_type == os_types.JUJU:
if group_spec.cloud in clouds:
jujuvm = clouds[group_spec.cloud]
else:
jujuvm = self._ConstructJujuController(group_spec)
clouds[group_spec.cloud] = jujuvm
for vm in vms:
vm.controller = clouds[group_spec.cloud]
vm.vm_group = group_name
jujuvm.units.extend(vms)
if jujuvm and jujuvm not in self.vms:
self.vms.extend([jujuvm])
self.vm_groups['%s_juju_controller' % group_spec.cloud] = [jujuvm]
self.vm_groups[group_name] = vms
self.vms.extend(vms)
if (self.config.spark_service and
self.config.spark_service.service_type == spark_service.PKB_MANAGED):
for group_name in 'master_group', 'worker_group':
self.spark_service.vms[group_name] = self.vm_groups[group_name]
def ConstructSparkService(self):
if self.config.spark_service is None:
return
spark_spec = self.config.spark_service
cloud = spark_spec.worker_group.cloud
if spark_spec.master_group:
cloud = spark_spec.master_group.cloud
providers.LoadProvider(cloud)
service_type = spark_spec.service_type
spark_service_class = spark_service.GetSparkServiceClass(
cloud, service_type)
self.spark_service = spark_service_class(spark_spec)
if service_type == spark_service.PKB_MANAGED:
for name, spec in [('master_group', spark_spec.master_group),
('worker_group', spark_spec.worker_group)]:
if name in self.config.vm_groups:
raise Exception('Cannot have a vm group {0} with a {1} spark '
'service'.format(name, spark_service.PKB_MANAGED))
self.config.vm_groups[name] = spec
def Prepare(self):
targets = [(vm.PrepareBackgroundWorkload, (), {}) for vm in self.vms]
vm_util.RunParallelThreads(targets, len(targets))
def Provision(self):
# need to be updated as well.
if self.capacity_reservations:
vm_util.RunThreaded(lambda res: res.Create(), self.capacity_reservations)
# Sort networks into a guaranteed order of creation based on dict key.
# There is a finite limit on the number of threads that are created to
# provision networks. Until support is added to provision resources in an
# order based on dependencies, this key ordering can be used to avoid
# deadlock by placing dependent networks later and their dependencies
# earlier. As an example, AWS stores both per-region and per-zone objects
# in this dict, and each per-zone object depends on a corresponding
# per-region object, so the per-region objects are given keys that come
# first when sorted.
networks = [self.networks[key]
for key in sorted(six.iterkeys(self.networks))]
vm_util.RunThreaded(lambda net: net.Create(), networks)
if self.container_registry:
self.container_registry.Create()
for container_spec in six.itervalues(self.container_specs):
if container_spec.static_image:
continue
container_spec.image = self.container_registry.GetOrBuild(
container_spec.image)
if self.container_cluster:
self.container_cluster.Create()
# do after network setup but before VM created
if self.nfs_service:
self.nfs_service.Create()
if self.smb_service:
self.smb_service.Create()
if self.vms:
# We separate out creating, booting, and preparing the VMs into two phases
# so that we don't slow down the creation of all the VMs by running
vm_util.RunThreaded(
self.CreateAndBootVm,
self.vms,
post_task_delay=FLAGS.create_and_boot_post_task_delay)
vm_util.RunThreaded(self.PrepareVmAfterBoot, self.vms)
sshable_vms = [
vm for vm in self.vms if vm.OS_TYPE not in os_types.WINDOWS_OS_TYPES
]
sshable_vm_groups = {}
for group_name, group_vms in six.iteritems(self.vm_groups):
sshable_vm_groups[group_name] = [
vm for vm in group_vms
if vm.OS_TYPE not in os_types.WINDOWS_OS_TYPES
]
vm_util.GenerateSSHConfig(sshable_vms, sshable_vm_groups)
if self.spark_service:
self.spark_service.Create()
if self.dpb_service:
self.dpb_service.Create()
if self.managed_relational_db:
self.managed_relational_db.client_vm = self.vms[0]
self.managed_relational_db.Create()
if self.tpus:
vm_util.RunThreaded(lambda tpu: tpu.Create(), self.tpus)
if self.edw_service:
if not self.edw_service.user_managed:
# already provisioned virtual private cloud (vpc).
for network in networks:
if network.__class__.__name__ == 'AwsNetwork':
self.config.edw_service.subnet_id = network.subnet.id
self.edw_service.Create()
def Delete(self):
if self.deleted:
return
if self.container_registry:
self.container_registry.Delete()
if self.spark_service:
self.spark_service.Delete()
if self.dpb_service:
self.dpb_service.Delete()
if self.managed_relational_db:
self.managed_relational_db.Delete()
if self.tpus:
vm_util.RunThreaded(lambda tpu: tpu.Delete(), self.tpus)
if self.edw_service:
self.edw_service.Delete()
if self.nfs_service:
self.nfs_service.Delete()
if self.smb_service:
self.smb_service.Delete()
# Note: It is ok to delete capacity reservations before deleting the VMs,
# and will actually save money (mere seconds of usage).
if self.capacity_reservations:
try:
vm_util.RunThreaded(lambda reservation: reservation.Delete(),
self.capacity_reservations)
except Exception: # pylint: disable=broad-except
logging.exception('Got an exception deleting CapacityReservations. '
'Attempting to continue tearing down.')
if self.vms:
try:
vm_util.RunThreaded(self.DeleteVm, self.vms)
except Exception:
logging.exception('Got an exception deleting VMs. '
'Attempting to continue tearing down.')
for firewall in six.itervalues(self.firewalls):
try:
firewall.DisallowAllPorts()
except Exception:
logging.exception('Got an exception disabling firewalls. '
'Attempting to continue tearing down.')
if self.container_cluster:
self.container_cluster.DeleteServices()
self.container_cluster.DeleteContainers()
self.container_cluster.Delete()
for net in six.itervalues(self.networks):
try:
net.Delete()
except Exception:
logging.exception('Got an exception deleting networks. '
'Attempting to continue tearing down.')
self.deleted = True
def GetSamples(self):
samples = []
if self.container_cluster:
samples.extend(self.container_cluster.GetSamples())
if self.container_registry:
samples.extend(self.container_registry.GetSamples())
return samples
def StartBackgroundWorkload(self):
targets = [(vm.StartBackgroundWorkload, (), {}) for vm in self.vms]
vm_util.RunParallelThreads(targets, len(targets))
def StopBackgroundWorkload(self):
targets = [(vm.StopBackgroundWorkload, (), {}) for vm in self.vms]
vm_util.RunParallelThreads(targets, len(targets))
def _GetResourceDict(self, time_format, timeout_minutes=None):
now_utc = datetime.datetime.utcnow()
if not timeout_minutes:
timeout_minutes = FLAGS.timeout_minutes
timeout_utc = (
now_utc +
datetime.timedelta(minutes=timeout_minutes))
tags = {
'timeout_utc': timeout_utc.strftime(time_format),
'create_time_utc': now_utc.strftime(time_format),
'benchmark': self.name,
'perfkit_uuid': self.uuid,
'owner': FLAGS.owner
}
return tags
def GetResourceTags(self, timeout_minutes=None):
return self._GetResourceDict(METADATA_TIME_FORMAT, timeout_minutes)
def _CreateVirtualMachine(self, vm_spec, os_type, cloud):
vm = static_vm.StaticVirtualMachine.GetStaticVirtualMachine()
if vm:
return vm
vm_class = virtual_machine.GetVmClass(cloud, os_type)
if vm_class is None:
raise errors.Error(
'VMs of type %s" are not currently supported on cloud "%s".' %
(os_type, cloud))
return vm_class(vm_spec)
def CreateAndBootVm(self, vm):
vm.Create()
logging.info('VM: %s', vm.ip_address)
logging.info('Waiting for boot completion.')
vm.AllowRemoteAccessPorts()
vm.WaitForBootCompletion()
def PrepareVmAfterBoot(self, vm):
vm_metadata = {
'benchmark':
self.name,
'perfkit_uuid':
self.uuid,
'benchmark_uid':
self.uid,
'create_time_utc':
datetime.datetime.utcfromtimestamp(vm.create_start_time),
'owner':
FLAGS.owner
}
for item in FLAGS.vm_metadata:
if ':' not in item:
raise Exception('"%s" not in expected key:value format' % item)
key, value = item.split(':', 1)
vm_metadata[key] = value
vm.AddMetadata(**vm_metadata)
vm.OnStartup()
if any((spec.disk_type == disk.LOCAL for spec in vm.disk_specs)):
vm.SetupLocalDisks()
for disk_spec in vm.disk_specs:
if disk_spec.disk_type == disk.RAM:
vm.CreateRamDisk(disk_spec)
else:
vm.CreateScratchDisk(disk_spec)
# TODO(user): Simplify disk logic.
if disk_spec.num_striped_disks > 1:
# scratch disks has already been created and striped together.
break
# This must come after Scratch Disk creation to support the
# Containerized VM case
vm.PrepareVMEnvironment()
def DeleteVm(self, vm):
if vm.is_static and vm.install_packages:
vm.PackageCleanup()
vm.Delete()
vm.DeleteScratchDisks()
@staticmethod
def _GetPickleFilename(uid):
return os.path.join(vm_util.GetTempDir(), uid)
def Pickle(self):
with open(self._GetPickleFilename(self.uid), 'wb') as pickle_file:
pickle.dump(self, pickle_file, 2)
@classmethod
def GetBenchmarkSpec(cls, benchmark_module, config, uid):
if stages.PROVISION in FLAGS.run_stage:
return cls(benchmark_module, config, uid)
try:
with open(cls._GetPickleFilename(uid), 'rb') as pickle_file:
spec = pickle.load(pickle_file)
except Exception as e: # pylint: disable=broad-except
logging.error('Unable to unpickle spec file for benchmark %s.',
benchmark_module.BENCHMARK_NAME)
raise e
# Always let the spec be deleted after being unpickled so that
# it's possible to run cleanup even if cleanup has already run.
spec.deleted = False
spec.status = benchmark_status.SKIPPED
context.SetThreadBenchmarkSpec(spec)
return spec
| true | true |
f7301e9a6d3cfd1a5e7377df11cefe34a6c0eeb6 | 251 | py | Python | orio/validators/__init__.py | NIEHS/orio | bf996ebcf41d14b945cd5848460b023376b637ad | [
"MIT"
] | 6 | 2017-04-19T08:49:20.000Z | 2020-12-18T16:13:28.000Z | orio/validators/__init__.py | NIEHS/orio | bf996ebcf41d14b945cd5848460b023376b637ad | [
"MIT"
] | null | null | null | orio/validators/__init__.py | NIEHS/orio | bf996ebcf41d14b945cd5848460b023376b637ad | [
"MIT"
] | 1 | 2020-12-18T16:14:45.000Z | 2020-12-18T16:14:45.000Z | from .base import get_chromosome_size_path # noqa
from .analysis import AnalysisValidator # noqa
from .bigwig import BigWigValidator # noqa
from .feature_list import FeatureListValidator # noqa
from .sort_vector import SortVectorValidator # noqa
| 41.833333 | 54 | 0.820717 | from .base import get_chromosome_size_path
from .analysis import AnalysisValidator
from .bigwig import BigWigValidator
from .feature_list import FeatureListValidator
from .sort_vector import SortVectorValidator
| true | true |
f7301f02b847c99bc61b46703e6c682dfcb86ed7 | 2,745 | py | Python | bidobe/astunit.py | pbrus/binary-doppler-beaming | fb7b8e58d36da41759d643a58270a76f61bd5c90 | [
"MIT"
] | 1 | 2018-06-19T18:35:55.000Z | 2018-06-19T18:35:55.000Z | bidobe/astunit.py | pbrus/binary-doppler-beaming | fb7b8e58d36da41759d643a58270a76f61bd5c90 | [
"MIT"
] | null | null | null | bidobe/astunit.py | pbrus/binary-doppler-beaming | fb7b8e58d36da41759d643a58270a76f61bd5c90 | [
"MIT"
] | null | null | null | """
Store physical constants and calculate astronomical units
from and to the International System of Units.
"""
class UnitsConverter:
"""
UnitsConverter converts different astronomical units
from and to the International System of Units (SI).
"""
# All constants in SI units.
G = 6.67408e-11
LIGHT_SPEED = 2.99792458e8
PLANCK_CONSTANT = 6.62606979e-34
BOLTZMANN_CONSTANT = 1.38064852e-23
STEFAN_BOLTZMANN_CONSTANT = 5.670367e-8
SUN_MASS = 1.9884e30
SUN_RADIUS = 6.957e8
AU = 1.49597e11
PARSEC = 3.086e16
DAY = 86400
MINUTE = 60
def convert_sun_mass_to_kg(self, mass):
"""Convert mass in the solar mass to kilograms."""
return mass*self.SUN_MASS
def convert_kg_to_sun_mass(self, mass):
"""Convert mass in kilograms to the solar mass."""
return mass/self.SUN_MASS
def convert_days_to_sec(self, days):
"""Convert time in days to seconds."""
return days*self.DAY
def convert_sec_to_days(self, seconds):
"""Convert time in seconds to days."""
return seconds/self.DAY
def convert_min_to_sec(self, minutes):
"""Convert time in minutes to seconds."""
return self.MINUTE*minutes
def convert_sec_to_min(self, seconds):
"""Convert time in seconds to minutes."""
return seconds/self.MINUTE
def convert_hours_to_sec(self, minutes):
"""Convert time in hours to seconds."""
return (self.MINUTE**2)*minutes
def convert_sec_to_hours(self, seconds):
"""Convert time in seconds to hours."""
return seconds/(self.MINUTE**2)
def convert_au_to_m(self, au):
"""Convert length in the Astronomical Units to meters."""
return au*self.AU
def convert_m_to_au(self, meters):
"""Convert length in meters to the Astronomical Units."""
return meters/self.AU
def convert_kmps_to_mps(self, speed):
"""Convert speed in kilometers per second to meters per second."""
return 1000.0*speed
def convert_mps_to_kmps(self, speed):
"""Convert speed in meters per second to kilometers per second."""
return speed/1000.0
def convert_m_to_sun_radius(self, meters):
"""Convert length in meters to the solar radius."""
return meters/self.SUN_RADIUS
def convert_sun_radius_to_m(self, radii):
"""Convert length in the solar radius to meters."""
return self.SUN_RADIUS*radii
def convert_m_to_parsec(self, meters):
"""Convert length in meters to parsec."""
return meters/self.PARSEC
def convert_parsec_to_m(self, parsecs):
"""Convert length in parsec to meters."""
return parsecs*self.PARSEC
| 30.5 | 74 | 0.660109 |
class UnitsConverter:
G = 6.67408e-11
LIGHT_SPEED = 2.99792458e8
PLANCK_CONSTANT = 6.62606979e-34
BOLTZMANN_CONSTANT = 1.38064852e-23
STEFAN_BOLTZMANN_CONSTANT = 5.670367e-8
SUN_MASS = 1.9884e30
SUN_RADIUS = 6.957e8
AU = 1.49597e11
PARSEC = 3.086e16
DAY = 86400
MINUTE = 60
def convert_sun_mass_to_kg(self, mass):
return mass*self.SUN_MASS
def convert_kg_to_sun_mass(self, mass):
return mass/self.SUN_MASS
def convert_days_to_sec(self, days):
return days*self.DAY
def convert_sec_to_days(self, seconds):
return seconds/self.DAY
def convert_min_to_sec(self, minutes):
return self.MINUTE*minutes
def convert_sec_to_min(self, seconds):
return seconds/self.MINUTE
def convert_hours_to_sec(self, minutes):
return (self.MINUTE**2)*minutes
def convert_sec_to_hours(self, seconds):
return seconds/(self.MINUTE**2)
def convert_au_to_m(self, au):
return au*self.AU
def convert_m_to_au(self, meters):
return meters/self.AU
def convert_kmps_to_mps(self, speed):
return 1000.0*speed
def convert_mps_to_kmps(self, speed):
return speed/1000.0
def convert_m_to_sun_radius(self, meters):
return meters/self.SUN_RADIUS
def convert_sun_radius_to_m(self, radii):
return self.SUN_RADIUS*radii
def convert_m_to_parsec(self, meters):
return meters/self.PARSEC
def convert_parsec_to_m(self, parsecs):
return parsecs*self.PARSEC
| true | true |
f7301f8356df84dbab174e8675eb84f92ee3ede7 | 13,286 | py | Python | DaisyXMusic/modules/song.py | skypar/NITIN_VC__MUSIC | d09f303018ab51bd1b7266ba339e3325520fe429 | [
"Unlicense"
] | null | null | null | DaisyXMusic/modules/song.py | skypar/NITIN_VC__MUSIC | d09f303018ab51bd1b7266ba339e3325520fe429 | [
"Unlicense"
] | null | null | null | DaisyXMusic/modules/song.py | skypar/NITIN_VC__MUSIC | d09f303018ab51bd1b7266ba339e3325520fe429 | [
"Unlicense"
] | null | null | null | # Daisyxmusic (Telegram bot project )
# Copyright (C) 2021 Inukaasith
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <https://www.gnu.org/licenses/>.
from __future__ import unicode_literals
import asyncio
import math
import os
import time
from random import randint
from urllib.parse import urlparse
import aiofiles
import aiohttp
import requests
import wget
import youtube_dl
from pyrogram import Client, filters
from pyrogram.errors import FloodWait, MessageNotModified
from pyrogram.types import Message
from youtube_search import YoutubeSearch
from youtubesearchpython import SearchVideos
from DaisyXMusic.config import DURATION_LIMIT
from DaisyXMusic.modules.play import arq
@Client.on_message(filters.command("song") & ~filters.channel)
def song(client, message):
user_id = message.from_user.id
user_name = message.from_user.first_name
rpk = "[" + user_name + "](tg://user?id=" + str(user_id) + ")"
query = ""
for i in message.command[1:]:
query += " " + str(i)
print(query)
m = message.reply("🔎 Finding the song...")
ydl_opts = {"format": "bestaudio[ext=m4a]"}
try:
results = YoutubeSearch(query, max_results=1).to_dict()
link = f"https://youtube.com{results[0]['url_suffix']}"
# print(results)
title = results[0]["title"][:40]
thumbnail = results[0]["thumbnails"][0]
thumb_name = f"thumb{title}.jpg"
thumb = requests.get(thumbnail, allow_redirects=True)
open(thumb_name, "wb").write(thumb.content)
duration = results[0]["duration"]
results[0]["url_suffix"]
results[0]["views"]
except Exception as e:
m.edit("❌ Found Nothing.\n\nTry another keywork or maybe spell it properly.")
print(str(e))
return
m.edit("Downloading the song ")
try:
with youtube_dl.YoutubeDL(ydl_opts) as ydl:
info_dict = ydl.extract_info(link, download=False)
audio_file = ydl.prepare_filename(info_dict)
ydl.process_info(info_dict)
rep = "**🎵 Uploaded by DaisyXMusic**"
secmul, dur, dur_arr = 1, 0, duration.split(":")
for i in range(len(dur_arr) - 1, -1, -1):
dur += int(dur_arr[i]) * secmul
secmul *= 60
message.reply_audio(
audio_file,
caption=rep,
thumb=thumb_name,
parse_mode="md",
title=title,
duration=dur,
)
m.delete()
except Exception as e:
m.edit("❌ Error")
print(e)
try:
os.remove(audio_file)
os.remove(thumb_name)
except Exception as e:
print(e)
def get_text(message: Message) -> [None, str]:
text_to_return = message.text
if message.text is None:
return None
if " " in text_to_return:
try:
return message.text.split(None, 1)[1]
except IndexError:
return None
else:
return None
def humanbytes(size):
if not size:
return ""
power = 2 ** 10
raised_to_pow = 0
dict_power_n = {0: "", 1: "Ki", 2: "Mi", 3: "Gi", 4: "Ti"}
while size > power:
size /= power
raised_to_pow += 1
return str(round(size, 2)) + " " + dict_power_n[raised_to_pow] + "B"
async def progress(current, total, message, start, type_of_ps, file_name=None):
now = time.time()
diff = now - start
if round(diff % 10.00) == 0 or current == total:
percentage = current * 100 / total
speed = current / diff
elapsed_time = round(diff) * 1000
if elapsed_time == 0:
return
time_to_completion = round((total - current) / speed) * 1000
estimated_total_time = elapsed_time + time_to_completion
progress_str = "{0}{1} {2}%\n".format(
"".join(["🔴" for i in range(math.floor(percentage / 10))]),
"".join(["🔘" for i in range(10 - math.floor(percentage / 10))]),
round(percentage, 2),
)
tmp = progress_str + "{0} of {1}\nETA: {2}".format(
humanbytes(current), humanbytes(total), time_formatter(estimated_total_time)
)
if file_name:
try:
await message.edit(
"{}\n**File Name:** `{}`\n{}".format(type_of_ps, file_name, tmp)
)
except FloodWait as e:
await asyncio.sleep(e.x)
except MessageNotModified:
pass
else:
try:
await message.edit("{}\n{}".format(type_of_ps, tmp))
except FloodWait as e:
await asyncio.sleep(e.x)
except MessageNotModified:
pass
def get_user(message: Message, text: str) -> [int, str, None]:
if text is None:
asplit = None
else:
asplit = text.split(" ", 1)
user_s = None
reason_ = None
if message.reply_to_message:
user_s = message.reply_to_message.from_user.id
reason_ = text if text else None
elif asplit is None:
return None, None
elif len(asplit[0]) > 0:
user_s = int(asplit[0]) if asplit[0].isdigit() else asplit[0]
if len(asplit) == 2:
reason_ = asplit[1]
return user_s, reason_
def get_readable_time(seconds: int) -> int:
count = 0
ping_time = ""
time_list = []
time_suffix_list = ["s", "m", "h", "days"]
while count < 4:
count += 1
if count < 3:
remainder, result = divmod(seconds, 60)
else:
remainder, result = divmod(seconds, 24)
if seconds == 0 and remainder == 0:
break
time_list.append(int(result))
seconds = int(remainder)
for x in range(len(time_list)):
time_list[x] = str(time_list[x]) + time_suffix_list[x]
if len(time_list) == 4:
ping_time += time_list.pop() + ", "
time_list.reverse()
ping_time += ":".join(time_list)
return ping_time
def time_formatter(milliseconds: int) -> str:
seconds, milliseconds = divmod(int(milliseconds), 1000)
minutes, seconds = divmod(seconds, 60)
hours, minutes = divmod(minutes, 60)
days, hours = divmod(hours, 24)
tmp = (
((str(days) + " day(s), ") if days else "")
+ ((str(hours) + " hour(s), ") if hours else "")
+ ((str(minutes) + " minute(s), ") if minutes else "")
+ ((str(seconds) + " second(s), ") if seconds else "")
+ ((str(milliseconds) + " millisecond(s), ") if milliseconds else "")
)
return tmp[:-2]
ydl_opts = {
"format": "bestaudio/best",
"writethumbnail": True,
"postprocessors": [
{
"key": "FFmpegExtractAudio",
"preferredcodec": "mp3",
"preferredquality": "192",
}
],
}
def get_file_extension_from_url(url):
url_path = urlparse(url).path
basename = os.path.basename(url_path)
return basename.split(".")[-1]
# Funtion To Download Song
async def download_song(url):
song_name = f"{randint(6969, 6999)}.mp3"
async with aiohttp.ClientSession() as session:
async with session.get(url) as resp:
if resp.status == 200:
f = await aiofiles.open(song_name, mode="wb")
await f.write(await resp.read())
await f.close()
return song_name
is_downloading = False
def time_to_seconds(time):
stringt = str(time)
return sum(int(x) * 60 ** i for i, x in enumerate(reversed(stringt.split(":"))))
@Client.on_message(filters.command("saavn") & ~filters.edited)
async def jssong(_, message):
global is_downloading
if len(message.command) < 2:
await message.reply_text("/saavn requires an argument.")
return
if is_downloading:
await message.reply_text(
"Another download is in progress, try again after sometime."
)
return
is_downloading = True
text = message.text.split(None, 1)[1]
query = text.replace(" ", "%20")
m = await message.reply_text("Searching...")
try:
songs = await arq.saavn(query)
if not songs.ok:
await message.reply_text(songs.result)
return
sname = songs.result[0].song
slink = songs.result[0].media_url
ssingers = songs.result[0].singers
await m.edit("Downloading")
song = await download_song(slink)
await m.edit("Uploading")
await message.reply_audio(audio=song, title=sname, performer=ssingers)
os.remove(song)
await m.delete()
except Exception as e:
is_downloading = False
await m.edit(str(e))
return
is_downloading = False
# Deezer Music
@Client.on_message(filters.command("deezer") & ~filters.edited)
async def deezsong(_, message):
global is_downloading
if len(message.command) < 2:
await message.reply_text("/deezer requires an argument.")
return
if is_downloading:
await message.reply_text(
"Another download is in progress, try again after sometime."
)
return
is_downloading = True
text = message.text.split(None, 1)[1]
query = text.replace(" ", "%20")
m = await message.reply_text("Searching...")
try:
songs = await arq.deezer(query, 1)
if not songs.ok:
await message.reply_text(songs.result)
return
title = songs.result[0].title
url = songs.result[0].url
artist = songs.result[0].artist
await m.edit("Downloading")
song = await download_song(url)
await m.edit("Uploading")
await message.reply_audio(audio=song, title=title, performer=artist)
os.remove(song)
await m.delete()
except Exception as e:
is_downloading = False
await m.edit(str(e))
return
is_downloading = False
@Client.on_message(filters.command(["vsong", "video"]))
async def ytmusic(client, message: Message):
global is_downloading
if is_downloading:
await message.reply_text(
"Another download is in progress, try again after sometime."
)
return
urlissed = get_text(message)
pablo = await client.send_message(
message.chat.id, f"`Getting {urlissed} From Youtube Servers. Please Wait.`"
)
if not urlissed:
await pablo.edit("Invalid Command Syntax, Please Check Help Menu To Know More!")
return
search = SearchVideos(f"{urlissed}", offset=1, mode="dict", max_results=1)
mi = search.result()
mio = mi["search_result"]
mo = mio[0]["link"]
thum = mio[0]["title"]
fridayz = mio[0]["id"]
thums = mio[0]["channel"]
kekme = f"https://img.youtube.com/vi/{fridayz}/hqdefault.jpg"
await asyncio.sleep(0.6)
url = mo
sedlyf = wget.download(kekme)
opts = {
"format": "best",
"addmetadata": True,
"key": "FFmpegMetadata",
"prefer_ffmpeg": True,
"geo_bypass": True,
"nocheckcertificate": True,
"postprocessors": [{"key": "FFmpegVideoConvertor", "preferedformat": "mp4"}],
"outtmpl": "%(id)s.mp4",
"logtostderr": False,
"quiet": True,
}
try:
is_downloading = True
with youtube_dl.YoutubeDL(opts) as ytdl:
infoo = ytdl.extract_info(url, False)
duration = round(infoo["duration"] / 60)
if duration > DURATION_LIMIT:
await pablo.edit(
f"❌ Videos longer than {DURATION_LIMIT} minute(s) aren't allowed, the provided video is {duration} minute(s)"
)
is_downloading = False
return
ytdl_data = ytdl.extract_info(url, download=True)
except Exception:
# await pablo.edit(event, f"**Failed To Download** \n**Error :** `{str(e)}`")
is_downloading = False
return
c_time = time.time()
file_stark = f"{ytdl_data['id']}.mp4"
capy = f"**Video Name ➠** `{thum}` \n**Requested For :** `{urlissed}` \n**Channel :** `{thums}` \n**Link :** `{mo}`"
await client.send_video(
message.chat.id,
video=open(file_stark, "rb"),
duration=int(ytdl_data["duration"]),
file_name=str(ytdl_data["title"]),
thumb=sedlyf,
caption=capy,
supports_streaming=True,
progress=progress,
progress_args=(
pablo,
c_time,
f"`Uploading {urlissed} Song From YouTube Music!`",
file_stark,
),
)
await pablo.delete()
is_downloading = False
for files in (sedlyf, file_stark):
if files and os.path.exists(files):
os.remove(files)
| 31.187793 | 129 | 0.594084 |
from __future__ import unicode_literals
import asyncio
import math
import os
import time
from random import randint
from urllib.parse import urlparse
import aiofiles
import aiohttp
import requests
import wget
import youtube_dl
from pyrogram import Client, filters
from pyrogram.errors import FloodWait, MessageNotModified
from pyrogram.types import Message
from youtube_search import YoutubeSearch
from youtubesearchpython import SearchVideos
from DaisyXMusic.config import DURATION_LIMIT
from DaisyXMusic.modules.play import arq
@Client.on_message(filters.command("song") & ~filters.channel)
def song(client, message):
user_id = message.from_user.id
user_name = message.from_user.first_name
rpk = "[" + user_name + "](tg://user?id=" + str(user_id) + ")"
query = ""
for i in message.command[1:]:
query += " " + str(i)
print(query)
m = message.reply("🔎 Finding the song...")
ydl_opts = {"format": "bestaudio[ext=m4a]"}
try:
results = YoutubeSearch(query, max_results=1).to_dict()
link = f"https://youtube.com{results[0]['url_suffix']}"
title = results[0]["title"][:40]
thumbnail = results[0]["thumbnails"][0]
thumb_name = f"thumb{title}.jpg"
thumb = requests.get(thumbnail, allow_redirects=True)
open(thumb_name, "wb").write(thumb.content)
duration = results[0]["duration"]
results[0]["url_suffix"]
results[0]["views"]
except Exception as e:
m.edit("❌ Found Nothing.\n\nTry another keywork or maybe spell it properly.")
print(str(e))
return
m.edit("Downloading the song ")
try:
with youtube_dl.YoutubeDL(ydl_opts) as ydl:
info_dict = ydl.extract_info(link, download=False)
audio_file = ydl.prepare_filename(info_dict)
ydl.process_info(info_dict)
rep = "**🎵 Uploaded by DaisyXMusic**"
secmul, dur, dur_arr = 1, 0, duration.split(":")
for i in range(len(dur_arr) - 1, -1, -1):
dur += int(dur_arr[i]) * secmul
secmul *= 60
message.reply_audio(
audio_file,
caption=rep,
thumb=thumb_name,
parse_mode="md",
title=title,
duration=dur,
)
m.delete()
except Exception as e:
m.edit("❌ Error")
print(e)
try:
os.remove(audio_file)
os.remove(thumb_name)
except Exception as e:
print(e)
def get_text(message: Message) -> [None, str]:
text_to_return = message.text
if message.text is None:
return None
if " " in text_to_return:
try:
return message.text.split(None, 1)[1]
except IndexError:
return None
else:
return None
def humanbytes(size):
if not size:
return ""
power = 2 ** 10
raised_to_pow = 0
dict_power_n = {0: "", 1: "Ki", 2: "Mi", 3: "Gi", 4: "Ti"}
while size > power:
size /= power
raised_to_pow += 1
return str(round(size, 2)) + " " + dict_power_n[raised_to_pow] + "B"
async def progress(current, total, message, start, type_of_ps, file_name=None):
now = time.time()
diff = now - start
if round(diff % 10.00) == 0 or current == total:
percentage = current * 100 / total
speed = current / diff
elapsed_time = round(diff) * 1000
if elapsed_time == 0:
return
time_to_completion = round((total - current) / speed) * 1000
estimated_total_time = elapsed_time + time_to_completion
progress_str = "{0}{1} {2}%\n".format(
"".join(["🔴" for i in range(math.floor(percentage / 10))]),
"".join(["🔘" for i in range(10 - math.floor(percentage / 10))]),
round(percentage, 2),
)
tmp = progress_str + "{0} of {1}\nETA: {2}".format(
humanbytes(current), humanbytes(total), time_formatter(estimated_total_time)
)
if file_name:
try:
await message.edit(
"{}\n**File Name:** `{}`\n{}".format(type_of_ps, file_name, tmp)
)
except FloodWait as e:
await asyncio.sleep(e.x)
except MessageNotModified:
pass
else:
try:
await message.edit("{}\n{}".format(type_of_ps, tmp))
except FloodWait as e:
await asyncio.sleep(e.x)
except MessageNotModified:
pass
def get_user(message: Message, text: str) -> [int, str, None]:
if text is None:
asplit = None
else:
asplit = text.split(" ", 1)
user_s = None
reason_ = None
if message.reply_to_message:
user_s = message.reply_to_message.from_user.id
reason_ = text if text else None
elif asplit is None:
return None, None
elif len(asplit[0]) > 0:
user_s = int(asplit[0]) if asplit[0].isdigit() else asplit[0]
if len(asplit) == 2:
reason_ = asplit[1]
return user_s, reason_
def get_readable_time(seconds: int) -> int:
count = 0
ping_time = ""
time_list = []
time_suffix_list = ["s", "m", "h", "days"]
while count < 4:
count += 1
if count < 3:
remainder, result = divmod(seconds, 60)
else:
remainder, result = divmod(seconds, 24)
if seconds == 0 and remainder == 0:
break
time_list.append(int(result))
seconds = int(remainder)
for x in range(len(time_list)):
time_list[x] = str(time_list[x]) + time_suffix_list[x]
if len(time_list) == 4:
ping_time += time_list.pop() + ", "
time_list.reverse()
ping_time += ":".join(time_list)
return ping_time
def time_formatter(milliseconds: int) -> str:
seconds, milliseconds = divmod(int(milliseconds), 1000)
minutes, seconds = divmod(seconds, 60)
hours, minutes = divmod(minutes, 60)
days, hours = divmod(hours, 24)
tmp = (
((str(days) + " day(s), ") if days else "")
+ ((str(hours) + " hour(s), ") if hours else "")
+ ((str(minutes) + " minute(s), ") if minutes else "")
+ ((str(seconds) + " second(s), ") if seconds else "")
+ ((str(milliseconds) + " millisecond(s), ") if milliseconds else "")
)
return tmp[:-2]
ydl_opts = {
"format": "bestaudio/best",
"writethumbnail": True,
"postprocessors": [
{
"key": "FFmpegExtractAudio",
"preferredcodec": "mp3",
"preferredquality": "192",
}
],
}
def get_file_extension_from_url(url):
url_path = urlparse(url).path
basename = os.path.basename(url_path)
return basename.split(".")[-1]
async def download_song(url):
song_name = f"{randint(6969, 6999)}.mp3"
async with aiohttp.ClientSession() as session:
async with session.get(url) as resp:
if resp.status == 200:
f = await aiofiles.open(song_name, mode="wb")
await f.write(await resp.read())
await f.close()
return song_name
is_downloading = False
def time_to_seconds(time):
stringt = str(time)
return sum(int(x) * 60 ** i for i, x in enumerate(reversed(stringt.split(":"))))
@Client.on_message(filters.command("saavn") & ~filters.edited)
async def jssong(_, message):
global is_downloading
if len(message.command) < 2:
await message.reply_text("/saavn requires an argument.")
return
if is_downloading:
await message.reply_text(
"Another download is in progress, try again after sometime."
)
return
is_downloading = True
text = message.text.split(None, 1)[1]
query = text.replace(" ", "%20")
m = await message.reply_text("Searching...")
try:
songs = await arq.saavn(query)
if not songs.ok:
await message.reply_text(songs.result)
return
sname = songs.result[0].song
slink = songs.result[0].media_url
ssingers = songs.result[0].singers
await m.edit("Downloading")
song = await download_song(slink)
await m.edit("Uploading")
await message.reply_audio(audio=song, title=sname, performer=ssingers)
os.remove(song)
await m.delete()
except Exception as e:
is_downloading = False
await m.edit(str(e))
return
is_downloading = False
@Client.on_message(filters.command("deezer") & ~filters.edited)
async def deezsong(_, message):
global is_downloading
if len(message.command) < 2:
await message.reply_text("/deezer requires an argument.")
return
if is_downloading:
await message.reply_text(
"Another download is in progress, try again after sometime."
)
return
is_downloading = True
text = message.text.split(None, 1)[1]
query = text.replace(" ", "%20")
m = await message.reply_text("Searching...")
try:
songs = await arq.deezer(query, 1)
if not songs.ok:
await message.reply_text(songs.result)
return
title = songs.result[0].title
url = songs.result[0].url
artist = songs.result[0].artist
await m.edit("Downloading")
song = await download_song(url)
await m.edit("Uploading")
await message.reply_audio(audio=song, title=title, performer=artist)
os.remove(song)
await m.delete()
except Exception as e:
is_downloading = False
await m.edit(str(e))
return
is_downloading = False
@Client.on_message(filters.command(["vsong", "video"]))
async def ytmusic(client, message: Message):
global is_downloading
if is_downloading:
await message.reply_text(
"Another download is in progress, try again after sometime."
)
return
urlissed = get_text(message)
pablo = await client.send_message(
message.chat.id, f"`Getting {urlissed} From Youtube Servers. Please Wait.`"
)
if not urlissed:
await pablo.edit("Invalid Command Syntax, Please Check Help Menu To Know More!")
return
search = SearchVideos(f"{urlissed}", offset=1, mode="dict", max_results=1)
mi = search.result()
mio = mi["search_result"]
mo = mio[0]["link"]
thum = mio[0]["title"]
fridayz = mio[0]["id"]
thums = mio[0]["channel"]
kekme = f"https://img.youtube.com/vi/{fridayz}/hqdefault.jpg"
await asyncio.sleep(0.6)
url = mo
sedlyf = wget.download(kekme)
opts = {
"format": "best",
"addmetadata": True,
"key": "FFmpegMetadata",
"prefer_ffmpeg": True,
"geo_bypass": True,
"nocheckcertificate": True,
"postprocessors": [{"key": "FFmpegVideoConvertor", "preferedformat": "mp4"}],
"outtmpl": "%(id)s.mp4",
"logtostderr": False,
"quiet": True,
}
try:
is_downloading = True
with youtube_dl.YoutubeDL(opts) as ytdl:
infoo = ytdl.extract_info(url, False)
duration = round(infoo["duration"] / 60)
if duration > DURATION_LIMIT:
await pablo.edit(
f"❌ Videos longer than {DURATION_LIMIT} minute(s) aren't allowed, the provided video is {duration} minute(s)"
)
is_downloading = False
return
ytdl_data = ytdl.extract_info(url, download=True)
except Exception:
# await pablo.edit(event, f"**Failed To Download** \n**Error :** `{str(e)}`")
is_downloading = False
return
c_time = time.time()
file_stark = f"{ytdl_data['id']}.mp4"
capy = f"**Video Name ➠** `{thum}` \n**Requested For :** `{urlissed}` \n**Channel :** `{thums}` \n**Link :** `{mo}`"
await client.send_video(
message.chat.id,
video=open(file_stark, "rb"),
duration=int(ytdl_data["duration"]),
file_name=str(ytdl_data["title"]),
thumb=sedlyf,
caption=capy,
supports_streaming=True,
progress=progress,
progress_args=(
pablo,
c_time,
f"`Uploading {urlissed} Song From YouTube Music!`",
file_stark,
),
)
await pablo.delete()
is_downloading = False
for files in (sedlyf, file_stark):
if files and os.path.exists(files):
os.remove(files)
| true | true |
f7301f8cc870b348e9ccc730cb09c52bb390f47c | 2,406 | py | Python | src/ast_toolbox/mcts/tree_plot.py | hdelecki/AdaptiveStressTestingToolbox | 184d7d7f1b4acb65eecb749e3c3a78cbcfc3c4ed | [
"MIT"
] | 29 | 2019-01-09T23:56:35.000Z | 2022-03-18T03:41:10.000Z | src/ast_toolbox/mcts/tree_plot.py | hdelecki/AdaptiveStressTestingToolbox | 184d7d7f1b4acb65eecb749e3c3a78cbcfc3c4ed | [
"MIT"
] | 39 | 2019-01-10T00:32:26.000Z | 2022-03-12T00:29:05.000Z | src/ast_toolbox/mcts/tree_plot.py | hdelecki/AdaptiveStressTestingToolbox | 184d7d7f1b4acb65eecb749e3c3a78cbcfc3c4ed | [
"MIT"
] | 11 | 2019-01-10T08:11:47.000Z | 2021-12-28T15:56:02.000Z | import uuid
import pydot
def get_root(tree):
"""Get the root node of the tree.
Parameters
----------
tree : dict
The tree.
Returns
----------
s : :py:class:`ast_toolbox.mcts.AdaptiveStressTesting.ASTState`
The root state.
"""
for s in tree.keys():
if s.parent is None:
return s
def s2node(s, tree):
"""Transfer the AST state to pydot node.
Parameters
----------
s : :py:class:`ast_toolbox.mcts.AdaptiveStressTesting.ASTState`
The AST state.
tree : dict
The tree.
Returns
----------
node : :py:class:`pydot.Node`
The pydot node.
"""
if s in tree.keys():
return pydot.Node(str(uuid.uuid4()), label='n=' + str(tree[s].n))
else:
return None
def add_children(s, s_node, tree, graph, d):
"""Add successors of s into the graph.
Parameters
----------
s : :py:class:`ast_toolbox.mcts.AdaptiveStressTesting.ASTState`
The AST state.
s_node : :py:class:`pydot.Node`
The pydot node corresponding to s.
tree : dict
The tree.
graph : :py:class:`pydot.Dot`
The pydot graph.
d : int
The depth.
"""
if d > 0:
for a in tree[s].a.keys():
n = tree[s].a[a].n
q = tree[s].a[a].q
assert len(tree[s].a[a].s.keys()) == 1
for ns in tree[s].a[a].s.keys():
ns_node = s2node(ns, tree)
if ns_node is not None:
graph.add_node(ns_node)
graph.add_edge(pydot.Edge(s_node, ns_node, label="n=" + str(n) + " a=" + str(a.get()) + " q=" + str(q)))
# graph.add_edge(pydot.Edge(s_node, ns_node))
add_children(ns, ns_node, tree, graph, d - 1)
def plot_tree(tree, d, path, format="svg"):
"""Plot the tree.
Parameters
----------
tree : dict
The tree.
d : int
The depth.
path : str
The plotting path.
format : str
The plotting format.
"""
graph = pydot.Dot(graph_type='digraph')
root = get_root(tree)
root_node = s2node(root, tree)
graph.add_node(root_node)
add_children(root, root_node, tree, graph, d)
filename = path + "." + format
if format == "svg":
graph.write(filename)
elif format == "png":
graph.write(filename)
| 24.30303 | 124 | 0.525769 | import uuid
import pydot
def get_root(tree):
for s in tree.keys():
if s.parent is None:
return s
def s2node(s, tree):
if s in tree.keys():
return pydot.Node(str(uuid.uuid4()), label='n=' + str(tree[s].n))
else:
return None
def add_children(s, s_node, tree, graph, d):
if d > 0:
for a in tree[s].a.keys():
n = tree[s].a[a].n
q = tree[s].a[a].q
assert len(tree[s].a[a].s.keys()) == 1
for ns in tree[s].a[a].s.keys():
ns_node = s2node(ns, tree)
if ns_node is not None:
graph.add_node(ns_node)
graph.add_edge(pydot.Edge(s_node, ns_node, label="n=" + str(n) + " a=" + str(a.get()) + " q=" + str(q)))
add_children(ns, ns_node, tree, graph, d - 1)
def plot_tree(tree, d, path, format="svg"):
graph = pydot.Dot(graph_type='digraph')
root = get_root(tree)
root_node = s2node(root, tree)
graph.add_node(root_node)
add_children(root, root_node, tree, graph, d)
filename = path + "." + format
if format == "svg":
graph.write(filename)
elif format == "png":
graph.write(filename)
| true | true |
f7302001cebe3ed8119fa24d93d0e279303b50c9 | 1,169 | py | Python | soctrack/utils.py | kcsry/soctrack | b8cfa8aefab98f8daeea0cafa10932f67bcda9dc | [
"MIT"
] | null | null | null | soctrack/utils.py | kcsry/soctrack | b8cfa8aefab98f8daeea0cafa10932f67bcda9dc | [
"MIT"
] | 1 | 2022-02-21T19:16:21.000Z | 2022-02-21T19:16:21.000Z | soctrack/utils.py | kcsry/soctrack | b8cfa8aefab98f8daeea0cafa10932f67bcda9dc | [
"MIT"
] | null | null | null | import re
import time
from django.conf import settings
from django.utils.timezone import make_aware, make_naive, utc
re_pattern = re.compile('[^\u0000-\uD7FF\uE000-\uFFFF]+', re.UNICODE)
def sanitize_unicode(u):
# We may not be able to store all special characters thanks
# to MySQL's boneheadedness, so accept the minor loss of fidelity
# in the cached data fields.
return re_pattern.sub(' ', u)
def could_be_utc(dt):
if settings.USE_TZ:
return make_aware(dt, utc)
else:
if dt.tzinfo:
return make_naive(dt, utc)
else:
return dt
class RetryError(Exception):
def __init__(self, fn, tries, exceptions):
name = getattr(fn, '__name__', None) or str(fn)
super().__init__('%s failed after %d tries' % (name, tries))
self.exceptions = exceptions
def retry_with_backoff(fn, tries=10, wait=0.5, exception_classes=(Exception,)):
exceptions = []
for t in range(tries):
try:
return fn()
except exception_classes as e:
exceptions.append(e)
time.sleep(wait * (1.5**t))
raise RetryError(fn, tries, exceptions)
| 27.186047 | 79 | 0.640719 | import re
import time
from django.conf import settings
from django.utils.timezone import make_aware, make_naive, utc
re_pattern = re.compile('[^\u0000-\uD7FF\uE000-\uFFFF]+', re.UNICODE)
def sanitize_unicode(u):
# in the cached data fields.
return re_pattern.sub(' ', u)
def could_be_utc(dt):
if settings.USE_TZ:
return make_aware(dt, utc)
else:
if dt.tzinfo:
return make_naive(dt, utc)
else:
return dt
class RetryError(Exception):
def __init__(self, fn, tries, exceptions):
name = getattr(fn, '__name__', None) or str(fn)
super().__init__('%s failed after %d tries' % (name, tries))
self.exceptions = exceptions
def retry_with_backoff(fn, tries=10, wait=0.5, exception_classes=(Exception,)):
exceptions = []
for t in range(tries):
try:
return fn()
except exception_classes as e:
exceptions.append(e)
time.sleep(wait * (1.5**t))
raise RetryError(fn, tries, exceptions)
| true | true |
f73021df6f3092bb9478c485785dab873f73a920 | 7,244 | py | Python | pynodegl-utils/pynodegl_utils/misc.py | gopro/gopro-lib-node.gl | 60d163cf65385b772f1d83d125f0bfe09db5352b | [
"Apache-2.0"
] | 45 | 2017-02-07T13:13:52.000Z | 2022-03-18T07:12:39.000Z | pynodegl-utils/pynodegl_utils/misc.py | mrobertseidowsky-gpsw/gopro-lib-node.gl | fbe427e4ea108468a63cde5920cf6f6ce03478bc | [
"Apache-2.0"
] | 148 | 2017-02-02T18:35:32.000Z | 2022-03-28T13:53:22.000Z | pynodegl-utils/pynodegl_utils/misc.py | mrobertseidowsky-gpsw/gopro-lib-node.gl | fbe427e4ea108468a63cde5920cf6f6ce03478bc | [
"Apache-2.0"
] | 28 | 2017-02-01T10:06:47.000Z | 2022-03-18T07:12:26.000Z | #
# Copyright 2016 GoPro Inc.
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
#
import os
import os.path as op
import tempfile
import platform
import math
import inspect
import json
import subprocess
import random
import pynodegl as ngl
from collections import namedtuple
def scene(**controls):
def real_decorator(scene_func):
def func_wrapper(idict=None, **extra_args):
if idict is None:
idict = {}
elif isinstance(idict, SceneCfg):
idict = idict.as_dict()
scene_cfg = SceneCfg(**idict)
scene = scene_func(scene_cfg, **extra_args)
odict = scene_cfg.as_dict()
odict['scene'] = scene
return odict
# Construct widgets specs
widgets_specs = []
func_specs = inspect.getfullargspec(scene_func)
if func_specs.defaults:
nb_optionnals = len(func_specs.defaults)
for i, key in enumerate(func_specs.args[-nb_optionnals:]):
# Set controller defaults according to the function prototype
control = controls.get(key)
if control is not None:
default = func_specs.defaults[i]
ctl_id = control.__class__.__name__
ctl_data = control._asdict()
widgets_specs.append((key, default, ctl_id, ctl_data))
# Transfers the widget specs to the UI.
# We could use the return value but it's better if the user can still
# call its decorated scene function transparently inside his own code
# without getting garbage along the return value.
func_wrapper.widgets_specs = widgets_specs
# Flag the scene as a scene function so it's registered in the UI.
func_wrapper.iam_a_ngl_scene_func = True
# Inherit doc from original function
func_wrapper.__doc__ = scene_func.__doc__
return func_wrapper
return real_decorator
scene.Range = namedtuple('Range', 'range unit_base', defaults=([0, 1], 1))
scene.Vector = namedtuple('Vector', 'n minv maxv', defaults=(None, None))
scene.Color = namedtuple('Color', '')
scene.Bool = namedtuple('Bool', '')
scene.File = namedtuple('File', 'filter', defaults=('',))
scene.List = namedtuple('List', 'choices')
scene.Text = namedtuple('Text', '')
class Media:
def __init__(self, filename):
self._filename = filename
self._set_media_dimensions()
def _set_media_dimensions(self):
data = subprocess.check_output(['ffprobe', '-v', '0',
'-select_streams', 'v:0',
'-of', 'json',
'-show_streams', '-show_format',
self._filename])
data = json.loads(data)
st = data['streams'][0]
self._dimensions = (st['width'], st['height'])
self._duration = float(data['format'].get('duration', 1))
self._framerate = tuple(int(x) for x in st['avg_frame_rate'].split('/'))
@property
def filename(self):
return self._filename
@property
def width(self):
return self._dimensions[0]
@property
def height(self):
return self._dimensions[1]
@property
def dimensions(self):
return self._dimensions
@property
def duration(self):
return self._duration
@property
def framerate(self):
return self._framerate
@property
def framerate_float(self):
return self._framerate[0] / float(self._framerate[1])
def get_nodegl_tempdir():
tmpdir = op.join(tempfile.gettempdir(), 'nodegl')
os.makedirs(tmpdir, exist_ok=True)
return tmpdir
class SceneCfg:
_DEFAULT_MEDIA_FILE = op.join(get_nodegl_tempdir(), 'ngl-media.mp4')
_DEFAULT_FIELDS = {
'aspect_ratio': (16, 9),
'duration': 30.0,
'framerate': (60, 1),
'backend': 'opengl',
'samples': 0,
'system': platform.system(),
'files': [],
'medias': None,
'clear_color': (0.0, 0.0, 0.0, 1.0),
}
def __init__(self, **kwargs):
for field, def_val in self._DEFAULT_FIELDS.items():
val = kwargs.get(field, def_val)
setattr(self, field, val)
if self.medias is None:
media_file = self._DEFAULT_MEDIA_FILE
if not op.exists(self._DEFAULT_MEDIA_FILE):
ret = subprocess.call(['ffmpeg', '-nostdin', '-nostats', '-f', 'lavfi', '-i',
'testsrc2=d=%d:r=%d/%d' % (int(math.ceil(self.duration)),
self.framerate[0], self.framerate[1]),
media_file])
if ret:
raise Exception('Unable to create a media file using ffmpeg (ret=%d)' % ret)
self.medias = [Media(media_file)]
# Predictible random number generator
self.rng = random.Random(0)
@property
def aspect_ratio_float(self):
return self.aspect_ratio[0] / float(self.aspect_ratio[1])
def as_dict(self):
odict = {}
for field in self._DEFAULT_FIELDS.keys():
odict[field] = getattr(self, field)
return odict
def _get_shader(self, name, stype, shader_path):
filename = f'{name}.{stype}'
if shader_path is None:
shader_path = op.join(op.dirname(__file__), 'examples', 'shaders')
with open(op.join(shader_path, filename)) as f:
return f.read()
def get_frag(self, name, shader_path=None):
return self._get_shader(name, 'frag', shader_path)
def get_vert(self, name, shader_path=None):
return self._get_shader(name, 'vert', shader_path)
def get_comp(self, name, shader_path=None):
return self._get_shader(name, 'comp', shader_path)
def get_viewport(width, height, aspect_ratio):
view_width = width
view_height = width * aspect_ratio[1] / aspect_ratio[0]
if view_height > height:
view_height = height
view_width = height * aspect_ratio[0] / aspect_ratio[1]
view_x = (width - view_width) // 2
view_y = (height - view_height) // 2
return (view_x, view_y, view_width, view_height)
def get_backend(backend):
backend_map = {
'opengl': ngl.BACKEND_OPENGL,
'opengles': ngl.BACKEND_OPENGLES,
}
return backend_map[backend]
| 33.077626 | 104 | 0.609194 |
import os
import os.path as op
import tempfile
import platform
import math
import inspect
import json
import subprocess
import random
import pynodegl as ngl
from collections import namedtuple
def scene(**controls):
def real_decorator(scene_func):
def func_wrapper(idict=None, **extra_args):
if idict is None:
idict = {}
elif isinstance(idict, SceneCfg):
idict = idict.as_dict()
scene_cfg = SceneCfg(**idict)
scene = scene_func(scene_cfg, **extra_args)
odict = scene_cfg.as_dict()
odict['scene'] = scene
return odict
widgets_specs = []
func_specs = inspect.getfullargspec(scene_func)
if func_specs.defaults:
nb_optionnals = len(func_specs.defaults)
for i, key in enumerate(func_specs.args[-nb_optionnals:]):
control = controls.get(key)
if control is not None:
default = func_specs.defaults[i]
ctl_id = control.__class__.__name__
ctl_data = control._asdict()
widgets_specs.append((key, default, ctl_id, ctl_data))
# call its decorated scene function transparently inside his own code
# without getting garbage along the return value.
func_wrapper.widgets_specs = widgets_specs
# Flag the scene as a scene function so it's registered in the UI.
func_wrapper.iam_a_ngl_scene_func = True
func_wrapper.__doc__ = scene_func.__doc__
return func_wrapper
return real_decorator
scene.Range = namedtuple('Range', 'range unit_base', defaults=([0, 1], 1))
scene.Vector = namedtuple('Vector', 'n minv maxv', defaults=(None, None))
scene.Color = namedtuple('Color', '')
scene.Bool = namedtuple('Bool', '')
scene.File = namedtuple('File', 'filter', defaults=('',))
scene.List = namedtuple('List', 'choices')
scene.Text = namedtuple('Text', '')
class Media:
def __init__(self, filename):
self._filename = filename
self._set_media_dimensions()
def _set_media_dimensions(self):
data = subprocess.check_output(['ffprobe', '-v', '0',
'-select_streams', 'v:0',
'-of', 'json',
'-show_streams', '-show_format',
self._filename])
data = json.loads(data)
st = data['streams'][0]
self._dimensions = (st['width'], st['height'])
self._duration = float(data['format'].get('duration', 1))
self._framerate = tuple(int(x) for x in st['avg_frame_rate'].split('/'))
@property
def filename(self):
return self._filename
@property
def width(self):
return self._dimensions[0]
@property
def height(self):
return self._dimensions[1]
@property
def dimensions(self):
return self._dimensions
@property
def duration(self):
return self._duration
@property
def framerate(self):
return self._framerate
@property
def framerate_float(self):
return self._framerate[0] / float(self._framerate[1])
def get_nodegl_tempdir():
tmpdir = op.join(tempfile.gettempdir(), 'nodegl')
os.makedirs(tmpdir, exist_ok=True)
return tmpdir
class SceneCfg:
_DEFAULT_MEDIA_FILE = op.join(get_nodegl_tempdir(), 'ngl-media.mp4')
_DEFAULT_FIELDS = {
'aspect_ratio': (16, 9),
'duration': 30.0,
'framerate': (60, 1),
'backend': 'opengl',
'samples': 0,
'system': platform.system(),
'files': [],
'medias': None,
'clear_color': (0.0, 0.0, 0.0, 1.0),
}
def __init__(self, **kwargs):
for field, def_val in self._DEFAULT_FIELDS.items():
val = kwargs.get(field, def_val)
setattr(self, field, val)
if self.medias is None:
media_file = self._DEFAULT_MEDIA_FILE
if not op.exists(self._DEFAULT_MEDIA_FILE):
ret = subprocess.call(['ffmpeg', '-nostdin', '-nostats', '-f', 'lavfi', '-i',
'testsrc2=d=%d:r=%d/%d' % (int(math.ceil(self.duration)),
self.framerate[0], self.framerate[1]),
media_file])
if ret:
raise Exception('Unable to create a media file using ffmpeg (ret=%d)' % ret)
self.medias = [Media(media_file)]
self.rng = random.Random(0)
@property
def aspect_ratio_float(self):
return self.aspect_ratio[0] / float(self.aspect_ratio[1])
def as_dict(self):
odict = {}
for field in self._DEFAULT_FIELDS.keys():
odict[field] = getattr(self, field)
return odict
def _get_shader(self, name, stype, shader_path):
filename = f'{name}.{stype}'
if shader_path is None:
shader_path = op.join(op.dirname(__file__), 'examples', 'shaders')
with open(op.join(shader_path, filename)) as f:
return f.read()
def get_frag(self, name, shader_path=None):
return self._get_shader(name, 'frag', shader_path)
def get_vert(self, name, shader_path=None):
return self._get_shader(name, 'vert', shader_path)
def get_comp(self, name, shader_path=None):
return self._get_shader(name, 'comp', shader_path)
def get_viewport(width, height, aspect_ratio):
view_width = width
view_height = width * aspect_ratio[1] / aspect_ratio[0]
if view_height > height:
view_height = height
view_width = height * aspect_ratio[0] / aspect_ratio[1]
view_x = (width - view_width) // 2
view_y = (height - view_height) // 2
return (view_x, view_y, view_width, view_height)
def get_backend(backend):
backend_map = {
'opengl': ngl.BACKEND_OPENGL,
'opengles': ngl.BACKEND_OPENGLES,
}
return backend_map[backend]
| true | true |
f73022031f62389d37c109bce546ac2a6294bc50 | 6,377 | py | Python | main_pretrain.py | xwyzsn/solo-learn | 16d021d8053439a3de205337ab2a11d191500b09 | [
"MIT"
] | 693 | 2021-05-31T15:48:32.000Z | 2022-03-31T17:12:46.000Z | main_pretrain.py | xwyzsn/solo-learn | 16d021d8053439a3de205337ab2a11d191500b09 | [
"MIT"
] | 151 | 2021-06-15T00:22:57.000Z | 2022-03-27T15:17:02.000Z | main_pretrain.py | xwyzsn/solo-learn | 16d021d8053439a3de205337ab2a11d191500b09 | [
"MIT"
] | 79 | 2021-06-02T10:31:15.000Z | 2022-03-25T01:25:09.000Z | # Copyright 2021 solo-learn development team.
# Permission is hereby granted, free of charge, to any person obtaining a copy of
# this software and associated documentation files (the "Software"), to deal in
# the Software without restriction, including without limitation the rights to use,
# copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the
# Software, and to permit persons to whom the Software is furnished to do so,
# subject to the following conditions:
# The above copyright notice and this permission notice shall be included in all copies
# or substantial portions of the Software.
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED,
# INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR
# PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE
# FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
# OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
# DEALINGS IN THE SOFTWARE.
import os
from pprint import pprint
from pytorch_lightning import Trainer, seed_everything
from pytorch_lightning.callbacks import LearningRateMonitor
from pytorch_lightning.loggers import WandbLogger
from solo.args.setup import parse_args_pretrain
from solo.methods import METHODS
from solo.utils.auto_resumer import AutoResumer
try:
from solo.methods.dali import PretrainABC
except ImportError as e:
print(e)
_dali_avaliable = False
else:
_dali_avaliable = True
try:
from solo.utils.auto_umap import AutoUMAP
except ImportError:
_umap_available = False
else:
_umap_available = True
import types
from solo.utils.checkpointer import Checkpointer
from solo.utils.classification_dataloader import prepare_data as prepare_data_classification
from solo.utils.pretrain_dataloader import (
prepare_dataloader,
prepare_datasets,
prepare_n_crop_transform,
prepare_transform,
)
def main():
seed_everything(5)
args = parse_args_pretrain()
assert args.method in METHODS, f"Choose from {METHODS.keys()}"
if args.num_large_crops != 2:
assert args.method == "wmse"
MethodClass = METHODS[args.method]
if args.dali:
assert (
_dali_avaliable
), "Dali is not currently avaiable, please install it first with [dali]."
MethodClass = types.new_class(f"Dali{MethodClass.__name__}", (PretrainABC, MethodClass))
model = MethodClass(**args.__dict__)
# pretrain dataloader
if not args.dali:
# asymmetric augmentations
if args.unique_augs > 1:
transform = [
prepare_transform(args.dataset, **kwargs) for kwargs in args.transform_kwargs
]
else:
transform = [prepare_transform(args.dataset, **args.transform_kwargs)]
transform = prepare_n_crop_transform(transform, num_crops_per_aug=args.num_crops_per_aug)
if args.debug_augmentations:
print("Transforms:")
pprint(transform)
train_dataset = prepare_datasets(
args.dataset,
transform,
data_dir=args.data_dir,
train_dir=args.train_dir,
no_labels=args.no_labels,
)
train_loader = prepare_dataloader(
train_dataset, batch_size=args.batch_size, num_workers=args.num_workers
)
# normal dataloader for when it is available
if args.dataset == "custom" and (args.no_labels or args.val_dir is None):
val_loader = None
elif args.dataset in ["imagenet100", "imagenet"] and args.val_dir is None:
val_loader = None
else:
_, val_loader = prepare_data_classification(
args.dataset,
data_dir=args.data_dir,
train_dir=args.train_dir,
val_dir=args.val_dir,
batch_size=args.batch_size,
num_workers=args.num_workers,
)
callbacks = []
# wandb logging
if args.wandb:
wandb_logger = WandbLogger(
name=args.name,
project=args.project,
entity=args.entity,
offline=args.offline,
)
wandb_logger.watch(model, log="gradients", log_freq=100)
wandb_logger.log_hyperparams(args)
# lr logging
lr_monitor = LearningRateMonitor(logging_interval="epoch")
callbacks.append(lr_monitor)
if args.save_checkpoint:
# save checkpoint on last epoch only
ckpt = Checkpointer(
args,
logdir=os.path.join(args.checkpoint_dir, args.method),
frequency=args.checkpoint_frequency,
)
callbacks.append(ckpt)
if args.auto_umap:
assert (
_umap_available
), "UMAP is not currently avaiable, please install it first with [umap]."
auto_umap = AutoUMAP(
args,
logdir=os.path.join(args.auto_umap_dir, args.method),
frequency=args.auto_umap_frequency,
)
callbacks.append(auto_umap)
# 1.7 will deprecate resume_from_checkpoint, but for the moment
# the argument is the same, but we need to pass it as ckpt_path to trainer.fit
ckpt_path = None
if args.auto_resume and args.resume_from_checkpoint is None:
auto_resumer = AutoResumer(
checkpoint_dir=os.path.join(args.checkpoint_dir, args.method),
max_hours=args.auto_resumer_max_hours,
)
resume_from_checkpoint = auto_resumer.find_checkpoint(args)
if resume_from_checkpoint is not None:
print(
"Resuming from previous checkpoint that matches specifications:",
f"'{resume_from_checkpoint}'",
)
ckpt_path = resume_from_checkpoint
elif args.resume_from_checkpoint is not None:
ckpt_path = args.resume_from_checkpoint
del args.resume_from_checkpoint
trainer = Trainer.from_argparse_args(
args,
logger=wandb_logger if args.wandb else None,
callbacks=callbacks,
enable_checkpointing=False,
)
if args.dali:
trainer.fit(model, val_dataloaders=val_loader, ckpt_path=ckpt_path)
else:
trainer.fit(model, train_loader, val_loader, ckpt_path=ckpt_path)
if __name__ == "__main__":
main()
| 33.740741 | 97 | 0.682923 |
import os
from pprint import pprint
from pytorch_lightning import Trainer, seed_everything
from pytorch_lightning.callbacks import LearningRateMonitor
from pytorch_lightning.loggers import WandbLogger
from solo.args.setup import parse_args_pretrain
from solo.methods import METHODS
from solo.utils.auto_resumer import AutoResumer
try:
from solo.methods.dali import PretrainABC
except ImportError as e:
print(e)
_dali_avaliable = False
else:
_dali_avaliable = True
try:
from solo.utils.auto_umap import AutoUMAP
except ImportError:
_umap_available = False
else:
_umap_available = True
import types
from solo.utils.checkpointer import Checkpointer
from solo.utils.classification_dataloader import prepare_data as prepare_data_classification
from solo.utils.pretrain_dataloader import (
prepare_dataloader,
prepare_datasets,
prepare_n_crop_transform,
prepare_transform,
)
def main():
seed_everything(5)
args = parse_args_pretrain()
assert args.method in METHODS, f"Choose from {METHODS.keys()}"
if args.num_large_crops != 2:
assert args.method == "wmse"
MethodClass = METHODS[args.method]
if args.dali:
assert (
_dali_avaliable
), "Dali is not currently avaiable, please install it first with [dali]."
MethodClass = types.new_class(f"Dali{MethodClass.__name__}", (PretrainABC, MethodClass))
model = MethodClass(**args.__dict__)
if not args.dali:
if args.unique_augs > 1:
transform = [
prepare_transform(args.dataset, **kwargs) for kwargs in args.transform_kwargs
]
else:
transform = [prepare_transform(args.dataset, **args.transform_kwargs)]
transform = prepare_n_crop_transform(transform, num_crops_per_aug=args.num_crops_per_aug)
if args.debug_augmentations:
print("Transforms:")
pprint(transform)
train_dataset = prepare_datasets(
args.dataset,
transform,
data_dir=args.data_dir,
train_dir=args.train_dir,
no_labels=args.no_labels,
)
train_loader = prepare_dataloader(
train_dataset, batch_size=args.batch_size, num_workers=args.num_workers
)
if args.dataset == "custom" and (args.no_labels or args.val_dir is None):
val_loader = None
elif args.dataset in ["imagenet100", "imagenet"] and args.val_dir is None:
val_loader = None
else:
_, val_loader = prepare_data_classification(
args.dataset,
data_dir=args.data_dir,
train_dir=args.train_dir,
val_dir=args.val_dir,
batch_size=args.batch_size,
num_workers=args.num_workers,
)
callbacks = []
if args.wandb:
wandb_logger = WandbLogger(
name=args.name,
project=args.project,
entity=args.entity,
offline=args.offline,
)
wandb_logger.watch(model, log="gradients", log_freq=100)
wandb_logger.log_hyperparams(args)
lr_monitor = LearningRateMonitor(logging_interval="epoch")
callbacks.append(lr_monitor)
if args.save_checkpoint:
ckpt = Checkpointer(
args,
logdir=os.path.join(args.checkpoint_dir, args.method),
frequency=args.checkpoint_frequency,
)
callbacks.append(ckpt)
if args.auto_umap:
assert (
_umap_available
), "UMAP is not currently avaiable, please install it first with [umap]."
auto_umap = AutoUMAP(
args,
logdir=os.path.join(args.auto_umap_dir, args.method),
frequency=args.auto_umap_frequency,
)
callbacks.append(auto_umap)
ckpt_path = None
if args.auto_resume and args.resume_from_checkpoint is None:
auto_resumer = AutoResumer(
checkpoint_dir=os.path.join(args.checkpoint_dir, args.method),
max_hours=args.auto_resumer_max_hours,
)
resume_from_checkpoint = auto_resumer.find_checkpoint(args)
if resume_from_checkpoint is not None:
print(
"Resuming from previous checkpoint that matches specifications:",
f"'{resume_from_checkpoint}'",
)
ckpt_path = resume_from_checkpoint
elif args.resume_from_checkpoint is not None:
ckpt_path = args.resume_from_checkpoint
del args.resume_from_checkpoint
trainer = Trainer.from_argparse_args(
args,
logger=wandb_logger if args.wandb else None,
callbacks=callbacks,
enable_checkpointing=False,
)
if args.dali:
trainer.fit(model, val_dataloaders=val_loader, ckpt_path=ckpt_path)
else:
trainer.fit(model, train_loader, val_loader, ckpt_path=ckpt_path)
if __name__ == "__main__":
main()
| true | true |
f730226ebacc7664173975d612d2e800b7ac3472 | 20,723 | py | Python | Blackjack/blackjack.py | nairoukh-code/Python_Projects | 9a0e2adb6e352b301ed9e542be9c9f1cd16b95b0 | [
"MIT"
] | null | null | null | Blackjack/blackjack.py | nairoukh-code/Python_Projects | 9a0e2adb6e352b301ed9e542be9c9f1cd16b95b0 | [
"MIT"
] | null | null | null | Blackjack/blackjack.py | nairoukh-code/Python_Projects | 9a0e2adb6e352b301ed9e542be9c9f1cd16b95b0 | [
"MIT"
] | null | null | null | # took some ideas from this source ( https://dev.to/nexttech/build-a-blackjack-command-line-game-3o4b )
import random
from enum import Enum
from time import time
class Game_Status(Enum):
WIN = 1
LOSE = 2
PUSH = 3
class Card:
def __init__(self, suit, value):
self.suit = suit
self.value = value
def __repr__(self):
return " of ".join((self.value, self.suit))
class Deck:
def __init__(self):
self.cards = [Card(s, v) for s in ["Spades", "Clubs", "Hearts",
"Diamonds"] for v in ["A", "2", "3", "4", "5", "6",
"7", "8", "9", "10", "10", "10", "10"]] * 6
random.shuffle(self.cards)
def deal(self):
if len(self.cards) > 1:
return self.cards.pop(0)
else:
self.__init__()
return self.cards.pop(0)
class Hand:
def __init__(self, dealer=False):
self.dealer = dealer
self.cards = []
self.value = 0
def add_card(self, card):
self.cards.append(card)
def calculate_value(self):
self.value = 0
number_of_aces = 0
for card in self.cards:
if card.value.isnumeric():
self.value += int(card.value)
else:
if card.value == "A":
number_of_aces += 1
self.value += 11
else:
self.value += 10
while 12 > number_of_aces > 0 and self.value > 21:
self.value -= 10
number_of_aces -= 1
return self.value
def get_value(self):
self.calculate_value()
return self.value
def display(self):
if self.dealer:
print("hidden")
print(self.cards[1])
else:
for card in self.cards:
print(card)
print("Value:", self.get_value())
def final_display(self):
for card in self.cards:
print(card)
print("Value:", self.get_value())
def is_busted(self): # check if value is > 21
return self.get_value() > 21
def can_split(self):
return self.cards[0].value == self.cards[1].value
def can_not_split(self):
return self.cards[0].value != self.cards[1].value
def is_push(self, other):
return self.get_value() == other.get_value()
def player_win(self, other):
return self.get_value() > other.get_value()
def player_loss(self, other):
return self.get_value() < other.get_value()
def check_for_blackjack(self):
return self.get_value() == 21 and len(self.cards) == 2
class Game:
def print_status(self, status: Game_Status):
if status == Game_Status.WIN:
print(" you win ! ")
elif status == Game_Status.LOSE:
print(" you lose !")
elif status == Game_Status.PUSH:
print(" push !")
def play(self):
playing = True
while playing:
self.deck = Deck()
self.player_hand = Hand()
self.dealer_hand = Hand(dealer=True)
self.first_hand = Hand()
self.second_hand = Hand()
for i in range(2):
self.player_hand.add_card(self.deck.deal())
self.dealer_hand.add_card(self.deck.deal())
print("Your hand is:")
self.player_hand.display()
print()
print("Dealer's hand is:")
self.dealer_hand.display()
game_over = False
can_play_double_down = True
while not game_over:
player_has_blackjack = self.player_hand.check_for_blackjack()
dealer_has_blackjack = self.dealer_hand.check_for_blackjack()
if player_has_blackjack or dealer_has_blackjack:
self.show_blackjack_results(player_has_blackjack, dealer_has_blackjack)
break
choice = input("Please choose [Hit / Stand / DoubleDown/ Split] by typing the option").lower()
while choice not in ["h", "s", "d", "p", "hit", "stand", "doubledown", "split"]:
choice = input("Please enter 'hit' or 'stand' or 'doubledown' or 'split' (or H/S/D/p) ").lower()
if choice in ['hit', 'h']:
self.player_hand.add_card(self.deck.deal())
self.player_hand.display()
can_play_double_down = False
if self.player_hand.is_busted():
print("You have lost!")
game_over = True
elif choice in ["stand", "s"]:
while self.dealer_hand.get_value() < 17:
self.dealer_hand.add_card(self.deck.deal())
print("Final Results")
print("Your hand:", self.player_hand.get_value())
print("Dealer's hand:", self.dealer_hand.get_value())
if self.player_hand.is_busted():
self.print_status(Game_Status.LOSE)
elif self.dealer_hand.is_busted():
self.print_status(Game_Status.WIN)
elif self.player_hand.player_win(self.dealer_hand):
self.print_status(Game_Status.WIN)
elif self.player_hand.is_push(self.dealer_hand):
self.print_status(Game_Status.PUSH)
elif self.player_hand.player_loss(self.dealer_hand):
self.print_status(Game_Status.LOSE)
self.display_result()
game_over = True
elif choice in [" doubledown ", "d"] and can_play_double_down:
self.player_hand.add_card(self.deck.deal())
while self.dealer_hand.get_value() < 17:
self.dealer_hand.add_card(self.deck.deal())
if self.player_hand.is_busted():
self.print_status(Game_Status.LOSE)
elif self.dealer_hand.is_busted():
self.print_status(Game_Status.WIN)
elif self.player_hand.player_win(self.dealer_hand):
self.print_status(Game_Status.WIN)
elif self.player_hand.player_loss(self.dealer_hand):
self.print_status(Game_Status.LOSE)
elif self.player_hand.is_push(self.dealer_hand):
self.print_status(Game_Status.PUSH)
self.display_result()
game_over = True
elif choice in [" doubledown ", "d"] and not can_play_double_down:
print("you can not play double down")
elif choice in [" split ", "p"] and self.player_hand.can_split():
first_card = Card(self.player_hand.cards[0].suit, self.player_hand.cards[0].value)
second_card = Card(self.player_hand.cards[1].suit, self.player_hand.cards[1].value)
self.first_hand.add_card(first_card)
self.second_hand.add_card(second_card)
self.first_hand.add_card(self.deck.deal())
self.second_hand.add_card(self.deck.deal())
print("your first hand : ")
self.first_hand.final_display()
print("your second hand : ")
self.second_hand.final_display()
not_finish_first_loop = True
while not_finish_first_loop:
first_choice = input("Please choose [Hit / stand] for your first hand ").lower()
while first_choice not in ["h", "s", "hit", "stand"]:
first_choice = input("Please enter 'hit' or 'stand' (or H/S) for the first hand ").lower()
if first_choice in ['hit', 'h']:
self.first_hand.add_card(self.deck.deal())
self.first_hand.display()
if self.first_hand.is_busted():
print("You have lost in your first hand!")
not_finish_first_loop = False
else:
not_finish_first_loop = False
not_finish_second_loop = True
while not_finish_second_loop:
second_choice = input("Please choose [Hit / stand] for your second hand ").lower()
while second_choice not in ["h", "s", "hit", "stand"]:
second_choice = input("Please enter 'hit' or 'stand' (or H/S) for the second hand ").lower()
if second_choice in ['hit', 'h']:
self.second_hand.add_card(self.deck.deal())
self.second_hand.display()
if self.second_hand.is_busted():
print("You have lost in your second hand!")
not_finish_first_loop = False
else:
not_finish_second_loop = False
if not not_finish_first_loop and not not_finish_second_loop:
while self.dealer_hand.get_value() < 17:
self.dealer_hand.add_card(self.deck.deal())
if self.dealer_hand.is_busted():
print("Final Results")
self.first_hand.final_display()
self.second_hand.final_display()
self.dealer_hand.final_display()
print(" you win in both hands")
game_over = True
else:
print("Final Results")
print("Your first hand:", self.first_hand.get_value())
print("Your second hand:", self.second_hand.get_value())
print("Dealer's hand:", self.dealer_hand.get_value())
if self.first_hand.is_busted():
print("you lost your first hand , your hand is over 21")
elif self.first_hand.player_win(self.dealer_hand):
print("You Win in your first hand!")
elif self.first_hand.player_loss(self.dealer_hand):
print("you lost your first hand ")
elif self.first_hand.is_push(self.dealer_hand):
print("push in the first hand!")
if self.second_hand.is_busted():
print("you lost your first hand , your hand is over 21")
elif self.second_hand.player_loss(self.dealer_hand):
print("you lost your second hand ")
elif self.second_hand.player_win(self.dealer_hand):
print("You Win in your second hand!")
elif self.second_hand.is_push(self.dealer_hand):
print("push in the second hand!")
game_over = True
elif choice in [" split ", "p"] and self.player_hand.can_not_split():
print(" no you can not splet")
again = input("Play Again? [Y/N] ")
while again.lower() not in ["y", "n"]:
again = input("Please enter Y or N ")
if again.lower() == "n":
print("Thanks for playing!")
playing = False
else:
playing = True
def display_result(self):
print("player hand")
self.player_hand.final_display()
print("dealer hand")
self.dealer_hand.final_display()
def show_blackjack_results(self, player_has_blackjack, dealer_has_blackjack):
if player_has_blackjack and dealer_has_blackjack:
print("Both players have blackjack! Draw!")
elif player_has_blackjack:
print("You have blackjack! You win!")
elif dealer_has_blackjack:
print("Dealer has blackjack! Dealer wins!")
class Result:
def __init__(self, dealer_card, player_hand_value):
self.dealer_card = dealer_card
self.player_hand_value = player_hand_value
self.hit_win_count = 0
self.hit_loss_count = 0
self.hit_draw_count = 0
self.stand_win_count = 0
self.stand_loss_count = 0
self.stand_draw_count = 0
class Simulation:
def __init__(self):
self.results = []
self.deck = Deck()
def simulation_rounds(self, num_of_rounds):
self.start = time()
for round in range(num_of_rounds):
self.player_hand = Hand()
self.dealer_hand = Hand(dealer=True)
for i in range(2):
self.player_hand.add_card(self.deck.deal())
self.dealer_hand.add_card(self.deck.deal())
player_hand_value = self.player_hand.get_value()
while self.player_hand.get_value() < 11:
self.player_hand.add_card(self.deck.deal())
player_hand_value = self.player_hand.get_value()
while self.dealer_hand.get_value() < 17:
self.dealer_hand.add_card(self.deck.deal())
dealer_up_card = self.dealer_hand.cards[0].value
actions = ["h", "s"]
random.shuffle(actions)
choice = actions.pop(0)
if choice in ['h'] and player_hand_value != 21:
self.player_hand.add_card(self.deck.deal())
self.calculateResult('h', dealer_up_card, player_hand_value)
else:
self.calculateResult('s', dealer_up_card, player_hand_value)
self.display_result()
def calculateResult(self, action, dealer_up_card, player_hand_value):
result = self.if_there(dealer_up_card, player_hand_value)
if result is None:
result = Result(dealer_up_card, player_hand_value)
self.results.append(result)
if self.player_hand.is_busted():
if action == 'h':
result.hit_loss_count += 1
else:
result.stand_loss_count += 1
elif self.dealer_hand.is_busted():
if action == 'h':
result.hit_win_count += 1
else:
result.stand_win_count += 1
elif self.player_hand.check_for_blackjack():
result.stand_win_count += 1
elif self.player_hand.player_win(self.dealer_hand):
if action == 'h':
result.hit_win_count += 1
else:
result.stand_win_count += 1
elif self.player_hand.is_push(self.dealer_hand):
if action == 'h':
result.hit_draw_count += 1
else:
result.stand_draw_count += 1
elif self.player_hand.player_loss(self.dealer_hand):
if action == 'h':
result.hit_loss_count += 1
else:
result.stand_loss_count += 1
def if_there(self, dealer_up_card, player_hand_value):
if len(self.results) > 0:
for result in self.results:
if result.dealer_card == dealer_up_card and result.player_hand_value == player_hand_value:
return result
return None
def display_result(self):
self.results.sort(key=lambda x: x.dealer_card)
self.results.sort(key=lambda x: x.player_hand_value)
total_wins = 0
total_loss = 0
total_push = 0
total_hit_win = 0
total_hit_loss = 0
total_hit_push = 0
total_stand_win = 0
total_stand_loss = 0
total_stand_push = 0
counter = 1
dash = '-' * 118
print(dash)
print('{:<12s}{:>12s}{:>19s}{:>12s}{:>12s}{:>9s}{:>13s}{:>14s}{:>8}'.format("Counter", "Player Card Value",
"Dealer Up Card", "Hit Win",
"Hit Lose",
"Push", "Stand win",
"Stand Loss", "Push"))
print(dash)
for result in self.results:
print('{:>1}{:>20}{:>20}{:>15}{:>12}{:>12}{:>10}{:>13}{:>12}'.format(counter, result.player_hand_value,
result.dealer_card,
result.hit_win_count,
result.hit_loss_count,
result.hit_draw_count,
result.stand_win_count,
result.stand_loss_count,
result.stand_draw_count))
counter += 1
total_wins += result.hit_win_count + result.stand_win_count
total_loss += result.hit_loss_count + result.stand_loss_count
total_push += result.hit_draw_count + result.stand_draw_count
total_hit_win += result.hit_win_count
total_hit_loss += result.hit_loss_count
total_hit_push += result.hit_draw_count
total_stand_win += result.stand_win_count
total_stand_loss += result.stand_loss_count
total_stand_push += result.hit_draw_count
total = total_wins + total_loss + total_push
print("total wins :", total_wins)
print("total loss :", total_loss)
print("total push :", total_push)
print("total :", total)
print()
print("----------- details ------------")
print("total hit wis :", total_hit_win)
print("total hit loss :", total_hit_loss)
print("total hit push :", total_hit_push)
print("total stand wis :", total_stand_win)
print("total stand loss :", total_stand_loss)
print("total stand push :", total_stand_push)
self.end = time()
print("time " + str(self.end - self.start) )
class OurStrategy(Simulation):
def __init__(self):
super().__init__()
self.results = []
self.deck = Deck()
def simulation_rounds(self, num_of_rounds):
self.start = time()
for round in range(num_of_rounds):
self.player_hand = Hand()
self.dealer_hand = Hand(dealer=True)
for i in range(2):
self.player_hand.add_card(self.deck.deal())
self.dealer_hand.add_card(self.deck.deal())
player_hand_value = self.player_hand.get_value()
while self.player_hand.get_value() < 11:
self.player_hand.add_card(self.deck.deal())
player_hand_value = self.player_hand.get_value()
while self.dealer_hand.get_value() < 17:
self.dealer_hand.add_card(self.deck.deal())
dealer_up_card = self.dealer_hand.cards[0].value
if (player_hand_value == 11 and dealer_up_card in ["2", "4", "5", "6", "7", "8", "9", "10"]) or \
(player_hand_value == 12 and dealer_up_card in ["2", "7", "8", "9", "10", "A"]) or \
(player_hand_value == 13 and dealer_up_card in ["5", "7", "8", "9"]) or \
(player_hand_value == 14 and dealer_up_card == "9") or \
(player_hand_value == 15 and dealer_up_card == "A"):
self.player_hand.add_card(self.deck.deal())
self.calculateResult('h', dealer_up_card, player_hand_value)
else:
self.calculateResult('s', dealer_up_card, player_hand_value)
self.display_result()
if __name__ == "__main__":
x = Simulation()
x.simulation_rounds(1000000)
s = OurStrategy()
s.simulation_rounds(1000000) | 39.623327 | 120 | 0.509917 |
import random
from enum import Enum
from time import time
class Game_Status(Enum):
WIN = 1
LOSE = 2
PUSH = 3
class Card:
def __init__(self, suit, value):
self.suit = suit
self.value = value
def __repr__(self):
return " of ".join((self.value, self.suit))
class Deck:
def __init__(self):
self.cards = [Card(s, v) for s in ["Spades", "Clubs", "Hearts",
"Diamonds"] for v in ["A", "2", "3", "4", "5", "6",
"7", "8", "9", "10", "10", "10", "10"]] * 6
random.shuffle(self.cards)
def deal(self):
if len(self.cards) > 1:
return self.cards.pop(0)
else:
self.__init__()
return self.cards.pop(0)
class Hand:
def __init__(self, dealer=False):
self.dealer = dealer
self.cards = []
self.value = 0
def add_card(self, card):
self.cards.append(card)
def calculate_value(self):
self.value = 0
number_of_aces = 0
for card in self.cards:
if card.value.isnumeric():
self.value += int(card.value)
else:
if card.value == "A":
number_of_aces += 1
self.value += 11
else:
self.value += 10
while 12 > number_of_aces > 0 and self.value > 21:
self.value -= 10
number_of_aces -= 1
return self.value
def get_value(self):
self.calculate_value()
return self.value
def display(self):
if self.dealer:
print("hidden")
print(self.cards[1])
else:
for card in self.cards:
print(card)
print("Value:", self.get_value())
def final_display(self):
for card in self.cards:
print(card)
print("Value:", self.get_value())
def is_busted(self):
return self.get_value() > 21
def can_split(self):
return self.cards[0].value == self.cards[1].value
def can_not_split(self):
return self.cards[0].value != self.cards[1].value
def is_push(self, other):
return self.get_value() == other.get_value()
def player_win(self, other):
return self.get_value() > other.get_value()
def player_loss(self, other):
return self.get_value() < other.get_value()
def check_for_blackjack(self):
return self.get_value() == 21 and len(self.cards) == 2
class Game:
def print_status(self, status: Game_Status):
if status == Game_Status.WIN:
print(" you win ! ")
elif status == Game_Status.LOSE:
print(" you lose !")
elif status == Game_Status.PUSH:
print(" push !")
def play(self):
playing = True
while playing:
self.deck = Deck()
self.player_hand = Hand()
self.dealer_hand = Hand(dealer=True)
self.first_hand = Hand()
self.second_hand = Hand()
for i in range(2):
self.player_hand.add_card(self.deck.deal())
self.dealer_hand.add_card(self.deck.deal())
print("Your hand is:")
self.player_hand.display()
print()
print("Dealer's hand is:")
self.dealer_hand.display()
game_over = False
can_play_double_down = True
while not game_over:
player_has_blackjack = self.player_hand.check_for_blackjack()
dealer_has_blackjack = self.dealer_hand.check_for_blackjack()
if player_has_blackjack or dealer_has_blackjack:
self.show_blackjack_results(player_has_blackjack, dealer_has_blackjack)
break
choice = input("Please choose [Hit / Stand / DoubleDown/ Split] by typing the option").lower()
while choice not in ["h", "s", "d", "p", "hit", "stand", "doubledown", "split"]:
choice = input("Please enter 'hit' or 'stand' or 'doubledown' or 'split' (or H/S/D/p) ").lower()
if choice in ['hit', 'h']:
self.player_hand.add_card(self.deck.deal())
self.player_hand.display()
can_play_double_down = False
if self.player_hand.is_busted():
print("You have lost!")
game_over = True
elif choice in ["stand", "s"]:
while self.dealer_hand.get_value() < 17:
self.dealer_hand.add_card(self.deck.deal())
print("Final Results")
print("Your hand:", self.player_hand.get_value())
print("Dealer's hand:", self.dealer_hand.get_value())
if self.player_hand.is_busted():
self.print_status(Game_Status.LOSE)
elif self.dealer_hand.is_busted():
self.print_status(Game_Status.WIN)
elif self.player_hand.player_win(self.dealer_hand):
self.print_status(Game_Status.WIN)
elif self.player_hand.is_push(self.dealer_hand):
self.print_status(Game_Status.PUSH)
elif self.player_hand.player_loss(self.dealer_hand):
self.print_status(Game_Status.LOSE)
self.display_result()
game_over = True
elif choice in [" doubledown ", "d"] and can_play_double_down:
self.player_hand.add_card(self.deck.deal())
while self.dealer_hand.get_value() < 17:
self.dealer_hand.add_card(self.deck.deal())
if self.player_hand.is_busted():
self.print_status(Game_Status.LOSE)
elif self.dealer_hand.is_busted():
self.print_status(Game_Status.WIN)
elif self.player_hand.player_win(self.dealer_hand):
self.print_status(Game_Status.WIN)
elif self.player_hand.player_loss(self.dealer_hand):
self.print_status(Game_Status.LOSE)
elif self.player_hand.is_push(self.dealer_hand):
self.print_status(Game_Status.PUSH)
self.display_result()
game_over = True
elif choice in [" doubledown ", "d"] and not can_play_double_down:
print("you can not play double down")
elif choice in [" split ", "p"] and self.player_hand.can_split():
first_card = Card(self.player_hand.cards[0].suit, self.player_hand.cards[0].value)
second_card = Card(self.player_hand.cards[1].suit, self.player_hand.cards[1].value)
self.first_hand.add_card(first_card)
self.second_hand.add_card(second_card)
self.first_hand.add_card(self.deck.deal())
self.second_hand.add_card(self.deck.deal())
print("your first hand : ")
self.first_hand.final_display()
print("your second hand : ")
self.second_hand.final_display()
not_finish_first_loop = True
while not_finish_first_loop:
first_choice = input("Please choose [Hit / stand] for your first hand ").lower()
while first_choice not in ["h", "s", "hit", "stand"]:
first_choice = input("Please enter 'hit' or 'stand' (or H/S) for the first hand ").lower()
if first_choice in ['hit', 'h']:
self.first_hand.add_card(self.deck.deal())
self.first_hand.display()
if self.first_hand.is_busted():
print("You have lost in your first hand!")
not_finish_first_loop = False
else:
not_finish_first_loop = False
not_finish_second_loop = True
while not_finish_second_loop:
second_choice = input("Please choose [Hit / stand] for your second hand ").lower()
while second_choice not in ["h", "s", "hit", "stand"]:
second_choice = input("Please enter 'hit' or 'stand' (or H/S) for the second hand ").lower()
if second_choice in ['hit', 'h']:
self.second_hand.add_card(self.deck.deal())
self.second_hand.display()
if self.second_hand.is_busted():
print("You have lost in your second hand!")
not_finish_first_loop = False
else:
not_finish_second_loop = False
if not not_finish_first_loop and not not_finish_second_loop:
while self.dealer_hand.get_value() < 17:
self.dealer_hand.add_card(self.deck.deal())
if self.dealer_hand.is_busted():
print("Final Results")
self.first_hand.final_display()
self.second_hand.final_display()
self.dealer_hand.final_display()
print(" you win in both hands")
game_over = True
else:
print("Final Results")
print("Your first hand:", self.first_hand.get_value())
print("Your second hand:", self.second_hand.get_value())
print("Dealer's hand:", self.dealer_hand.get_value())
if self.first_hand.is_busted():
print("you lost your first hand , your hand is over 21")
elif self.first_hand.player_win(self.dealer_hand):
print("You Win in your first hand!")
elif self.first_hand.player_loss(self.dealer_hand):
print("you lost your first hand ")
elif self.first_hand.is_push(self.dealer_hand):
print("push in the first hand!")
if self.second_hand.is_busted():
print("you lost your first hand , your hand is over 21")
elif self.second_hand.player_loss(self.dealer_hand):
print("you lost your second hand ")
elif self.second_hand.player_win(self.dealer_hand):
print("You Win in your second hand!")
elif self.second_hand.is_push(self.dealer_hand):
print("push in the second hand!")
game_over = True
elif choice in [" split ", "p"] and self.player_hand.can_not_split():
print(" no you can not splet")
again = input("Play Again? [Y/N] ")
while again.lower() not in ["y", "n"]:
again = input("Please enter Y or N ")
if again.lower() == "n":
print("Thanks for playing!")
playing = False
else:
playing = True
def display_result(self):
print("player hand")
self.player_hand.final_display()
print("dealer hand")
self.dealer_hand.final_display()
def show_blackjack_results(self, player_has_blackjack, dealer_has_blackjack):
if player_has_blackjack and dealer_has_blackjack:
print("Both players have blackjack! Draw!")
elif player_has_blackjack:
print("You have blackjack! You win!")
elif dealer_has_blackjack:
print("Dealer has blackjack! Dealer wins!")
class Result:
def __init__(self, dealer_card, player_hand_value):
self.dealer_card = dealer_card
self.player_hand_value = player_hand_value
self.hit_win_count = 0
self.hit_loss_count = 0
self.hit_draw_count = 0
self.stand_win_count = 0
self.stand_loss_count = 0
self.stand_draw_count = 0
class Simulation:
def __init__(self):
self.results = []
self.deck = Deck()
def simulation_rounds(self, num_of_rounds):
self.start = time()
for round in range(num_of_rounds):
self.player_hand = Hand()
self.dealer_hand = Hand(dealer=True)
for i in range(2):
self.player_hand.add_card(self.deck.deal())
self.dealer_hand.add_card(self.deck.deal())
player_hand_value = self.player_hand.get_value()
while self.player_hand.get_value() < 11:
self.player_hand.add_card(self.deck.deal())
player_hand_value = self.player_hand.get_value()
while self.dealer_hand.get_value() < 17:
self.dealer_hand.add_card(self.deck.deal())
dealer_up_card = self.dealer_hand.cards[0].value
actions = ["h", "s"]
random.shuffle(actions)
choice = actions.pop(0)
if choice in ['h'] and player_hand_value != 21:
self.player_hand.add_card(self.deck.deal())
self.calculateResult('h', dealer_up_card, player_hand_value)
else:
self.calculateResult('s', dealer_up_card, player_hand_value)
self.display_result()
def calculateResult(self, action, dealer_up_card, player_hand_value):
result = self.if_there(dealer_up_card, player_hand_value)
if result is None:
result = Result(dealer_up_card, player_hand_value)
self.results.append(result)
if self.player_hand.is_busted():
if action == 'h':
result.hit_loss_count += 1
else:
result.stand_loss_count += 1
elif self.dealer_hand.is_busted():
if action == 'h':
result.hit_win_count += 1
else:
result.stand_win_count += 1
elif self.player_hand.check_for_blackjack():
result.stand_win_count += 1
elif self.player_hand.player_win(self.dealer_hand):
if action == 'h':
result.hit_win_count += 1
else:
result.stand_win_count += 1
elif self.player_hand.is_push(self.dealer_hand):
if action == 'h':
result.hit_draw_count += 1
else:
result.stand_draw_count += 1
elif self.player_hand.player_loss(self.dealer_hand):
if action == 'h':
result.hit_loss_count += 1
else:
result.stand_loss_count += 1
def if_there(self, dealer_up_card, player_hand_value):
if len(self.results) > 0:
for result in self.results:
if result.dealer_card == dealer_up_card and result.player_hand_value == player_hand_value:
return result
return None
def display_result(self):
self.results.sort(key=lambda x: x.dealer_card)
self.results.sort(key=lambda x: x.player_hand_value)
total_wins = 0
total_loss = 0
total_push = 0
total_hit_win = 0
total_hit_loss = 0
total_hit_push = 0
total_stand_win = 0
total_stand_loss = 0
total_stand_push = 0
counter = 1
dash = '-' * 118
print(dash)
print('{:<12s}{:>12s}{:>19s}{:>12s}{:>12s}{:>9s}{:>13s}{:>14s}{:>8}'.format("Counter", "Player Card Value",
"Dealer Up Card", "Hit Win",
"Hit Lose",
"Push", "Stand win",
"Stand Loss", "Push"))
print(dash)
for result in self.results:
print('{:>1}{:>20}{:>20}{:>15}{:>12}{:>12}{:>10}{:>13}{:>12}'.format(counter, result.player_hand_value,
result.dealer_card,
result.hit_win_count,
result.hit_loss_count,
result.hit_draw_count,
result.stand_win_count,
result.stand_loss_count,
result.stand_draw_count))
counter += 1
total_wins += result.hit_win_count + result.stand_win_count
total_loss += result.hit_loss_count + result.stand_loss_count
total_push += result.hit_draw_count + result.stand_draw_count
total_hit_win += result.hit_win_count
total_hit_loss += result.hit_loss_count
total_hit_push += result.hit_draw_count
total_stand_win += result.stand_win_count
total_stand_loss += result.stand_loss_count
total_stand_push += result.hit_draw_count
total = total_wins + total_loss + total_push
print("total wins :", total_wins)
print("total loss :", total_loss)
print("total push :", total_push)
print("total :", total)
print()
print("----------- details ------------")
print("total hit wis :", total_hit_win)
print("total hit loss :", total_hit_loss)
print("total hit push :", total_hit_push)
print("total stand wis :", total_stand_win)
print("total stand loss :", total_stand_loss)
print("total stand push :", total_stand_push)
self.end = time()
print("time " + str(self.end - self.start) )
class OurStrategy(Simulation):
def __init__(self):
super().__init__()
self.results = []
self.deck = Deck()
def simulation_rounds(self, num_of_rounds):
self.start = time()
for round in range(num_of_rounds):
self.player_hand = Hand()
self.dealer_hand = Hand(dealer=True)
for i in range(2):
self.player_hand.add_card(self.deck.deal())
self.dealer_hand.add_card(self.deck.deal())
player_hand_value = self.player_hand.get_value()
while self.player_hand.get_value() < 11:
self.player_hand.add_card(self.deck.deal())
player_hand_value = self.player_hand.get_value()
while self.dealer_hand.get_value() < 17:
self.dealer_hand.add_card(self.deck.deal())
dealer_up_card = self.dealer_hand.cards[0].value
if (player_hand_value == 11 and dealer_up_card in ["2", "4", "5", "6", "7", "8", "9", "10"]) or \
(player_hand_value == 12 and dealer_up_card in ["2", "7", "8", "9", "10", "A"]) or \
(player_hand_value == 13 and dealer_up_card in ["5", "7", "8", "9"]) or \
(player_hand_value == 14 and dealer_up_card == "9") or \
(player_hand_value == 15 and dealer_up_card == "A"):
self.player_hand.add_card(self.deck.deal())
self.calculateResult('h', dealer_up_card, player_hand_value)
else:
self.calculateResult('s', dealer_up_card, player_hand_value)
self.display_result()
if __name__ == "__main__":
x = Simulation()
x.simulation_rounds(1000000)
s = OurStrategy()
s.simulation_rounds(1000000) | true | true |
f73023b517d0845ea1d7905c59222d02f1e64c12 | 362 | py | Python | transcription_folder_to_sclite_hyp.py | c0louri/kaldi-grpc-server | d0f6881099423e6d08df74dc4217ddf3f43621a2 | [
"Apache-2.0"
] | null | null | null | transcription_folder_to_sclite_hyp.py | c0louri/kaldi-grpc-server | d0f6881099423e6d08df74dc4217ddf3f43621a2 | [
"Apache-2.0"
] | null | null | null | transcription_folder_to_sclite_hyp.py | c0louri/kaldi-grpc-server | d0f6881099423e6d08df74dc4217ddf3f43621a2 | [
"Apache-2.0"
] | null | null | null | import fileinput
import os
def to_sclite_line(trans):
with open(trans, "r") as fd:
hyp = fd.read()
_id, _ = os.path.splitext(os.path.basename(trans))
return f"{hyp} ({_id})"
def main():
with fileinput.input() as finput:
for ln in finput:
print(to_sclite_line(ln.strip()))
if __name__ == "__main__":
main()
| 16.454545 | 54 | 0.596685 | import fileinput
import os
def to_sclite_line(trans):
with open(trans, "r") as fd:
hyp = fd.read()
_id, _ = os.path.splitext(os.path.basename(trans))
return f"{hyp} ({_id})"
def main():
with fileinput.input() as finput:
for ln in finput:
print(to_sclite_line(ln.strip()))
if __name__ == "__main__":
main()
| true | true |
f73024ea71ed1bb25989143787154d6617e53ae0 | 18,051 | py | Python | sdk/python/pulumi_azure_native/recoveryservices/v20210601/protection_container.py | polivbr/pulumi-azure-native | 09571f3bf6bdc4f3621aabefd1ba6c0d4ecfb0e7 | [
"Apache-2.0"
] | null | null | null | sdk/python/pulumi_azure_native/recoveryservices/v20210601/protection_container.py | polivbr/pulumi-azure-native | 09571f3bf6bdc4f3621aabefd1ba6c0d4ecfb0e7 | [
"Apache-2.0"
] | null | null | null | sdk/python/pulumi_azure_native/recoveryservices/v20210601/protection_container.py | polivbr/pulumi-azure-native | 09571f3bf6bdc4f3621aabefd1ba6c0d4ecfb0e7 | [
"Apache-2.0"
] | null | null | null | # coding=utf-8
# *** WARNING: this file was generated by the Pulumi SDK Generator. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union, overload
from ... import _utilities
from . import outputs
from ._enums import *
from ._inputs import *
__all__ = ['ProtectionContainerArgs', 'ProtectionContainer']
@pulumi.input_type
class ProtectionContainerArgs:
def __init__(__self__, *,
fabric_name: pulumi.Input[str],
resource_group_name: pulumi.Input[str],
vault_name: pulumi.Input[str],
container_name: Optional[pulumi.Input[str]] = None,
e_tag: Optional[pulumi.Input[str]] = None,
location: Optional[pulumi.Input[str]] = None,
properties: Optional[pulumi.Input[Union['AzureBackupServerContainerArgs', 'AzureIaaSClassicComputeVMContainerArgs', 'AzureIaaSComputeVMContainerArgs', 'AzureSQLAGWorkloadContainerProtectionContainerArgs', 'AzureSqlContainerArgs', 'AzureStorageContainerArgs', 'AzureVMAppContainerProtectionContainerArgs', 'AzureWorkloadContainerArgs', 'DpmContainerArgs', 'GenericContainerArgs', 'IaaSVMContainerArgs', 'MabContainerArgs']]] = None,
tags: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]] = None):
"""
The set of arguments for constructing a ProtectionContainer resource.
:param pulumi.Input[str] fabric_name: Fabric name associated with the container.
:param pulumi.Input[str] resource_group_name: The name of the resource group where the recovery services vault is present.
:param pulumi.Input[str] vault_name: The name of the recovery services vault.
:param pulumi.Input[str] container_name: Name of the container to be registered.
:param pulumi.Input[str] e_tag: Optional ETag.
:param pulumi.Input[str] location: Resource location.
:param pulumi.Input[Union['AzureBackupServerContainerArgs', 'AzureIaaSClassicComputeVMContainerArgs', 'AzureIaaSComputeVMContainerArgs', 'AzureSQLAGWorkloadContainerProtectionContainerArgs', 'AzureSqlContainerArgs', 'AzureStorageContainerArgs', 'AzureVMAppContainerProtectionContainerArgs', 'AzureWorkloadContainerArgs', 'DpmContainerArgs', 'GenericContainerArgs', 'IaaSVMContainerArgs', 'MabContainerArgs']] properties: ProtectionContainerResource properties
:param pulumi.Input[Mapping[str, pulumi.Input[str]]] tags: Resource tags.
"""
pulumi.set(__self__, "fabric_name", fabric_name)
pulumi.set(__self__, "resource_group_name", resource_group_name)
pulumi.set(__self__, "vault_name", vault_name)
if container_name is not None:
pulumi.set(__self__, "container_name", container_name)
if e_tag is not None:
pulumi.set(__self__, "e_tag", e_tag)
if location is not None:
pulumi.set(__self__, "location", location)
if properties is not None:
pulumi.set(__self__, "properties", properties)
if tags is not None:
pulumi.set(__self__, "tags", tags)
@property
@pulumi.getter(name="fabricName")
def fabric_name(self) -> pulumi.Input[str]:
"""
Fabric name associated with the container.
"""
return pulumi.get(self, "fabric_name")
@fabric_name.setter
def fabric_name(self, value: pulumi.Input[str]):
pulumi.set(self, "fabric_name", value)
@property
@pulumi.getter(name="resourceGroupName")
def resource_group_name(self) -> pulumi.Input[str]:
"""
The name of the resource group where the recovery services vault is present.
"""
return pulumi.get(self, "resource_group_name")
@resource_group_name.setter
def resource_group_name(self, value: pulumi.Input[str]):
pulumi.set(self, "resource_group_name", value)
@property
@pulumi.getter(name="vaultName")
def vault_name(self) -> pulumi.Input[str]:
"""
The name of the recovery services vault.
"""
return pulumi.get(self, "vault_name")
@vault_name.setter
def vault_name(self, value: pulumi.Input[str]):
pulumi.set(self, "vault_name", value)
@property
@pulumi.getter(name="containerName")
def container_name(self) -> Optional[pulumi.Input[str]]:
"""
Name of the container to be registered.
"""
return pulumi.get(self, "container_name")
@container_name.setter
def container_name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "container_name", value)
@property
@pulumi.getter(name="eTag")
def e_tag(self) -> Optional[pulumi.Input[str]]:
"""
Optional ETag.
"""
return pulumi.get(self, "e_tag")
@e_tag.setter
def e_tag(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "e_tag", value)
@property
@pulumi.getter
def location(self) -> Optional[pulumi.Input[str]]:
"""
Resource location.
"""
return pulumi.get(self, "location")
@location.setter
def location(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "location", value)
@property
@pulumi.getter
def properties(self) -> Optional[pulumi.Input[Union['AzureBackupServerContainerArgs', 'AzureIaaSClassicComputeVMContainerArgs', 'AzureIaaSComputeVMContainerArgs', 'AzureSQLAGWorkloadContainerProtectionContainerArgs', 'AzureSqlContainerArgs', 'AzureStorageContainerArgs', 'AzureVMAppContainerProtectionContainerArgs', 'AzureWorkloadContainerArgs', 'DpmContainerArgs', 'GenericContainerArgs', 'IaaSVMContainerArgs', 'MabContainerArgs']]]:
"""
ProtectionContainerResource properties
"""
return pulumi.get(self, "properties")
@properties.setter
def properties(self, value: Optional[pulumi.Input[Union['AzureBackupServerContainerArgs', 'AzureIaaSClassicComputeVMContainerArgs', 'AzureIaaSComputeVMContainerArgs', 'AzureSQLAGWorkloadContainerProtectionContainerArgs', 'AzureSqlContainerArgs', 'AzureStorageContainerArgs', 'AzureVMAppContainerProtectionContainerArgs', 'AzureWorkloadContainerArgs', 'DpmContainerArgs', 'GenericContainerArgs', 'IaaSVMContainerArgs', 'MabContainerArgs']]]):
pulumi.set(self, "properties", value)
@property
@pulumi.getter
def tags(self) -> Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]]:
"""
Resource tags.
"""
return pulumi.get(self, "tags")
@tags.setter
def tags(self, value: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]]):
pulumi.set(self, "tags", value)
class ProtectionContainer(pulumi.CustomResource):
@overload
def __init__(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
container_name: Optional[pulumi.Input[str]] = None,
e_tag: Optional[pulumi.Input[str]] = None,
fabric_name: Optional[pulumi.Input[str]] = None,
location: Optional[pulumi.Input[str]] = None,
properties: Optional[pulumi.Input[Union[pulumi.InputType['AzureBackupServerContainerArgs'], pulumi.InputType['AzureIaaSClassicComputeVMContainerArgs'], pulumi.InputType['AzureIaaSComputeVMContainerArgs'], pulumi.InputType['AzureSQLAGWorkloadContainerProtectionContainerArgs'], pulumi.InputType['AzureSqlContainerArgs'], pulumi.InputType['AzureStorageContainerArgs'], pulumi.InputType['AzureVMAppContainerProtectionContainerArgs'], pulumi.InputType['AzureWorkloadContainerArgs'], pulumi.InputType['DpmContainerArgs'], pulumi.InputType['GenericContainerArgs'], pulumi.InputType['IaaSVMContainerArgs'], pulumi.InputType['MabContainerArgs']]]] = None,
resource_group_name: Optional[pulumi.Input[str]] = None,
tags: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]] = None,
vault_name: Optional[pulumi.Input[str]] = None,
__props__=None):
"""
Base class for container with backup items. Containers with specific workloads are derived from this class.
:param str resource_name: The name of the resource.
:param pulumi.ResourceOptions opts: Options for the resource.
:param pulumi.Input[str] container_name: Name of the container to be registered.
:param pulumi.Input[str] e_tag: Optional ETag.
:param pulumi.Input[str] fabric_name: Fabric name associated with the container.
:param pulumi.Input[str] location: Resource location.
:param pulumi.Input[Union[pulumi.InputType['AzureBackupServerContainerArgs'], pulumi.InputType['AzureIaaSClassicComputeVMContainerArgs'], pulumi.InputType['AzureIaaSComputeVMContainerArgs'], pulumi.InputType['AzureSQLAGWorkloadContainerProtectionContainerArgs'], pulumi.InputType['AzureSqlContainerArgs'], pulumi.InputType['AzureStorageContainerArgs'], pulumi.InputType['AzureVMAppContainerProtectionContainerArgs'], pulumi.InputType['AzureWorkloadContainerArgs'], pulumi.InputType['DpmContainerArgs'], pulumi.InputType['GenericContainerArgs'], pulumi.InputType['IaaSVMContainerArgs'], pulumi.InputType['MabContainerArgs']]] properties: ProtectionContainerResource properties
:param pulumi.Input[str] resource_group_name: The name of the resource group where the recovery services vault is present.
:param pulumi.Input[Mapping[str, pulumi.Input[str]]] tags: Resource tags.
:param pulumi.Input[str] vault_name: The name of the recovery services vault.
"""
...
@overload
def __init__(__self__,
resource_name: str,
args: ProtectionContainerArgs,
opts: Optional[pulumi.ResourceOptions] = None):
"""
Base class for container with backup items. Containers with specific workloads are derived from this class.
:param str resource_name: The name of the resource.
:param ProtectionContainerArgs args: The arguments to use to populate this resource's properties.
:param pulumi.ResourceOptions opts: Options for the resource.
"""
...
def __init__(__self__, resource_name: str, *args, **kwargs):
resource_args, opts = _utilities.get_resource_args_opts(ProtectionContainerArgs, pulumi.ResourceOptions, *args, **kwargs)
if resource_args is not None:
__self__._internal_init(resource_name, opts, **resource_args.__dict__)
else:
__self__._internal_init(resource_name, *args, **kwargs)
def _internal_init(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
container_name: Optional[pulumi.Input[str]] = None,
e_tag: Optional[pulumi.Input[str]] = None,
fabric_name: Optional[pulumi.Input[str]] = None,
location: Optional[pulumi.Input[str]] = None,
properties: Optional[pulumi.Input[Union[pulumi.InputType['AzureBackupServerContainerArgs'], pulumi.InputType['AzureIaaSClassicComputeVMContainerArgs'], pulumi.InputType['AzureIaaSComputeVMContainerArgs'], pulumi.InputType['AzureSQLAGWorkloadContainerProtectionContainerArgs'], pulumi.InputType['AzureSqlContainerArgs'], pulumi.InputType['AzureStorageContainerArgs'], pulumi.InputType['AzureVMAppContainerProtectionContainerArgs'], pulumi.InputType['AzureWorkloadContainerArgs'], pulumi.InputType['DpmContainerArgs'], pulumi.InputType['GenericContainerArgs'], pulumi.InputType['IaaSVMContainerArgs'], pulumi.InputType['MabContainerArgs']]]] = None,
resource_group_name: Optional[pulumi.Input[str]] = None,
tags: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]] = None,
vault_name: Optional[pulumi.Input[str]] = None,
__props__=None):
if opts is None:
opts = pulumi.ResourceOptions()
if not isinstance(opts, pulumi.ResourceOptions):
raise TypeError('Expected resource options to be a ResourceOptions instance')
if opts.version is None:
opts.version = _utilities.get_version()
if opts.id is None:
if __props__ is not None:
raise TypeError('__props__ is only valid when passed in combination with a valid opts.id to get an existing resource')
__props__ = ProtectionContainerArgs.__new__(ProtectionContainerArgs)
__props__.__dict__["container_name"] = container_name
__props__.__dict__["e_tag"] = e_tag
if fabric_name is None and not opts.urn:
raise TypeError("Missing required property 'fabric_name'")
__props__.__dict__["fabric_name"] = fabric_name
__props__.__dict__["location"] = location
__props__.__dict__["properties"] = properties
if resource_group_name is None and not opts.urn:
raise TypeError("Missing required property 'resource_group_name'")
__props__.__dict__["resource_group_name"] = resource_group_name
__props__.__dict__["tags"] = tags
if vault_name is None and not opts.urn:
raise TypeError("Missing required property 'vault_name'")
__props__.__dict__["vault_name"] = vault_name
__props__.__dict__["name"] = None
__props__.__dict__["type"] = None
alias_opts = pulumi.ResourceOptions(aliases=[pulumi.Alias(type_="azure-nextgen:recoveryservices/v20210601:ProtectionContainer"), pulumi.Alias(type_="azure-native:recoveryservices:ProtectionContainer"), pulumi.Alias(type_="azure-nextgen:recoveryservices:ProtectionContainer"), pulumi.Alias(type_="azure-native:recoveryservices/v20161201:ProtectionContainer"), pulumi.Alias(type_="azure-nextgen:recoveryservices/v20161201:ProtectionContainer"), pulumi.Alias(type_="azure-native:recoveryservices/v20201001:ProtectionContainer"), pulumi.Alias(type_="azure-nextgen:recoveryservices/v20201001:ProtectionContainer"), pulumi.Alias(type_="azure-native:recoveryservices/v20201201:ProtectionContainer"), pulumi.Alias(type_="azure-nextgen:recoveryservices/v20201201:ProtectionContainer"), pulumi.Alias(type_="azure-native:recoveryservices/v20210101:ProtectionContainer"), pulumi.Alias(type_="azure-nextgen:recoveryservices/v20210101:ProtectionContainer"), pulumi.Alias(type_="azure-native:recoveryservices/v20210201:ProtectionContainer"), pulumi.Alias(type_="azure-nextgen:recoveryservices/v20210201:ProtectionContainer"), pulumi.Alias(type_="azure-native:recoveryservices/v20210201preview:ProtectionContainer"), pulumi.Alias(type_="azure-nextgen:recoveryservices/v20210201preview:ProtectionContainer"), pulumi.Alias(type_="azure-native:recoveryservices/v20210210:ProtectionContainer"), pulumi.Alias(type_="azure-nextgen:recoveryservices/v20210210:ProtectionContainer"), pulumi.Alias(type_="azure-native:recoveryservices/v20210301:ProtectionContainer"), pulumi.Alias(type_="azure-nextgen:recoveryservices/v20210301:ProtectionContainer"), pulumi.Alias(type_="azure-native:recoveryservices/v20210401:ProtectionContainer"), pulumi.Alias(type_="azure-nextgen:recoveryservices/v20210401:ProtectionContainer"), pulumi.Alias(type_="azure-native:recoveryservices/v20210701:ProtectionContainer"), pulumi.Alias(type_="azure-nextgen:recoveryservices/v20210701:ProtectionContainer")])
opts = pulumi.ResourceOptions.merge(opts, alias_opts)
super(ProtectionContainer, __self__).__init__(
'azure-native:recoveryservices/v20210601:ProtectionContainer',
resource_name,
__props__,
opts)
@staticmethod
def get(resource_name: str,
id: pulumi.Input[str],
opts: Optional[pulumi.ResourceOptions] = None) -> 'ProtectionContainer':
"""
Get an existing ProtectionContainer resource's state with the given name, id, and optional extra
properties used to qualify the lookup.
:param str resource_name: The unique name of the resulting resource.
:param pulumi.Input[str] id: The unique provider ID of the resource to lookup.
:param pulumi.ResourceOptions opts: Options for the resource.
"""
opts = pulumi.ResourceOptions.merge(opts, pulumi.ResourceOptions(id=id))
__props__ = ProtectionContainerArgs.__new__(ProtectionContainerArgs)
__props__.__dict__["e_tag"] = None
__props__.__dict__["location"] = None
__props__.__dict__["name"] = None
__props__.__dict__["properties"] = None
__props__.__dict__["tags"] = None
__props__.__dict__["type"] = None
return ProtectionContainer(resource_name, opts=opts, __props__=__props__)
@property
@pulumi.getter(name="eTag")
def e_tag(self) -> pulumi.Output[Optional[str]]:
"""
Optional ETag.
"""
return pulumi.get(self, "e_tag")
@property
@pulumi.getter
def location(self) -> pulumi.Output[Optional[str]]:
"""
Resource location.
"""
return pulumi.get(self, "location")
@property
@pulumi.getter
def name(self) -> pulumi.Output[str]:
"""
Resource name associated with the resource.
"""
return pulumi.get(self, "name")
@property
@pulumi.getter
def properties(self) -> pulumi.Output[Any]:
"""
ProtectionContainerResource properties
"""
return pulumi.get(self, "properties")
@property
@pulumi.getter
def tags(self) -> pulumi.Output[Optional[Mapping[str, str]]]:
"""
Resource tags.
"""
return pulumi.get(self, "tags")
@property
@pulumi.getter
def type(self) -> pulumi.Output[str]:
"""
Resource type represents the complete path of the form Namespace/ResourceType/ResourceType/...
"""
return pulumi.get(self, "type")
| 56.943218 | 1,968 | 0.70046 |
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union, overload
from ... import _utilities
from . import outputs
from ._enums import *
from ._inputs import *
__all__ = ['ProtectionContainerArgs', 'ProtectionContainer']
@pulumi.input_type
class ProtectionContainerArgs:
def __init__(__self__, *,
fabric_name: pulumi.Input[str],
resource_group_name: pulumi.Input[str],
vault_name: pulumi.Input[str],
container_name: Optional[pulumi.Input[str]] = None,
e_tag: Optional[pulumi.Input[str]] = None,
location: Optional[pulumi.Input[str]] = None,
properties: Optional[pulumi.Input[Union['AzureBackupServerContainerArgs', 'AzureIaaSClassicComputeVMContainerArgs', 'AzureIaaSComputeVMContainerArgs', 'AzureSQLAGWorkloadContainerProtectionContainerArgs', 'AzureSqlContainerArgs', 'AzureStorageContainerArgs', 'AzureVMAppContainerProtectionContainerArgs', 'AzureWorkloadContainerArgs', 'DpmContainerArgs', 'GenericContainerArgs', 'IaaSVMContainerArgs', 'MabContainerArgs']]] = None,
tags: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]] = None):
pulumi.set(__self__, "fabric_name", fabric_name)
pulumi.set(__self__, "resource_group_name", resource_group_name)
pulumi.set(__self__, "vault_name", vault_name)
if container_name is not None:
pulumi.set(__self__, "container_name", container_name)
if e_tag is not None:
pulumi.set(__self__, "e_tag", e_tag)
if location is not None:
pulumi.set(__self__, "location", location)
if properties is not None:
pulumi.set(__self__, "properties", properties)
if tags is not None:
pulumi.set(__self__, "tags", tags)
@property
@pulumi.getter(name="fabricName")
def fabric_name(self) -> pulumi.Input[str]:
return pulumi.get(self, "fabric_name")
@fabric_name.setter
def fabric_name(self, value: pulumi.Input[str]):
pulumi.set(self, "fabric_name", value)
@property
@pulumi.getter(name="resourceGroupName")
def resource_group_name(self) -> pulumi.Input[str]:
return pulumi.get(self, "resource_group_name")
@resource_group_name.setter
def resource_group_name(self, value: pulumi.Input[str]):
pulumi.set(self, "resource_group_name", value)
@property
@pulumi.getter(name="vaultName")
def vault_name(self) -> pulumi.Input[str]:
return pulumi.get(self, "vault_name")
@vault_name.setter
def vault_name(self, value: pulumi.Input[str]):
pulumi.set(self, "vault_name", value)
@property
@pulumi.getter(name="containerName")
def container_name(self) -> Optional[pulumi.Input[str]]:
return pulumi.get(self, "container_name")
@container_name.setter
def container_name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "container_name", value)
@property
@pulumi.getter(name="eTag")
def e_tag(self) -> Optional[pulumi.Input[str]]:
return pulumi.get(self, "e_tag")
@e_tag.setter
def e_tag(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "e_tag", value)
@property
@pulumi.getter
def location(self) -> Optional[pulumi.Input[str]]:
return pulumi.get(self, "location")
@location.setter
def location(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "location", value)
@property
@pulumi.getter
def properties(self) -> Optional[pulumi.Input[Union['AzureBackupServerContainerArgs', 'AzureIaaSClassicComputeVMContainerArgs', 'AzureIaaSComputeVMContainerArgs', 'AzureSQLAGWorkloadContainerProtectionContainerArgs', 'AzureSqlContainerArgs', 'AzureStorageContainerArgs', 'AzureVMAppContainerProtectionContainerArgs', 'AzureWorkloadContainerArgs', 'DpmContainerArgs', 'GenericContainerArgs', 'IaaSVMContainerArgs', 'MabContainerArgs']]]:
return pulumi.get(self, "properties")
@properties.setter
def properties(self, value: Optional[pulumi.Input[Union['AzureBackupServerContainerArgs', 'AzureIaaSClassicComputeVMContainerArgs', 'AzureIaaSComputeVMContainerArgs', 'AzureSQLAGWorkloadContainerProtectionContainerArgs', 'AzureSqlContainerArgs', 'AzureStorageContainerArgs', 'AzureVMAppContainerProtectionContainerArgs', 'AzureWorkloadContainerArgs', 'DpmContainerArgs', 'GenericContainerArgs', 'IaaSVMContainerArgs', 'MabContainerArgs']]]):
pulumi.set(self, "properties", value)
@property
@pulumi.getter
def tags(self) -> Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]]:
return pulumi.get(self, "tags")
@tags.setter
def tags(self, value: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]]):
pulumi.set(self, "tags", value)
class ProtectionContainer(pulumi.CustomResource):
@overload
def __init__(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
container_name: Optional[pulumi.Input[str]] = None,
e_tag: Optional[pulumi.Input[str]] = None,
fabric_name: Optional[pulumi.Input[str]] = None,
location: Optional[pulumi.Input[str]] = None,
properties: Optional[pulumi.Input[Union[pulumi.InputType['AzureBackupServerContainerArgs'], pulumi.InputType['AzureIaaSClassicComputeVMContainerArgs'], pulumi.InputType['AzureIaaSComputeVMContainerArgs'], pulumi.InputType['AzureSQLAGWorkloadContainerProtectionContainerArgs'], pulumi.InputType['AzureSqlContainerArgs'], pulumi.InputType['AzureStorageContainerArgs'], pulumi.InputType['AzureVMAppContainerProtectionContainerArgs'], pulumi.InputType['AzureWorkloadContainerArgs'], pulumi.InputType['DpmContainerArgs'], pulumi.InputType['GenericContainerArgs'], pulumi.InputType['IaaSVMContainerArgs'], pulumi.InputType['MabContainerArgs']]]] = None,
resource_group_name: Optional[pulumi.Input[str]] = None,
tags: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]] = None,
vault_name: Optional[pulumi.Input[str]] = None,
__props__=None):
...
@overload
def __init__(__self__,
resource_name: str,
args: ProtectionContainerArgs,
opts: Optional[pulumi.ResourceOptions] = None):
...
def __init__(__self__, resource_name: str, *args, **kwargs):
resource_args, opts = _utilities.get_resource_args_opts(ProtectionContainerArgs, pulumi.ResourceOptions, *args, **kwargs)
if resource_args is not None:
__self__._internal_init(resource_name, opts, **resource_args.__dict__)
else:
__self__._internal_init(resource_name, *args, **kwargs)
def _internal_init(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
container_name: Optional[pulumi.Input[str]] = None,
e_tag: Optional[pulumi.Input[str]] = None,
fabric_name: Optional[pulumi.Input[str]] = None,
location: Optional[pulumi.Input[str]] = None,
properties: Optional[pulumi.Input[Union[pulumi.InputType['AzureBackupServerContainerArgs'], pulumi.InputType['AzureIaaSClassicComputeVMContainerArgs'], pulumi.InputType['AzureIaaSComputeVMContainerArgs'], pulumi.InputType['AzureSQLAGWorkloadContainerProtectionContainerArgs'], pulumi.InputType['AzureSqlContainerArgs'], pulumi.InputType['AzureStorageContainerArgs'], pulumi.InputType['AzureVMAppContainerProtectionContainerArgs'], pulumi.InputType['AzureWorkloadContainerArgs'], pulumi.InputType['DpmContainerArgs'], pulumi.InputType['GenericContainerArgs'], pulumi.InputType['IaaSVMContainerArgs'], pulumi.InputType['MabContainerArgs']]]] = None,
resource_group_name: Optional[pulumi.Input[str]] = None,
tags: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]] = None,
vault_name: Optional[pulumi.Input[str]] = None,
__props__=None):
if opts is None:
opts = pulumi.ResourceOptions()
if not isinstance(opts, pulumi.ResourceOptions):
raise TypeError('Expected resource options to be a ResourceOptions instance')
if opts.version is None:
opts.version = _utilities.get_version()
if opts.id is None:
if __props__ is not None:
raise TypeError('__props__ is only valid when passed in combination with a valid opts.id to get an existing resource')
__props__ = ProtectionContainerArgs.__new__(ProtectionContainerArgs)
__props__.__dict__["container_name"] = container_name
__props__.__dict__["e_tag"] = e_tag
if fabric_name is None and not opts.urn:
raise TypeError("Missing required property 'fabric_name'")
__props__.__dict__["fabric_name"] = fabric_name
__props__.__dict__["location"] = location
__props__.__dict__["properties"] = properties
if resource_group_name is None and not opts.urn:
raise TypeError("Missing required property 'resource_group_name'")
__props__.__dict__["resource_group_name"] = resource_group_name
__props__.__dict__["tags"] = tags
if vault_name is None and not opts.urn:
raise TypeError("Missing required property 'vault_name'")
__props__.__dict__["vault_name"] = vault_name
__props__.__dict__["name"] = None
__props__.__dict__["type"] = None
alias_opts = pulumi.ResourceOptions(aliases=[pulumi.Alias(type_="azure-nextgen:recoveryservices/v20210601:ProtectionContainer"), pulumi.Alias(type_="azure-native:recoveryservices:ProtectionContainer"), pulumi.Alias(type_="azure-nextgen:recoveryservices:ProtectionContainer"), pulumi.Alias(type_="azure-native:recoveryservices/v20161201:ProtectionContainer"), pulumi.Alias(type_="azure-nextgen:recoveryservices/v20161201:ProtectionContainer"), pulumi.Alias(type_="azure-native:recoveryservices/v20201001:ProtectionContainer"), pulumi.Alias(type_="azure-nextgen:recoveryservices/v20201001:ProtectionContainer"), pulumi.Alias(type_="azure-native:recoveryservices/v20201201:ProtectionContainer"), pulumi.Alias(type_="azure-nextgen:recoveryservices/v20201201:ProtectionContainer"), pulumi.Alias(type_="azure-native:recoveryservices/v20210101:ProtectionContainer"), pulumi.Alias(type_="azure-nextgen:recoveryservices/v20210101:ProtectionContainer"), pulumi.Alias(type_="azure-native:recoveryservices/v20210201:ProtectionContainer"), pulumi.Alias(type_="azure-nextgen:recoveryservices/v20210201:ProtectionContainer"), pulumi.Alias(type_="azure-native:recoveryservices/v20210201preview:ProtectionContainer"), pulumi.Alias(type_="azure-nextgen:recoveryservices/v20210201preview:ProtectionContainer"), pulumi.Alias(type_="azure-native:recoveryservices/v20210210:ProtectionContainer"), pulumi.Alias(type_="azure-nextgen:recoveryservices/v20210210:ProtectionContainer"), pulumi.Alias(type_="azure-native:recoveryservices/v20210301:ProtectionContainer"), pulumi.Alias(type_="azure-nextgen:recoveryservices/v20210301:ProtectionContainer"), pulumi.Alias(type_="azure-native:recoveryservices/v20210401:ProtectionContainer"), pulumi.Alias(type_="azure-nextgen:recoveryservices/v20210401:ProtectionContainer"), pulumi.Alias(type_="azure-native:recoveryservices/v20210701:ProtectionContainer"), pulumi.Alias(type_="azure-nextgen:recoveryservices/v20210701:ProtectionContainer")])
opts = pulumi.ResourceOptions.merge(opts, alias_opts)
super(ProtectionContainer, __self__).__init__(
'azure-native:recoveryservices/v20210601:ProtectionContainer',
resource_name,
__props__,
opts)
@staticmethod
def get(resource_name: str,
id: pulumi.Input[str],
opts: Optional[pulumi.ResourceOptions] = None) -> 'ProtectionContainer':
opts = pulumi.ResourceOptions.merge(opts, pulumi.ResourceOptions(id=id))
__props__ = ProtectionContainerArgs.__new__(ProtectionContainerArgs)
__props__.__dict__["e_tag"] = None
__props__.__dict__["location"] = None
__props__.__dict__["name"] = None
__props__.__dict__["properties"] = None
__props__.__dict__["tags"] = None
__props__.__dict__["type"] = None
return ProtectionContainer(resource_name, opts=opts, __props__=__props__)
@property
@pulumi.getter(name="eTag")
def e_tag(self) -> pulumi.Output[Optional[str]]:
return pulumi.get(self, "e_tag")
@property
@pulumi.getter
def location(self) -> pulumi.Output[Optional[str]]:
return pulumi.get(self, "location")
@property
@pulumi.getter
def name(self) -> pulumi.Output[str]:
return pulumi.get(self, "name")
@property
@pulumi.getter
def properties(self) -> pulumi.Output[Any]:
return pulumi.get(self, "properties")
@property
@pulumi.getter
def tags(self) -> pulumi.Output[Optional[Mapping[str, str]]]:
return pulumi.get(self, "tags")
@property
@pulumi.getter
def type(self) -> pulumi.Output[str]:
return pulumi.get(self, "type")
| true | true |
f73025c4d9d89dd6e0b7fe5b4dd2de1d97525760 | 3,376 | py | Python | examples/nips17_adversarial_competition/dev_toolkit/sample_defenses/ens_adv_inception_resnet_v2/defense.py | luvrpg/cleverhans | 1f2ee7a04cff1ec54c96dcba5294f6e2d7780d42 | [
"MIT"
] | 50 | 2018-11-20T11:59:18.000Z | 2021-11-01T18:01:42.000Z | examples/nips17_adversarial_competition/dev_toolkit/sample_defenses/ens_adv_inception_resnet_v2/defense.py | luvrpg/cleverhans | 1f2ee7a04cff1ec54c96dcba5294f6e2d7780d42 | [
"MIT"
] | 2 | 2019-07-22T20:59:01.000Z | 2019-11-17T07:00:00.000Z | examples/nips17_adversarial_competition/dev_toolkit/sample_defenses/ens_adv_inception_resnet_v2/defense.py | luvrpg/cleverhans | 1f2ee7a04cff1ec54c96dcba5294f6e2d7780d42 | [
"MIT"
] | 20 | 2018-03-14T14:01:55.000Z | 2021-09-17T19:19:56.000Z | """Implementation of sample defense.
This defense loads inception resnet v2 checkpoint and classifies all images
using loaded checkpoint.
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import os
import numpy as np
from scipy.misc import imread
import tensorflow as tf
import inception_resnet_v2
slim = tf.contrib.slim
tf.flags.DEFINE_string(
'master', '', 'The address of the TensorFlow master to use.')
tf.flags.DEFINE_string(
'checkpoint_path', '', 'Path to checkpoint for inception network.')
tf.flags.DEFINE_string(
'input_dir', '', 'Input directory with images.')
tf.flags.DEFINE_string(
'output_file', '', 'Output file to save labels.')
tf.flags.DEFINE_integer(
'image_width', 299, 'Width of each input images.')
tf.flags.DEFINE_integer(
'image_height', 299, 'Height of each input images.')
tf.flags.DEFINE_integer(
'batch_size', 16, 'How many images process at one time.')
FLAGS = tf.flags.FLAGS
def load_images(input_dir, batch_shape):
"""Read png images from input directory in batches.
Args:
input_dir: input directory
batch_shape: shape of minibatch array, i.e. [batch_size, height, width, 3]
Yields:
filenames: list file names without path of each image
Lenght of this list could be less than batch_size, in this case only
first few images of the result are elements of the minibatch.
images: array with all images from this batch
"""
images = np.zeros(batch_shape)
filenames = []
idx = 0
batch_size = batch_shape[0]
for filepath in tf.gfile.Glob(os.path.join(input_dir, '*.png')):
with tf.gfile.Open(filepath) as f:
image = imread(f, mode='RGB').astype(np.float) / 255.0
# Images for inception classifier are normalized to be in [-1, 1] interval.
images[idx, :, :, :] = image * 2.0 - 1.0
filenames.append(os.path.basename(filepath))
idx += 1
if idx == batch_size:
yield filenames, images
filenames = []
images = np.zeros(batch_shape)
idx = 0
if idx > 0:
yield filenames, images
def main(_):
batch_shape = [FLAGS.batch_size, FLAGS.image_height, FLAGS.image_width, 3]
num_classes = 1001
tf.logging.set_verbosity(tf.logging.INFO)
with tf.Graph().as_default():
# Prepare graph
x_input = tf.placeholder(tf.float32, shape=batch_shape)
with slim.arg_scope(inception_resnet_v2.inception_resnet_v2_arg_scope()):
_, end_points = inception_resnet_v2.inception_resnet_v2(
x_input, num_classes=num_classes, is_training=False)
predicted_labels = tf.argmax(end_points['Predictions'], 1)
# Run computation
saver = tf.train.Saver(slim.get_model_variables())
session_creator = tf.train.ChiefSessionCreator(
scaffold=tf.train.Scaffold(saver=saver),
checkpoint_filename_with_path=FLAGS.checkpoint_path,
master=FLAGS.master)
with tf.train.MonitoredSession(session_creator=session_creator) as sess:
with tf.gfile.Open(FLAGS.output_file, 'w') as out_file:
for filenames, images in load_images(FLAGS.input_dir, batch_shape):
labels = sess.run(predicted_labels, feed_dict={x_input: images})
for filename, label in zip(filenames, labels):
out_file.write('{0},{1}\n'.format(filename, label))
if __name__ == '__main__':
tf.app.run()
| 29.876106 | 79 | 0.707642 |
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import os
import numpy as np
from scipy.misc import imread
import tensorflow as tf
import inception_resnet_v2
slim = tf.contrib.slim
tf.flags.DEFINE_string(
'master', '', 'The address of the TensorFlow master to use.')
tf.flags.DEFINE_string(
'checkpoint_path', '', 'Path to checkpoint for inception network.')
tf.flags.DEFINE_string(
'input_dir', '', 'Input directory with images.')
tf.flags.DEFINE_string(
'output_file', '', 'Output file to save labels.')
tf.flags.DEFINE_integer(
'image_width', 299, 'Width of each input images.')
tf.flags.DEFINE_integer(
'image_height', 299, 'Height of each input images.')
tf.flags.DEFINE_integer(
'batch_size', 16, 'How many images process at one time.')
FLAGS = tf.flags.FLAGS
def load_images(input_dir, batch_shape):
images = np.zeros(batch_shape)
filenames = []
idx = 0
batch_size = batch_shape[0]
for filepath in tf.gfile.Glob(os.path.join(input_dir, '*.png')):
with tf.gfile.Open(filepath) as f:
image = imread(f, mode='RGB').astype(np.float) / 255.0
images[idx, :, :, :] = image * 2.0 - 1.0
filenames.append(os.path.basename(filepath))
idx += 1
if idx == batch_size:
yield filenames, images
filenames = []
images = np.zeros(batch_shape)
idx = 0
if idx > 0:
yield filenames, images
def main(_):
batch_shape = [FLAGS.batch_size, FLAGS.image_height, FLAGS.image_width, 3]
num_classes = 1001
tf.logging.set_verbosity(tf.logging.INFO)
with tf.Graph().as_default():
x_input = tf.placeholder(tf.float32, shape=batch_shape)
with slim.arg_scope(inception_resnet_v2.inception_resnet_v2_arg_scope()):
_, end_points = inception_resnet_v2.inception_resnet_v2(
x_input, num_classes=num_classes, is_training=False)
predicted_labels = tf.argmax(end_points['Predictions'], 1)
saver = tf.train.Saver(slim.get_model_variables())
session_creator = tf.train.ChiefSessionCreator(
scaffold=tf.train.Scaffold(saver=saver),
checkpoint_filename_with_path=FLAGS.checkpoint_path,
master=FLAGS.master)
with tf.train.MonitoredSession(session_creator=session_creator) as sess:
with tf.gfile.Open(FLAGS.output_file, 'w') as out_file:
for filenames, images in load_images(FLAGS.input_dir, batch_shape):
labels = sess.run(predicted_labels, feed_dict={x_input: images})
for filename, label in zip(filenames, labels):
out_file.write('{0},{1}\n'.format(filename, label))
if __name__ == '__main__':
tf.app.run()
| true | true |
f730267630885ce1626fc7dd223bb44956acfce4 | 1,937 | py | Python | transformers/md5/server.py | NVIDIA/ais-etl | e60e4c5a8be208379916fc245fd874f670336ce2 | [
"MIT"
] | 4 | 2020-08-08T19:39:33.000Z | 2021-06-02T19:14:34.000Z | transformers/md5/server.py | NVIDIA/ais-tar2tf | 4e4a9e448d7249e3e19481ca32c3f53fe5022ecc | [
"MIT"
] | null | null | null | transformers/md5/server.py | NVIDIA/ais-tar2tf | 4e4a9e448d7249e3e19481ca32c3f53fe5022ecc | [
"MIT"
] | 4 | 2020-10-28T19:49:15.000Z | 2022-03-28T23:21:02.000Z | #!/usr/bin/env python
import argparse
import hashlib
import requests
import os
from http.server import HTTPServer, BaseHTTPRequestHandler
from socketserver import ThreadingMixIn
host_target = os.environ['AIS_TARGET_URL']
class Handler(BaseHTTPRequestHandler):
def log_request(self, code='-', size='-'):
# Don't log successful requests info. Unsuccessful logged by log_error().
pass
def _set_headers(self):
self.send_response(200)
self.send_header("Content-Type", "text/plain")
self.end_headers()
def do_PUT(self):
content_length = int(self.headers['Content-Length'])
post_data = self.rfile.read(content_length)
md5 = hashlib.md5()
md5.update(post_data)
self._set_headers()
self.wfile.write(md5.hexdigest().encode())
def do_GET(self):
if self.path == "/health":
self._set_headers()
self.wfile.write(b"OK")
return
x = requests.get(host_target + self.path)
md5 = hashlib.md5()
md5.update(x.content)
self._set_headers()
self.wfile.write(md5.hexdigest().encode())
class ThreadedHTTPServer(ThreadingMixIn, HTTPServer):
"""Handle requests in a separate thread."""
def run(addr="localhost", port=8000):
server = ThreadedHTTPServer((addr, port), Handler)
print(f"Starting HTTP server on {addr}:{port}")
server.serve_forever()
if __name__ == "__main__":
parser = argparse.ArgumentParser(description="Run a simple HTTP server")
parser.add_argument(
"-l",
"--listen",
default="localhost",
help="Specify the IP address on which the server listens",
)
parser.add_argument(
"-p",
"--port",
type=int,
default=8000,
help="Specify the port on which the server listens",
)
args = parser.parse_args()
run(addr=args.listen, port=args.port)
| 27.28169 | 81 | 0.637584 |
import argparse
import hashlib
import requests
import os
from http.server import HTTPServer, BaseHTTPRequestHandler
from socketserver import ThreadingMixIn
host_target = os.environ['AIS_TARGET_URL']
class Handler(BaseHTTPRequestHandler):
def log_request(self, code='-', size='-'):
pass
def _set_headers(self):
self.send_response(200)
self.send_header("Content-Type", "text/plain")
self.end_headers()
def do_PUT(self):
content_length = int(self.headers['Content-Length'])
post_data = self.rfile.read(content_length)
md5 = hashlib.md5()
md5.update(post_data)
self._set_headers()
self.wfile.write(md5.hexdigest().encode())
def do_GET(self):
if self.path == "/health":
self._set_headers()
self.wfile.write(b"OK")
return
x = requests.get(host_target + self.path)
md5 = hashlib.md5()
md5.update(x.content)
self._set_headers()
self.wfile.write(md5.hexdigest().encode())
class ThreadedHTTPServer(ThreadingMixIn, HTTPServer):
def run(addr="localhost", port=8000):
server = ThreadedHTTPServer((addr, port), Handler)
print(f"Starting HTTP server on {addr}:{port}")
server.serve_forever()
if __name__ == "__main__":
parser = argparse.ArgumentParser(description="Run a simple HTTP server")
parser.add_argument(
"-l",
"--listen",
default="localhost",
help="Specify the IP address on which the server listens",
)
parser.add_argument(
"-p",
"--port",
type=int,
default=8000,
help="Specify the port on which the server listens",
)
args = parser.parse_args()
run(addr=args.listen, port=args.port)
| true | true |
f73027e9e4952cf5a8f78b5328442c74a11c9e43 | 234 | py | Python | kwueBackend/kwue/controllers/list.py | bounswe/bounswe2016group4 | cbc8201aa86049b81f20ef44ee37eb065a469d46 | [
"Apache-2.0"
] | 6 | 2016-02-14T18:04:48.000Z | 2016-12-18T20:09:15.000Z | kwueBackend/kwue/controllers/list.py | bounswe/bounswe2016group4 | cbc8201aa86049b81f20ef44ee37eb065a469d46 | [
"Apache-2.0"
] | 113 | 2016-02-14T18:06:57.000Z | 2021-06-10T17:57:12.000Z | kwueBackend/kwue/controllers/list.py | bounswe/bounswe2016group4 | cbc8201aa86049b81f20ef44ee37eb065a469d46 | [
"Apache-2.0"
] | 1 | 2017-02-15T18:48:55.000Z | 2017-02-15T18:48:55.000Z | from django.shortcuts import render
def get_list(req):
return render(req, 'kwue/food.html', {})
def add_item(req):
return render(req, 'kwue/food.html', {})
def create_list(req):
return render(req, 'kwue/food.html', {})
| 21.272727 | 44 | 0.67094 | from django.shortcuts import render
def get_list(req):
return render(req, 'kwue/food.html', {})
def add_item(req):
return render(req, 'kwue/food.html', {})
def create_list(req):
return render(req, 'kwue/food.html', {})
| true | true |
f73028328736056212bd4d00388f2744b0775730 | 29,066 | py | Python | mbpo/algorithms/meee.py | YaoYao1995/mbpo | b9571e469459ce3a632b19dc3fee68c9ac3857b2 | [
"MIT"
] | null | null | null | mbpo/algorithms/meee.py | YaoYao1995/mbpo | b9571e469459ce3a632b19dc3fee68c9ac3857b2 | [
"MIT"
] | null | null | null | mbpo/algorithms/meee.py | YaoYao1995/mbpo | b9571e469459ce3a632b19dc3fee68c9ac3857b2 | [
"MIT"
] | null | null | null | ## adapted from https://github.com/rail-berkeley/softlearning/blob/master/softlearning/algorithms/sac.py
import os
import math
import pickle
from collections import OrderedDict
from numbers import Number
from itertools import count
import gtimer as gt
import pdb
import numpy as np
import tensorflow as tf
from tensorflow.python.training import training_util
from softlearning.algorithms.rl_algorithm import RLAlgorithm
from softlearning.replay_pools.simple_replay_pool import WeightedReplayPool
from mbpo.models.constructor import construct_model, format_samples_for_training
from mbpo.models.fake_env import FakeEnv
from mbpo.utils.writer import Writer
from mbpo.utils.visualization import visualize_policy
from mbpo.utils.logging import Progress
import mbpo.utils.filesystem as filesystem
def td_target(reward, discount, next_value):
return reward + discount * next_value
class MEEE(RLAlgorithm):
""" Model-Ensemble Policy Optimization (MEEE)
"""
def __init__(
self,
training_environment,
evaluation_environment,
policy,
Qs,
pool,
static_fns,
plotter=None,
tf_summaries=False,
lr=3e-4,
reward_scale=1.0,
target_entropy='auto',
discount=0.99,
tau=5e-3,
target_update_interval=1,
action_prior='uniform',
reparameterize=False,
store_extra_policy_info=False,
deterministic=False,
model_train_freq=250,
num_networks=7,
num_elites=5,
model_retain_epochs=20,
rollout_batch_size=100e3,
real_ratio=0.1,
rollout_schedule=[20,100,1,1],
hidden_dim=200,
max_model_t=None,
**kwargs,
):
"""
Args:
env (`SoftlearningEnv`): Environment used for training.
policy: A policy function approximator.
initial_exploration_policy: ('Policy'): A policy that we use
for initial exploration which is not trained by the algorithm.
Qs: Q-function approximators. The min of these
approximators will be used. Usage of at least two Q-functions
improves performance by reducing overestimation bias.
pool (`PoolBase`): Replay pool to add gathered samples to.
plotter (`QFPolicyPlotter`): Plotter instance to be used for
visualizing Q-function during training.
lr (`float`): Learning rate used for the function approximators.
discount (`float`): Discount factor for Q-function updates.
tau (`float`): Soft value function target update weight.
target_update_interval ('int'): Frequency at which target network
updates occur in iterations.
reparameterize ('bool'): If True, we use a gradient estimator for
the policy derived using the reparameterization trick. We use
a likelihood ratio based estimator otherwise.
"""
super(MEEE, self).__init__(**kwargs)
obs_dim = np.prod(training_environment.observation_space.shape)
act_dim = np.prod(training_environment.action_space.shape)
self._model = construct_model(obs_dim=obs_dim, act_dim=act_dim, hidden_dim=hidden_dim, num_networks=num_networks, num_elites=num_elites)
self._static_fns = static_fns
self.fake_env = FakeEnv(self._model, self._static_fns)
self._rollout_schedule = rollout_schedule
self._max_model_t = max_model_t
# self._model_pool_size = model_pool_size
# print('[ MBPO ] Model pool size: {:.2E}'.format(self._model_pool_size))
# self._model_pool = WeightedReplayPool(pool._observation_space, pool._action_space, self._model_pool_size)
self._model_retain_epochs = model_retain_epochs
self._model_train_freq = model_train_freq
self._rollout_batch_size = int(rollout_batch_size)
self._deterministic = deterministic
self._real_ratio = real_ratio
self._log_dir = os.getcwd()
self._writer = Writer(self._log_dir)
self._training_environment = training_environment
self._evaluation_environment = evaluation_environment
self._policy = policy
self._Qs = Qs
self._Q_targets = tuple(tf.keras.models.clone_model(Q) for Q in Qs)
self._pool = pool
self._plotter = plotter
self._tf_summaries = tf_summaries
self._policy_lr = lr
self._Q_lr = lr
self._reward_scale = reward_scale
self._target_entropy = (
-np.prod(self._training_environment.action_space.shape)
if target_entropy == 'auto'
else target_entropy)
print('[ MEEE ] Target entropy: {}'.format(self._target_entropy))
self._discount = discount
self._tau = tau
self._target_update_interval = target_update_interval
self._action_prior = action_prior
self._reparameterize = reparameterize
self._store_extra_policy_info = store_extra_policy_info
observation_shape = self._training_environment.active_observation_shape
action_shape = self._training_environment.action_space.shape
assert len(observation_shape) == 1, observation_shape
self._observation_shape = observation_shape
assert len(action_shape) == 1, action_shape
self._action_shape = action_shape
self._build()
def _build(self):
self._training_ops = {}
self._init_global_step()
self._init_placeholders()
self._init_actor_update()
self._init_critic_update()
def _train(self):
"""Return a generator that performs RL training.
Args:
env (`SoftlearningEnv`): Environment used for training.
policy (`Policy`): Policy used for training
initial_exploration_policy ('Policy'): Policy used for exploration
If None, then all exploration is done using policy
pool (`PoolBase`): Sample pool to add samples to
"""
training_environment = self._training_environment
evaluation_environment = self._evaluation_environment
policy = self._policy
pool = self._pool
model_metrics = {}
if not self._training_started:
self._init_training()
self._initial_exploration_hook(
training_environment, self._initial_exploration_policy, pool)
self.sampler.initialize(training_environment, policy, pool)
gt.reset_root()
gt.rename_root('RLAlgorithm')
gt.set_def_unique(False)
self._training_before_hook()
for self._epoch in gt.timed_for(range(self._epoch, self._n_epochs)):
self._epoch_before_hook()
gt.stamp('epoch_before_hook')
self._training_progress = Progress(self._epoch_length * self._n_train_repeat)
start_samples = self.sampler._total_samples
for i in count():
samples_now = self.sampler._total_samples
self._timestep = samples_now - start_samples
if (samples_now >= start_samples + self._epoch_length
and self.ready_to_train):
break
self._timestep_before_hook()
gt.stamp('timestep_before_hook')
if self._timestep % self._model_train_freq == 0 and self._real_ratio < 1.0:
self._training_progress.pause()
print('[ MEEE ] log_dir: {} | ratio: {}'.format(self._log_dir, self._real_ratio))
print('[ MEEE ] Training model at epoch {} | freq {} | timestep {} (total: {}) | epoch train steps: {} (total: {})'.format(
self._epoch, self._model_train_freq, self._timestep, self._total_timestep, self._train_steps_this_epoch, self._num_train_steps)
)
model_train_metrics = self._train_model(batch_size=256, max_epochs=None, holdout_ratio=0.2, max_t=self._max_model_t)
model_metrics.update(model_train_metrics)
gt.stamp('epoch_train_model')
self._set_rollout_length()
self._reallocate_model_pool()
model_rollout_metrics = self._rollout_model(rollout_batch_size=self._rollout_batch_size, deterministic=self._deterministic)
model_metrics.update(model_rollout_metrics)
gt.stamp('epoch_rollout_model')
# self._visualize_model(self._evaluation_environment, self._total_timestep)
self._training_progress.resume()
# No UCB exploration
#self._do_sampling(timestep=self._total_timestep)
self._do_sampling(timestep=self._total_timestep, disturb=True, fake_env=self.fake_env, Qs = self._Qs)
#print("**exploration**")
gt.stamp('sample')
if self.ready_to_train:
self._do_training_repeats(timestep=self._total_timestep)
gt.stamp('train')
self._timestep_after_hook()
gt.stamp('timestep_after_hook')
training_paths = self.sampler.get_last_n_paths(
math.ceil(self._epoch_length / self.sampler._max_path_length))
gt.stamp('training_paths')
evaluation_paths = self._evaluation_paths(
policy, evaluation_environment)
gt.stamp('evaluation_paths')
training_metrics = self._evaluate_rollouts(
training_paths, training_environment)
gt.stamp('training_metrics')
if evaluation_paths:
evaluation_metrics = self._evaluate_rollouts(
evaluation_paths, evaluation_environment)
gt.stamp('evaluation_metrics')
else:
evaluation_metrics = {}
self._epoch_after_hook(training_paths)
gt.stamp('epoch_after_hook')
sampler_diagnostics = self.sampler.get_diagnostics()
diagnostics = self.get_diagnostics(
iteration=self._total_timestep,
batch=self._evaluation_batch(),
training_paths=training_paths,
evaluation_paths=evaluation_paths)
time_diagnostics = gt.get_times().stamps.itrs
diagnostics.update(OrderedDict((
*(
(f'evaluation/{key}', evaluation_metrics[key])
for key in sorted(evaluation_metrics.keys())
),
*(
(f'training/{key}', training_metrics[key])
for key in sorted(training_metrics.keys())
),
*(
(f'times/{key}', time_diagnostics[key][-1])
for key in sorted(time_diagnostics.keys())
),
*(
(f'sampler/{key}', sampler_diagnostics[key])
for key in sorted(sampler_diagnostics.keys())
),
*(
(f'model/{key}', model_metrics[key])
for key in sorted(model_metrics.keys())
),
('epoch', self._epoch),
('timestep', self._timestep),
('timesteps_total', self._total_timestep),
('train-steps', self._num_train_steps),
)))
if self._eval_render_mode is not None and hasattr(
evaluation_environment, 'render_rollouts'):
training_environment.render_rollouts(evaluation_paths)
yield diagnostics
self.sampler.terminate()
self._training_after_hook()
self._training_progress.close()
yield {'done': True, **diagnostics}
def train(self, *args, **kwargs):
return self._train(*args, **kwargs)
def _log_policy(self):
save_path = os.path.join(self._log_dir, 'models')
filesystem.mkdir(save_path)
weights = self._policy.get_weights()
data = {'policy_weights': weights}
full_path = os.path.join(save_path, 'policy_{}.pkl'.format(self._total_timestep))
print('Saving policy to: {}'.format(full_path))
pickle.dump(data, open(full_path, 'wb'))
def _log_model(self):
save_path = os.path.join(self._log_dir, 'models')
filesystem.mkdir(save_path)
print('Saving model to: {}'.format(save_path))
self._model.save(save_path, self._total_timestep)
def _set_rollout_length(self):
min_epoch, max_epoch, min_length, max_length = self._rollout_schedule
if self._epoch <= min_epoch:
y = min_length
else:
dx = (self._epoch - min_epoch) / (max_epoch - min_epoch)
dx = min(dx, 1)
y = dx * (max_length - min_length) + min_length
self._rollout_length = int(y)
print('[ Model Length ] Epoch: {} (min: {}, max: {}) | Length: {} (min: {} , max: {})'.format(
self._epoch, min_epoch, max_epoch, self._rollout_length, min_length, max_length
))
def _reallocate_model_pool(self):
obs_space = self._pool._observation_space
act_space = self._pool._action_space
rollouts_per_epoch = self._rollout_batch_size * self._epoch_length / self._model_train_freq
model_steps_per_epoch = int(self._rollout_length * rollouts_per_epoch)
new_pool_size = self._model_retain_epochs * model_steps_per_epoch
if not hasattr(self, '_model_pool'):
print('[ MEEE ] Initializing new model pool with size {:.2e}'.format(
new_pool_size
))
self._model_pool = WeightedReplayPool(obs_space, act_space, new_pool_size)
elif self._model_pool._max_size != new_pool_size:
print('[ MEEE ] Updating model pool | {:.2e} --> {:.2e}'.format(
self._model_pool._max_size, new_pool_size
))
samples = self._model_pool.return_all_samples()
new_pool = WeightedReplayPool(obs_space, act_space, new_pool_size)
new_pool.add_samples(samples)
assert self._model_pool.size == new_pool.size
self._model_pool = new_pool
def _train_model(self, **kwargs):
env_samples = self._pool.return_all_samples()
train_inputs, train_outputs = format_samples_for_training(env_samples)
model_metrics = self._model.train(train_inputs, train_outputs, **kwargs)
return model_metrics
def _rollout_model(self, rollout_batch_size, **kwargs):
print('[ Model Rollout ] Starting | Epoch: {} | Rollout length: {} | Batch size: {}'.format(
self._epoch, self._rollout_length, rollout_batch_size
))
batch = self.sampler.random_batch(rollout_batch_size)
obs = batch['observations']
steps_added = []
for i in range(self._rollout_length):
act = self._policy.actions_np(obs)
next_obs, rew, term, info = self.fake_env.step(obs, act, **kwargs)
steps_added.append(len(obs))
samples = {'observations': obs, 'actions': act, 'next_observations': next_obs, 'rewards': rew, 'terminals': term, 'stds': info['dev'][:,None]}
self._model_pool.add_samples(samples)
nonterm_mask = ~term.squeeze(-1)
if nonterm_mask.sum() == 0:
print('[ Model Rollout ] Breaking early: {} | {} / {}'.format(i, nonterm_mask.sum(), nonterm_mask.shape))
break
obs = next_obs[nonterm_mask]
mean_rollout_length = sum(steps_added) / rollout_batch_size
rollout_stats = {'mean_rollout_length': mean_rollout_length}
print('[ Model Rollout ] Added: {:.1e} | Model pool: {:.1e} (max {:.1e}) | Length: {} | Train rep: {}'.format(
sum(steps_added), self._model_pool.size, self._model_pool._max_size, mean_rollout_length, self._n_train_repeat
))
return rollout_stats
def _visualize_model(self, env, timestep):
## save env state
state = env.unwrapped.state_vector()
qpos_dim = len(env.unwrapped.sim.data.qpos)
qpos = state[:qpos_dim]
qvel = state[qpos_dim:]
print('[ Visualization ] Starting | Epoch {} | Log dir: {}\n'.format(self._epoch, self._log_dir))
visualize_policy(env, self.fake_env, self._policy, self._writer, timestep)
print('[ Visualization ] Done')
## set env state
env.unwrapped.set_state(qpos, qvel)
def _training_batch(self, batch_size=None):
batch_size = batch_size or self.sampler._batch_size
env_batch_size = int(batch_size*self._real_ratio)
model_batch_size = batch_size - env_batch_size
## can sample from the env pool even if env_batch_size == 0
env_batch = self._pool.random_batch(env_batch_size)
if model_batch_size > 0:
model_batch = self._model_pool.random_batch(model_batch_size)
keys = env_batch.keys()
batch = {k: np.concatenate((env_batch[k], model_batch[k]), axis=0) for k in keys}
else:
## if real_ratio == 1.0, no model pool was ever allocated,
## so skip the model pool sampling
batch = env_batch
return batch
def _init_global_step(self):
self.global_step = training_util.get_or_create_global_step()
self._training_ops.update({
'increment_global_step': training_util._increment_global_step(1)
})
def _init_placeholders(self):
"""Create input placeholders for the SAC algorithm.
Creates `tf.placeholder`s for:
- observation
- next observation
- action
- reward
- terminals
- stds
"""
self._iteration_ph = tf.placeholder(
tf.int64, shape=None, name='iteration')
self._observations_ph = tf.placeholder(
tf.float32,
shape=(None, *self._observation_shape),
name='observation',
)
self._next_observations_ph = tf.placeholder(
tf.float32,
shape=(None, *self._observation_shape),
name='next_observation',
)
self._actions_ph = tf.placeholder(
tf.float32,
shape=(None, *self._action_shape),
name='actions',
)
self._rewards_ph = tf.placeholder(
tf.float32,
shape=(None, 1),
name='rewards',
)
self._stds_ph = tf.placeholder(
tf.float32,
shape=(None, 1),
name='stds',
)
self._terminals_ph = tf.placeholder(
tf.float32,
shape=(None, 1),
name='terminals',
)
if self._store_extra_policy_info:
self._log_pis_ph = tf.placeholder(
tf.float32,
shape=(None, 1),
name='log_pis',
)
self._raw_actions_ph = tf.placeholder(
tf.float32,
shape=(None, *self._action_shape),
name='raw_actions',
)
def _get_Q_target(self):
next_actions = self._policy.actions([self._next_observations_ph])
next_log_pis = self._policy.log_pis(
[self._next_observations_ph], next_actions)
next_Qs_values = tuple(
Q([self._next_observations_ph, next_actions])
for Q in self._Q_targets)
min_next_Q = tf.reduce_min(next_Qs_values, axis=0)
next_value = min_next_Q - self._alpha * next_log_pis
Q_target = td_target(
reward=self._reward_scale * self._rewards_ph,
discount=self._discount,
next_value=(1 - self._terminals_ph) * next_value)
return Q_target
def _init_critic_update(self):
"""Create minimization operation for critic Q-function.
Creates a `tf.optimizer.minimize` operation for updating
critic Q-function with gradient descent, and appends it to
`self._training_ops` attribute.
"""
Q_target = tf.stop_gradient(self._get_Q_target())
assert Q_target.shape.as_list() == [None, 1]
# weighted critic loss
temperature_critic = 5.0
weight_target_Q = tf.stop_gradient(tf.sigmoid(-self._stds_ph * temperature_critic))
Q_values = self._Q_values = tuple(
Q([self._observations_ph, self._actions_ph])
for Q in self._Qs)
Q_losses = self._Q_losses = tuple(
tf.losses.mean_squared_error(
labels=Q_target, predictions=Q_value, weights=weight_target_Q)
for Q_value in Q_values)
self._Q_optimizers = tuple(
tf.train.AdamOptimizer(
learning_rate=self._Q_lr,
name='{}_{}_optimizer'.format(Q._name, i)
) for i, Q in enumerate(self._Qs))
Q_training_ops = tuple(
tf.contrib.layers.optimize_loss(
Q_loss,
self.global_step,
learning_rate=self._Q_lr,
optimizer=Q_optimizer,
variables=Q.trainable_variables,
increment_global_step=False,
summaries=((
"loss", "gradients", "gradient_norm", "global_gradient_norm"
) if self._tf_summaries else ()))
for i, (Q, Q_loss, Q_optimizer)
in enumerate(zip(self._Qs, Q_losses, self._Q_optimizers)))
self._training_ops.update({'Q': tf.group(Q_training_ops)})
def _init_actor_update(self):
"""Create minimization operations for policy and entropy.
Creates a `tf.optimizer.minimize` operations for updating
policy and entropy with gradient descent, and adds them to
`self._training_ops` attribute.
"""
actions = self._policy.actions([self._observations_ph])
log_pis = self._policy.log_pis([self._observations_ph], actions)
assert log_pis.shape.as_list() == [None, 1]
log_alpha = self._log_alpha = tf.get_variable(
'log_alpha',
dtype=tf.float32,
initializer=0.0)
alpha = tf.exp(log_alpha)
if isinstance(self._target_entropy, Number):
alpha_loss = -tf.reduce_mean(
log_alpha * tf.stop_gradient(log_pis + self._target_entropy))
self._alpha_optimizer = tf.train.AdamOptimizer(
self._policy_lr, name='alpha_optimizer')
self._alpha_train_op = self._alpha_optimizer.minimize(
loss=alpha_loss, var_list=[log_alpha])
self._training_ops.update({
'temperature_alpha': self._alpha_train_op
})
self._alpha = alpha
if self._action_prior == 'normal':
policy_prior = tf.contrib.distributions.MultivariateNormalDiag(
loc=tf.zeros(self._action_shape),
scale_diag=tf.ones(self._action_shape))
policy_prior_log_probs = policy_prior.log_prob(actions)
elif self._action_prior == 'uniform':
policy_prior_log_probs = 0.0
Q_log_targets = tuple(
Q([self._observations_ph, actions])
for Q in self._Qs)
min_Q_log_target = tf.reduce_min(Q_log_targets, axis=0)
# weighted actor loss
temperature_act = 5.0
weight_actor_Q = tf.stop_gradient(tf.sigmoid(-self._stds_ph * temperature_act) + 0.5)
if self._reparameterize:
policy_kl_losses = (
alpha * log_pis
- min_Q_log_target
- policy_prior_log_probs) * weight_actor_Q
else:
raise NotImplementedError
assert policy_kl_losses.shape.as_list() == [None, 1]
policy_loss = tf.reduce_mean(policy_kl_losses)
self._policy_optimizer = tf.train.AdamOptimizer(
learning_rate=self._policy_lr,
name="policy_optimizer")
policy_train_op = tf.contrib.layers.optimize_loss(
policy_loss,
self.global_step,
learning_rate=self._policy_lr,
optimizer=self._policy_optimizer,
variables=self._policy.trainable_variables,
increment_global_step=False,
summaries=(
"loss", "gradients", "gradient_norm", "global_gradient_norm"
) if self._tf_summaries else ())
self._training_ops.update({'policy_train_op': policy_train_op})
def _init_training(self):
self._update_target(tau=1.0)
def _update_target(self, tau=None):
tau = tau or self._tau
for Q, Q_target in zip(self._Qs, self._Q_targets):
source_params = Q.get_weights()
target_params = Q_target.get_weights()
Q_target.set_weights([
tau * source + (1.0 - tau) * target
for source, target in zip(source_params, target_params)
])
def _do_training(self, iteration, batch):
"""Runs the operations for updating training and target ops."""
self._training_progress.update()
self._training_progress.set_description()
feed_dict = self._get_feed_dict(iteration, batch)
self._session.run(self._training_ops, feed_dict)
if iteration % self._target_update_interval == 0:
# Run target ops here.
self._update_target()
def _get_feed_dict(self, iteration, batch):
"""Construct TensorFlow feed_dict from sample batch."""
feed_dict = {
self._observations_ph: batch['observations'],
self._actions_ph: batch['actions'],
self._next_observations_ph: batch['next_observations'],
self._rewards_ph: batch['rewards'],
self._terminals_ph: batch['terminals'],
self._stds_ph: batch['stds'],
}
if self._store_extra_policy_info:
feed_dict[self._log_pis_ph] = batch['log_pis']
feed_dict[self._raw_actions_ph] = batch['raw_actions']
if iteration is not None:
feed_dict[self._iteration_ph] = iteration
return feed_dict
def get_diagnostics(self,
iteration,
batch,
training_paths,
evaluation_paths):
"""Return diagnostic information as ordered dictionary.
Records mean and standard deviation of Q-function and state
value function, and TD-loss (mean squared Bellman error)
for the sample batch.
Also calls the `draw` method of the plotter, if plotter defined.
"""
feed_dict = self._get_feed_dict(iteration, batch)
(Q_values, Q_losses, alpha, global_step) = self._session.run(
(self._Q_values,
self._Q_losses,
self._alpha,
self.global_step),
feed_dict)
diagnostics = OrderedDict({
'Q-avg': np.mean(Q_values),
'Q-std': np.std(Q_values),
'Q_loss': np.mean(Q_losses),
'alpha': alpha,
})
policy_diagnostics = self._policy.get_diagnostics(
batch['observations'])
diagnostics.update({
f'policy/{key}': value
for key, value in policy_diagnostics.items()
})
if self._plotter:
self._plotter.draw()
return diagnostics
@property
def tf_saveables(self):
saveables = {
'_policy_optimizer': self._policy_optimizer,
**{
f'Q_optimizer_{i}': optimizer
for i, optimizer in enumerate(self._Q_optimizers)
},
'_log_alpha': self._log_alpha,
}
if hasattr(self, '_alpha_optimizer'):
saveables['_alpha_optimizer'] = self._alpha_optimizer
return saveables
| 38.498013 | 155 | 0.588729 | r
from itertools import count
import gtimer as gt
import pdb
import numpy as np
import tensorflow as tf
from tensorflow.python.training import training_util
from softlearning.algorithms.rl_algorithm import RLAlgorithm
from softlearning.replay_pools.simple_replay_pool import WeightedReplayPool
from mbpo.models.constructor import construct_model, format_samples_for_training
from mbpo.models.fake_env import FakeEnv
from mbpo.utils.writer import Writer
from mbpo.utils.visualization import visualize_policy
from mbpo.utils.logging import Progress
import mbpo.utils.filesystem as filesystem
def td_target(reward, discount, next_value):
return reward + discount * next_value
class MEEE(RLAlgorithm):
def __init__(
self,
training_environment,
evaluation_environment,
policy,
Qs,
pool,
static_fns,
plotter=None,
tf_summaries=False,
lr=3e-4,
reward_scale=1.0,
target_entropy='auto',
discount=0.99,
tau=5e-3,
target_update_interval=1,
action_prior='uniform',
reparameterize=False,
store_extra_policy_info=False,
deterministic=False,
model_train_freq=250,
num_networks=7,
num_elites=5,
model_retain_epochs=20,
rollout_batch_size=100e3,
real_ratio=0.1,
rollout_schedule=[20,100,1,1],
hidden_dim=200,
max_model_t=None,
**kwargs,
):
super(MEEE, self).__init__(**kwargs)
obs_dim = np.prod(training_environment.observation_space.shape)
act_dim = np.prod(training_environment.action_space.shape)
self._model = construct_model(obs_dim=obs_dim, act_dim=act_dim, hidden_dim=hidden_dim, num_networks=num_networks, num_elites=num_elites)
self._static_fns = static_fns
self.fake_env = FakeEnv(self._model, self._static_fns)
self._rollout_schedule = rollout_schedule
self._max_model_t = max_model_t
self._model_retain_epochs = model_retain_epochs
self._model_train_freq = model_train_freq
self._rollout_batch_size = int(rollout_batch_size)
self._deterministic = deterministic
self._real_ratio = real_ratio
self._log_dir = os.getcwd()
self._writer = Writer(self._log_dir)
self._training_environment = training_environment
self._evaluation_environment = evaluation_environment
self._policy = policy
self._Qs = Qs
self._Q_targets = tuple(tf.keras.models.clone_model(Q) for Q in Qs)
self._pool = pool
self._plotter = plotter
self._tf_summaries = tf_summaries
self._policy_lr = lr
self._Q_lr = lr
self._reward_scale = reward_scale
self._target_entropy = (
-np.prod(self._training_environment.action_space.shape)
if target_entropy == 'auto'
else target_entropy)
print('[ MEEE ] Target entropy: {}'.format(self._target_entropy))
self._discount = discount
self._tau = tau
self._target_update_interval = target_update_interval
self._action_prior = action_prior
self._reparameterize = reparameterize
self._store_extra_policy_info = store_extra_policy_info
observation_shape = self._training_environment.active_observation_shape
action_shape = self._training_environment.action_space.shape
assert len(observation_shape) == 1, observation_shape
self._observation_shape = observation_shape
assert len(action_shape) == 1, action_shape
self._action_shape = action_shape
self._build()
def _build(self):
self._training_ops = {}
self._init_global_step()
self._init_placeholders()
self._init_actor_update()
self._init_critic_update()
def _train(self):
training_environment = self._training_environment
evaluation_environment = self._evaluation_environment
policy = self._policy
pool = self._pool
model_metrics = {}
if not self._training_started:
self._init_training()
self._initial_exploration_hook(
training_environment, self._initial_exploration_policy, pool)
self.sampler.initialize(training_environment, policy, pool)
gt.reset_root()
gt.rename_root('RLAlgorithm')
gt.set_def_unique(False)
self._training_before_hook()
for self._epoch in gt.timed_for(range(self._epoch, self._n_epochs)):
self._epoch_before_hook()
gt.stamp('epoch_before_hook')
self._training_progress = Progress(self._epoch_length * self._n_train_repeat)
start_samples = self.sampler._total_samples
for i in count():
samples_now = self.sampler._total_samples
self._timestep = samples_now - start_samples
if (samples_now >= start_samples + self._epoch_length
and self.ready_to_train):
break
self._timestep_before_hook()
gt.stamp('timestep_before_hook')
if self._timestep % self._model_train_freq == 0 and self._real_ratio < 1.0:
self._training_progress.pause()
print('[ MEEE ] log_dir: {} | ratio: {}'.format(self._log_dir, self._real_ratio))
print('[ MEEE ] Training model at epoch {} | freq {} | timestep {} (total: {}) | epoch train steps: {} (total: {})'.format(
self._epoch, self._model_train_freq, self._timestep, self._total_timestep, self._train_steps_this_epoch, self._num_train_steps)
)
model_train_metrics = self._train_model(batch_size=256, max_epochs=None, holdout_ratio=0.2, max_t=self._max_model_t)
model_metrics.update(model_train_metrics)
gt.stamp('epoch_train_model')
self._set_rollout_length()
self._reallocate_model_pool()
model_rollout_metrics = self._rollout_model(rollout_batch_size=self._rollout_batch_size, deterministic=self._deterministic)
model_metrics.update(model_rollout_metrics)
gt.stamp('epoch_rollout_model')
self._training_progress.resume()
self._do_sampling(timestep=self._total_timestep, disturb=True, fake_env=self.fake_env, Qs = self._Qs)
gt.stamp('sample')
if self.ready_to_train:
self._do_training_repeats(timestep=self._total_timestep)
gt.stamp('train')
self._timestep_after_hook()
gt.stamp('timestep_after_hook')
training_paths = self.sampler.get_last_n_paths(
math.ceil(self._epoch_length / self.sampler._max_path_length))
gt.stamp('training_paths')
evaluation_paths = self._evaluation_paths(
policy, evaluation_environment)
gt.stamp('evaluation_paths')
training_metrics = self._evaluate_rollouts(
training_paths, training_environment)
gt.stamp('training_metrics')
if evaluation_paths:
evaluation_metrics = self._evaluate_rollouts(
evaluation_paths, evaluation_environment)
gt.stamp('evaluation_metrics')
else:
evaluation_metrics = {}
self._epoch_after_hook(training_paths)
gt.stamp('epoch_after_hook')
sampler_diagnostics = self.sampler.get_diagnostics()
diagnostics = self.get_diagnostics(
iteration=self._total_timestep,
batch=self._evaluation_batch(),
training_paths=training_paths,
evaluation_paths=evaluation_paths)
time_diagnostics = gt.get_times().stamps.itrs
diagnostics.update(OrderedDict((
*(
(f'evaluation/{key}', evaluation_metrics[key])
for key in sorted(evaluation_metrics.keys())
),
*(
(f'training/{key}', training_metrics[key])
for key in sorted(training_metrics.keys())
),
*(
(f'times/{key}', time_diagnostics[key][-1])
for key in sorted(time_diagnostics.keys())
),
*(
(f'sampler/{key}', sampler_diagnostics[key])
for key in sorted(sampler_diagnostics.keys())
),
*(
(f'model/{key}', model_metrics[key])
for key in sorted(model_metrics.keys())
),
('epoch', self._epoch),
('timestep', self._timestep),
('timesteps_total', self._total_timestep),
('train-steps', self._num_train_steps),
)))
if self._eval_render_mode is not None and hasattr(
evaluation_environment, 'render_rollouts'):
training_environment.render_rollouts(evaluation_paths)
yield diagnostics
self.sampler.terminate()
self._training_after_hook()
self._training_progress.close()
yield {'done': True, **diagnostics}
def train(self, *args, **kwargs):
return self._train(*args, **kwargs)
def _log_policy(self):
save_path = os.path.join(self._log_dir, 'models')
filesystem.mkdir(save_path)
weights = self._policy.get_weights()
data = {'policy_weights': weights}
full_path = os.path.join(save_path, 'policy_{}.pkl'.format(self._total_timestep))
print('Saving policy to: {}'.format(full_path))
pickle.dump(data, open(full_path, 'wb'))
def _log_model(self):
save_path = os.path.join(self._log_dir, 'models')
filesystem.mkdir(save_path)
print('Saving model to: {}'.format(save_path))
self._model.save(save_path, self._total_timestep)
def _set_rollout_length(self):
min_epoch, max_epoch, min_length, max_length = self._rollout_schedule
if self._epoch <= min_epoch:
y = min_length
else:
dx = (self._epoch - min_epoch) / (max_epoch - min_epoch)
dx = min(dx, 1)
y = dx * (max_length - min_length) + min_length
self._rollout_length = int(y)
print('[ Model Length ] Epoch: {} (min: {}, max: {}) | Length: {} (min: {} , max: {})'.format(
self._epoch, min_epoch, max_epoch, self._rollout_length, min_length, max_length
))
def _reallocate_model_pool(self):
obs_space = self._pool._observation_space
act_space = self._pool._action_space
rollouts_per_epoch = self._rollout_batch_size * self._epoch_length / self._model_train_freq
model_steps_per_epoch = int(self._rollout_length * rollouts_per_epoch)
new_pool_size = self._model_retain_epochs * model_steps_per_epoch
if not hasattr(self, '_model_pool'):
print('[ MEEE ] Initializing new model pool with size {:.2e}'.format(
new_pool_size
))
self._model_pool = WeightedReplayPool(obs_space, act_space, new_pool_size)
elif self._model_pool._max_size != new_pool_size:
print('[ MEEE ] Updating model pool | {:.2e} --> {:.2e}'.format(
self._model_pool._max_size, new_pool_size
))
samples = self._model_pool.return_all_samples()
new_pool = WeightedReplayPool(obs_space, act_space, new_pool_size)
new_pool.add_samples(samples)
assert self._model_pool.size == new_pool.size
self._model_pool = new_pool
def _train_model(self, **kwargs):
env_samples = self._pool.return_all_samples()
train_inputs, train_outputs = format_samples_for_training(env_samples)
model_metrics = self._model.train(train_inputs, train_outputs, **kwargs)
return model_metrics
def _rollout_model(self, rollout_batch_size, **kwargs):
print('[ Model Rollout ] Starting | Epoch: {} | Rollout length: {} | Batch size: {}'.format(
self._epoch, self._rollout_length, rollout_batch_size
))
batch = self.sampler.random_batch(rollout_batch_size)
obs = batch['observations']
steps_added = []
for i in range(self._rollout_length):
act = self._policy.actions_np(obs)
next_obs, rew, term, info = self.fake_env.step(obs, act, **kwargs)
steps_added.append(len(obs))
samples = {'observations': obs, 'actions': act, 'next_observations': next_obs, 'rewards': rew, 'terminals': term, 'stds': info['dev'][:,None]}
self._model_pool.add_samples(samples)
nonterm_mask = ~term.squeeze(-1)
if nonterm_mask.sum() == 0:
print('[ Model Rollout ] Breaking early: {} | {} / {}'.format(i, nonterm_mask.sum(), nonterm_mask.shape))
break
obs = next_obs[nonterm_mask]
mean_rollout_length = sum(steps_added) / rollout_batch_size
rollout_stats = {'mean_rollout_length': mean_rollout_length}
print('[ Model Rollout ] Added: {:.1e} | Model pool: {:.1e} (max {:.1e}) | Length: {} | Train rep: {}'.format(
sum(steps_added), self._model_pool.size, self._model_pool._max_size, mean_rollout_length, self._n_train_repeat
))
return rollout_stats
def _visualize_model(self, env, timestep):
env.unwrapped.state_vector()
qpos_dim = len(env.unwrapped.sim.data.qpos)
qpos = state[:qpos_dim]
qvel = state[qpos_dim:]
print('[ Visualization ] Starting | Epoch {} | Log dir: {}\n'.format(self._epoch, self._log_dir))
visualize_policy(env, self.fake_env, self._policy, self._writer, timestep)
print('[ Visualization ] Done')
rapped.set_state(qpos, qvel)
def _training_batch(self, batch_size=None):
batch_size = batch_size or self.sampler._batch_size
env_batch_size = int(batch_size*self._real_ratio)
model_batch_size = batch_size - env_batch_size
)
if model_batch_size > 0:
model_batch = self._model_pool.random_batch(model_batch_size)
keys = env_batch.keys()
batch = {k: np.concatenate((env_batch[k], model_batch[k]), axis=0) for k in keys}
else:
ep(self):
self.global_step = training_util.get_or_create_global_step()
self._training_ops.update({
'increment_global_step': training_util._increment_global_step(1)
})
def _init_placeholders(self):
self._iteration_ph = tf.placeholder(
tf.int64, shape=None, name='iteration')
self._observations_ph = tf.placeholder(
tf.float32,
shape=(None, *self._observation_shape),
name='observation',
)
self._next_observations_ph = tf.placeholder(
tf.float32,
shape=(None, *self._observation_shape),
name='next_observation',
)
self._actions_ph = tf.placeholder(
tf.float32,
shape=(None, *self._action_shape),
name='actions',
)
self._rewards_ph = tf.placeholder(
tf.float32,
shape=(None, 1),
name='rewards',
)
self._stds_ph = tf.placeholder(
tf.float32,
shape=(None, 1),
name='stds',
)
self._terminals_ph = tf.placeholder(
tf.float32,
shape=(None, 1),
name='terminals',
)
if self._store_extra_policy_info:
self._log_pis_ph = tf.placeholder(
tf.float32,
shape=(None, 1),
name='log_pis',
)
self._raw_actions_ph = tf.placeholder(
tf.float32,
shape=(None, *self._action_shape),
name='raw_actions',
)
def _get_Q_target(self):
next_actions = self._policy.actions([self._next_observations_ph])
next_log_pis = self._policy.log_pis(
[self._next_observations_ph], next_actions)
next_Qs_values = tuple(
Q([self._next_observations_ph, next_actions])
for Q in self._Q_targets)
min_next_Q = tf.reduce_min(next_Qs_values, axis=0)
next_value = min_next_Q - self._alpha * next_log_pis
Q_target = td_target(
reward=self._reward_scale * self._rewards_ph,
discount=self._discount,
next_value=(1 - self._terminals_ph) * next_value)
return Q_target
def _init_critic_update(self):
Q_target = tf.stop_gradient(self._get_Q_target())
assert Q_target.shape.as_list() == [None, 1]
temperature_critic = 5.0
weight_target_Q = tf.stop_gradient(tf.sigmoid(-self._stds_ph * temperature_critic))
Q_values = self._Q_values = tuple(
Q([self._observations_ph, self._actions_ph])
for Q in self._Qs)
Q_losses = self._Q_losses = tuple(
tf.losses.mean_squared_error(
labels=Q_target, predictions=Q_value, weights=weight_target_Q)
for Q_value in Q_values)
self._Q_optimizers = tuple(
tf.train.AdamOptimizer(
learning_rate=self._Q_lr,
name='{}_{}_optimizer'.format(Q._name, i)
) for i, Q in enumerate(self._Qs))
Q_training_ops = tuple(
tf.contrib.layers.optimize_loss(
Q_loss,
self.global_step,
learning_rate=self._Q_lr,
optimizer=Q_optimizer,
variables=Q.trainable_variables,
increment_global_step=False,
summaries=((
"loss", "gradients", "gradient_norm", "global_gradient_norm"
) if self._tf_summaries else ()))
for i, (Q, Q_loss, Q_optimizer)
in enumerate(zip(self._Qs, Q_losses, self._Q_optimizers)))
self._training_ops.update({'Q': tf.group(Q_training_ops)})
def _init_actor_update(self):
actions = self._policy.actions([self._observations_ph])
log_pis = self._policy.log_pis([self._observations_ph], actions)
assert log_pis.shape.as_list() == [None, 1]
log_alpha = self._log_alpha = tf.get_variable(
'log_alpha',
dtype=tf.float32,
initializer=0.0)
alpha = tf.exp(log_alpha)
if isinstance(self._target_entropy, Number):
alpha_loss = -tf.reduce_mean(
log_alpha * tf.stop_gradient(log_pis + self._target_entropy))
self._alpha_optimizer = tf.train.AdamOptimizer(
self._policy_lr, name='alpha_optimizer')
self._alpha_train_op = self._alpha_optimizer.minimize(
loss=alpha_loss, var_list=[log_alpha])
self._training_ops.update({
'temperature_alpha': self._alpha_train_op
})
self._alpha = alpha
if self._action_prior == 'normal':
policy_prior = tf.contrib.distributions.MultivariateNormalDiag(
loc=tf.zeros(self._action_shape),
scale_diag=tf.ones(self._action_shape))
policy_prior_log_probs = policy_prior.log_prob(actions)
elif self._action_prior == 'uniform':
policy_prior_log_probs = 0.0
Q_log_targets = tuple(
Q([self._observations_ph, actions])
for Q in self._Qs)
min_Q_log_target = tf.reduce_min(Q_log_targets, axis=0)
temperature_act = 5.0
weight_actor_Q = tf.stop_gradient(tf.sigmoid(-self._stds_ph * temperature_act) + 0.5)
if self._reparameterize:
policy_kl_losses = (
alpha * log_pis
- min_Q_log_target
- policy_prior_log_probs) * weight_actor_Q
else:
raise NotImplementedError
assert policy_kl_losses.shape.as_list() == [None, 1]
policy_loss = tf.reduce_mean(policy_kl_losses)
self._policy_optimizer = tf.train.AdamOptimizer(
learning_rate=self._policy_lr,
name="policy_optimizer")
policy_train_op = tf.contrib.layers.optimize_loss(
policy_loss,
self.global_step,
learning_rate=self._policy_lr,
optimizer=self._policy_optimizer,
variables=self._policy.trainable_variables,
increment_global_step=False,
summaries=(
"loss", "gradients", "gradient_norm", "global_gradient_norm"
) if self._tf_summaries else ())
self._training_ops.update({'policy_train_op': policy_train_op})
def _init_training(self):
self._update_target(tau=1.0)
def _update_target(self, tau=None):
tau = tau or self._tau
for Q, Q_target in zip(self._Qs, self._Q_targets):
source_params = Q.get_weights()
target_params = Q_target.get_weights()
Q_target.set_weights([
tau * source + (1.0 - tau) * target
for source, target in zip(source_params, target_params)
])
def _do_training(self, iteration, batch):
self._training_progress.update()
self._training_progress.set_description()
feed_dict = self._get_feed_dict(iteration, batch)
self._session.run(self._training_ops, feed_dict)
if iteration % self._target_update_interval == 0:
self._update_target()
def _get_feed_dict(self, iteration, batch):
feed_dict = {
self._observations_ph: batch['observations'],
self._actions_ph: batch['actions'],
self._next_observations_ph: batch['next_observations'],
self._rewards_ph: batch['rewards'],
self._terminals_ph: batch['terminals'],
self._stds_ph: batch['stds'],
}
if self._store_extra_policy_info:
feed_dict[self._log_pis_ph] = batch['log_pis']
feed_dict[self._raw_actions_ph] = batch['raw_actions']
if iteration is not None:
feed_dict[self._iteration_ph] = iteration
return feed_dict
def get_diagnostics(self,
iteration,
batch,
training_paths,
evaluation_paths):
feed_dict = self._get_feed_dict(iteration, batch)
(Q_values, Q_losses, alpha, global_step) = self._session.run(
(self._Q_values,
self._Q_losses,
self._alpha,
self.global_step),
feed_dict)
diagnostics = OrderedDict({
'Q-avg': np.mean(Q_values),
'Q-std': np.std(Q_values),
'Q_loss': np.mean(Q_losses),
'alpha': alpha,
})
policy_diagnostics = self._policy.get_diagnostics(
batch['observations'])
diagnostics.update({
f'policy/{key}': value
for key, value in policy_diagnostics.items()
})
if self._plotter:
self._plotter.draw()
return diagnostics
@property
def tf_saveables(self):
saveables = {
'_policy_optimizer': self._policy_optimizer,
**{
f'Q_optimizer_{i}': optimizer
for i, optimizer in enumerate(self._Q_optimizers)
},
'_log_alpha': self._log_alpha,
}
if hasattr(self, '_alpha_optimizer'):
saveables['_alpha_optimizer'] = self._alpha_optimizer
return saveables
| true | true |
f7302be119c81384731038eacea54af1c0b78722 | 2,312 | py | Python | customSDK/servicefabric/models/service_backup_configuration_info.py | hans-olav/service-fabric-cli | baf27342ad4b9f74dee1954e60ed5b40ebcf039d | [
"MIT"
] | null | null | null | customSDK/servicefabric/models/service_backup_configuration_info.py | hans-olav/service-fabric-cli | baf27342ad4b9f74dee1954e60ed5b40ebcf039d | [
"MIT"
] | null | null | null | customSDK/servicefabric/models/service_backup_configuration_info.py | hans-olav/service-fabric-cli | baf27342ad4b9f74dee1954e60ed5b40ebcf039d | [
"MIT"
] | null | null | null | # coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
#
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
# --------------------------------------------------------------------------
from .backup_configuration_info import BackupConfigurationInfo
class ServiceBackupConfigurationInfo(BackupConfigurationInfo):
"""Backup configuration information for a specific Service Fabric service
specifying what backup policy is being applied and suspend description, if
any.
:param policy_name: The name of the backup policy which is applicable to
this Service Fabric application or service or partition.
:type policy_name: str
:param policy_inherited_from: Specifies the scope at which the backup
policy is applied. Possible values include: 'Invalid', 'Partition',
'Service', 'Application'
:type policy_inherited_from: str or
~azure.servicefabric.models.BackupPolicyScope
:param suspension_info: Describes the backup suspension details.
:type suspension_info: ~azure.servicefabric.models.BackupSuspensionInfo
:param kind: Constant filled by server.
:type kind: str
:param service_name: The full name of the service with 'fabric:' URI
scheme.
:type service_name: str
"""
_validation = {
'kind': {'required': True},
}
_attribute_map = {
'policy_name': {'key': 'PolicyName', 'type': 'str'},
'policy_inherited_from': {'key': 'PolicyInheritedFrom', 'type': 'str'},
'suspension_info': {'key': 'SuspensionInfo', 'type': 'BackupSuspensionInfo'},
'kind': {'key': 'Kind', 'type': 'str'},
'service_name': {'key': 'ServiceName', 'type': 'str'},
}
def __init__(self, policy_name=None, policy_inherited_from=None, suspension_info=None, service_name=None):
super(ServiceBackupConfigurationInfo, self).__init__(policy_name=policy_name, policy_inherited_from=policy_inherited_from, suspension_info=suspension_info)
self.service_name = service_name
self.kind = 'Service'
| 43.622642 | 163 | 0.67301 |
from .backup_configuration_info import BackupConfigurationInfo
class ServiceBackupConfigurationInfo(BackupConfigurationInfo):
_validation = {
'kind': {'required': True},
}
_attribute_map = {
'policy_name': {'key': 'PolicyName', 'type': 'str'},
'policy_inherited_from': {'key': 'PolicyInheritedFrom', 'type': 'str'},
'suspension_info': {'key': 'SuspensionInfo', 'type': 'BackupSuspensionInfo'},
'kind': {'key': 'Kind', 'type': 'str'},
'service_name': {'key': 'ServiceName', 'type': 'str'},
}
def __init__(self, policy_name=None, policy_inherited_from=None, suspension_info=None, service_name=None):
super(ServiceBackupConfigurationInfo, self).__init__(policy_name=policy_name, policy_inherited_from=policy_inherited_from, suspension_info=suspension_info)
self.service_name = service_name
self.kind = 'Service'
| true | true |
f7302cc54f16c065e96db6b3d234be1df4223db1 | 691 | py | Python | parlai/tasks/squad/test.py | zl930216/ParlAI | abf0ad6d1779af0f8ce0b5aed00d2bab71416684 | [
"MIT"
] | 9,228 | 2017-05-03T03:40:34.000Z | 2022-03-31T14:03:29.000Z | parlai/tasks/squad/test.py | zl930216/ParlAI | abf0ad6d1779af0f8ce0b5aed00d2bab71416684 | [
"MIT"
] | 2,660 | 2017-05-03T23:06:02.000Z | 2022-03-31T21:24:29.000Z | parlai/tasks/squad/test.py | zl930216/ParlAI | abf0ad6d1779af0f8ce0b5aed00d2bab71416684 | [
"MIT"
] | 2,058 | 2017-05-04T12:19:48.000Z | 2022-03-31T10:28:11.000Z | #!/usr/bin/env python3
# Copyright (c) Facebook, Inc. and its affiliates.
# This source code is licensed under the MIT license found in the
# LICENSE file in the root directory of this source tree.
from parlai.utils.testing import AutoTeacherTest
class TestDefaultTeacher(AutoTeacherTest):
task = "squad"
class TestIndexTeacher(AutoTeacherTest):
task = "squad:index"
class TestOpensquadTeacher(AutoTeacherTest):
task = "squad:opensquad"
class TestFulldocTeacher(AutoTeacherTest):
task = "squad:fulldoc"
class TestSentenceTeacher(AutoTeacherTest):
task = "squad:sentence"
class TestFulldocsentenceTeacher(AutoTeacherTest):
task = "squad:fulldocsentence"
| 21.59375 | 65 | 0.76411 |
from parlai.utils.testing import AutoTeacherTest
class TestDefaultTeacher(AutoTeacherTest):
task = "squad"
class TestIndexTeacher(AutoTeacherTest):
task = "squad:index"
class TestOpensquadTeacher(AutoTeacherTest):
task = "squad:opensquad"
class TestFulldocTeacher(AutoTeacherTest):
task = "squad:fulldoc"
class TestSentenceTeacher(AutoTeacherTest):
task = "squad:sentence"
class TestFulldocsentenceTeacher(AutoTeacherTest):
task = "squad:fulldocsentence"
| true | true |
f7302ce59810c52de7625664e0887f9d75b5fb56 | 6,150 | py | Python | .docs/conf.py | Gael-de-Sailly/flopy | 4104cf5e6a35e2a1fd6183442962ae5cb258fa7a | [
"CC0-1.0",
"BSD-3-Clause"
] | null | null | null | .docs/conf.py | Gael-de-Sailly/flopy | 4104cf5e6a35e2a1fd6183442962ae5cb258fa7a | [
"CC0-1.0",
"BSD-3-Clause"
] | null | null | null | .docs/conf.py | Gael-de-Sailly/flopy | 4104cf5e6a35e2a1fd6183442962ae5cb258fa7a | [
"CC0-1.0",
"BSD-3-Clause"
] | null | null | null | # Configuration file for the Sphinx documentation builder.
#
# This file only contains a selection of the most common options. For a full
# list see the documentation:
# https://www.sphinx-doc.org/en/master/usage/configuration.html
# -- Path setup --------------------------------------------------------------
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
#
import os
import sys
# add flopy root directory to the python path
sys.path.insert(0, os.path.abspath(".."))
from flopy import __version__
# -- determine if running on readthedocs ------------------------------------
on_rtd = os.environ.get('READTHEDOCS') == 'True'
# -- create source rst files ------------------------------------------------
cmd = "sphinx-apidoc -e -o source/ ../flopy/"
print(cmd)
os.system(cmd)
# -- programatically create rst files ---------------------------------------
cmd = ("python", "create_rstfiles.py")
print(" ".join(cmd))
os.system(" ".join(cmd))
# -- convert the tutorial scripts -------------------------------------------
if not on_rtd:
cmd = ("python", "create_tutorials.py")
print(" ".join(cmd))
os.system(" ".join(cmd))
# -- Project information -----------------------------------------------------
project = "flopy Documentation"
copyright = "2020, Bakker, Mark, Post, Vincent, Langevin, C. D., Hughes, J. D., White, J. T., Leaf, A. T., Paulinski, S. R., Larsen, J. D., Toews, M. W., Morway, E. D., Bellino, J. C., Starn, J. J., and Fienen, M. N."
author = "Bakker, Mark, Post, Vincent, Langevin, C. D., Hughes, J. D., White, J. T., Leaf, A. T., Paulinski, S. R., Larsen, J. D., Toews, M. W., Morway, E. D., Bellino, J. C., Starn, J. J., and Fienen, M. N."
# The version.
version = __version__
release = __version__
language = None
# -- General configuration ---------------------------------------------------
# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
# ones.
extensions = [
"sphinx.ext.autodoc",
"sphinx.ext.autosummary",
"sphinx.ext.napoleon",
"sphinx.ext.doctest",
"sphinx.ext.intersphinx",
"sphinx.ext.todo",
"sphinx.ext.coverage",
"sphinx.ext.mathjax",
"sphinx.ext.ifconfig",
"sphinx.ext.viewcode",
"IPython.sphinxext.ipython_console_highlighting", # lowercase didn't work
"sphinx.ext.autosectionlabel",
"nbsphinx",
"nbsphinx_link",
"recommonmark",
]
# Settings for GitHub actions integration
if on_rtd:
extensions.append("rtds_action")
rtds_action_github_repo = "modflowpy/flopy"
rtds_action_path = "_notebooks"
rtds_action_artifact_prefix = "notebooks-for-"
rtds_action_github_token = os.environ.get("GITHUB_TOKEN", None)
# Add any paths that contain templates here, relative to this directory.
templates_path = ["_templates"]
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
# This pattern also affects html_static_path and html_extra_path.
exclude_patterns = ["_build", "Thumbs.db", ".DS_Store", "**.ipynb_checkpoints"]
source_suffix = ".rst"
# The encoding of source files.
source_encoding = "utf-8"
# The master toctree document.
master_doc = "index"
# If true, '()' will be appended to :func: etc. cross-reference text.
add_function_parentheses = True
# If true, the current module name will be prepended to all description
# unit titles (such as .. function::).
add_module_names = False
# If true, sectionauthor and moduleauthor directives will be shown in the
# output. They are ignored by default.
show_authors = False
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = "sphinx"
# A list of ignored prefixes for module index sorting.
# modindex_common_prefix = []
# If true, keep warnings as "system message" paragraphs in the built documents.
# keep_warnings = False
# If true, `todo` and `todoList` produce output, else they produce nothing.
todo_include_todos = False
# -- Options for HTML output -------------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
#
html_theme = "sphinx_rtd_theme"
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ["_static"]
html_theme_options = {
"github_url": "https://github.com/modflowpy/flopy",
"use_edit_page_button": False,
}
autosummary_generate = True
numpydoc_show_class_members = False
html_context = {
"github_user": "flopy",
"github_repo": "flopy",
"github_version": "master",
"doc_path": "doc",
}
html_css_files = [
"css/custom.css",
]
# A shorter title for the navigation bar. Default is the same as html_title.
html_short_title = "flopy"
html_favicon = "_images/flopylogo.png"
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
# If true, SmartyPants will be used to convert quotes and dashes to
# typographically correct entities.
html_use_smartypants = True
# If false, no module index is generated.
html_domain_indices = True
# If false, no index is generated.
html_use_index = True
# If true, the index is split into individual pages for each letter.
html_split_index = False
# If true, links to the reST sources are added to the pages.
# html_show_sourcelink = True
# If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
html_show_sphinx = True
# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
html_show_copyright = False
# Output file base name for HTML help builder.
htmlhelp_basename = "flopydoc"
| 33.791209 | 217 | 0.677073 |
import os
import sys
sys.path.insert(0, os.path.abspath(".."))
from flopy import __version__
on_rtd = os.environ.get('READTHEDOCS') == 'True'
cmd = "sphinx-apidoc -e -o source/ ../flopy/"
print(cmd)
os.system(cmd)
cmd = ("python", "create_rstfiles.py")
print(" ".join(cmd))
os.system(" ".join(cmd))
if not on_rtd:
cmd = ("python", "create_tutorials.py")
print(" ".join(cmd))
os.system(" ".join(cmd))
project = "flopy Documentation"
copyright = "2020, Bakker, Mark, Post, Vincent, Langevin, C. D., Hughes, J. D., White, J. T., Leaf, A. T., Paulinski, S. R., Larsen, J. D., Toews, M. W., Morway, E. D., Bellino, J. C., Starn, J. J., and Fienen, M. N."
author = "Bakker, Mark, Post, Vincent, Langevin, C. D., Hughes, J. D., White, J. T., Leaf, A. T., Paulinski, S. R., Larsen, J. D., Toews, M. W., Morway, E. D., Bellino, J. C., Starn, J. J., and Fienen, M. N."
version = __version__
release = __version__
language = None
extensions = [
"sphinx.ext.autodoc",
"sphinx.ext.autosummary",
"sphinx.ext.napoleon",
"sphinx.ext.doctest",
"sphinx.ext.intersphinx",
"sphinx.ext.todo",
"sphinx.ext.coverage",
"sphinx.ext.mathjax",
"sphinx.ext.ifconfig",
"sphinx.ext.viewcode",
"IPython.sphinxext.ipython_console_highlighting",
"sphinx.ext.autosectionlabel",
"nbsphinx",
"nbsphinx_link",
"recommonmark",
]
# Settings for GitHub actions integration
if on_rtd:
extensions.append("rtds_action")
rtds_action_github_repo = "modflowpy/flopy"
rtds_action_path = "_notebooks"
rtds_action_artifact_prefix = "notebooks-for-"
rtds_action_github_token = os.environ.get("GITHUB_TOKEN", None)
# Add any paths that contain templates here, relative to this directory.
templates_path = ["_templates"]
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
# This pattern also affects html_static_path and html_extra_path.
exclude_patterns = ["_build", "Thumbs.db", ".DS_Store", "**.ipynb_checkpoints"]
source_suffix = ".rst"
# The encoding of source files.
source_encoding = "utf-8"
# The master toctree document.
master_doc = "index"
# If true, '()' will be appended to :func: etc. cross-reference text.
add_function_parentheses = True
# If true, the current module name will be prepended to all description
# unit titles (such as .. function::).
add_module_names = False
# If true, sectionauthor and moduleauthor directives will be shown in the
# output. They are ignored by default.
show_authors = False
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = "sphinx"
# A list of ignored prefixes for module index sorting.
# modindex_common_prefix = []
# If true, keep warnings as "system message" paragraphs in the built documents.
# keep_warnings = False
# If true, `todo` and `todoList` produce output, else they produce nothing.
todo_include_todos = False
# -- Options for HTML output -------------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
#
html_theme = "sphinx_rtd_theme"
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ["_static"]
html_theme_options = {
"github_url": "https://github.com/modflowpy/flopy",
"use_edit_page_button": False,
}
autosummary_generate = True
numpydoc_show_class_members = False
html_context = {
"github_user": "flopy",
"github_repo": "flopy",
"github_version": "master",
"doc_path": "doc",
}
html_css_files = [
"css/custom.css",
]
# A shorter title for the navigation bar. Default is the same as html_title.
html_short_title = "flopy"
html_favicon = "_images/flopylogo.png"
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
# If true, SmartyPants will be used to convert quotes and dashes to
# typographically correct entities.
html_use_smartypants = True
# If false, no module index is generated.
html_domain_indices = True
# If false, no index is generated.
html_use_index = True
# If true, the index is split into individual pages for each letter.
html_split_index = False
# If true, links to the reST sources are added to the pages.
# html_show_sourcelink = True
# If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
html_show_sphinx = True
# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
html_show_copyright = False
# Output file base name for HTML help builder.
htmlhelp_basename = "flopydoc"
| true | true |
f7302d7fbc14ba5b762d875c3cc9ddd617ab5ad6 | 77,626 | py | Python | test_integration/geopm_test_integration.py | RyoTTa/geopm | 74246c8ce70ee47f53bc5629638f51c2c391027b | [
"BSD-3-Clause"
] | null | null | null | test_integration/geopm_test_integration.py | RyoTTa/geopm | 74246c8ce70ee47f53bc5629638f51c2c391027b | [
"BSD-3-Clause"
] | null | null | null | test_integration/geopm_test_integration.py | RyoTTa/geopm | 74246c8ce70ee47f53bc5629638f51c2c391027b | [
"BSD-3-Clause"
] | null | null | null | #!/usr/bin/env python
#
# Copyright (c) 2015, 2016, 2017, 2018, 2019, Intel Corporation
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions
# are met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
#
# * Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in
# the documentation and/or other materials provided with the
# distribution.
#
# * Neither the name of Intel Corporation nor the names of its
# contributors may be used to endorse or promote products derived
# from this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY LOG OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
from __future__ import absolute_import
from __future__ import division
from future import standard_library
standard_library.install_aliases()
from builtins import str
import os
import sys
import unittest
import subprocess
import time
import pandas
import collections
import socket
import shlex
import json
sys.path.append(os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
from test_integration import util
from test_integration import geopm_test_launcher
import geopmpy.io
import geopmpy.launcher
def create_frequency_map_policy(min_freq, max_freq, frequency_map, use_env=False):
"""Create a frequency map to be consumed by the frequency map agent.
Arguments:
min_freq: Floor frequency for the agent
max_freq: Ceiling frequency for the agent
frequency_map: Dictionary mapping region names to frequencies
use_env: If true, apply the map to an environment variable, and return
the policy needed when the environment variable is in use.
Otherwise, clear the environment variable and return the policy
needed when the variable is not in use.
"""
policy = {'frequency_min': min_freq, 'frequency_max': max_freq}
known_hashes = {
'dgemm': 0x00000000a74bbf35,
'all2all': 0x000000003ddc81bf,
'stream': 0x00000000d691da00,
'sleep': 0x00000000536c798f,
'MPI_Barrier': 0x000000007b561f45,
'model-init': 0x00000000644f9787,
'unmarked-region': 0x00000000725e8066 }
if use_env:
os.environ['GEOPM_FREQUENCY_MAP'] = json.dumps(frequency_map)
else:
if 'GEOPM_FREQUENCY_MAP' in os.environ:
os.environ.pop('GEOPM_FREQUENCY_MAP')
for i, (region_name, frequency) in enumerate(frequency_map.items()):
region_hash = known_hashes[region_name]
policy['HASH_{}'.format(i)] = int(region_hash)
policy['FREQ_{}'.format(i)] = frequency
return policy
class TestIntegration(unittest.TestCase):
def setUp(self):
self.longMessage = True
self._agent = 'power_governor'
self._options = {'power_budget': 150}
self._tmp_files = []
self._output = None
self._power_limit = geopm_test_launcher.geopmread("MSR::PKG_POWER_LIMIT:PL1_POWER_LIMIT board 0")
self._frequency = geopm_test_launcher.geopmread("MSR::PERF_CTL:FREQ board 0")
self._original_freq_map_env = os.environ.get('GEOPM_FREQUENCY_MAP')
def tearDown(self):
geopm_test_launcher.geopmwrite("MSR::PKG_POWER_LIMIT:PL1_POWER_LIMIT board 0 " + str(self._power_limit))
geopm_test_launcher.geopmwrite("MSR::PERF_CTL:FREQ board 0 " + str(self._frequency))
if sys.exc_info() == (None, None, None) and os.getenv('GEOPM_KEEP_FILES') is None:
if self._output is not None:
self._output.remove_files()
for ff in self._tmp_files:
try:
os.remove(ff)
except OSError:
pass
if self._original_freq_map_env is None:
if 'GEOPM_FREQUENCY_MAP' in os.environ:
os.environ.pop('GEOPM_FREQUENCY_MAP')
else:
os.environ['GEOPM_FREQUENCY_MAP'] = self._original_freq_map_env
def assertNear(self, a, b, epsilon=0.05, msg=''):
denom = a if a != 0 else 1
if abs((a - b) / denom) >= epsilon:
self.fail('The fractional difference between {a} and {b} is greater than {epsilon}. {msg}'.format(a=a, b=b, epsilon=epsilon, msg=msg))
def create_progress_df(self, df):
# Build a df with only the first region entry and the exit.
df = df.reset_index(drop=True)
last_index = 0
filtered_df = pandas.DataFrame()
row_list = []
progress_1s = df['REGION_PROGRESS'].loc[df['REGION_PROGRESS'] == 1]
for index, _ in progress_1s.iteritems():
row = df.loc[last_index:index].head(1)
row_list += [row[['TIME', 'REGION_PROGRESS', 'REGION_RUNTIME']]]
row = df.loc[last_index:index].tail(1)
row_list += [row[['TIME', 'REGION_PROGRESS', 'REGION_RUNTIME']]]
last_index = index + 1 # Set the next starting index to be one past where we are
filtered_df = pandas.concat(row_list)
return filtered_df
def test_report_and_trace_generation(self):
name = 'test_report_and_trace_generation'
report_path = name + '.report'
trace_path = name + '.trace'
num_node = 4
num_rank = 16
app_conf = geopmpy.io.BenchConf(name + '_app.config')
self._tmp_files.append(app_conf.get_path())
app_conf.append_region('sleep', 1.0)
agent_conf = geopmpy.io.AgentConf(name + '_agent.config', self._agent, self._options)
self._tmp_files.append(agent_conf.get_path())
launcher = geopm_test_launcher.TestLauncher(app_conf, agent_conf, report_path, trace_path)
launcher.set_num_node(num_node)
launcher.set_num_rank(num_rank)
launcher.run(name)
self._output = geopmpy.io.AppOutput(report_path, trace_path + '*')
node_names = self._output.get_node_names()
self.assertEqual(num_node, len(node_names))
for nn in node_names:
report = self._output.get_report_data(node_name=nn)
self.assertNotEqual(0, len(report))
trace = self._output.get_trace_data(node_name=nn)
self.assertNotEqual(0, len(trace))
def test_no_report_and_trace_generation(self):
name = 'test_no_report_and_trace_generation'
num_node = 4
num_rank = 16
app_conf = geopmpy.io.BenchConf(name + '_app.config')
self._tmp_files.append(app_conf.get_path())
app_conf.append_region('sleep', 1.0)
agent_conf = geopmpy.io.AgentConf(name + '_agent.config', self._agent, self._options)
self._tmp_files.append(agent_conf.get_path())
launcher = geopm_test_launcher.TestLauncher(app_conf, agent_conf)
launcher.set_num_node(num_node)
launcher.set_num_rank(num_rank)
launcher.run(name)
@unittest.skipUnless('mr-fusion' in socket.gethostname(), "This test only enabled on known working systems.")
def test_report_and_trace_generation_pthread(self):
name = 'test_report_and_trace_generation_pthread'
report_path = name + '.report'
trace_path = name + '.trace'
num_node = 4
num_rank = 16
app_conf = geopmpy.io.BenchConf(name + '_app.config')
self._tmp_files.append(app_conf.get_path())
app_conf.append_region('sleep', 1.0)
agent_conf = geopmpy.io.AgentConf(name + '_agent.config', self._agent, self._options)
self._tmp_files.append(agent_conf.get_path())
launcher = geopm_test_launcher.TestLauncher(app_conf, agent_conf, report_path, trace_path)
launcher.set_num_node(num_node)
launcher.set_num_rank(num_rank)
launcher.set_pmpi_ctl('pthread')
launcher.run(name)
self._output = geopmpy.io.AppOutput(report_path, trace_path + '*')
node_names = self._output.get_node_names()
self.assertEqual(num_node, len(node_names))
for nn in node_names:
report = self._output.get_report_data(node_name=nn)
self.assertNotEqual(0, len(report))
trace = self._output.get_trace_data(node_name=nn)
self.assertNotEqual(0, len(trace))
@unittest.skipUnless(geopm_test_launcher.detect_launcher() != "aprun",
'ALPS does not support multi-application launch on the same nodes.')
@util.skip_unless_batch()
def test_report_and_trace_generation_application(self):
name = 'test_report_and_trace_generation_application'
report_path = name + '.report'
trace_path = name + '.trace'
num_node = 4
num_rank = 16
app_conf = geopmpy.io.BenchConf(name + '_app.config')
self._tmp_files.append(app_conf.get_path())
app_conf.append_region('sleep', 1.0)
agent_conf = geopmpy.io.AgentConf(name + '_agent.config', self._agent, self._options)
self._tmp_files.append(agent_conf.get_path())
launcher = geopm_test_launcher.TestLauncher(app_conf, agent_conf, report_path, trace_path)
launcher.set_num_node(num_node)
launcher.set_num_rank(num_rank)
launcher.set_pmpi_ctl('application')
launcher.run(name)
self._output = geopmpy.io.AppOutput(report_path, trace_path + '*')
node_names = self._output.get_node_names()
self.assertEqual(num_node, len(node_names))
for nn in node_names:
report = self._output.get_report_data(node_name=nn)
self.assertNotEqual(0, len(report))
trace = self._output.get_trace_data(node_name=nn)
self.assertNotEqual(0, len(trace))
@unittest.skipUnless(geopm_test_launcher.detect_launcher() == "srun" and os.getenv('SLURM_NODELIST') is None,
'Requires non-sbatch SLURM session for alloc\'d and idle nodes.')
def test_report_generation_all_nodes(self):
name = 'test_report_generation_all_nodes'
report_path = name + '.report'
num_node = 1
num_rank = 1
delay = 1.0
app_conf = geopmpy.io.BenchConf(name + '_app.config')
self._tmp_files.append(app_conf.get_path())
app_conf.append_region('sleep', delay)
agent_conf = geopmpy.io.AgentConf(name + '_agent.config', self._agent, self._options)
self._tmp_files.append(agent_conf.get_path())
launcher = geopm_test_launcher.TestLauncher(app_conf, agent_conf, report_path)
launcher.set_num_node(num_node)
launcher.set_num_rank(num_rank)
time.sleep(5) # Wait a moment to finish cleaning-up from a previous test
idle_nodes = launcher.get_idle_nodes()
idle_nodes_copy = list(idle_nodes)
alloc_nodes = launcher.get_alloc_nodes()
launcher.write_log(name, 'Idle nodes : {nodes}'.format(nodes=idle_nodes))
launcher.write_log(name, 'Alloc\'d nodes : {nodes}'.format(nodes=alloc_nodes))
node_names = []
for nn in idle_nodes_copy:
launcher.set_node_list(nn.split()) # Hack to convert string to list
try:
launcher.run(name)
node_names += nn.split()
except subprocess.CalledProcessError as e:
if e.returncode == 1 and nn not in launcher.get_idle_nodes():
launcher.write_log(name, '{node} has disappeared from the idle list!'.format(node=nn))
idle_nodes.remove(nn)
else:
launcher.write_log(name, 'Return code = {code}'.format(code=e.returncode))
raise e
ao = geopmpy.io.AppOutput(report_path, do_cache=False)
sleep_data = ao.get_report_data(node_name=nn, region='sleep')
app_data = ao.get_app_total_data(node_name=nn)
self.assertNotEqual(0, len(sleep_data))
self.assertNear(delay, sleep_data['runtime'].item())
self.assertGreater(app_data['runtime'].item(), sleep_data['runtime'].item())
self.assertEqual(1, sleep_data['count'].item())
self.assertEqual(len(node_names), len(idle_nodes))
def test_runtime(self):
name = 'test_runtime'
report_path = name + '.report'
num_node = 1
num_rank = 5
delay = 3.0
app_conf = geopmpy.io.BenchConf(name + '_app.config')
self._tmp_files.append(app_conf.get_path())
app_conf.append_region('sleep', delay)
agent_conf = geopmpy.io.AgentConf(name + '_agent.config', self._agent, self._options)
self._tmp_files.append(agent_conf.get_path())
launcher = geopm_test_launcher.TestLauncher(app_conf, agent_conf, report_path)
launcher.set_num_node(num_node)
launcher.set_num_rank(num_rank)
launcher.run(name)
self._output = geopmpy.io.AppOutput(report_path)
node_names = self._output.get_node_names()
self.assertEqual(num_node, len(node_names))
for nn in node_names:
report = self._output.get_report_data(node_name=nn, region='sleep')
app_total = self._output.get_app_total_data(node_name=nn)
self.assertNear(delay, report['runtime'].item())
self.assertGreater(app_total['runtime'].item(), report['runtime'].item())
def test_runtime_epoch(self):
name = 'test_runtime_epoch'
report_path = name + '.report'
num_node = 1
num_rank = 5
delay = 3.0
app_conf = geopmpy.io.BenchConf(name + '_app.config')
self._tmp_files.append(app_conf.get_path())
app_conf.append_region('sleep', delay)
app_conf.append_region('spin', delay)
agent_conf = geopmpy.io.AgentConf(name + '_agent.config', self._agent, self._options)
self._tmp_files.append(agent_conf.get_path())
launcher = geopm_test_launcher.TestLauncher(app_conf, agent_conf, report_path)
launcher.set_num_node(num_node)
launcher.set_num_rank(num_rank)
launcher.run(name)
self._output = geopmpy.io.AppOutput(report_path)
node_names = self._output.get_node_names()
self.assertEqual(num_node, len(node_names))
for nn in node_names:
spin_data = self._output.get_report_data(node_name=nn, region='spin')
sleep_data = self._output.get_report_data(node_name=nn, region='sleep')
epoch_data = self._output.get_report_data(node_name=nn, region='epoch')
total_runtime = sleep_data['runtime'].item() + spin_data['runtime'].item()
self.assertNear(total_runtime, epoch_data['runtime'].item())
def test_epoch_data_valid(self):
name = 'test_epoch_data_valid'
report_path = name + '.report'
num_node = 1
num_rank = 1
big_o = 1.0
loop_count = 10
app_conf = geopmpy.io.BenchConf(name + '_app.config')
self._tmp_files.append(app_conf.get_path())
app_conf.set_loop_count(loop_count)
app_conf.append_region('spin-unmarked', big_o)
agent_conf = geopmpy.io.AgentConf(name + '_agent.config', self._agent, self._options)
self._tmp_files.append(agent_conf.get_path())
launcher = geopm_test_launcher.TestLauncher(app_conf, agent_conf, report_path)
launcher.set_num_node(num_node)
launcher.set_num_rank(num_rank)
launcher.run(name)
report = geopmpy.io.RawReport(report_path)
node_names = report.host_names()
self.assertEqual(num_node, len(node_names))
for nn in node_names:
regions = report.region_names(nn)
self.assertTrue('model-init' not in regions)
totals = report.raw_totals(nn)
unmarked = report.raw_region(nn, 'unmarked-region')
epoch = report.raw_epoch(nn)
# Epoch has valid data
self.assertGreater(epoch['runtime (sec)'], 0)
self.assertGreater(epoch['sync-runtime (sec)'], 0)
self.assertGreater(epoch['package-energy (joules)'], 0)
self.assertGreater(epoch['dram-energy (joules)'], 0)
self.assertGreater(epoch['power (watts)'], 0)
self.assertGreater(epoch['frequency (%)'], 0)
self.assertGreater(epoch['frequency (Hz)'], 0)
self.assertEqual(epoch['count'], loop_count)
# Runtime
self.assertTrue(totals['runtime (sec)'] > unmarked['runtime (sec)'] >= epoch['runtime (sec)'],
'''The total runtime is NOT > the unmarked runtime or the unmarked runtime is NOT
>= the Epoch runtime.''')
# Package Energy (joules)
self.assertTrue(totals['package-energy (joules)'] >
unmarked['package-energy (joules)'] >=
epoch['package-energy (joules)'],
'''The total package energy (joules) is NOT > the unmarked package energy (joules)
or the unmarked package energy (joules) is NOT >= the Epoch package
energy (joules).''')
# DRAM Energy
self.assertTrue(totals['dram-energy (joules)'] >
unmarked['dram-energy (joules)'] >=
epoch['dram-energy (joules)'],
'''The total dram energy is NOT > the unmarked dram energy or the unmarked
dram energy is NOT >= the Epoch dram energy.''')
# Sync-runtime
self.assertTrue(unmarked['sync-runtime (sec)'] >= epoch['sync-runtime (sec)'],
'''The sync-runtime for the unmarked region is NOT >= the Epoch sync-runtime.''')
def test_runtime_nested(self):
name = 'test_runtime_nested'
report_path = name + '.report'
num_node = 1
num_rank = 1
delay = 1.0
loop_count = 2
app_conf = geopmpy.io.BenchConf(name + '_app.config')
self._tmp_files.append(app_conf.get_path())
app_conf.set_loop_count(loop_count)
app_conf.append_region('nested-progress', delay)
agent_conf = geopmpy.io.AgentConf(name + '_agent.config', self._agent, self._options)
self._tmp_files.append(agent_conf.get_path())
launcher = geopm_test_launcher.TestLauncher(app_conf, agent_conf, report_path)
launcher.set_num_node(num_node)
launcher.set_num_rank(num_rank)
launcher.run(name)
self._output = geopmpy.io.AppOutput(report_path)
node_names = self._output.get_node_names()
self.assertEqual(num_node, len(node_names))
for nn in node_names:
spin_data = self._output.get_report_data(node_name=nn, region='spin')
epoch_data = self._output.get_report_data(node_name=nn, region='epoch')
app_totals = self._output.get_app_total_data(node_name=nn)
# The spin sections of this region sleep for 'delay' seconds twice per loop.
self.assertNear(2 * loop_count * delay, spin_data['runtime'].item())
self.assertNear(spin_data['runtime'].item(), epoch_data['runtime'].item(), epsilon=0.01)
self.assertGreater(app_totals['network-time'].item(), 0)
self.assertGreater(0.1, app_totals['network-time'].item())
self.assertEqual(loop_count, spin_data['count'].item())
def test_trace_runtimes(self):
name = 'test_trace_runtimes'
report_path = name + '.report'
trace_path = name + '.trace'
num_node = 4
num_rank = 16
app_conf = geopmpy.io.BenchConf(name + '_app.config')
self._tmp_files.append(app_conf.get_path())
app_conf.append_region('sleep', 1.0)
app_conf.append_region('dgemm', 1.0)
app_conf.append_region('all2all', 1.0)
agent_conf = geopmpy.io.AgentConf(name + '_agent.config', self._agent, self._options)
self._tmp_files.append(agent_conf.get_path())
launcher = geopm_test_launcher.TestLauncher(app_conf, agent_conf, report_path,
trace_path, region_barrier=True)
launcher.set_num_node(num_node)
launcher.set_num_rank(num_rank)
launcher.run(name)
self._output = geopmpy.io.AppOutput(report_path, trace_path + '*')
node_names = self._output.get_node_names()
self.assertEqual(len(node_names), num_node)
regions = self._output.get_region_names()
for nn in node_names:
trace = self._output.get_trace_data(node_name=nn)
app_totals = self._output.get_app_total_data(node_name=nn)
self.assertNear(trace.iloc[-1]['TIME'], app_totals['runtime'].item(), msg='Application runtime failure, node_name={}.'.format(nn))
# Calculate runtime totals for each region in each trace, compare to report
tt = trace.reset_index(level='index') # move 'index' field from multiindex to columns
tt = tt.set_index(['REGION_HASH'], append=True) # add region_hash column to multiindex
tt_reg = tt.groupby(level=['REGION_HASH'])
for region_name in regions:
region_data = self._output.get_report_data(node_name=nn, region=region_name)
if (region_name not in ['unmarked-region', 'model-init', 'epoch'] and
not region_name.startswith('MPI_') and
region_data['sync_runtime'].item() != 0):
region_hash = region_data['id'].item()
trace_data = tt_reg.get_group(region_hash)
start_idx = trace_data.iloc[0]['index']
end_idx = trace_data.iloc[-1]['index'] + 1 # use time from sample after exiting region
start_time = tt.loc[tt['index'] == start_idx]['TIME'].item()
end_time = tt.loc[tt['index'] == end_idx]['TIME'].item()
trace_elapsed_time = end_time - start_time
msg = 'for region {rn} on node {nn}'.format(rn=region_name, nn=nn)
self.assertNear(trace_elapsed_time, region_data['sync_runtime'].item(), msg=msg)
#epoch
region_data = self._output.get_report_data(node_name=nn, region='epoch')
trace_elapsed_time = trace.iloc[-1]['TIME'] - trace['TIME'].loc[trace['EPOCH_COUNT'] == 0].iloc[0]
msg = 'for epoch on node {nn}'.format(nn=nn)
self.assertNear(trace_elapsed_time, region_data['runtime'].item(), msg=msg)
@util.skip_unless_config_enable('bloat')
def test_runtime_regulator(self):
name = 'test_runtime_regulator'
report_path = name + '.report'
trace_path = name + '.trace'
num_node = 1
num_rank = 4
loop_count = 20
app_conf = geopmpy.io.BenchConf(name + '_app.config')
self._tmp_files.append(app_conf.get_path())
app_conf.set_loop_count(loop_count)
sleep_big_o = 1.0
spin_big_o = 0.5
expected_region_runtime = {'spin': spin_big_o, 'sleep': sleep_big_o}
app_conf.append_region('sleep', sleep_big_o)
app_conf.append_region('spin', spin_big_o)
agent_conf = geopmpy.io.AgentConf(name + '_agent.config', self._agent, self._options)
self._tmp_files.append(agent_conf.get_path())
launcher = geopm_test_launcher.TestLauncher(app_conf, agent_conf, report_path, trace_path, region_barrier=True)
launcher.set_num_node(num_node)
launcher.set_num_rank(num_rank)
launcher.run(name)
self._output = geopmpy.io.AppOutput(report_path, trace_path + '*')
node_names = self._output.get_node_names()
self.assertEqual(len(node_names), num_node)
regions = self._output.get_region_names()
for nn in node_names:
app_totals = self._output.get_app_total_data(node_name=nn)
trace = self._output.get_trace_data(node_name=nn)
self.assertNear(trace.iloc[-1]['TIME'], app_totals['runtime'].item())
tt = trace.set_index(['REGION_HASH'], append=True)
tt = tt.groupby(level=['REGION_HASH'])
for region_name in regions:
region_data = self._output.get_report_data(node_name=nn, region=region_name)
if region_name not in ['unmarked-region', 'model-init', 'epoch'] and not region_name.startswith('MPI_') and region_data['runtime'].item() != 0:
trace_data = tt.get_group(region_data['id'].item())
filtered_df = self.create_progress_df(trace_data)
first_time = False
epsilon = 0.001 if region_name != 'sleep' else 0.05
for index, df in filtered_df.iterrows():
if df['REGION_PROGRESS'] == 1:
self.assertNear(df['REGION_RUNTIME'], expected_region_runtime[region_name], epsilon=epsilon)
first_time = True
if first_time is True and df['REGION_PROGRESS'] == 0:
self.assertNear(df['REGION_RUNTIME'], expected_region_runtime[region_name], epsilon=epsilon)
@util.skip_unless_run_long_tests()
@util.skip_unless_config_enable('bloat')
def test_region_runtimes(self):
name = 'test_region_runtimes'
report_path = name + '.report'
trace_path = name + '.trace'
num_node = 4
num_rank = 16
loop_count = 500
app_conf = geopmpy.io.BenchConf(name + '_app.config')
self._tmp_files.append(app_conf.get_path())
app_conf.append_region('dgemm', 8.0)
app_conf.set_loop_count(loop_count)
agent_conf = geopmpy.io.AgentConf(name + '_agent.config', self._agent, self._options)
self._tmp_files.append(agent_conf.get_path())
launcher = geopm_test_launcher.TestLauncher(app_conf, agent_conf, report_path, trace_path, time_limit=900)
launcher.set_num_node(num_node)
launcher.set_num_rank(num_rank)
launcher.run(name)
self._output = geopmpy.io.AppOutput(report_path, trace_path + '*')
node_names = self._output.get_node_names()
self.assertEqual(len(node_names), num_node)
# Calculate region times from traces
region_times = collections.defaultdict(lambda: collections.defaultdict(dict))
for nn in node_names:
tt = self._output.get_trace_data(node_name=nn).set_index(['REGION_HASH'], append=True).groupby(level=['REGION_HASH'])
for region_hash, data in tt:
filtered_df = self.create_progress_df(data)
filtered_df = filtered_df.diff()
# Since I'm not separating out the progress 0's from 1's, when I do the diff I only care about the
# case where 1 - 0 = 1 for the progress column.
filtered_df = filtered_df.loc[filtered_df['REGION_PROGRESS'] == 1]
if len(filtered_df) > 1:
launcher.write_log(name, 'Region elapsed time stats from {} - {} :\n{}'\
.format(nn, region_hash, filtered_df['TIME'].describe()))
filtered_df['TIME'].describe()
region_times[nn][region_hash] = filtered_df
launcher.write_log(name, '{}'.format('-' * 80))
# Loop through the reports to see if the region runtimes line up with what was calculated from the trace files above.
regions = self._output.get_region_names()
write_regions = True
for nn in node_names:
for region_name in regions:
rr = self._output.get_report_data(node_name=nn, region=region_name)
if (region_name != 'epoch' and
rr['id'].item() != 0 and
rr['count'].item() > 1):
if write_regions:
launcher.write_log(name, 'Region {} is {}.'.format(rr['id'].item(), region_name))
runtime = rr['sync_runtime'].item()
self.assertNear(runtime,
region_times[nn][rr['id'].item()]['TIME'].sum())
write_regions = False
# Test to ensure every region detected in the trace is captured in the report.
for nn in node_names:
report_ids = []
for region_name in regions:
rr = self._output.get_report_data(node_name=nn, region=region_name)
report_ids.append(rr['id'].item())
for region_hash in region_times[nn].keys():
self.assertTrue(region_hash in report_ids, msg='Report from {} missing region_hash {}'.format(nn, region_hash))
def test_progress(self):
name = 'test_progress'
report_path = name + '.report'
num_node = 1
num_rank = 4
delay = 3.0
app_conf = geopmpy.io.BenchConf(name + '_app.config')
self._tmp_files.append(app_conf.get_path())
app_conf.append_region('sleep-progress', delay)
agent_conf = geopmpy.io.AgentConf(name + '_agent.config', self._agent, self._options)
self._tmp_files.append(agent_conf.get_path())
launcher = geopm_test_launcher.TestLauncher(app_conf, agent_conf, report_path)
launcher.set_num_node(num_node)
launcher.set_num_rank(num_rank)
launcher.run(name)
self._output = geopmpy.io.AppOutput(report_path)
node_names = self._output.get_node_names()
self.assertEqual(len(node_names), num_node)
for nn in node_names:
sleep_data = self._output.get_report_data(node_name=nn, region='sleep')
app_total = self._output.get_app_total_data(node_name=nn)
self.assertNear(delay, sleep_data['runtime'].item())
self.assertGreater(app_total['runtime'].item(), sleep_data['runtime'].item())
self.assertEqual(1, sleep_data['count'].item())
def test_count(self):
name = 'test_count'
report_path = name + '.report'
trace_path = name + '.trace'
num_node = 1
num_rank = 4
delay = 0.01
loop_count = 100
app_conf = geopmpy.io.BenchConf(name + '_app.config')
self._tmp_files.append(app_conf.get_path())
app_conf.set_loop_count(loop_count)
app_conf.append_region('spin', delay)
agent_conf = geopmpy.io.AgentConf(name + '_agent.config', self._agent, self._options)
self._tmp_files.append(agent_conf.get_path())
launcher = geopm_test_launcher.TestLauncher(app_conf, agent_conf, report_path, trace_path)
launcher.set_num_node(num_node)
launcher.set_num_rank(num_rank)
launcher.run(name)
self._output = geopmpy.io.AppOutput(report_path, trace_path + '*')
node_names = self._output.get_node_names()
self.assertEqual(len(node_names), num_node)
for nn in node_names:
trace_data = self._output.get_trace_data(node_name=nn)
spin_data = self._output.get_report_data(node_name=nn, region='spin')
epoch_data = self._output.get_report_data(node_name=nn, region='epoch')
self.assertNear(delay * loop_count, spin_data['runtime'].item())
self.assertEqual(loop_count, spin_data['count'].item())
self.assertEqual(loop_count, epoch_data['count'].item())
self.assertEqual(loop_count, trace_data['EPOCH_COUNT'][-1])
@util.skip_unless_run_long_tests()
def test_scaling(self):
"""
This test will start at ${num_node} nodes and ranks. It will then calls check_run() to
ensure that commands can be executed successfully on all of the allocated compute nodes.
Afterwards it will run the specified app config on each node and verify the reports. When
complete it will double num_node and run the steps again.
WARNING: This test can take a long time to run depending on the number of starting nodes and
the size of the allocation.
"""
name = 'test_scaling'
report_path = name + '.report'
num_node = 2
loop_count = 100
app_conf = geopmpy.io.BenchConf(name + '_app.config')
self._tmp_files.append(app_conf.get_path())
app_conf.append_region('dgemm', 1.0)
app_conf.append_region('all2all', 1.0)
app_conf.set_loop_count(loop_count)
agent_conf = geopmpy.io.AgentConf(name + '_agent.config', self._agent, self._options)
self._tmp_files.append(agent_conf.get_path())
launcher = geopm_test_launcher.TestLauncher(app_conf, agent_conf, report_path, time_limit=900)
check_successful = True
while check_successful:
launcher.set_num_node(num_node)
launcher.set_num_rank(num_node)
try:
launcher.check_run(name)
except subprocess.CalledProcessError as e:
# If we exceed the available nodes in the allocation ALPS/SLURM give a rc of 1
# All other rc's are real errors
if e.returncode != 1:
raise e
check_successful = False
if check_successful:
launcher.write_log(name, 'About to run on {} nodes.'.format(num_node))
launcher.run(name)
self._output = geopmpy.io.AppOutput(report_path)
node_names = self._output.get_node_names()
self.assertEqual(len(node_names), num_node)
for nn in node_names:
dgemm_data = self._output.get_report_data(node_name=nn, region='dgemm')
all2all_data = self._output.get_report_data(node_name=nn, region='all2all')
self.assertEqual(loop_count, dgemm_data['count'].item())
self.assertEqual(loop_count, all2all_data['count'].item())
self.assertGreater(dgemm_data['runtime'].item(), 0.0)
self.assertGreater(all2all_data['runtime'].item(), 0.0)
num_node *= 2
self._output.remove_files()
@util.skip_unless_run_long_tests()
def test_power_consumption(self):
name = 'test_power_consumption'
report_path = name + '.report'
trace_path = name + '.trace'
num_node = 4
num_rank = 16
loop_count = 500
app_conf = geopmpy.io.BenchConf(name + '_app.config')
self._tmp_files.append(app_conf.get_path())
app_conf.append_region('dgemm', 8.0)
app_conf.set_loop_count(loop_count)
fam, mod = geopm_test_launcher.get_platform()
if fam == 6 and mod == 87:
# budget for KNL
self._options['power_budget'] = 130
else:
self._options['power_budget'] = 200
gov_agent_conf_path = name + '_gov_agent.config'
self._tmp_files.append(gov_agent_conf_path)
gov_agent_conf = geopmpy.io.AgentConf(name + '_agent.config', self._agent, self._options)
launcher = geopm_test_launcher.TestLauncher(app_conf, gov_agent_conf, report_path,
trace_path, time_limit=900)
launcher.set_num_node(num_node)
launcher.set_num_rank(num_rank)
launcher.write_log(name, 'Power cap = {}W'.format(self._options['power_budget']))
launcher.run(name)
self._output = geopmpy.io.AppOutput(report_path, trace_path + '*')
node_names = self._output.get_node_names()
self.assertEqual(num_node, len(node_names))
all_power_data = {}
# Total power consumed will be Socket(s) + DRAM
for nn in node_names:
tt = self._output.get_trace_data(node_name=nn)
first_epoch_index = tt.loc[tt['EPOCH_COUNT'] == 0][:1].index[0]
epoch_dropped_data = tt[first_epoch_index:] # Drop all startup data
power_data = epoch_dropped_data.filter(regex='ENERGY')
power_data['TIME'] = epoch_dropped_data['TIME']
power_data = power_data.diff().dropna()
power_data.rename(columns={'TIME': 'ELAPSED_TIME'}, inplace=True)
power_data = power_data.loc[(power_data != 0).all(axis=1)] # Will drop any row that is all 0's
pkg_energy_cols = [s for s in power_data.keys() if 'ENERGY_PACKAGE' in s]
dram_energy_cols = [s for s in power_data.keys() if 'ENERGY_DRAM' in s]
power_data['SOCKET_POWER'] = power_data[pkg_energy_cols].sum(axis=1) / power_data['ELAPSED_TIME']
power_data['DRAM_POWER'] = power_data[dram_energy_cols].sum(axis=1) / power_data['ELAPSED_TIME']
power_data['COMBINED_POWER'] = power_data['SOCKET_POWER'] + power_data['DRAM_POWER']
pandas.set_option('display.width', 100)
launcher.write_log(name, 'Power stats from {} :\n{}'.format(nn, power_data.describe()))
all_power_data[nn] = power_data
for node_name, power_data in all_power_data.items():
# Allow for overages of 2% at the 75th percentile.
self.assertGreater(self._options['power_budget'] * 1.02, power_data['SOCKET_POWER'].quantile(.75))
# TODO Checks on the maximum power computed during the run?
# TODO Checks to see how much power was left on the table?
@util.skip_unless_run_long_tests()
@util.skip_unless_batch()
def test_power_balancer(self):
name = 'test_power_balancer'
num_node = 4
num_rank = 16
loop_count = 500
# Require that the balancer moves the maximum dgemm runtime at
# least 1/4 the distance to the mean dgemm runtime under the
# governor.
margin_factor = 0.25
app_conf = geopmpy.io.BenchConf(name + '_app.config')
self._tmp_files.append(app_conf.get_path())
app_conf.append_region('dgemm-imbalance', 8.0)
app_conf.append_region('all2all', 0.05)
app_conf.set_loop_count(loop_count)
# Update app config with imbalance
alloc_nodes = geopm_test_launcher.TestLauncher.get_alloc_nodes()
for nn in range(len(alloc_nodes) // 2):
app_conf.append_imbalance(alloc_nodes[nn], 0.5)
fam, mod = geopm_test_launcher.get_platform()
if fam == 6 and mod == 87:
# budget for KNL
power_budget = 130
else:
power_budget = 200
self._options = {'power_budget': power_budget}
gov_agent_conf_path = name + '_gov_agent.config'
bal_agent_conf_path = name + '_bal_agent.config'
self._tmp_files.append(gov_agent_conf_path)
self._tmp_files.append(bal_agent_conf_path)
agent_list = ['power_governor', 'power_balancer']
path_dict = {'power_governor': gov_agent_conf_path, 'power_balancer': bal_agent_conf_path}
agent_runtime = dict()
for agent in agent_list:
agent_conf = geopmpy.io.AgentConf(path_dict[agent], agent, self._options)
run_name = '{}_{}'.format(name, agent)
report_path = '{}.report'.format(run_name)
trace_path = '{}.trace'.format(run_name)
launcher = geopm_test_launcher.TestLauncher(app_conf, agent_conf, report_path,
trace_path, time_limit=2700)
launcher.set_num_node(num_node)
launcher.set_num_rank(num_rank)
launcher.write_log(run_name, 'Power cap = {}W'.format(power_budget))
launcher.run(run_name)
self._output = geopmpy.io.AppOutput(report_path, trace_path + '*')
node_names = self._output.get_node_names()
self.assertEqual(num_node, len(node_names))
power_limits = []
# Total power consumed will be Socket(s) + DRAM
for nn in node_names:
tt = self._output.get_trace_data(node_name=nn)
first_epoch_index = tt.loc[tt['EPOCH_COUNT'] == 0][:1].index[0]
epoch_dropped_data = tt[first_epoch_index:] # Drop all startup data
power_data = epoch_dropped_data.filter(regex='ENERGY')
power_data['TIME'] = epoch_dropped_data['TIME']
power_data = power_data.diff().dropna()
power_data.rename(columns={'TIME': 'ELAPSED_TIME'}, inplace=True)
power_data = power_data.loc[(power_data != 0).all(axis=1)] # Will drop any row that is all 0's
pkg_energy_cols = [s for s in power_data.keys() if 'ENERGY_PACKAGE' in s]
dram_energy_cols = [s for s in power_data.keys() if 'ENERGY_DRAM' in s]
power_data['SOCKET_POWER'] = power_data[pkg_energy_cols].sum(axis=1) / power_data['ELAPSED_TIME']
power_data['DRAM_POWER'] = power_data[dram_energy_cols].sum(axis=1) / power_data['ELAPSED_TIME']
power_data['COMBINED_POWER'] = power_data['SOCKET_POWER'] + power_data['DRAM_POWER']
pandas.set_option('display.width', 100)
launcher.write_log(name, 'Power stats from {} {} :\n{}'.format(agent, nn, power_data.describe()))
# Get final power limit set on the node
if agent == 'power_balancer':
power_limits.append(epoch_dropped_data['POWER_LIMIT'][-1])
if agent == 'power_balancer':
avg_power_limit = sum(power_limits) / len(power_limits)
self.assertTrue(avg_power_limit <= power_budget)
min_runtime = float('nan')
max_runtime = float('nan')
node_names = self._output.get_node_names()
runtime_list = []
for node_name in node_names:
epoch_data = self._output.get_report_data(node_name=node_name, region='dgemm')
runtime_list.append(epoch_data['runtime'].item())
if agent == 'power_governor':
mean_runtime = sum(runtime_list) / len(runtime_list)
max_runtime = max(runtime_list)
margin = margin_factor * (max_runtime - mean_runtime)
agent_runtime[agent] = max(runtime_list)
self.assertGreater(agent_runtime['power_governor'] - margin,
agent_runtime['power_balancer'],
"governor runtime: {}, balancer runtime: {}, margin: {}".format(
agent_runtime['power_governor'], agent_runtime['power_balancer'], margin))
def test_progress_exit(self):
"""
Check that when we always see progress exit before the next entry.
Make sure that progress only decreases when a new region is entered.
"""
name = 'test_progress_exit'
report_path = name + '.report'
trace_path = name + '.trace'
num_node = 1
num_rank = 16
loop_count = 100
big_o = 0.1
app_conf = geopmpy.io.BenchConf(name + '_app.config')
self._tmp_files.append(app_conf.get_path())
app_conf.set_loop_count(loop_count)
app_conf.append_region('dgemm-progress', big_o)
app_conf.append_region('spin-progress', big_o)
agent_conf = geopmpy.io.AgentConf(name + '_agent.config', self._agent, self._options)
self._tmp_files.append(agent_conf.get_path())
launcher = geopm_test_launcher.TestLauncher(app_conf, agent_conf, report_path, trace_path, region_barrier=True)
launcher.set_num_node(num_node)
launcher.set_num_rank(num_rank)
launcher.run(name)
self._output = geopmpy.io.AppOutput(report_path, trace_path + '*')
node_names = self._output.get_node_names()
self.assertEqual(num_node, len(node_names))
for nn in node_names:
tt = self._output.get_trace_data(node_name=nn)
tt = tt.set_index(['REGION_HASH'], append=True)
tt = tt.groupby(level=['REGION_HASH'])
for region_hash, data in tt:
tmp = data['REGION_PROGRESS'].diff()
#@todo legacy branch?
# Look for changes in progress that are more negative
# than can be expected due to extrapolation error.
if region_hash == 8300189175:
negative_progress = tmp.loc[(tmp > -1) & (tmp < -0.1)]
launcher.write_log(name, '{}'.format(negative_progress))
self.assertEqual(0, len(negative_progress))
@util.skip_unless_run_long_tests()
@util.skip_unless_optimized()
def test_sample_rate(self):
"""
Check that sample rate is regular and fast.
"""
name = 'test_sample_rate'
report_path = name + '.report'
trace_path = name + '.trace'
num_node = 1
num_rank = 16
loop_count = 10
big_o = 10.0
region = 'dgemm-progress'
max_mean = 0.01 # 10 millisecond max sample period
max_nstd = 0.1 # 10% normalized standard deviation (std / mean)
app_conf = geopmpy.io.BenchConf(name + '_app.config')
self._tmp_files.append(app_conf.get_path())
app_conf.set_loop_count(loop_count)
app_conf.append_region(region, big_o)
agent_conf = geopmpy.io.AgentConf(name + '_agent.config', self._agent, self._options)
self._tmp_files.append(agent_conf.get_path())
launcher = geopm_test_launcher.TestLauncher(app_conf, agent_conf, report_path, trace_path)
launcher.set_num_node(num_node)
launcher.set_num_rank(num_rank)
launcher.run(name)
self._output = geopmpy.io.AppOutput(report_path, trace_path + '*')
node_names = self._output.get_node_names()
self.assertEqual(num_node, len(node_names))
for nn in node_names:
tt = self._output.get_trace_data(node_name=nn)
delta_t = tt['TIME'].diff()
delta_t = delta_t.loc[delta_t != 0]
self.assertGreater(max_mean, delta_t.mean())
# WARNING : The following line may mask issues in the sampling rate. To do a fine grained analysis, comment
# out the next line and do NOT run on the BSP. This will require modifications to the launcher or manual testing.
size_orig = len(delta_t)
delta_t = delta_t[(delta_t - delta_t.mean()) < 3*delta_t.std()] # Only keep samples within 3 stds of the mean
self.assertGreater(0.06, 1 - (float(len(delta_t)) / size_orig))
self.assertGreater(max_nstd, delta_t.std() / delta_t.mean())
def test_network_times(self):
name = 'test_network_times'
report_path = name + '.report'
trace_path = name + '.trace'
num_node = 4
num_rank = 16
app_conf = geopmpy.io.BenchConf(name + '_app.config')
self._tmp_files.append(app_conf.get_path())
app_conf.append_region('sleep', 1.0)
app_conf.append_region('dgemm', 1.0)
app_conf.append_region('all2all', 1.0)
agent_conf = geopmpy.io.AgentConf(name + '_agent.config', self._agent, self._options)
self._tmp_files.append(agent_conf.get_path())
launcher = geopm_test_launcher.TestLauncher(app_conf, agent_conf, report_path, trace_path)
launcher.set_num_node(num_node)
launcher.set_num_rank(num_rank)
launcher.run(name)
self._output = geopmpy.io.AppOutput(report_path, trace_path + '*')
node_names = self._output.get_node_names()
self.assertEqual(len(node_names), num_node)
for nn in node_names:
all2all_data = self._output.get_report_data(node_name=nn, region='all2all')
sleep_data = self._output.get_report_data(node_name=nn, region='sleep')
dgemm_data = self._output.get_report_data(node_name=nn, region='dgemm')
barrier_data = self._output.get_report_data(node_name=nn, region='MPI_Barrier')
unmarked_data = self._output.get_report_data(node_name=nn, region='unmarked-region')
epoch_data = self._output.get_report_data(node_name=nn, region='epoch')
app_total = self._output.get_app_total_data(node_name=nn)
self.assertEqual(0, unmarked_data['count'].item())
# Since MPI time is is counted if any rank on a node is in
# an MPI call, but region time is counted only when all
# ranks on a node are in a region, we must use the
# unmarked-region time as our error term when comparing
# MPI time and all2all time.
mpi_epsilon = max(unmarked_data['runtime'].item() / all2all_data['network_time'].item(), 0.05)
self.assertNear(all2all_data['network_time'].item(), all2all_data['runtime'].item(), mpi_epsilon)
self.assertNear(all2all_data['network_time'].item() + barrier_data['network_time'].item(),
epoch_data['network_time'].item())
# TODO: inconsistent; can we just use _ everywhere?
self.assertNear(all2all_data['network_time'].item() + barrier_data['network_time'].item(),
app_total['network-time'].item())
self.assertEqual(0, unmarked_data['network_time'].item())
self.assertEqual(0, sleep_data['network_time'].item())
self.assertEqual(0, dgemm_data['network_time'].item())
def test_ignore_runtime(self):
name = 'test_ignore_runtime'
report_path = name + '.report'
trace_path = name + '.trace'
num_node = 4
num_rank = 16
app_conf = geopmpy.io.BenchConf(name + '_app.config')
self._tmp_files.append(app_conf.get_path())
app_conf.append_region('ignore', 1.0)
app_conf.append_region('dgemm', 1.0)
app_conf.append_region('all2all', 1.0)
agent_conf = geopmpy.io.AgentConf(name + '_agent.config', self._agent, self._options)
self._tmp_files.append(agent_conf.get_path())
launcher = geopm_test_launcher.TestLauncher(app_conf, agent_conf, report_path, trace_path)
launcher.set_num_node(num_node)
launcher.set_num_rank(num_rank)
launcher.run(name)
self._output = geopmpy.io.AppOutput(report_path, trace_path + '*')
node_names = self._output.get_node_names()
self.assertEqual(len(node_names), num_node)
for nn in node_names:
ignore_data = self._output.get_report_data(node_name=nn, region='ignore')
app_data = self._output.get_app_total_data(node_name=nn)
self.assertNear(ignore_data['runtime'].item(),
app_data['ignore-runtime'].item(), 0.00005)
@util.skip_unless_config_enable('ompt')
def test_unmarked_ompt(self):
name = 'test_unmarked_ompt'
report_path = name + '.report'
num_node = 4
num_rank = 16
app_conf = geopmpy.io.BenchConf(name + '_app.config')
self._tmp_files.append(app_conf.get_path())
app_conf.append_region('stream-unmarked', 1.0)
app_conf.append_region('dgemm-unmarked', 1.0)
app_conf.append_region('all2all-unmarked', 1.0)
agent_conf = geopmpy.io.AgentConf(name + '_agent.config', self._agent, self._options)
self._tmp_files.append(agent_conf.get_path())
launcher = geopm_test_launcher.TestLauncher(app_conf, agent_conf, report_path)
launcher.set_num_node(num_node)
launcher.set_num_rank(num_rank)
launcher.run(name)
self._output = geopmpy.io.AppOutput(report_path)
node_names = self._output.get_node_names()
self.assertEqual(len(node_names), num_node)
stream_id = None
region_names = self._output.get_region_names()
stream_name = [key for key in region_names if key.lower().find('stream') != -1][0]
for nn in node_names:
stream_data = self._output.get_report_data(node_name=nn, region=stream_name)
found = False
for name in region_names:
if stream_name in name: # account for numbers at end of OMPT region names
found = True
self.assertTrue(found)
self.assertEqual(1, stream_data['count'].item())
if stream_id:
self.assertEqual(stream_id, stream_data['id'].item())
else:
stream_id = stream_data['id'].item()
ompt_regions = [key for key in region_names if key.startswith('[OMPT]')]
self.assertLessEqual(2, len(ompt_regions))
self.assertTrue(('MPI_Alltoall' in region_names))
gemm_region = [key for key in region_names if key.lower().find('gemm') != -1]
self.assertLessEqual(1, len(gemm_region))
def _test_agent_frequency_map(self, name, use_env=False):
min_freq = geopm_test_launcher.geopmread("CPUINFO::FREQ_MIN board 0")
max_freq = geopm_test_launcher.geopmread("CPUINFO::FREQ_MAX board 0")
sticker_freq = geopm_test_launcher.geopmread("CPUINFO::FREQ_STICKER board 0")
freq_step = geopm_test_launcher.geopmread("CPUINFO::FREQ_STEP board 0")
self._agent = "frequency_map"
report_path = name + '.report'
trace_path = name + '.trace'
num_node = 1
num_rank = 4
loop_count = 5
dgemm_bigo = 15.0
stream_bigo = 1.0
dgemm_bigo_jlse = 35.647
dgemm_bigo_quartz = 29.12
stream_bigo_jlse = 1.6225
stream_bigo_quartz = 1.7941
hostname = socket.gethostname()
if hostname.endswith('.alcf.anl.gov'):
dgemm_bigo = dgemm_bigo_jlse
stream_bigo = stream_bigo_jlse
elif hostname.startswith('mcfly'):
dgemm_bigo = 42.0
stream_bigo = 1.75
elif hostname.startswith('quartz'):
dgemm_bigo = dgemm_bigo_quartz
stream_bigo = stream_bigo_quartz
app_conf = geopmpy.io.BenchConf(name + '_app.config')
self._tmp_files.append(app_conf.get_path())
app_conf.set_loop_count(loop_count)
app_conf.append_region('dgemm', dgemm_bigo)
app_conf.append_region('stream', stream_bigo)
app_conf.append_region('all2all', 1.0)
app_conf.write()
freq_map = {}
freq_map['dgemm'] = sticker_freq
freq_map['stream'] = sticker_freq - 2 * freq_step
freq_map['all2all'] = min_freq
self._options = create_frequency_map_policy(min_freq, max_freq, freq_map, use_env)
agent_conf = geopmpy.io.AgentConf(name + '_agent.config', self._agent, self._options)
self._tmp_files.append(agent_conf.get_path())
launcher = geopm_test_launcher.TestLauncher(app_conf, agent_conf, report_path,
trace_path, region_barrier=True, time_limit=900)
launcher.set_num_node(num_node)
launcher.set_num_rank(num_rank)
launcher.run(name)
self._output = geopmpy.io.AppOutput(report_path, trace_path + '*')
node_names = self._output.get_node_names()
self.assertEqual(len(node_names), num_node)
regions = self._output.get_region_names()
for nn in node_names:
for region_name in regions:
region_data = self._output.get_report_data(node_name=nn, region=region_name)
if (region_name in ['dgemm', 'stream', 'all2all']):
#todo verify trace frequencies
#todo verify agent report augment frequecies
msg = region_name + " frequency should be near assigned map frequency"
self.assertNear(region_data['frequency'].item(), freq_map[region_name] / sticker_freq * 100, msg=msg)
def test_agent_frequency_map_env(self):
"""
Test of the FrequencyMapAgent, setting a map through GEOPM_FREQUENCY_MAP.
"""
self._test_agent_frequency_map('test_agent_frequency_map_env', use_env=True)
def test_agent_frequency_map_policy(self):
"""
Test of the FrequencyMapAgent, setting a map through the policy.
"""
self._test_agent_frequency_map('test_agent_frequency_map_policy', use_env=False)
def test_agent_energy_efficient_single_region(self):
"""
Test of the EnergyEfficientAgent against single region loop.
"""
name = 'test_energy_efficient_single_region'
min_freq = geopm_test_launcher.geopmread("CPUINFO::FREQ_MIN board 0")
sticker_freq = geopm_test_launcher.geopmread("CPUINFO::FREQ_STICKER board 0")
freq_step = geopm_test_launcher.geopmread("CPUINFO::FREQ_STEP board 0")
self._agent = "energy_efficient"
report_path = name + '.report'
trace_path = name + '.trace'
num_node = 1
num_rank = 4
loop_count = 100
app_conf = geopmpy.io.BenchConf(name + '_app.config')
self._tmp_files.append(app_conf.get_path())
app_conf.set_loop_count(loop_count)
app_conf.append_region('spin', 0.1)
self._options = {'frequency_min': min_freq,
'frequency_max': sticker_freq}
agent_conf = geopmpy.io.AgentConf(name + '_agent.config', self._agent, self._options)
self._tmp_files.append(agent_conf.get_path())
launcher = geopm_test_launcher.TestLauncher(app_conf, agent_conf, report_path, trace_path)
launcher.set_num_node(num_node)
launcher.set_num_rank(num_rank)
launcher.run(name)
self._output = geopmpy.io.AppOutput(report_path, trace_path + '*')
node_names = self._output.get_node_names()
self.assertEqual(len(node_names), num_node)
regions = self._output.get_region_names()
for nn in node_names:
for region_name in regions:
report = geopmpy.io.RawReport(report_path)
if (region_name in ['spin']):
region = report.raw_region(nn, region_name)
msg = region_name + " frequency should be minimum frequency as specified by policy"
self.assertEqual(region['requested-online-frequency'], min_freq, msg=msg) # freq should reduce
@util.skip_unless_run_long_tests()
@util.skip_unless_cpufreq()
@util.skip_unless_batch()
def test_agent_energy_efficient(self):
"""
Test of the EnergyEfficientAgent.
"""
name = 'test_energy_efficient_sticker'
min_freq = geopm_test_launcher.geopmread("CPUINFO::FREQ_MIN board 0")
max_freq = geopm_test_launcher.geopmread("CPUINFO::FREQ_MAX board 0")
sticker_freq = geopm_test_launcher.geopmread("CPUINFO::FREQ_STICKER board 0")
freq_step = geopm_test_launcher.geopmread("CPUINFO::FREQ_STEP board 0")
self._agent = "energy_efficient"
num_node = 1
num_rank = 4
loop_count = 200
dgemm_bigo = 15.0
stream_bigo = 1.0
dgemm_bigo_jlse = 35.647
dgemm_bigo_quartz = 29.12
stream_bigo_jlse = 1.6225
stream_bigo_quartz = 1.7941
hostname = socket.gethostname()
if hostname.endswith('.alcf.anl.gov'):
dgemm_bigo = dgemm_bigo_jlse
stream_bigo = stream_bigo_jlse
elif hostname.startswith('mcfly'):
dgemm_bigo = 42.0
stream_bigo = 1.75
elif hostname.startswith('quartz'):
dgemm_bigo = dgemm_bigo_quartz
stream_bigo = stream_bigo_quartz
run = ['_sticker', '_nan_nan']
for rr in run:
report_path = name + rr + '.report'
trace_path = name + rr + '.trace'
app_conf = geopmpy.io.BenchConf(name + '_app.config')
self._tmp_files.append(app_conf.get_path())
app_conf.set_loop_count(loop_count)
app_conf.append_region('dgemm', dgemm_bigo)
app_conf.append_region('stream', stream_bigo)
app_conf.write()
if rr == '_sticker':
self._options = {'frequency_min': sticker_freq,
'frequency_max': sticker_freq}
freq = sticker_freq
else:
self._options = {'frequency_min': min_freq,
'frequency_max': sticker_freq}
agent_conf = geopmpy.io.AgentConf(name + '_agent.config', self._agent, self._options)
self._tmp_files.append(agent_conf.get_path())
launcher = geopm_test_launcher.TestLauncher(app_conf, agent_conf, report_path,
trace_path, region_barrier=True, time_limit=900)
launcher.set_num_node(num_node)
launcher.set_num_rank(num_rank)
launcher.run(name + rr)
# compare the app_total runtime and energy and assert within bounds
report_path = name + run[0] + '.report'
trace_path = name + run[0] + '.trace'
sticker_out = geopmpy.io.AppOutput(report_path, trace_path + '*')
report_path = name + run[1] + '.report'
trace_path = name + run[1] + '.trace'
nan_out = geopmpy.io.AppOutput(report_path, trace_path + '*')
for nn in nan_out.get_node_names():
sticker_app_total = sticker_out.get_app_total_data(node_name=nn)
nan_app_total = nan_out.get_app_total_data(node_name=nn)
runtime_savings_epoch = (sticker_app_total['runtime'].item() - nan_app_total['runtime'].item()) / sticker_app_total['runtime'].item()
energy_savings_epoch = (sticker_app_total['energy-package'].item() - nan_app_total['energy-package'].item()) / sticker_app_total['energy-package'].item()
self.assertLess(-0.1, runtime_savings_epoch) # want -10% or better
self.assertLess(0.0, energy_savings_epoch)
class TestIntegrationGeopmio(unittest.TestCase):
''' Tests of geopmread and geopmwrite.'''
def setUp(self):
self.skip_warning_string = 'Incompatible CPU'
def check_output(self, args, expected):
try:
proc = subprocess.Popen([self.exec_name] + args,
stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
for exp in expected:
line = proc.stdout.readline()
while self.skip_warning_string.encode() in line:
line = proc.stdout.readline()
self.assertIn(exp.encode(), line)
for line in proc.stdout:
if self.skip_warning_string.encode() not in line:
self.assertNotIn(b'Error', line)
except subprocess.CalledProcessError as ex:
sys.stderr.write('{}\n'.format(ex.output))
def check_output_range(self, args, min_exp, max_exp):
try:
proc = subprocess.Popen([self.exec_name] + args,
stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
for line in proc.stdout:
if self.skip_warning_string.encode() in line:
continue
if line.startswith(b'0x'):
value = int(line)
else:
value = float(line)
self.assertLessEqual(min_exp, value, msg="Value read for {} smaller than {}: {}.".format(args, min_exp, value))
self.assertGreaterEqual(max_exp, value, msg="Value read for {} larger than {}: {}.".format(args, max_exp, value))
except subprocess.CalledProcessError as ex:
sys.stderr.write('{}\n'.format(ex.output))
def check_no_error(self, args):
try:
proc = subprocess.Popen([self.exec_name] + args,
stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
for line in proc.stdout:
if self.skip_warning_string.encode() not in line:
self.assertNotIn(b'Error', line)
except subprocess.CalledProcessError as ex:
sys.stderr.write('{}\n'.format(ex.output))
def test_geopmread_command_line(self):
'''
Check that geopmread commandline arguments work.
'''
self.exec_name = "geopmread"
# no args
self.check_no_error([])
# domain flag
self.check_output(['--domain'], ['board', 'package', 'core', 'cpu',
'board_memory', 'package_memory',
'board_nic', 'package_nic',
'board_accelerator', 'package_accelerator'])
self.check_output(['--domain', 'TIME'], ['cpu'])
# read signal
self.check_no_error(['TIME', 'board', '0'])
# info
self.check_no_error(['--info'])
self.check_output(['--info', 'TIME'], ['Time in seconds'])
# errors
read_err = 'domain type and domain index are required'
self.check_output(['TIME'], [read_err])
self.check_output(['TIME', 'board'], [read_err])
self.check_output(['TIME', 'board', 'bad'], ['invalid domain index'])
self.check_output(['FREQUENCY', 'package', '111'], ['cannot read signal'])
self.check_output(['ENERGY_PACKAGE', 'cpu', '0'], ['cannot read signal'])
self.check_output(['INVALID', 'board', '0'], ['cannot read signal'])
self.check_output(['--domain', 'INVALID'], ['unable to determine signal type'])
self.check_output(['--domain', '--info'], ['info about domain not implemented'])
@util.skip_unless_batch()
def test_geopmread_all_signal_agg(self):
'''
Check that all reported signals can be read for board, aggregating if necessary.
'''
self.exec_name = "geopmread"
all_signals = []
try:
proc = subprocess.Popen([self.exec_name],
stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
for line in proc.stdout:
if self.skip_warning_string.encode() not in line:
all_signals.append(line.strip())
except subprocess.CalledProcessError as ex:
sys.stderr.write('{}\n'.format(ex.output))
for sig in all_signals:
self.check_no_error([sig.decode(), 'board', '0'])
@util.skip_unless_batch()
def test_geopmread_signal_value(self):
'''
Check that some specific signals give a sane value.
'''
self.exec_name = "geopmread"
signal_range = {
"POWER_PACKAGE": (20, 400),
"FREQUENCY": (1.0e8, 5.0e9),
"TIME": (0, 10), # time in sec to start geopmread
"TEMPERATURE_CORE": (0, 100)
}
for signal, val_range in signal_range.items():
try:
self.check_no_error([signal, "board", "0"])
except:
raise
pass # skip missing signals
else:
self.check_output_range([signal, "board", "0"], *val_range)
def test_geopmread_custom_msr(self):
'''
Check that MSRIOGroup picks up additional MSRs in path.
'''
self.exec_name = "geopmread"
path = os.path.join(
os.path.dirname(
os.path.dirname(
os.path.realpath(__file__))),
'examples/custom_msr/')
custom_env = os.environ.copy()
custom_env['GEOPM_PLUGIN_PATH'] = path
all_signals = []
try:
proc = subprocess.Popen([self.exec_name], env=custom_env,
stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
for line in proc.stdout:
if self.skip_warning_string.encode() not in line:
all_signals.append(line.strip())
except subprocess.CalledProcessError as ex:
sys.stderr.write('{}\n'.format(ex.output))
self.assertIn(b'MSR::CORE_PERF_LIMIT_REASONS#', all_signals)
def test_geopmwrite_command_line(self):
'''
Check that geopmwrite commandline arguments work.
'''
self.exec_name = "geopmwrite"
# no args
self.check_no_error([])
# domain flag
self.check_output(['--domain'], ['board', 'package', 'core', 'cpu',
'board_memory', 'package_memory',
'board_nic', 'package_nic',
'board_accelerator', 'package_accelerator'])
self.check_no_error(['--domain', 'FREQUENCY'])
# info
self.check_no_error(['--info'])
self.check_output(['--info', 'FREQUENCY'], ['processor frequency'])
# errors
write_err = 'domain type, domain index, and value are required'
self.check_output(['FREQUENCY'], [write_err])
self.check_output(['FREQUENCY', 'board'], [write_err])
self.check_output(['FREQUENCY', 'board', '0'], [write_err])
self.check_output(['FREQUENCY', 'board', 'bad', '0'], ['invalid domain index'])
self.check_output(['FREQUENCY', 'board', '0', 'bad'], ['invalid write value'])
self.check_output(['FREQUENCY', 'package', '111', '0'], ['cannot write control'])
self.check_output(['FREQUENCY', 'board_nic', '0', '0'], ['cannot write control'])
self.check_output(['INVALID', 'board', '0', '0'], ['cannot write control'])
self.check_output(['--domain', 'INVALID'], ['unable to determine control type'])
self.check_output(['--domain', '--info'], ['info about domain not implemented'])
@util.skip_unless_batch()
def test_geopmwrite_set_freq(self):
'''
Check that geopmwrite can be used to set frequency.
'''
def read_stdout_line(stdout):
line = stdout.readline()
while self.skip_warning_string.encode() in line:
line = stdout.readline()
return line.strip()
def read_current_freq(domain, signal='FREQUENCY'):
read_proc = subprocess.Popen(['geopmread', signal, domain, '0'],
stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
freq = read_stdout_line(read_proc.stdout)
freq = float(freq)
return freq
def read_min_max_freq():
read_proc = subprocess.Popen(['geopmread', 'CPUINFO::FREQ_MIN', 'board', '0'],
stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
min_freq = read_stdout_line(read_proc.stdout)
min_freq = float(int(float(min_freq)/1e8)*1e8) # convert to multiple of 1e8
read_proc = subprocess.Popen(['geopmread', 'CPUINFO::FREQ_MAX', 'board', '0'],
stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
max_freq = read_stdout_line(read_proc.stdout)
max_freq = float(int(float(max_freq)/1e8)*1e8)
return min_freq, max_freq
self.exec_name = "geopmwrite"
read_proc = subprocess.Popen(['geopmread', '--domain', 'FREQUENCY'],
stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
read_domain = read_stdout_line(read_proc.stdout).decode()
write_proc = subprocess.Popen([self.exec_name, '--domain', 'FREQUENCY'],
stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
write_domain = read_stdout_line(write_proc.stdout).decode()
min_freq, max_freq = read_min_max_freq()
old_freq = read_current_freq(write_domain, 'MSR::PERF_CTL:FREQ')
self.assertLess(old_freq, max_freq * 2)
self.assertGreater(old_freq, min_freq - 1e8)
# set to min and check
self.check_no_error(['FREQUENCY', write_domain, '0', str(min_freq)])
result = read_current_freq(read_domain)
self.assertEqual(min_freq, result)
# set to max and check
self.check_no_error(['FREQUENCY', write_domain, '0', str(max_freq)])
result = read_current_freq(read_domain)
self.assertEqual(max_freq, result)
self.check_no_error(['FREQUENCY', write_domain, '0', str(old_freq)])
class TestIntegrationGeopmagent(unittest.TestCase):
''' Tests of geopmagent.'''
def setUp(self):
self.exec_name = 'geopmagent'
self.skip_warning_string = 'Incompatible CPU frequency driver/governor'
def check_output(self, args, expected):
try:
proc = subprocess.Popen([self.exec_name] + args,
stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
for exp in expected:
line = proc.stdout.readline()
while self.skip_warning_string.encode() in line or line == b'\n':
line = proc.stdout.readline()
self.assertIn(exp.encode(), line)
for line in proc.stdout:
if self.skip_warning_string.encode() not in line:
self.assertNotIn(b'Error', line)
except subprocess.CalledProcessError as ex:
sys.stderr.write('{}\n'.format(ex.output))
def check_json_output(self, args, expected):
try:
proc = subprocess.Popen([self.exec_name] + args,
stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
except subprocess.CalledProcessError as ex:
sys.stderr.write('{}\n'.format(ex.output))
line = proc.stdout.readline()
while self.skip_warning_string.encode() in line or line == b'\n':
line = proc.stdout.readline()
try:
out_json = json.loads(line.decode())
except ValueError:
self.fail('Could not convert json string: {}\n'.format(line))
self.assertEqual(expected, out_json)
for line in proc.stdout:
if self.skip_warning_string.encode() not in line:
self.assertNotIn(b'Error', line)
def check_no_error(self, args):
try:
proc = subprocess.Popen([self.exec_name] + args,
stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
for line in proc.stdout:
if self.skip_warning_string.encode() not in line:
self.assertNotIn(b'Error', line)
except subprocess.CalledProcessError as ex:
sys.stderr.write('{}\n'.format(ex.output))
def test_geopmagent_command_line(self):
'''
Check that geopmagent commandline arguments work.
'''
# no args
agent_names = ['monitor', 'power_balancer', 'power_governor',
'energy_efficient', 'frequency_map']
self.check_output([], agent_names)
# help message
self.check_output(['--help'], ['Usage'])
# version
self.check_no_error(['--version'])
# agent policy and sample names
for agent in agent_names:
self.check_output(['--agent', agent],
['Policy', 'Sample'])
# policy file
self.check_json_output(['--agent', 'monitor', '--policy', 'None'],
{})
self.check_json_output(['--agent', 'power_governor', '--policy', '150'],
{'POWER_PACKAGE_LIMIT_TOTAL': 150})
# default value policy
self.check_json_output(['--agent', 'power_governor', '--policy', 'NAN'],
{'POWER_PACKAGE_LIMIT_TOTAL': 'NAN'})
self.check_json_output(['--agent', 'power_governor', '--policy', 'nan'],
{'POWER_PACKAGE_LIMIT_TOTAL': 'NAN'})
self.check_json_output(['--agent', 'energy_efficient', '--policy', 'nan,nan'],
{'FREQ_MIN': 'NAN', 'FREQ_MAX': 'NAN'})
self.check_json_output(['--agent', 'energy_efficient', '--policy', '1.2e9,nan'],
{'FREQ_MIN': 1.2e9, 'FREQ_MAX': 'NAN'})
self.check_json_output(['--agent', 'energy_efficient', '--policy', 'nan,1.3e9'],
{'FREQ_MIN': 'NAN', 'FREQ_MAX': 1.3e9})
# unspecified policy values are accepted
self.check_json_output(['--agent', 'power_balancer', '--policy', '150'],
{'POWER_PACKAGE_LIMIT_TOTAL': 150})
# errors
self.check_output(['--agent', 'power_governor', '--policy', 'None'],
['not a valid floating-point number', 'Invalid argument'])
self.check_output(['--agent', 'monitor', '--policy', '300'],
['agent takes no parameters', 'Invalid argument'])
self.check_output(['--agent', 'energy_efficient', '--policy', '2.0e9,5.0e9,4.5e9,6.7,4.2'],
['Number of policies', 'Invalid argument'])
if __name__ == '__main__':
unittest.main()
| 49.068268 | 165 | 0.618775 |
from __future__ import absolute_import
from __future__ import division
from future import standard_library
standard_library.install_aliases()
from builtins import str
import os
import sys
import unittest
import subprocess
import time
import pandas
import collections
import socket
import shlex
import json
sys.path.append(os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
from test_integration import util
from test_integration import geopm_test_launcher
import geopmpy.io
import geopmpy.launcher
def create_frequency_map_policy(min_freq, max_freq, frequency_map, use_env=False):
policy = {'frequency_min': min_freq, 'frequency_max': max_freq}
known_hashes = {
'dgemm': 0x00000000a74bbf35,
'all2all': 0x000000003ddc81bf,
'stream': 0x00000000d691da00,
'sleep': 0x00000000536c798f,
'MPI_Barrier': 0x000000007b561f45,
'model-init': 0x00000000644f9787,
'unmarked-region': 0x00000000725e8066 }
if use_env:
os.environ['GEOPM_FREQUENCY_MAP'] = json.dumps(frequency_map)
else:
if 'GEOPM_FREQUENCY_MAP' in os.environ:
os.environ.pop('GEOPM_FREQUENCY_MAP')
for i, (region_name, frequency) in enumerate(frequency_map.items()):
region_hash = known_hashes[region_name]
policy['HASH_{}'.format(i)] = int(region_hash)
policy['FREQ_{}'.format(i)] = frequency
return policy
class TestIntegration(unittest.TestCase):
def setUp(self):
self.longMessage = True
self._agent = 'power_governor'
self._options = {'power_budget': 150}
self._tmp_files = []
self._output = None
self._power_limit = geopm_test_launcher.geopmread("MSR::PKG_POWER_LIMIT:PL1_POWER_LIMIT board 0")
self._frequency = geopm_test_launcher.geopmread("MSR::PERF_CTL:FREQ board 0")
self._original_freq_map_env = os.environ.get('GEOPM_FREQUENCY_MAP')
def tearDown(self):
geopm_test_launcher.geopmwrite("MSR::PKG_POWER_LIMIT:PL1_POWER_LIMIT board 0 " + str(self._power_limit))
geopm_test_launcher.geopmwrite("MSR::PERF_CTL:FREQ board 0 " + str(self._frequency))
if sys.exc_info() == (None, None, None) and os.getenv('GEOPM_KEEP_FILES') is None:
if self._output is not None:
self._output.remove_files()
for ff in self._tmp_files:
try:
os.remove(ff)
except OSError:
pass
if self._original_freq_map_env is None:
if 'GEOPM_FREQUENCY_MAP' in os.environ:
os.environ.pop('GEOPM_FREQUENCY_MAP')
else:
os.environ['GEOPM_FREQUENCY_MAP'] = self._original_freq_map_env
def assertNear(self, a, b, epsilon=0.05, msg=''):
denom = a if a != 0 else 1
if abs((a - b) / denom) >= epsilon:
self.fail('The fractional difference between {a} and {b} is greater than {epsilon}. {msg}'.format(a=a, b=b, epsilon=epsilon, msg=msg))
def create_progress_df(self, df):
df = df.reset_index(drop=True)
last_index = 0
filtered_df = pandas.DataFrame()
row_list = []
progress_1s = df['REGION_PROGRESS'].loc[df['REGION_PROGRESS'] == 1]
for index, _ in progress_1s.iteritems():
row = df.loc[last_index:index].head(1)
row_list += [row[['TIME', 'REGION_PROGRESS', 'REGION_RUNTIME']]]
row = df.loc[last_index:index].tail(1)
row_list += [row[['TIME', 'REGION_PROGRESS', 'REGION_RUNTIME']]]
last_index = index + 1
filtered_df = pandas.concat(row_list)
return filtered_df
def test_report_and_trace_generation(self):
name = 'test_report_and_trace_generation'
report_path = name + '.report'
trace_path = name + '.trace'
num_node = 4
num_rank = 16
app_conf = geopmpy.io.BenchConf(name + '_app.config')
self._tmp_files.append(app_conf.get_path())
app_conf.append_region('sleep', 1.0)
agent_conf = geopmpy.io.AgentConf(name + '_agent.config', self._agent, self._options)
self._tmp_files.append(agent_conf.get_path())
launcher = geopm_test_launcher.TestLauncher(app_conf, agent_conf, report_path, trace_path)
launcher.set_num_node(num_node)
launcher.set_num_rank(num_rank)
launcher.run(name)
self._output = geopmpy.io.AppOutput(report_path, trace_path + '*')
node_names = self._output.get_node_names()
self.assertEqual(num_node, len(node_names))
for nn in node_names:
report = self._output.get_report_data(node_name=nn)
self.assertNotEqual(0, len(report))
trace = self._output.get_trace_data(node_name=nn)
self.assertNotEqual(0, len(trace))
def test_no_report_and_trace_generation(self):
name = 'test_no_report_and_trace_generation'
num_node = 4
num_rank = 16
app_conf = geopmpy.io.BenchConf(name + '_app.config')
self._tmp_files.append(app_conf.get_path())
app_conf.append_region('sleep', 1.0)
agent_conf = geopmpy.io.AgentConf(name + '_agent.config', self._agent, self._options)
self._tmp_files.append(agent_conf.get_path())
launcher = geopm_test_launcher.TestLauncher(app_conf, agent_conf)
launcher.set_num_node(num_node)
launcher.set_num_rank(num_rank)
launcher.run(name)
@unittest.skipUnless('mr-fusion' in socket.gethostname(), "This test only enabled on known working systems.")
def test_report_and_trace_generation_pthread(self):
name = 'test_report_and_trace_generation_pthread'
report_path = name + '.report'
trace_path = name + '.trace'
num_node = 4
num_rank = 16
app_conf = geopmpy.io.BenchConf(name + '_app.config')
self._tmp_files.append(app_conf.get_path())
app_conf.append_region('sleep', 1.0)
agent_conf = geopmpy.io.AgentConf(name + '_agent.config', self._agent, self._options)
self._tmp_files.append(agent_conf.get_path())
launcher = geopm_test_launcher.TestLauncher(app_conf, agent_conf, report_path, trace_path)
launcher.set_num_node(num_node)
launcher.set_num_rank(num_rank)
launcher.set_pmpi_ctl('pthread')
launcher.run(name)
self._output = geopmpy.io.AppOutput(report_path, trace_path + '*')
node_names = self._output.get_node_names()
self.assertEqual(num_node, len(node_names))
for nn in node_names:
report = self._output.get_report_data(node_name=nn)
self.assertNotEqual(0, len(report))
trace = self._output.get_trace_data(node_name=nn)
self.assertNotEqual(0, len(trace))
@unittest.skipUnless(geopm_test_launcher.detect_launcher() != "aprun",
'ALPS does not support multi-application launch on the same nodes.')
@util.skip_unless_batch()
def test_report_and_trace_generation_application(self):
name = 'test_report_and_trace_generation_application'
report_path = name + '.report'
trace_path = name + '.trace'
num_node = 4
num_rank = 16
app_conf = geopmpy.io.BenchConf(name + '_app.config')
self._tmp_files.append(app_conf.get_path())
app_conf.append_region('sleep', 1.0)
agent_conf = geopmpy.io.AgentConf(name + '_agent.config', self._agent, self._options)
self._tmp_files.append(agent_conf.get_path())
launcher = geopm_test_launcher.TestLauncher(app_conf, agent_conf, report_path, trace_path)
launcher.set_num_node(num_node)
launcher.set_num_rank(num_rank)
launcher.set_pmpi_ctl('application')
launcher.run(name)
self._output = geopmpy.io.AppOutput(report_path, trace_path + '*')
node_names = self._output.get_node_names()
self.assertEqual(num_node, len(node_names))
for nn in node_names:
report = self._output.get_report_data(node_name=nn)
self.assertNotEqual(0, len(report))
trace = self._output.get_trace_data(node_name=nn)
self.assertNotEqual(0, len(trace))
@unittest.skipUnless(geopm_test_launcher.detect_launcher() == "srun" and os.getenv('SLURM_NODELIST') is None,
'Requires non-sbatch SLURM session for alloc\'d and idle nodes.')
def test_report_generation_all_nodes(self):
name = 'test_report_generation_all_nodes'
report_path = name + '.report'
num_node = 1
num_rank = 1
delay = 1.0
app_conf = geopmpy.io.BenchConf(name + '_app.config')
self._tmp_files.append(app_conf.get_path())
app_conf.append_region('sleep', delay)
agent_conf = geopmpy.io.AgentConf(name + '_agent.config', self._agent, self._options)
self._tmp_files.append(agent_conf.get_path())
launcher = geopm_test_launcher.TestLauncher(app_conf, agent_conf, report_path)
launcher.set_num_node(num_node)
launcher.set_num_rank(num_rank)
time.sleep(5) # Wait a moment to finish cleaning-up from a previous test
idle_nodes = launcher.get_idle_nodes()
idle_nodes_copy = list(idle_nodes)
alloc_nodes = launcher.get_alloc_nodes()
launcher.write_log(name, 'Idle nodes : {nodes}'.format(nodes=idle_nodes))
launcher.write_log(name, 'Alloc\'d nodes : {nodes}'.format(nodes=alloc_nodes))
node_names = []
for nn in idle_nodes_copy:
launcher.set_node_list(nn.split())
try:
launcher.run(name)
node_names += nn.split()
except subprocess.CalledProcessError as e:
if e.returncode == 1 and nn not in launcher.get_idle_nodes():
launcher.write_log(name, '{node} has disappeared from the idle list!'.format(node=nn))
idle_nodes.remove(nn)
else:
launcher.write_log(name, 'Return code = {code}'.format(code=e.returncode))
raise e
ao = geopmpy.io.AppOutput(report_path, do_cache=False)
sleep_data = ao.get_report_data(node_name=nn, region='sleep')
app_data = ao.get_app_total_data(node_name=nn)
self.assertNotEqual(0, len(sleep_data))
self.assertNear(delay, sleep_data['runtime'].item())
self.assertGreater(app_data['runtime'].item(), sleep_data['runtime'].item())
self.assertEqual(1, sleep_data['count'].item())
self.assertEqual(len(node_names), len(idle_nodes))
def test_runtime(self):
name = 'test_runtime'
report_path = name + '.report'
num_node = 1
num_rank = 5
delay = 3.0
app_conf = geopmpy.io.BenchConf(name + '_app.config')
self._tmp_files.append(app_conf.get_path())
app_conf.append_region('sleep', delay)
agent_conf = geopmpy.io.AgentConf(name + '_agent.config', self._agent, self._options)
self._tmp_files.append(agent_conf.get_path())
launcher = geopm_test_launcher.TestLauncher(app_conf, agent_conf, report_path)
launcher.set_num_node(num_node)
launcher.set_num_rank(num_rank)
launcher.run(name)
self._output = geopmpy.io.AppOutput(report_path)
node_names = self._output.get_node_names()
self.assertEqual(num_node, len(node_names))
for nn in node_names:
report = self._output.get_report_data(node_name=nn, region='sleep')
app_total = self._output.get_app_total_data(node_name=nn)
self.assertNear(delay, report['runtime'].item())
self.assertGreater(app_total['runtime'].item(), report['runtime'].item())
def test_runtime_epoch(self):
name = 'test_runtime_epoch'
report_path = name + '.report'
num_node = 1
num_rank = 5
delay = 3.0
app_conf = geopmpy.io.BenchConf(name + '_app.config')
self._tmp_files.append(app_conf.get_path())
app_conf.append_region('sleep', delay)
app_conf.append_region('spin', delay)
agent_conf = geopmpy.io.AgentConf(name + '_agent.config', self._agent, self._options)
self._tmp_files.append(agent_conf.get_path())
launcher = geopm_test_launcher.TestLauncher(app_conf, agent_conf, report_path)
launcher.set_num_node(num_node)
launcher.set_num_rank(num_rank)
launcher.run(name)
self._output = geopmpy.io.AppOutput(report_path)
node_names = self._output.get_node_names()
self.assertEqual(num_node, len(node_names))
for nn in node_names:
spin_data = self._output.get_report_data(node_name=nn, region='spin')
sleep_data = self._output.get_report_data(node_name=nn, region='sleep')
epoch_data = self._output.get_report_data(node_name=nn, region='epoch')
total_runtime = sleep_data['runtime'].item() + spin_data['runtime'].item()
self.assertNear(total_runtime, epoch_data['runtime'].item())
def test_epoch_data_valid(self):
name = 'test_epoch_data_valid'
report_path = name + '.report'
num_node = 1
num_rank = 1
big_o = 1.0
loop_count = 10
app_conf = geopmpy.io.BenchConf(name + '_app.config')
self._tmp_files.append(app_conf.get_path())
app_conf.set_loop_count(loop_count)
app_conf.append_region('spin-unmarked', big_o)
agent_conf = geopmpy.io.AgentConf(name + '_agent.config', self._agent, self._options)
self._tmp_files.append(agent_conf.get_path())
launcher = geopm_test_launcher.TestLauncher(app_conf, agent_conf, report_path)
launcher.set_num_node(num_node)
launcher.set_num_rank(num_rank)
launcher.run(name)
report = geopmpy.io.RawReport(report_path)
node_names = report.host_names()
self.assertEqual(num_node, len(node_names))
for nn in node_names:
regions = report.region_names(nn)
self.assertTrue('model-init' not in regions)
totals = report.raw_totals(nn)
unmarked = report.raw_region(nn, 'unmarked-region')
epoch = report.raw_epoch(nn)
self.assertGreater(epoch['runtime (sec)'], 0)
self.assertGreater(epoch['sync-runtime (sec)'], 0)
self.assertGreater(epoch['package-energy (joules)'], 0)
self.assertGreater(epoch['dram-energy (joules)'], 0)
self.assertGreater(epoch['power (watts)'], 0)
self.assertGreater(epoch['frequency (%)'], 0)
self.assertGreater(epoch['frequency (Hz)'], 0)
self.assertEqual(epoch['count'], loop_count)
self.assertTrue(totals['runtime (sec)'] > unmarked['runtime (sec)'] >= epoch['runtime (sec)'],
'''The total runtime is NOT > the unmarked runtime or the unmarked runtime is NOT
>= the Epoch runtime.''')
self.assertTrue(totals['package-energy (joules)'] >
unmarked['package-energy (joules)'] >=
epoch['package-energy (joules)'],
'''The total package energy (joules) is NOT > the unmarked package energy (joules)
or the unmarked package energy (joules) is NOT >= the Epoch package
energy (joules).''')
self.assertTrue(totals['dram-energy (joules)'] >
unmarked['dram-energy (joules)'] >=
epoch['dram-energy (joules)'],
'''The total dram energy is NOT > the unmarked dram energy or the unmarked
dram energy is NOT >= the Epoch dram energy.''')
self.assertTrue(unmarked['sync-runtime (sec)'] >= epoch['sync-runtime (sec)'],
'''The sync-runtime for the unmarked region is NOT >= the Epoch sync-runtime.''')
def test_runtime_nested(self):
name = 'test_runtime_nested'
report_path = name + '.report'
num_node = 1
num_rank = 1
delay = 1.0
loop_count = 2
app_conf = geopmpy.io.BenchConf(name + '_app.config')
self._tmp_files.append(app_conf.get_path())
app_conf.set_loop_count(loop_count)
app_conf.append_region('nested-progress', delay)
agent_conf = geopmpy.io.AgentConf(name + '_agent.config', self._agent, self._options)
self._tmp_files.append(agent_conf.get_path())
launcher = geopm_test_launcher.TestLauncher(app_conf, agent_conf, report_path)
launcher.set_num_node(num_node)
launcher.set_num_rank(num_rank)
launcher.run(name)
self._output = geopmpy.io.AppOutput(report_path)
node_names = self._output.get_node_names()
self.assertEqual(num_node, len(node_names))
for nn in node_names:
spin_data = self._output.get_report_data(node_name=nn, region='spin')
epoch_data = self._output.get_report_data(node_name=nn, region='epoch')
app_totals = self._output.get_app_total_data(node_name=nn)
self.assertNear(2 * loop_count * delay, spin_data['runtime'].item())
self.assertNear(spin_data['runtime'].item(), epoch_data['runtime'].item(), epsilon=0.01)
self.assertGreater(app_totals['network-time'].item(), 0)
self.assertGreater(0.1, app_totals['network-time'].item())
self.assertEqual(loop_count, spin_data['count'].item())
def test_trace_runtimes(self):
name = 'test_trace_runtimes'
report_path = name + '.report'
trace_path = name + '.trace'
num_node = 4
num_rank = 16
app_conf = geopmpy.io.BenchConf(name + '_app.config')
self._tmp_files.append(app_conf.get_path())
app_conf.append_region('sleep', 1.0)
app_conf.append_region('dgemm', 1.0)
app_conf.append_region('all2all', 1.0)
agent_conf = geopmpy.io.AgentConf(name + '_agent.config', self._agent, self._options)
self._tmp_files.append(agent_conf.get_path())
launcher = geopm_test_launcher.TestLauncher(app_conf, agent_conf, report_path,
trace_path, region_barrier=True)
launcher.set_num_node(num_node)
launcher.set_num_rank(num_rank)
launcher.run(name)
self._output = geopmpy.io.AppOutput(report_path, trace_path + '*')
node_names = self._output.get_node_names()
self.assertEqual(len(node_names), num_node)
regions = self._output.get_region_names()
for nn in node_names:
trace = self._output.get_trace_data(node_name=nn)
app_totals = self._output.get_app_total_data(node_name=nn)
self.assertNear(trace.iloc[-1]['TIME'], app_totals['runtime'].item(), msg='Application runtime failure, node_name={}.'.format(nn))
tt = trace.reset_index(level='index')
tt = tt.set_index(['REGION_HASH'], append=True)
tt_reg = tt.groupby(level=['REGION_HASH'])
for region_name in regions:
region_data = self._output.get_report_data(node_name=nn, region=region_name)
if (region_name not in ['unmarked-region', 'model-init', 'epoch'] and
not region_name.startswith('MPI_') and
region_data['sync_runtime'].item() != 0):
region_hash = region_data['id'].item()
trace_data = tt_reg.get_group(region_hash)
start_idx = trace_data.iloc[0]['index']
end_idx = trace_data.iloc[-1]['index'] + 1
start_time = tt.loc[tt['index'] == start_idx]['TIME'].item()
end_time = tt.loc[tt['index'] == end_idx]['TIME'].item()
trace_elapsed_time = end_time - start_time
msg = 'for region {rn} on node {nn}'.format(rn=region_name, nn=nn)
self.assertNear(trace_elapsed_time, region_data['sync_runtime'].item(), msg=msg)
region_data = self._output.get_report_data(node_name=nn, region='epoch')
trace_elapsed_time = trace.iloc[-1]['TIME'] - trace['TIME'].loc[trace['EPOCH_COUNT'] == 0].iloc[0]
msg = 'for epoch on node {nn}'.format(nn=nn)
self.assertNear(trace_elapsed_time, region_data['runtime'].item(), msg=msg)
@util.skip_unless_config_enable('bloat')
def test_runtime_regulator(self):
name = 'test_runtime_regulator'
report_path = name + '.report'
trace_path = name + '.trace'
num_node = 1
num_rank = 4
loop_count = 20
app_conf = geopmpy.io.BenchConf(name + '_app.config')
self._tmp_files.append(app_conf.get_path())
app_conf.set_loop_count(loop_count)
sleep_big_o = 1.0
spin_big_o = 0.5
expected_region_runtime = {'spin': spin_big_o, 'sleep': sleep_big_o}
app_conf.append_region('sleep', sleep_big_o)
app_conf.append_region('spin', spin_big_o)
agent_conf = geopmpy.io.AgentConf(name + '_agent.config', self._agent, self._options)
self._tmp_files.append(agent_conf.get_path())
launcher = geopm_test_launcher.TestLauncher(app_conf, agent_conf, report_path, trace_path, region_barrier=True)
launcher.set_num_node(num_node)
launcher.set_num_rank(num_rank)
launcher.run(name)
self._output = geopmpy.io.AppOutput(report_path, trace_path + '*')
node_names = self._output.get_node_names()
self.assertEqual(len(node_names), num_node)
regions = self._output.get_region_names()
for nn in node_names:
app_totals = self._output.get_app_total_data(node_name=nn)
trace = self._output.get_trace_data(node_name=nn)
self.assertNear(trace.iloc[-1]['TIME'], app_totals['runtime'].item())
tt = trace.set_index(['REGION_HASH'], append=True)
tt = tt.groupby(level=['REGION_HASH'])
for region_name in regions:
region_data = self._output.get_report_data(node_name=nn, region=region_name)
if region_name not in ['unmarked-region', 'model-init', 'epoch'] and not region_name.startswith('MPI_') and region_data['runtime'].item() != 0:
trace_data = tt.get_group(region_data['id'].item())
filtered_df = self.create_progress_df(trace_data)
first_time = False
epsilon = 0.001 if region_name != 'sleep' else 0.05
for index, df in filtered_df.iterrows():
if df['REGION_PROGRESS'] == 1:
self.assertNear(df['REGION_RUNTIME'], expected_region_runtime[region_name], epsilon=epsilon)
first_time = True
if first_time is True and df['REGION_PROGRESS'] == 0:
self.assertNear(df['REGION_RUNTIME'], expected_region_runtime[region_name], epsilon=epsilon)
@util.skip_unless_run_long_tests()
@util.skip_unless_config_enable('bloat')
def test_region_runtimes(self):
name = 'test_region_runtimes'
report_path = name + '.report'
trace_path = name + '.trace'
num_node = 4
num_rank = 16
loop_count = 500
app_conf = geopmpy.io.BenchConf(name + '_app.config')
self._tmp_files.append(app_conf.get_path())
app_conf.append_region('dgemm', 8.0)
app_conf.set_loop_count(loop_count)
agent_conf = geopmpy.io.AgentConf(name + '_agent.config', self._agent, self._options)
self._tmp_files.append(agent_conf.get_path())
launcher = geopm_test_launcher.TestLauncher(app_conf, agent_conf, report_path, trace_path, time_limit=900)
launcher.set_num_node(num_node)
launcher.set_num_rank(num_rank)
launcher.run(name)
self._output = geopmpy.io.AppOutput(report_path, trace_path + '*')
node_names = self._output.get_node_names()
self.assertEqual(len(node_names), num_node)
region_times = collections.defaultdict(lambda: collections.defaultdict(dict))
for nn in node_names:
tt = self._output.get_trace_data(node_name=nn).set_index(['REGION_HASH'], append=True).groupby(level=['REGION_HASH'])
for region_hash, data in tt:
filtered_df = self.create_progress_df(data)
filtered_df = filtered_df.diff()
# case where 1 - 0 = 1 for the progress column.
filtered_df = filtered_df.loc[filtered_df['REGION_PROGRESS'] == 1]
if len(filtered_df) > 1:
launcher.write_log(name, 'Region elapsed time stats from {} - {} :\n{}'\
.format(nn, region_hash, filtered_df['TIME'].describe()))
filtered_df['TIME'].describe()
region_times[nn][region_hash] = filtered_df
launcher.write_log(name, '{}'.format('-' * 80))
# Loop through the reports to see if the region runtimes line up with what was calculated from the trace files above.
regions = self._output.get_region_names()
write_regions = True
for nn in node_names:
for region_name in regions:
rr = self._output.get_report_data(node_name=nn, region=region_name)
if (region_name != 'epoch' and
rr['id'].item() != 0 and
rr['count'].item() > 1):
if write_regions:
launcher.write_log(name, 'Region {} is {}.'.format(rr['id'].item(), region_name))
runtime = rr['sync_runtime'].item()
self.assertNear(runtime,
region_times[nn][rr['id'].item()]['TIME'].sum())
write_regions = False
# Test to ensure every region detected in the trace is captured in the report.
for nn in node_names:
report_ids = []
for region_name in regions:
rr = self._output.get_report_data(node_name=nn, region=region_name)
report_ids.append(rr['id'].item())
for region_hash in region_times[nn].keys():
self.assertTrue(region_hash in report_ids, msg='Report from {} missing region_hash {}'.format(nn, region_hash))
def test_progress(self):
name = 'test_progress'
report_path = name + '.report'
num_node = 1
num_rank = 4
delay = 3.0
app_conf = geopmpy.io.BenchConf(name + '_app.config')
self._tmp_files.append(app_conf.get_path())
app_conf.append_region('sleep-progress', delay)
agent_conf = geopmpy.io.AgentConf(name + '_agent.config', self._agent, self._options)
self._tmp_files.append(agent_conf.get_path())
launcher = geopm_test_launcher.TestLauncher(app_conf, agent_conf, report_path)
launcher.set_num_node(num_node)
launcher.set_num_rank(num_rank)
launcher.run(name)
self._output = geopmpy.io.AppOutput(report_path)
node_names = self._output.get_node_names()
self.assertEqual(len(node_names), num_node)
for nn in node_names:
sleep_data = self._output.get_report_data(node_name=nn, region='sleep')
app_total = self._output.get_app_total_data(node_name=nn)
self.assertNear(delay, sleep_data['runtime'].item())
self.assertGreater(app_total['runtime'].item(), sleep_data['runtime'].item())
self.assertEqual(1, sleep_data['count'].item())
def test_count(self):
name = 'test_count'
report_path = name + '.report'
trace_path = name + '.trace'
num_node = 1
num_rank = 4
delay = 0.01
loop_count = 100
app_conf = geopmpy.io.BenchConf(name + '_app.config')
self._tmp_files.append(app_conf.get_path())
app_conf.set_loop_count(loop_count)
app_conf.append_region('spin', delay)
agent_conf = geopmpy.io.AgentConf(name + '_agent.config', self._agent, self._options)
self._tmp_files.append(agent_conf.get_path())
launcher = geopm_test_launcher.TestLauncher(app_conf, agent_conf, report_path, trace_path)
launcher.set_num_node(num_node)
launcher.set_num_rank(num_rank)
launcher.run(name)
self._output = geopmpy.io.AppOutput(report_path, trace_path + '*')
node_names = self._output.get_node_names()
self.assertEqual(len(node_names), num_node)
for nn in node_names:
trace_data = self._output.get_trace_data(node_name=nn)
spin_data = self._output.get_report_data(node_name=nn, region='spin')
epoch_data = self._output.get_report_data(node_name=nn, region='epoch')
self.assertNear(delay * loop_count, spin_data['runtime'].item())
self.assertEqual(loop_count, spin_data['count'].item())
self.assertEqual(loop_count, epoch_data['count'].item())
self.assertEqual(loop_count, trace_data['EPOCH_COUNT'][-1])
@util.skip_unless_run_long_tests()
def test_scaling(self):
name = 'test_scaling'
report_path = name + '.report'
num_node = 2
loop_count = 100
app_conf = geopmpy.io.BenchConf(name + '_app.config')
self._tmp_files.append(app_conf.get_path())
app_conf.append_region('dgemm', 1.0)
app_conf.append_region('all2all', 1.0)
app_conf.set_loop_count(loop_count)
agent_conf = geopmpy.io.AgentConf(name + '_agent.config', self._agent, self._options)
self._tmp_files.append(agent_conf.get_path())
launcher = geopm_test_launcher.TestLauncher(app_conf, agent_conf, report_path, time_limit=900)
check_successful = True
while check_successful:
launcher.set_num_node(num_node)
launcher.set_num_rank(num_node)
try:
launcher.check_run(name)
except subprocess.CalledProcessError as e:
# If we exceed the available nodes in the allocation ALPS/SLURM give a rc of 1
# All other rc's are real errors
if e.returncode != 1:
raise e
check_successful = False
if check_successful:
launcher.write_log(name, 'About to run on {} nodes.'.format(num_node))
launcher.run(name)
self._output = geopmpy.io.AppOutput(report_path)
node_names = self._output.get_node_names()
self.assertEqual(len(node_names), num_node)
for nn in node_names:
dgemm_data = self._output.get_report_data(node_name=nn, region='dgemm')
all2all_data = self._output.get_report_data(node_name=nn, region='all2all')
self.assertEqual(loop_count, dgemm_data['count'].item())
self.assertEqual(loop_count, all2all_data['count'].item())
self.assertGreater(dgemm_data['runtime'].item(), 0.0)
self.assertGreater(all2all_data['runtime'].item(), 0.0)
num_node *= 2
self._output.remove_files()
@util.skip_unless_run_long_tests()
def test_power_consumption(self):
name = 'test_power_consumption'
report_path = name + '.report'
trace_path = name + '.trace'
num_node = 4
num_rank = 16
loop_count = 500
app_conf = geopmpy.io.BenchConf(name + '_app.config')
self._tmp_files.append(app_conf.get_path())
app_conf.append_region('dgemm', 8.0)
app_conf.set_loop_count(loop_count)
fam, mod = geopm_test_launcher.get_platform()
if fam == 6 and mod == 87:
self._options['power_budget'] = 130
else:
self._options['power_budget'] = 200
gov_agent_conf_path = name + '_gov_agent.config'
self._tmp_files.append(gov_agent_conf_path)
gov_agent_conf = geopmpy.io.AgentConf(name + '_agent.config', self._agent, self._options)
launcher = geopm_test_launcher.TestLauncher(app_conf, gov_agent_conf, report_path,
trace_path, time_limit=900)
launcher.set_num_node(num_node)
launcher.set_num_rank(num_rank)
launcher.write_log(name, 'Power cap = {}W'.format(self._options['power_budget']))
launcher.run(name)
self._output = geopmpy.io.AppOutput(report_path, trace_path + '*')
node_names = self._output.get_node_names()
self.assertEqual(num_node, len(node_names))
all_power_data = {}
for nn in node_names:
tt = self._output.get_trace_data(node_name=nn)
first_epoch_index = tt.loc[tt['EPOCH_COUNT'] == 0][:1].index[0]
epoch_dropped_data = tt[first_epoch_index:]
power_data = epoch_dropped_data.filter(regex='ENERGY')
power_data['TIME'] = epoch_dropped_data['TIME']
power_data = power_data.diff().dropna()
power_data.rename(columns={'TIME': 'ELAPSED_TIME'}, inplace=True)
power_data = power_data.loc[(power_data != 0).all(axis=1)]
pkg_energy_cols = [s for s in power_data.keys() if 'ENERGY_PACKAGE' in s]
dram_energy_cols = [s for s in power_data.keys() if 'ENERGY_DRAM' in s]
power_data['SOCKET_POWER'] = power_data[pkg_energy_cols].sum(axis=1) / power_data['ELAPSED_TIME']
power_data['DRAM_POWER'] = power_data[dram_energy_cols].sum(axis=1) / power_data['ELAPSED_TIME']
power_data['COMBINED_POWER'] = power_data['SOCKET_POWER'] + power_data['DRAM_POWER']
pandas.set_option('display.width', 100)
launcher.write_log(name, 'Power stats from {} :\n{}'.format(nn, power_data.describe()))
all_power_data[nn] = power_data
for node_name, power_data in all_power_data.items():
# Allow for overages of 2% at the 75th percentile.
self.assertGreater(self._options['power_budget'] * 1.02, power_data['SOCKET_POWER'].quantile(.75))
# TODO Checks on the maximum power computed during the run?
# TODO Checks to see how much power was left on the table?
@util.skip_unless_run_long_tests()
@util.skip_unless_batch()
def test_power_balancer(self):
name = 'test_power_balancer'
num_node = 4
num_rank = 16
loop_count = 500
# Require that the balancer moves the maximum dgemm runtime at
# least 1/4 the distance to the mean dgemm runtime under the
# governor.
margin_factor = 0.25
app_conf = geopmpy.io.BenchConf(name + '_app.config')
self._tmp_files.append(app_conf.get_path())
app_conf.append_region('dgemm-imbalance', 8.0)
app_conf.append_region('all2all', 0.05)
app_conf.set_loop_count(loop_count)
# Update app config with imbalance
alloc_nodes = geopm_test_launcher.TestLauncher.get_alloc_nodes()
for nn in range(len(alloc_nodes) // 2):
app_conf.append_imbalance(alloc_nodes[nn], 0.5)
fam, mod = geopm_test_launcher.get_platform()
if fam == 6 and mod == 87:
# budget for KNL
power_budget = 130
else:
power_budget = 200
self._options = {'power_budget': power_budget}
gov_agent_conf_path = name + '_gov_agent.config'
bal_agent_conf_path = name + '_bal_agent.config'
self._tmp_files.append(gov_agent_conf_path)
self._tmp_files.append(bal_agent_conf_path)
agent_list = ['power_governor', 'power_balancer']
path_dict = {'power_governor': gov_agent_conf_path, 'power_balancer': bal_agent_conf_path}
agent_runtime = dict()
for agent in agent_list:
agent_conf = geopmpy.io.AgentConf(path_dict[agent], agent, self._options)
run_name = '{}_{}'.format(name, agent)
report_path = '{}.report'.format(run_name)
trace_path = '{}.trace'.format(run_name)
launcher = geopm_test_launcher.TestLauncher(app_conf, agent_conf, report_path,
trace_path, time_limit=2700)
launcher.set_num_node(num_node)
launcher.set_num_rank(num_rank)
launcher.write_log(run_name, 'Power cap = {}W'.format(power_budget))
launcher.run(run_name)
self._output = geopmpy.io.AppOutput(report_path, trace_path + '*')
node_names = self._output.get_node_names()
self.assertEqual(num_node, len(node_names))
power_limits = []
# Total power consumed will be Socket(s) + DRAM
for nn in node_names:
tt = self._output.get_trace_data(node_name=nn)
first_epoch_index = tt.loc[tt['EPOCH_COUNT'] == 0][:1].index[0]
epoch_dropped_data = tt[first_epoch_index:] # Drop all startup data
power_data = epoch_dropped_data.filter(regex='ENERGY')
power_data['TIME'] = epoch_dropped_data['TIME']
power_data = power_data.diff().dropna()
power_data.rename(columns={'TIME': 'ELAPSED_TIME'}, inplace=True)
power_data = power_data.loc[(power_data != 0).all(axis=1)] # Will drop any row that is all 0's
pkg_energy_cols = [s for s in power_data.keys() if 'ENERGY_PACKAGE' in s]
dram_energy_cols = [s for s in power_data.keys() if 'ENERGY_DRAM' in s]
power_data['SOCKET_POWER'] = power_data[pkg_energy_cols].sum(axis=1) / power_data['ELAPSED_TIME']
power_data['DRAM_POWER'] = power_data[dram_energy_cols].sum(axis=1) / power_data['ELAPSED_TIME']
power_data['COMBINED_POWER'] = power_data['SOCKET_POWER'] + power_data['DRAM_POWER']
pandas.set_option('display.width', 100)
launcher.write_log(name, 'Power stats from {} {} :\n{}'.format(agent, nn, power_data.describe()))
if agent == 'power_balancer':
power_limits.append(epoch_dropped_data['POWER_LIMIT'][-1])
if agent == 'power_balancer':
avg_power_limit = sum(power_limits) / len(power_limits)
self.assertTrue(avg_power_limit <= power_budget)
min_runtime = float('nan')
max_runtime = float('nan')
node_names = self._output.get_node_names()
runtime_list = []
for node_name in node_names:
epoch_data = self._output.get_report_data(node_name=node_name, region='dgemm')
runtime_list.append(epoch_data['runtime'].item())
if agent == 'power_governor':
mean_runtime = sum(runtime_list) / len(runtime_list)
max_runtime = max(runtime_list)
margin = margin_factor * (max_runtime - mean_runtime)
agent_runtime[agent] = max(runtime_list)
self.assertGreater(agent_runtime['power_governor'] - margin,
agent_runtime['power_balancer'],
"governor runtime: {}, balancer runtime: {}, margin: {}".format(
agent_runtime['power_governor'], agent_runtime['power_balancer'], margin))
def test_progress_exit(self):
name = 'test_progress_exit'
report_path = name + '.report'
trace_path = name + '.trace'
num_node = 1
num_rank = 16
loop_count = 100
big_o = 0.1
app_conf = geopmpy.io.BenchConf(name + '_app.config')
self._tmp_files.append(app_conf.get_path())
app_conf.set_loop_count(loop_count)
app_conf.append_region('dgemm-progress', big_o)
app_conf.append_region('spin-progress', big_o)
agent_conf = geopmpy.io.AgentConf(name + '_agent.config', self._agent, self._options)
self._tmp_files.append(agent_conf.get_path())
launcher = geopm_test_launcher.TestLauncher(app_conf, agent_conf, report_path, trace_path, region_barrier=True)
launcher.set_num_node(num_node)
launcher.set_num_rank(num_rank)
launcher.run(name)
self._output = geopmpy.io.AppOutput(report_path, trace_path + '*')
node_names = self._output.get_node_names()
self.assertEqual(num_node, len(node_names))
for nn in node_names:
tt = self._output.get_trace_data(node_name=nn)
tt = tt.set_index(['REGION_HASH'], append=True)
tt = tt.groupby(level=['REGION_HASH'])
for region_hash, data in tt:
tmp = data['REGION_PROGRESS'].diff()
if region_hash == 8300189175:
negative_progress = tmp.loc[(tmp > -1) & (tmp < -0.1)]
launcher.write_log(name, '{}'.format(negative_progress))
self.assertEqual(0, len(negative_progress))
@util.skip_unless_run_long_tests()
@util.skip_unless_optimized()
def test_sample_rate(self):
name = 'test_sample_rate'
report_path = name + '.report'
trace_path = name + '.trace'
num_node = 1
num_rank = 16
loop_count = 10
big_o = 10.0
region = 'dgemm-progress'
max_mean = 0.01
max_nstd = 0.1
app_conf = geopmpy.io.BenchConf(name + '_app.config')
self._tmp_files.append(app_conf.get_path())
app_conf.set_loop_count(loop_count)
app_conf.append_region(region, big_o)
agent_conf = geopmpy.io.AgentConf(name + '_agent.config', self._agent, self._options)
self._tmp_files.append(agent_conf.get_path())
launcher = geopm_test_launcher.TestLauncher(app_conf, agent_conf, report_path, trace_path)
launcher.set_num_node(num_node)
launcher.set_num_rank(num_rank)
launcher.run(name)
self._output = geopmpy.io.AppOutput(report_path, trace_path + '*')
node_names = self._output.get_node_names()
self.assertEqual(num_node, len(node_names))
for nn in node_names:
tt = self._output.get_trace_data(node_name=nn)
delta_t = tt['TIME'].diff()
delta_t = delta_t.loc[delta_t != 0]
self.assertGreater(max_mean, delta_t.mean())
size_orig = len(delta_t)
delta_t = delta_t[(delta_t - delta_t.mean()) < 3*delta_t.std()]
self.assertGreater(0.06, 1 - (float(len(delta_t)) / size_orig))
self.assertGreater(max_nstd, delta_t.std() / delta_t.mean())
def test_network_times(self):
name = 'test_network_times'
report_path = name + '.report'
trace_path = name + '.trace'
num_node = 4
num_rank = 16
app_conf = geopmpy.io.BenchConf(name + '_app.config')
self._tmp_files.append(app_conf.get_path())
app_conf.append_region('sleep', 1.0)
app_conf.append_region('dgemm', 1.0)
app_conf.append_region('all2all', 1.0)
agent_conf = geopmpy.io.AgentConf(name + '_agent.config', self._agent, self._options)
self._tmp_files.append(agent_conf.get_path())
launcher = geopm_test_launcher.TestLauncher(app_conf, agent_conf, report_path, trace_path)
launcher.set_num_node(num_node)
launcher.set_num_rank(num_rank)
launcher.run(name)
self._output = geopmpy.io.AppOutput(report_path, trace_path + '*')
node_names = self._output.get_node_names()
self.assertEqual(len(node_names), num_node)
for nn in node_names:
all2all_data = self._output.get_report_data(node_name=nn, region='all2all')
sleep_data = self._output.get_report_data(node_name=nn, region='sleep')
dgemm_data = self._output.get_report_data(node_name=nn, region='dgemm')
barrier_data = self._output.get_report_data(node_name=nn, region='MPI_Barrier')
unmarked_data = self._output.get_report_data(node_name=nn, region='unmarked-region')
epoch_data = self._output.get_report_data(node_name=nn, region='epoch')
app_total = self._output.get_app_total_data(node_name=nn)
self.assertEqual(0, unmarked_data['count'].item())
mpi_epsilon = max(unmarked_data['runtime'].item() / all2all_data['network_time'].item(), 0.05)
self.assertNear(all2all_data['network_time'].item(), all2all_data['runtime'].item(), mpi_epsilon)
self.assertNear(all2all_data['network_time'].item() + barrier_data['network_time'].item(),
epoch_data['network_time'].item())
self.assertNear(all2all_data['network_time'].item() + barrier_data['network_time'].item(),
app_total['network-time'].item())
self.assertEqual(0, unmarked_data['network_time'].item())
self.assertEqual(0, sleep_data['network_time'].item())
self.assertEqual(0, dgemm_data['network_time'].item())
def test_ignore_runtime(self):
name = 'test_ignore_runtime'
report_path = name + '.report'
trace_path = name + '.trace'
num_node = 4
num_rank = 16
app_conf = geopmpy.io.BenchConf(name + '_app.config')
self._tmp_files.append(app_conf.get_path())
app_conf.append_region('ignore', 1.0)
app_conf.append_region('dgemm', 1.0)
app_conf.append_region('all2all', 1.0)
agent_conf = geopmpy.io.AgentConf(name + '_agent.config', self._agent, self._options)
self._tmp_files.append(agent_conf.get_path())
launcher = geopm_test_launcher.TestLauncher(app_conf, agent_conf, report_path, trace_path)
launcher.set_num_node(num_node)
launcher.set_num_rank(num_rank)
launcher.run(name)
self._output = geopmpy.io.AppOutput(report_path, trace_path + '*')
node_names = self._output.get_node_names()
self.assertEqual(len(node_names), num_node)
for nn in node_names:
ignore_data = self._output.get_report_data(node_name=nn, region='ignore')
app_data = self._output.get_app_total_data(node_name=nn)
self.assertNear(ignore_data['runtime'].item(),
app_data['ignore-runtime'].item(), 0.00005)
@util.skip_unless_config_enable('ompt')
def test_unmarked_ompt(self):
name = 'test_unmarked_ompt'
report_path = name + '.report'
num_node = 4
num_rank = 16
app_conf = geopmpy.io.BenchConf(name + '_app.config')
self._tmp_files.append(app_conf.get_path())
app_conf.append_region('stream-unmarked', 1.0)
app_conf.append_region('dgemm-unmarked', 1.0)
app_conf.append_region('all2all-unmarked', 1.0)
agent_conf = geopmpy.io.AgentConf(name + '_agent.config', self._agent, self._options)
self._tmp_files.append(agent_conf.get_path())
launcher = geopm_test_launcher.TestLauncher(app_conf, agent_conf, report_path)
launcher.set_num_node(num_node)
launcher.set_num_rank(num_rank)
launcher.run(name)
self._output = geopmpy.io.AppOutput(report_path)
node_names = self._output.get_node_names()
self.assertEqual(len(node_names), num_node)
stream_id = None
region_names = self._output.get_region_names()
stream_name = [key for key in region_names if key.lower().find('stream') != -1][0]
for nn in node_names:
stream_data = self._output.get_report_data(node_name=nn, region=stream_name)
found = False
for name in region_names:
if stream_name in name:
found = True
self.assertTrue(found)
self.assertEqual(1, stream_data['count'].item())
if stream_id:
self.assertEqual(stream_id, stream_data['id'].item())
else:
stream_id = stream_data['id'].item()
ompt_regions = [key for key in region_names if key.startswith('[OMPT]')]
self.assertLessEqual(2, len(ompt_regions))
self.assertTrue(('MPI_Alltoall' in region_names))
gemm_region = [key for key in region_names if key.lower().find('gemm') != -1]
self.assertLessEqual(1, len(gemm_region))
def _test_agent_frequency_map(self, name, use_env=False):
min_freq = geopm_test_launcher.geopmread("CPUINFO::FREQ_MIN board 0")
max_freq = geopm_test_launcher.geopmread("CPUINFO::FREQ_MAX board 0")
sticker_freq = geopm_test_launcher.geopmread("CPUINFO::FREQ_STICKER board 0")
freq_step = geopm_test_launcher.geopmread("CPUINFO::FREQ_STEP board 0")
self._agent = "frequency_map"
report_path = name + '.report'
trace_path = name + '.trace'
num_node = 1
num_rank = 4
loop_count = 5
dgemm_bigo = 15.0
stream_bigo = 1.0
dgemm_bigo_jlse = 35.647
dgemm_bigo_quartz = 29.12
stream_bigo_jlse = 1.6225
stream_bigo_quartz = 1.7941
hostname = socket.gethostname()
if hostname.endswith('.alcf.anl.gov'):
dgemm_bigo = dgemm_bigo_jlse
stream_bigo = stream_bigo_jlse
elif hostname.startswith('mcfly'):
dgemm_bigo = 42.0
stream_bigo = 1.75
elif hostname.startswith('quartz'):
dgemm_bigo = dgemm_bigo_quartz
stream_bigo = stream_bigo_quartz
app_conf = geopmpy.io.BenchConf(name + '_app.config')
self._tmp_files.append(app_conf.get_path())
app_conf.set_loop_count(loop_count)
app_conf.append_region('dgemm', dgemm_bigo)
app_conf.append_region('stream', stream_bigo)
app_conf.append_region('all2all', 1.0)
app_conf.write()
freq_map = {}
freq_map['dgemm'] = sticker_freq
freq_map['stream'] = sticker_freq - 2 * freq_step
freq_map['all2all'] = min_freq
self._options = create_frequency_map_policy(min_freq, max_freq, freq_map, use_env)
agent_conf = geopmpy.io.AgentConf(name + '_agent.config', self._agent, self._options)
self._tmp_files.append(agent_conf.get_path())
launcher = geopm_test_launcher.TestLauncher(app_conf, agent_conf, report_path,
trace_path, region_barrier=True, time_limit=900)
launcher.set_num_node(num_node)
launcher.set_num_rank(num_rank)
launcher.run(name)
self._output = geopmpy.io.AppOutput(report_path, trace_path + '*')
node_names = self._output.get_node_names()
self.assertEqual(len(node_names), num_node)
regions = self._output.get_region_names()
for nn in node_names:
for region_name in regions:
region_data = self._output.get_report_data(node_name=nn, region=region_name)
if (region_name in ['dgemm', 'stream', 'all2all']):
msg = region_name + " frequency should be near assigned map frequency"
self.assertNear(region_data['frequency'].item(), freq_map[region_name] / sticker_freq * 100, msg=msg)
def test_agent_frequency_map_env(self):
self._test_agent_frequency_map('test_agent_frequency_map_env', use_env=True)
def test_agent_frequency_map_policy(self):
self._test_agent_frequency_map('test_agent_frequency_map_policy', use_env=False)
def test_agent_energy_efficient_single_region(self):
name = 'test_energy_efficient_single_region'
min_freq = geopm_test_launcher.geopmread("CPUINFO::FREQ_MIN board 0")
sticker_freq = geopm_test_launcher.geopmread("CPUINFO::FREQ_STICKER board 0")
freq_step = geopm_test_launcher.geopmread("CPUINFO::FREQ_STEP board 0")
self._agent = "energy_efficient"
report_path = name + '.report'
trace_path = name + '.trace'
num_node = 1
num_rank = 4
loop_count = 100
app_conf = geopmpy.io.BenchConf(name + '_app.config')
self._tmp_files.append(app_conf.get_path())
app_conf.set_loop_count(loop_count)
app_conf.append_region('spin', 0.1)
self._options = {'frequency_min': min_freq,
'frequency_max': sticker_freq}
agent_conf = geopmpy.io.AgentConf(name + '_agent.config', self._agent, self._options)
self._tmp_files.append(agent_conf.get_path())
launcher = geopm_test_launcher.TestLauncher(app_conf, agent_conf, report_path, trace_path)
launcher.set_num_node(num_node)
launcher.set_num_rank(num_rank)
launcher.run(name)
self._output = geopmpy.io.AppOutput(report_path, trace_path + '*')
node_names = self._output.get_node_names()
self.assertEqual(len(node_names), num_node)
regions = self._output.get_region_names()
for nn in node_names:
for region_name in regions:
report = geopmpy.io.RawReport(report_path)
if (region_name in ['spin']):
region = report.raw_region(nn, region_name)
msg = region_name + " frequency should be minimum frequency as specified by policy"
self.assertEqual(region['requested-online-frequency'], min_freq, msg=msg)
@util.skip_unless_run_long_tests()
@util.skip_unless_cpufreq()
@util.skip_unless_batch()
def test_agent_energy_efficient(self):
name = 'test_energy_efficient_sticker'
min_freq = geopm_test_launcher.geopmread("CPUINFO::FREQ_MIN board 0")
max_freq = geopm_test_launcher.geopmread("CPUINFO::FREQ_MAX board 0")
sticker_freq = geopm_test_launcher.geopmread("CPUINFO::FREQ_STICKER board 0")
freq_step = geopm_test_launcher.geopmread("CPUINFO::FREQ_STEP board 0")
self._agent = "energy_efficient"
num_node = 1
num_rank = 4
loop_count = 200
dgemm_bigo = 15.0
stream_bigo = 1.0
dgemm_bigo_jlse = 35.647
dgemm_bigo_quartz = 29.12
stream_bigo_jlse = 1.6225
stream_bigo_quartz = 1.7941
hostname = socket.gethostname()
if hostname.endswith('.alcf.anl.gov'):
dgemm_bigo = dgemm_bigo_jlse
stream_bigo = stream_bigo_jlse
elif hostname.startswith('mcfly'):
dgemm_bigo = 42.0
stream_bigo = 1.75
elif hostname.startswith('quartz'):
dgemm_bigo = dgemm_bigo_quartz
stream_bigo = stream_bigo_quartz
run = ['_sticker', '_nan_nan']
for rr in run:
report_path = name + rr + '.report'
trace_path = name + rr + '.trace'
app_conf = geopmpy.io.BenchConf(name + '_app.config')
self._tmp_files.append(app_conf.get_path())
app_conf.set_loop_count(loop_count)
app_conf.append_region('dgemm', dgemm_bigo)
app_conf.append_region('stream', stream_bigo)
app_conf.write()
if rr == '_sticker':
self._options = {'frequency_min': sticker_freq,
'frequency_max': sticker_freq}
freq = sticker_freq
else:
self._options = {'frequency_min': min_freq,
'frequency_max': sticker_freq}
agent_conf = geopmpy.io.AgentConf(name + '_agent.config', self._agent, self._options)
self._tmp_files.append(agent_conf.get_path())
launcher = geopm_test_launcher.TestLauncher(app_conf, agent_conf, report_path,
trace_path, region_barrier=True, time_limit=900)
launcher.set_num_node(num_node)
launcher.set_num_rank(num_rank)
launcher.run(name + rr)
report_path = name + run[0] + '.report'
trace_path = name + run[0] + '.trace'
sticker_out = geopmpy.io.AppOutput(report_path, trace_path + '*')
report_path = name + run[1] + '.report'
trace_path = name + run[1] + '.trace'
nan_out = geopmpy.io.AppOutput(report_path, trace_path + '*')
for nn in nan_out.get_node_names():
sticker_app_total = sticker_out.get_app_total_data(node_name=nn)
nan_app_total = nan_out.get_app_total_data(node_name=nn)
runtime_savings_epoch = (sticker_app_total['runtime'].item() - nan_app_total['runtime'].item()) / sticker_app_total['runtime'].item()
energy_savings_epoch = (sticker_app_total['energy-package'].item() - nan_app_total['energy-package'].item()) / sticker_app_total['energy-package'].item()
self.assertLess(-0.1, runtime_savings_epoch)
self.assertLess(0.0, energy_savings_epoch)
class TestIntegrationGeopmio(unittest.TestCase):
def setUp(self):
self.skip_warning_string = 'Incompatible CPU'
def check_output(self, args, expected):
try:
proc = subprocess.Popen([self.exec_name] + args,
stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
for exp in expected:
line = proc.stdout.readline()
while self.skip_warning_string.encode() in line:
line = proc.stdout.readline()
self.assertIn(exp.encode(), line)
for line in proc.stdout:
if self.skip_warning_string.encode() not in line:
self.assertNotIn(b'Error', line)
except subprocess.CalledProcessError as ex:
sys.stderr.write('{}\n'.format(ex.output))
def check_output_range(self, args, min_exp, max_exp):
try:
proc = subprocess.Popen([self.exec_name] + args,
stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
for line in proc.stdout:
if self.skip_warning_string.encode() in line:
continue
if line.startswith(b'0x'):
value = int(line)
else:
value = float(line)
self.assertLessEqual(min_exp, value, msg="Value read for {} smaller than {}: {}.".format(args, min_exp, value))
self.assertGreaterEqual(max_exp, value, msg="Value read for {} larger than {}: {}.".format(args, max_exp, value))
except subprocess.CalledProcessError as ex:
sys.stderr.write('{}\n'.format(ex.output))
def check_no_error(self, args):
try:
proc = subprocess.Popen([self.exec_name] + args,
stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
for line in proc.stdout:
if self.skip_warning_string.encode() not in line:
self.assertNotIn(b'Error', line)
except subprocess.CalledProcessError as ex:
sys.stderr.write('{}\n'.format(ex.output))
def test_geopmread_command_line(self):
self.exec_name = "geopmread"
self.check_no_error([])
self.check_output(['--domain'], ['board', 'package', 'core', 'cpu',
'board_memory', 'package_memory',
'board_nic', 'package_nic',
'board_accelerator', 'package_accelerator'])
self.check_output(['--domain', 'TIME'], ['cpu'])
self.check_no_error(['TIME', 'board', '0'])
self.check_no_error(['--info'])
self.check_output(['--info', 'TIME'], ['Time in seconds'])
read_err = 'domain type and domain index are required'
self.check_output(['TIME'], [read_err])
self.check_output(['TIME', 'board'], [read_err])
self.check_output(['TIME', 'board', 'bad'], ['invalid domain index'])
self.check_output(['FREQUENCY', 'package', '111'], ['cannot read signal'])
self.check_output(['ENERGY_PACKAGE', 'cpu', '0'], ['cannot read signal'])
self.check_output(['INVALID', 'board', '0'], ['cannot read signal'])
self.check_output(['--domain', 'INVALID'], ['unable to determine signal type'])
self.check_output(['--domain', '--info'], ['info about domain not implemented'])
@util.skip_unless_batch()
def test_geopmread_all_signal_agg(self):
self.exec_name = "geopmread"
all_signals = []
try:
proc = subprocess.Popen([self.exec_name],
stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
for line in proc.stdout:
if self.skip_warning_string.encode() not in line:
all_signals.append(line.strip())
except subprocess.CalledProcessError as ex:
sys.stderr.write('{}\n'.format(ex.output))
for sig in all_signals:
self.check_no_error([sig.decode(), 'board', '0'])
@util.skip_unless_batch()
def test_geopmread_signal_value(self):
self.exec_name = "geopmread"
signal_range = {
"POWER_PACKAGE": (20, 400),
"FREQUENCY": (1.0e8, 5.0e9),
"TIME": (0, 10),
"TEMPERATURE_CORE": (0, 100)
}
for signal, val_range in signal_range.items():
try:
self.check_no_error([signal, "board", "0"])
except:
raise
pass
else:
self.check_output_range([signal, "board", "0"], *val_range)
def test_geopmread_custom_msr(self):
self.exec_name = "geopmread"
path = os.path.join(
os.path.dirname(
os.path.dirname(
os.path.realpath(__file__))),
'examples/custom_msr/')
custom_env = os.environ.copy()
custom_env['GEOPM_PLUGIN_PATH'] = path
all_signals = []
try:
proc = subprocess.Popen([self.exec_name], env=custom_env,
stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
for line in proc.stdout:
if self.skip_warning_string.encode() not in line:
all_signals.append(line.strip())
except subprocess.CalledProcessError as ex:
sys.stderr.write('{}\n'.format(ex.output))
self.assertIn(b'MSR::CORE_PERF_LIMIT_REASONS#', all_signals)
def test_geopmwrite_command_line(self):
self.exec_name = "geopmwrite"
self.check_no_error([])
self.check_output(['--domain'], ['board', 'package', 'core', 'cpu',
'board_memory', 'package_memory',
'board_nic', 'package_nic',
'board_accelerator', 'package_accelerator'])
self.check_no_error(['--domain', 'FREQUENCY'])
self.check_no_error(['--info'])
self.check_output(['--info', 'FREQUENCY'], ['processor frequency'])
write_err = 'domain type, domain index, and value are required'
self.check_output(['FREQUENCY'], [write_err])
self.check_output(['FREQUENCY', 'board'], [write_err])
self.check_output(['FREQUENCY', 'board', '0'], [write_err])
self.check_output(['FREQUENCY', 'board', 'bad', '0'], ['invalid domain index'])
self.check_output(['FREQUENCY', 'board', '0', 'bad'], ['invalid write value'])
self.check_output(['FREQUENCY', 'package', '111', '0'], ['cannot write control'])
self.check_output(['FREQUENCY', 'board_nic', '0', '0'], ['cannot write control'])
self.check_output(['INVALID', 'board', '0', '0'], ['cannot write control'])
self.check_output(['--domain', 'INVALID'], ['unable to determine control type'])
self.check_output(['--domain', '--info'], ['info about domain not implemented'])
@util.skip_unless_batch()
def test_geopmwrite_set_freq(self):
def read_stdout_line(stdout):
line = stdout.readline()
while self.skip_warning_string.encode() in line:
line = stdout.readline()
return line.strip()
def read_current_freq(domain, signal='FREQUENCY'):
read_proc = subprocess.Popen(['geopmread', signal, domain, '0'],
stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
freq = read_stdout_line(read_proc.stdout)
freq = float(freq)
return freq
def read_min_max_freq():
read_proc = subprocess.Popen(['geopmread', 'CPUINFO::FREQ_MIN', 'board', '0'],
stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
min_freq = read_stdout_line(read_proc.stdout)
min_freq = float(int(float(min_freq)/1e8)*1e8)
read_proc = subprocess.Popen(['geopmread', 'CPUINFO::FREQ_MAX', 'board', '0'],
stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
max_freq = read_stdout_line(read_proc.stdout)
max_freq = float(int(float(max_freq)/1e8)*1e8)
return min_freq, max_freq
self.exec_name = "geopmwrite"
read_proc = subprocess.Popen(['geopmread', '--domain', 'FREQUENCY'],
stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
read_domain = read_stdout_line(read_proc.stdout).decode()
write_proc = subprocess.Popen([self.exec_name, '--domain', 'FREQUENCY'],
stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
write_domain = read_stdout_line(write_proc.stdout).decode()
min_freq, max_freq = read_min_max_freq()
old_freq = read_current_freq(write_domain, 'MSR::PERF_CTL:FREQ')
self.assertLess(old_freq, max_freq * 2)
self.assertGreater(old_freq, min_freq - 1e8)
self.check_no_error(['FREQUENCY', write_domain, '0', str(min_freq)])
result = read_current_freq(read_domain)
self.assertEqual(min_freq, result)
self.check_no_error(['FREQUENCY', write_domain, '0', str(max_freq)])
result = read_current_freq(read_domain)
self.assertEqual(max_freq, result)
self.check_no_error(['FREQUENCY', write_domain, '0', str(old_freq)])
class TestIntegrationGeopmagent(unittest.TestCase):
def setUp(self):
self.exec_name = 'geopmagent'
self.skip_warning_string = 'Incompatible CPU frequency driver/governor'
def check_output(self, args, expected):
try:
proc = subprocess.Popen([self.exec_name] + args,
stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
for exp in expected:
line = proc.stdout.readline()
while self.skip_warning_string.encode() in line or line == b'\n':
line = proc.stdout.readline()
self.assertIn(exp.encode(), line)
for line in proc.stdout:
if self.skip_warning_string.encode() not in line:
self.assertNotIn(b'Error', line)
except subprocess.CalledProcessError as ex:
sys.stderr.write('{}\n'.format(ex.output))
def check_json_output(self, args, expected):
try:
proc = subprocess.Popen([self.exec_name] + args,
stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
except subprocess.CalledProcessError as ex:
sys.stderr.write('{}\n'.format(ex.output))
line = proc.stdout.readline()
while self.skip_warning_string.encode() in line or line == b'\n':
line = proc.stdout.readline()
try:
out_json = json.loads(line.decode())
except ValueError:
self.fail('Could not convert json string: {}\n'.format(line))
self.assertEqual(expected, out_json)
for line in proc.stdout:
if self.skip_warning_string.encode() not in line:
self.assertNotIn(b'Error', line)
def check_no_error(self, args):
try:
proc = subprocess.Popen([self.exec_name] + args,
stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
for line in proc.stdout:
if self.skip_warning_string.encode() not in line:
self.assertNotIn(b'Error', line)
except subprocess.CalledProcessError as ex:
sys.stderr.write('{}\n'.format(ex.output))
def test_geopmagent_command_line(self):
agent_names = ['monitor', 'power_balancer', 'power_governor',
'energy_efficient', 'frequency_map']
self.check_output([], agent_names)
self.check_output(['--help'], ['Usage'])
self.check_no_error(['--version'])
for agent in agent_names:
self.check_output(['--agent', agent],
['Policy', 'Sample'])
self.check_json_output(['--agent', 'monitor', '--policy', 'None'],
{})
self.check_json_output(['--agent', 'power_governor', '--policy', '150'],
{'POWER_PACKAGE_LIMIT_TOTAL': 150})
self.check_json_output(['--agent', 'power_governor', '--policy', 'NAN'],
{'POWER_PACKAGE_LIMIT_TOTAL': 'NAN'})
self.check_json_output(['--agent', 'power_governor', '--policy', 'nan'],
{'POWER_PACKAGE_LIMIT_TOTAL': 'NAN'})
self.check_json_output(['--agent', 'energy_efficient', '--policy', 'nan,nan'],
{'FREQ_MIN': 'NAN', 'FREQ_MAX': 'NAN'})
self.check_json_output(['--agent', 'energy_efficient', '--policy', '1.2e9,nan'],
{'FREQ_MIN': 1.2e9, 'FREQ_MAX': 'NAN'})
self.check_json_output(['--agent', 'energy_efficient', '--policy', 'nan,1.3e9'],
{'FREQ_MIN': 'NAN', 'FREQ_MAX': 1.3e9})
self.check_json_output(['--agent', 'power_balancer', '--policy', '150'],
{'POWER_PACKAGE_LIMIT_TOTAL': 150})
self.check_output(['--agent', 'power_governor', '--policy', 'None'],
['not a valid floating-point number', 'Invalid argument'])
self.check_output(['--agent', 'monitor', '--policy', '300'],
['agent takes no parameters', 'Invalid argument'])
self.check_output(['--agent', 'energy_efficient', '--policy', '2.0e9,5.0e9,4.5e9,6.7,4.2'],
['Number of policies', 'Invalid argument'])
if __name__ == '__main__':
unittest.main()
| true | true |
f7302e0b103de32216051a22e30483430b67f84e | 940 | py | Python | running_modes/utils/general.py | lilleswing/Reinvent-1 | ac4e3e6fa6379c6f4af883478dfd1b3407933ada | [
"Apache-2.0"
] | 183 | 2020-04-04T02:01:15.000Z | 2022-03-30T21:56:56.000Z | running_modes/utils/general.py | lilleswing/Reinvent-1 | ac4e3e6fa6379c6f4af883478dfd1b3407933ada | [
"Apache-2.0"
] | 39 | 2020-04-05T15:19:56.000Z | 2022-03-09T12:58:21.000Z | running_modes/utils/general.py | lilleswing/Reinvent-1 | ac4e3e6fa6379c6f4af883478dfd1b3407933ada | [
"Apache-2.0"
] | 70 | 2020-04-05T19:25:43.000Z | 2022-02-22T12:04:39.000Z | import time
import numpy as np
import torch
def to_tensor(tensor):
if isinstance(tensor, np.ndarray):
tensor = torch.from_numpy(tensor)
if torch.cuda.is_available():
return torch.autograd.Variable(tensor).cuda()
return torch.autograd.Variable(tensor)
def set_default_device_cuda():
"""Sets the default device (cpu or cuda) used for all tensors."""
if torch.cuda.is_available() == False:
tensor = torch.FloatTensor
torch.set_default_tensor_type(tensor)
return False
else: # device_name == "cuda":
tensor = torch.cuda.FloatTensor # pylint: disable=E1101
torch.set_default_tensor_type(tensor)
return True
def estimate_run_time(start_time, n_steps, step):
time_elapsed = int(time.time() - start_time)
time_left = (time_elapsed * ((n_steps - step) / (step + 1)))
summary = {"elapsed": time_elapsed, "left": time_left}
return summary | 30.322581 | 69 | 0.678723 | import time
import numpy as np
import torch
def to_tensor(tensor):
if isinstance(tensor, np.ndarray):
tensor = torch.from_numpy(tensor)
if torch.cuda.is_available():
return torch.autograd.Variable(tensor).cuda()
return torch.autograd.Variable(tensor)
def set_default_device_cuda():
if torch.cuda.is_available() == False:
tensor = torch.FloatTensor
torch.set_default_tensor_type(tensor)
return False
else:
tensor = torch.cuda.FloatTensor
torch.set_default_tensor_type(tensor)
return True
def estimate_run_time(start_time, n_steps, step):
time_elapsed = int(time.time() - start_time)
time_left = (time_elapsed * ((n_steps - step) / (step + 1)))
summary = {"elapsed": time_elapsed, "left": time_left}
return summary | true | true |
f7302e9cf28bc426a294342f8db30d4a7364613d | 370 | py | Python | multiple-languages/python/ros-cdk-cas-1.0.3/src/ros_cdk_cas/_jsii/__init__.py | aliyun/Resource-Orchestration-Service-Cloud-Development-K | 2b81e135002ed81cb72f7d07be7ff497ea39e2e1 | [
"Apache-2.0"
] | 15 | 2020-11-10T02:00:28.000Z | 2022-02-07T19:28:10.000Z | multiple-languages/python/ros-cdk-cas-1.0.3/src/ros_cdk_cas/_jsii/__init__.py | aliyun/Resource-Orchestration-Service-Cloud-Development-K | 2b81e135002ed81cb72f7d07be7ff497ea39e2e1 | [
"Apache-2.0"
] | 23 | 2021-02-02T04:37:02.000Z | 2022-03-31T06:41:06.000Z | multiple-languages/python/ros-cdk-cas-1.0.3/src/ros_cdk_cas/_jsii/__init__.py | aliyun/Resource-Orchestration-Service-Cloud-Development-K | 2b81e135002ed81cb72f7d07be7ff497ea39e2e1 | [
"Apache-2.0"
] | 4 | 2021-01-13T05:48:43.000Z | 2022-03-15T11:26:48.000Z | import abc
import builtins
import datetime
import enum
import typing
import jsii
import publication
import typing_extensions
import constructs._jsii
import ros_cdk_core._jsii
__jsii_assembly__ = jsii.JSIIAssembly.load(
"@alicloud/ros-cdk-cas", "1.0.3", __name__[0:-6], "ros-cdk-cas@1.0.3.jsii.tgz"
)
__all__ = [
"__jsii_assembly__",
]
publication.publish()
| 16.086957 | 82 | 0.759459 | import abc
import builtins
import datetime
import enum
import typing
import jsii
import publication
import typing_extensions
import constructs._jsii
import ros_cdk_core._jsii
__jsii_assembly__ = jsii.JSIIAssembly.load(
"@alicloud/ros-cdk-cas", "1.0.3", __name__[0:-6], "ros-cdk-cas@1.0.3.jsii.tgz"
)
__all__ = [
"__jsii_assembly__",
]
publication.publish()
| true | true |
f7302ec626d9babefa67e7f7cd70358eb037d937 | 1,552 | py | Python | src/pyri/webui_browser/plugins/panel.py | pyri-project/pyri-webui-browser | 57f20bef7af357a8d051c700aff95fef389a3be0 | [
"Apache-2.0"
] | null | null | null | src/pyri/webui_browser/plugins/panel.py | pyri-project/pyri-webui-browser | 57f20bef7af357a8d051c700aff95fef389a3be0 | [
"Apache-2.0"
] | null | null | null | src/pyri/webui_browser/plugins/panel.py | pyri-project/pyri-webui-browser | 57f20bef7af357a8d051c700aff95fef389a3be0 | [
"Apache-2.0"
] | null | null | null | from typing import List, Dict, Callable, Any, NamedTuple, TYPE_CHECKING
from pyri.plugins import util as plugin_util
if TYPE_CHECKING:
from .. import PyriWebUIBrowser
class PyriWebUIBrowserPanelInfo(NamedTuple):
title: str
panel_type: str
priority: int
class PyriWebUIBrowserPanelBase:
pass
class PyriWebUIBrowserPanelPluginFactory:
def __init__(self):
super().__init__()
def get_plugin_name(self) -> str:
return ""
def get_panels_infos(self) -> Dict[str,PyriWebUIBrowserPanelInfo]:
return []
async def add_panel(self, panel_type: str, core: "PyriWebUIBrowser", parent_element: Any) -> PyriWebUIBrowserPanelBase:
raise NotImplementedError()
def get_webui_browser_panel_factories() -> List[PyriWebUIBrowserPanelPluginFactory]:
return plugin_util.get_plugin_factories("pyri.plugins.webui_browser_panel")
def get_all_webui_browser_panels_infos() -> Dict[str,Any]:
ret = dict()
factories = get_webui_browser_panel_factories()
for factory in factories:
ret[factory.get_plugin_name()] = factory.get_panels_infos()
return ret
async def add_webui_browser_panel(panel_type: str, core: "PyriWebUIBrowser", parent_element: Any) -> Dict[str,Any]:
factories = get_webui_browser_panel_factories()
for factory in factories:
infos = factory.get_panels_infos()
if panel_type in infos:
return await factory.add_panel(panel_type, core, parent_element)
assert False, f"Unknown panel_type \"{panel_type}\" specified"
| 33.021277 | 123 | 0.73518 | from typing import List, Dict, Callable, Any, NamedTuple, TYPE_CHECKING
from pyri.plugins import util as plugin_util
if TYPE_CHECKING:
from .. import PyriWebUIBrowser
class PyriWebUIBrowserPanelInfo(NamedTuple):
title: str
panel_type: str
priority: int
class PyriWebUIBrowserPanelBase:
pass
class PyriWebUIBrowserPanelPluginFactory:
def __init__(self):
super().__init__()
def get_plugin_name(self) -> str:
return ""
def get_panels_infos(self) -> Dict[str,PyriWebUIBrowserPanelInfo]:
return []
async def add_panel(self, panel_type: str, core: "PyriWebUIBrowser", parent_element: Any) -> PyriWebUIBrowserPanelBase:
raise NotImplementedError()
def get_webui_browser_panel_factories() -> List[PyriWebUIBrowserPanelPluginFactory]:
return plugin_util.get_plugin_factories("pyri.plugins.webui_browser_panel")
def get_all_webui_browser_panels_infos() -> Dict[str,Any]:
ret = dict()
factories = get_webui_browser_panel_factories()
for factory in factories:
ret[factory.get_plugin_name()] = factory.get_panels_infos()
return ret
async def add_webui_browser_panel(panel_type: str, core: "PyriWebUIBrowser", parent_element: Any) -> Dict[str,Any]:
factories = get_webui_browser_panel_factories()
for factory in factories:
infos = factory.get_panels_infos()
if panel_type in infos:
return await factory.add_panel(panel_type, core, parent_element)
assert False, f"Unknown panel_type \"{panel_type}\" specified"
| true | true |
f7302fb44428bb7330b8cbab6d2be8a127232a1b | 1,451 | py | Python | monk/system_unit_tests/pytorch/test_block_resnet_v2.py | Shreyashwaghe/monk_v1 | 4ee4d9483e8ffac9b73a41f3c378e5abf5fc799b | [
"Apache-2.0"
] | 7 | 2020-07-26T08:37:29.000Z | 2020-10-30T10:23:11.000Z | monk/system_unit_tests/pytorch/test_block_resnet_v2.py | mursalfk/monk_v1 | 62f34a52f242772186ffff7e56764e958fbcd920 | [
"Apache-2.0"
] | null | null | null | monk/system_unit_tests/pytorch/test_block_resnet_v2.py | mursalfk/monk_v1 | 62f34a52f242772186ffff7e56764e958fbcd920 | [
"Apache-2.0"
] | 1 | 2020-10-07T12:57:44.000Z | 2020-10-07T12:57:44.000Z | import os
import sys
sys.path.append("../../../monk/");
import psutil
from pytorch_prototype import prototype
from compare_prototype import compare
from common import print_start
from common import print_status
import torch
import numpy as np
from pytorch.losses.return_loss import load_loss
def test_block_resnet_v2(system_dict):
forward = True;
test = "test_block_resnet_v2";
system_dict["total_tests"] += 1;
print_start(test, system_dict["total_tests"])
if(forward):
try:
gtf = prototype(verbose=0);
gtf.Prototype("sample-project-1", "sample-experiment-1");
network = [];
network.append(gtf.resnet_v2_block(output_channels=32, stride=1, downsample=True));
network.append(gtf.resnet_v2_block(output_channels=32, stride=1, downsample=False));
gtf.Compile_Network(network, data_shape=(1, 64, 64), use_gpu=False);
x = torch.randn(1, 1, 64, 64);
y = gtf.system_dict["local"]["model"](x);
system_dict["successful_tests"] += 1;
print_status("Pass");
except Exception as e:
system_dict["failed_tests_exceptions"].append(e);
system_dict["failed_tests_lists"].append(test);
forward = False;
print_status("Fail");
else:
system_dict["skipped_tests_lists"].append(test);
print_status("Skipped");
return system_dict
| 29.612245 | 96 | 0.644383 | import os
import sys
sys.path.append("../../../monk/");
import psutil
from pytorch_prototype import prototype
from compare_prototype import compare
from common import print_start
from common import print_status
import torch
import numpy as np
from pytorch.losses.return_loss import load_loss
def test_block_resnet_v2(system_dict):
forward = True;
test = "test_block_resnet_v2";
system_dict["total_tests"] += 1;
print_start(test, system_dict["total_tests"])
if(forward):
try:
gtf = prototype(verbose=0);
gtf.Prototype("sample-project-1", "sample-experiment-1");
network = [];
network.append(gtf.resnet_v2_block(output_channels=32, stride=1, downsample=True));
network.append(gtf.resnet_v2_block(output_channels=32, stride=1, downsample=False));
gtf.Compile_Network(network, data_shape=(1, 64, 64), use_gpu=False);
x = torch.randn(1, 1, 64, 64);
y = gtf.system_dict["local"]["model"](x);
system_dict["successful_tests"] += 1;
print_status("Pass");
except Exception as e:
system_dict["failed_tests_exceptions"].append(e);
system_dict["failed_tests_lists"].append(test);
forward = False;
print_status("Fail");
else:
system_dict["skipped_tests_lists"].append(test);
print_status("Skipped");
return system_dict
| true | true |
f7302ffbe807ed5672868941d35075e072212e37 | 2,694 | py | Python | sgnlp/models/span_extraction/train.py | raymondng76/sgnlp | f09eada90ef5b1ee979901e5c14413d32e758049 | [
"MIT"
] | 14 | 2021-08-02T01:52:18.000Z | 2022-01-14T10:16:02.000Z | sgnlp/models/span_extraction/train.py | raymondng76/sgnlp | f09eada90ef5b1ee979901e5c14413d32e758049 | [
"MIT"
] | 29 | 2021-08-02T01:53:46.000Z | 2022-03-30T05:40:46.000Z | sgnlp/models/span_extraction/train.py | raymondng76/sgnlp | f09eada90ef5b1ee979901e5c14413d32e758049 | [
"MIT"
] | 7 | 2021-08-02T01:54:19.000Z | 2022-01-07T06:37:45.000Z | import json
import math
from transformers import Trainer
from transformers import TrainingArguments
from .config import RecconSpanExtractionConfig
from .data_class import RecconSpanExtractionArguments
from .modeling import RecconSpanExtractionModel
from .tokenization import RecconSpanExtractionTokenizer
from .utils import parse_args_and_load_config, load_examples, RecconSpanExtractionData
def train_model(cfg: RecconSpanExtractionArguments):
"""
Method for training RecconSpanExtractionModel.
Args:
config (:obj:`RecconSpanExtractionArguments`):
RecconSpanExtractionArguments config load from config file.
Example::
import json
from sgnlp.models.span_extraction import train
from sgnlp.models.span_extraction.utils import parse_args_and_load_config
cfg = parse_args_and_load_config('config/span_extraction_config.json')
train(cfg)
"""
config = RecconSpanExtractionConfig.from_pretrained(cfg.model_name)
tokenizer = RecconSpanExtractionTokenizer.from_pretrained(cfg.model_name)
model = RecconSpanExtractionModel.from_pretrained(cfg.model_name, config=config)
with open(cfg.train_data_path, "r") as train_file:
train_json = json.load(train_file)
with open(cfg.val_data_path, "r") as val_file:
val_json = json.load(val_file)
load_train_exp_args = {
"examples": train_json,
"tokenizer": tokenizer,
"max_seq_length": cfg.max_seq_length,
"doc_stride": cfg.doc_stride,
"max_query_length": cfg.max_query_length,
}
load_valid_exp_args = {
"examples": val_json,
"tokenizer": tokenizer,
"max_seq_length": cfg.max_seq_length,
"doc_stride": cfg.doc_stride,
"max_query_length": cfg.max_query_length,
}
train_dataset = load_examples(**load_train_exp_args)
val_dataset = load_examples(**load_valid_exp_args)
t_total = (
len(train_dataset)
// cfg.train_args["gradient_accumulation_steps"]
* cfg.train_args["num_train_epochs"]
)
cfg.train_args["eval_steps"] = int(
len(train_dataset) / cfg.train_args["per_device_train_batch_size"]
)
cfg.train_args["warmup_steps"] = math.ceil(t_total * cfg.train_args["warmup_ratio"])
training_args = TrainingArguments(**cfg.train_args)
trainer = Trainer(
model=model,
args=training_args,
train_dataset=RecconSpanExtractionData(train_dataset),
eval_dataset=RecconSpanExtractionData(val_dataset),
)
trainer.train()
trainer.save_model()
if __name__ == "__main__":
cfg = parse_args_and_load_config()
train_model(cfg)
| 32.071429 | 88 | 0.717892 | import json
import math
from transformers import Trainer
from transformers import TrainingArguments
from .config import RecconSpanExtractionConfig
from .data_class import RecconSpanExtractionArguments
from .modeling import RecconSpanExtractionModel
from .tokenization import RecconSpanExtractionTokenizer
from .utils import parse_args_and_load_config, load_examples, RecconSpanExtractionData
def train_model(cfg: RecconSpanExtractionArguments):
config = RecconSpanExtractionConfig.from_pretrained(cfg.model_name)
tokenizer = RecconSpanExtractionTokenizer.from_pretrained(cfg.model_name)
model = RecconSpanExtractionModel.from_pretrained(cfg.model_name, config=config)
with open(cfg.train_data_path, "r") as train_file:
train_json = json.load(train_file)
with open(cfg.val_data_path, "r") as val_file:
val_json = json.load(val_file)
load_train_exp_args = {
"examples": train_json,
"tokenizer": tokenizer,
"max_seq_length": cfg.max_seq_length,
"doc_stride": cfg.doc_stride,
"max_query_length": cfg.max_query_length,
}
load_valid_exp_args = {
"examples": val_json,
"tokenizer": tokenizer,
"max_seq_length": cfg.max_seq_length,
"doc_stride": cfg.doc_stride,
"max_query_length": cfg.max_query_length,
}
train_dataset = load_examples(**load_train_exp_args)
val_dataset = load_examples(**load_valid_exp_args)
t_total = (
len(train_dataset)
// cfg.train_args["gradient_accumulation_steps"]
* cfg.train_args["num_train_epochs"]
)
cfg.train_args["eval_steps"] = int(
len(train_dataset) / cfg.train_args["per_device_train_batch_size"]
)
cfg.train_args["warmup_steps"] = math.ceil(t_total * cfg.train_args["warmup_ratio"])
training_args = TrainingArguments(**cfg.train_args)
trainer = Trainer(
model=model,
args=training_args,
train_dataset=RecconSpanExtractionData(train_dataset),
eval_dataset=RecconSpanExtractionData(val_dataset),
)
trainer.train()
trainer.save_model()
if __name__ == "__main__":
cfg = parse_args_and_load_config()
train_model(cfg)
| true | true |
f73030990cc126d9327feaa150791a0d22622092 | 1,387 | py | Python | tests/update_test_outputs.py | UUDigitalHumanitieslab/folia2alpino | 4181955ccecadd02e311a548d67b17c09bfd81d4 | [
"MIT"
] | null | null | null | tests/update_test_outputs.py | UUDigitalHumanitieslab/folia2alpino | 4181955ccecadd02e311a548d67b17c09bfd81d4 | [
"MIT"
] | null | null | null | tests/update_test_outputs.py | UUDigitalHumanitieslab/folia2alpino | 4181955ccecadd02e311a548d67b17c09bfd81d4 | [
"MIT"
] | null | null | null | #!/usr/bin/env python3
"""
Script for updating the output files using the current behavior.
"""
import sys
sys.path.append("..")
sys.path.append(".")
from glob import glob
import unittest
import re
from typing import cast, List, Sequence
from os import path
from corpus2alpino.converter import Converter
from corpus2alpino.collectors.filesystem import FilesystemCollector
from corpus2alpino.targets.memory import MemoryTarget
from corpus2alpino.writers.paqu import PaQuWriter
args = sys.argv[1:]
if args:
patterns = args[0].split(',')
else:
patterns = ['example*.xml', 'example*.txt', 'example*.cha']
paqu_writer = PaQuWriter()
test_files = cast(List[str], [])
for pattern in patterns:
test_files += (f for f in glob(path.join(path.dirname(__file__), pattern))
if '_expected' not in f)
converter = Converter(
FilesystemCollector(test_files),
target=MemoryTarget(),
writer=paqu_writer)
converted = list(converter.convert())
for test_file, output in zip(test_files, converted):
expected_filename = re.sub('\.(txt|xml|cha)$', '_expected.txt', test_file)
with open(expected_filename, mode='w', encoding='utf-8') as expected_file:
expected_file.write(output)
from test_enrich_lassy import get_enriched
with open('enrichment_expected.xml', mode='w', encoding='utf-8') as expected_file:
expected_file.write(get_enriched())
| 29.510638 | 82 | 0.733237 |
import sys
sys.path.append("..")
sys.path.append(".")
from glob import glob
import unittest
import re
from typing import cast, List, Sequence
from os import path
from corpus2alpino.converter import Converter
from corpus2alpino.collectors.filesystem import FilesystemCollector
from corpus2alpino.targets.memory import MemoryTarget
from corpus2alpino.writers.paqu import PaQuWriter
args = sys.argv[1:]
if args:
patterns = args[0].split(',')
else:
patterns = ['example*.xml', 'example*.txt', 'example*.cha']
paqu_writer = PaQuWriter()
test_files = cast(List[str], [])
for pattern in patterns:
test_files += (f for f in glob(path.join(path.dirname(__file__), pattern))
if '_expected' not in f)
converter = Converter(
FilesystemCollector(test_files),
target=MemoryTarget(),
writer=paqu_writer)
converted = list(converter.convert())
for test_file, output in zip(test_files, converted):
expected_filename = re.sub('\.(txt|xml|cha)$', '_expected.txt', test_file)
with open(expected_filename, mode='w', encoding='utf-8') as expected_file:
expected_file.write(output)
from test_enrich_lassy import get_enriched
with open('enrichment_expected.xml', mode='w', encoding='utf-8') as expected_file:
expected_file.write(get_enriched())
| true | true |
f730313f1a3cfec6df0cdc426961abe92340dec7 | 2,873 | py | Python | spiketoolkit/preprocessing/center.py | teristam/spiketoolk | 0ae7adabce46cf620c3627ee0093d890996ef355 | [
"MIT"
] | null | null | null | spiketoolkit/preprocessing/center.py | teristam/spiketoolk | 0ae7adabce46cf620c3627ee0093d890996ef355 | [
"MIT"
] | null | null | null | spiketoolkit/preprocessing/center.py | teristam/spiketoolk | 0ae7adabce46cf620c3627ee0093d890996ef355 | [
"MIT"
] | null | null | null | from spikeextractors import RecordingExtractor
from .transform import TransformRecording
import numpy as np
class CenterRecording(TransformRecording):
preprocessor_name = 'Center'
def __init__(self, recording, mode, seconds, n_snippets):
if not isinstance(recording, RecordingExtractor):
raise ValueError("'recording' must be a RecordingExtractor")
self._scalar = 1
self._mode = mode
self._seconds = seconds
self._n_snippets = n_snippets
assert self._mode in ['mean', 'median'], "'mode' can be 'mean' or 'median'"
# use n_snippets of equal duration equally distributed on the recording
n_snippets = int(n_snippets)
assert n_snippets > 0, "'n_snippets' must be positive"
snip_len = seconds / n_snippets * recording.get_sampling_frequency()
if seconds * recording.get_sampling_frequency() >= recording.get_num_frames():
traces = recording.get_traces()
else:
# skip initial and final part
snip_start = np.linspace(snip_len // 2, recording.get_num_frames()-int(1.5*snip_len), n_snippets)
traces_snippets = recording.get_snippets(reference_frames=snip_start, snippet_len=snip_len)
traces_snippets = traces_snippets.swapaxes(0, 1)
traces = traces_snippets.reshape((traces_snippets.shape[0],
traces_snippets.shape[1] * traces_snippets.shape[2]))
if self._mode == 'mean':
self._offset = -np.mean(traces, axis=1)
else:
self._offset = -np.median(traces, axis=1)
dtype = str(recording.get_dtype())
if 'uint' in dtype:
if 'numpy' in dtype:
dtype = str(dtype).replace("<class '", "").replace("'>", "")
# drop 'numpy'
dtype = dtype.split('.')[1]
dtype = dtype[1:]
TransformRecording.__init__(self, recording, scalar=self._scalar, offset=self._offset, dtype=dtype)
self._kwargs = {'recording': recording.make_serialized_dict(), 'mode': mode, 'seconds': seconds,
'n_snippets': n_snippets}
def center(recording, mode='median', seconds=10., n_snippets=10):
'''
Removes the offset of the traces channel by channel.
Parameters
----------
recording: RecordingExtractor
The recording extractor to be transformed
mode: str
'median' (default) or 'mean'
seconds: float
Number of seconds used to compute center
n_snippets: int
Number of snippets in which the total 'seconds' are divided spanning the recording duration
Returns
-------
center: CenterRecording
The output recording extractor object
'''
return CenterRecording(recording=recording, mode=mode, seconds=seconds, n_snippets=n_snippets)
| 41.637681 | 109 | 0.637313 | from spikeextractors import RecordingExtractor
from .transform import TransformRecording
import numpy as np
class CenterRecording(TransformRecording):
preprocessor_name = 'Center'
def __init__(self, recording, mode, seconds, n_snippets):
if not isinstance(recording, RecordingExtractor):
raise ValueError("'recording' must be a RecordingExtractor")
self._scalar = 1
self._mode = mode
self._seconds = seconds
self._n_snippets = n_snippets
assert self._mode in ['mean', 'median'], "'mode' can be 'mean' or 'median'"
n_snippets = int(n_snippets)
assert n_snippets > 0, "'n_snippets' must be positive"
snip_len = seconds / n_snippets * recording.get_sampling_frequency()
if seconds * recording.get_sampling_frequency() >= recording.get_num_frames():
traces = recording.get_traces()
else:
snip_start = np.linspace(snip_len // 2, recording.get_num_frames()-int(1.5*snip_len), n_snippets)
traces_snippets = recording.get_snippets(reference_frames=snip_start, snippet_len=snip_len)
traces_snippets = traces_snippets.swapaxes(0, 1)
traces = traces_snippets.reshape((traces_snippets.shape[0],
traces_snippets.shape[1] * traces_snippets.shape[2]))
if self._mode == 'mean':
self._offset = -np.mean(traces, axis=1)
else:
self._offset = -np.median(traces, axis=1)
dtype = str(recording.get_dtype())
if 'uint' in dtype:
if 'numpy' in dtype:
dtype = str(dtype).replace("<class '", "").replace("'>", "")
dtype = dtype.split('.')[1]
dtype = dtype[1:]
TransformRecording.__init__(self, recording, scalar=self._scalar, offset=self._offset, dtype=dtype)
self._kwargs = {'recording': recording.make_serialized_dict(), 'mode': mode, 'seconds': seconds,
'n_snippets': n_snippets}
def center(recording, mode='median', seconds=10., n_snippets=10):
return CenterRecording(recording=recording, mode=mode, seconds=seconds, n_snippets=n_snippets)
| true | true |
f7303176a948e5cc9fbd74ef4d04e4f617797080 | 3,283 | py | Python | dcgan/mnist/InceptionScore.py | DoubleE1/Keras-GAN | 775eb82b18cb146203295f19c937d4290de2953f | [
"MIT"
] | null | null | null | dcgan/mnist/InceptionScore.py | DoubleE1/Keras-GAN | 775eb82b18cb146203295f19c937d4290de2953f | [
"MIT"
] | null | null | null | dcgan/mnist/InceptionScore.py | DoubleE1/Keras-GAN | 775eb82b18cb146203295f19c937d4290de2953f | [
"MIT"
] | null | null | null | # calculate inception score for cifar-10 in Keras
import numpy as np
import matplotlib.pyplot as plt
from math import floor
from numpy import ones, expand_dims, log, mean, std, exp
from numpy.random import shuffle
from keras.applications.inception_v3 import InceptionV3, preprocess_input
from keras.datasets import cifar10
from skimage.transform import resize
from numpy import asarray
from PIL import Image
import os.path
from os import path
from IPython.display import clear_output
# scale an array of images to a new size
def scale_images(images, new_shape):
images_list = list()
for image in images:
# resize with nearest neighbor interpolation
new_image = resize(image, new_shape, 0)
# store
images_list.append(new_image)
return asarray(images_list)
def crop_center(img):
#hardcoded for now
left = 143
top = 58
right = 513
bottom = 427
# Crop the center of the image
return np.asarray(img.crop((left, top, right, bottom)))
# assumes images have any shape and pixels in [0,255]
def calculate_inception_score(images, n_split=10, eps=1E-16):
# load inception v3 model
model = InceptionV3()
# enumerate splits of images/predictions
scores = list()
n_part = floor(images.shape[0] / n_split)
for i in range(n_split):
# retrieve images
ix_start, ix_end = i * n_part, (i+1) * n_part
subset = images[ix_start:ix_end]
# convert from uint8 to float32
print(i, ix_end, ix_start, n_part)
subset = subset.astype('float32')
# scale images to the required size
subset = scale_images(subset, (299,299,1))
# pre-process images, scale to [-1,1]
subset = preprocess_input(subset)
# predict p(y|x)
p_yx = model.predict(subset)
# calculate p(y)
p_y = expand_dims(p_yx.mean(axis=0), 0)
# calculate KL divergence using log probabilities
kl_d = p_yx * (log(p_yx + eps) - log(p_y + eps))
# sum over classes
sum_kl_d = kl_d.sum(axis=1)
# average over images
avg_kl_d = mean(sum_kl_d)
# undo the log
is_score = exp(avg_kl_d)
# store
scores.append(is_score)
# print(i)
# average across images
is_avg, is_std = mean(scores), std(scores)
return is_avg, is_std
image_path = "Keras-GAN/dcgan/mnist/single_mnist_images"
if path.exists(image_path):
images = []
head_tail = path.split(image_path)
for i in range(2):
head_tail = head_tail[0]
head_tail = path.split(head_tail)
if ~image_path.endswith('/'):
image_path = image_path + '/'
print(image_path)
for i in range(5000):
if path.exists(image_path + str(f"{i}.png")):
new_image_path = image_path + str(f"{i}.png")
print("Loaded image: ", str(f"{i}.png"))
img = Image.open(new_image_path)
img = crop_center(img)
# append the image into a list
images.append(img)
clear_output()
# convert the list into array
images = np.asarray(images)
print(images.shape)
# calculates the average and standard deviation inception scores
is_avg, is_std = calculate_inception_score(images)
print(f"The inception score for {head_tail[1]}")
print('average inception score:', is_avg, 'standard deviation inception scores:', is_std)
else:
print("Image path not found") | 30.971698 | 91 | 0.687176 |
import numpy as np
import matplotlib.pyplot as plt
from math import floor
from numpy import ones, expand_dims, log, mean, std, exp
from numpy.random import shuffle
from keras.applications.inception_v3 import InceptionV3, preprocess_input
from keras.datasets import cifar10
from skimage.transform import resize
from numpy import asarray
from PIL import Image
import os.path
from os import path
from IPython.display import clear_output
def scale_images(images, new_shape):
images_list = list()
for image in images:
new_image = resize(image, new_shape, 0)
images_list.append(new_image)
return asarray(images_list)
def crop_center(img):
left = 143
top = 58
right = 513
bottom = 427
return np.asarray(img.crop((left, top, right, bottom)))
def calculate_inception_score(images, n_split=10, eps=1E-16):
model = InceptionV3()
scores = list()
n_part = floor(images.shape[0] / n_split)
for i in range(n_split):
ix_start, ix_end = i * n_part, (i+1) * n_part
subset = images[ix_start:ix_end]
print(i, ix_end, ix_start, n_part)
subset = subset.astype('float32')
subset = scale_images(subset, (299,299,1))
subset = preprocess_input(subset)
p_yx = model.predict(subset)
p_y = expand_dims(p_yx.mean(axis=0), 0)
kl_d = p_yx * (log(p_yx + eps) - log(p_y + eps))
sum_kl_d = kl_d.sum(axis=1)
avg_kl_d = mean(sum_kl_d)
is_score = exp(avg_kl_d)
scores.append(is_score)
is_avg, is_std = mean(scores), std(scores)
return is_avg, is_std
image_path = "Keras-GAN/dcgan/mnist/single_mnist_images"
if path.exists(image_path):
images = []
head_tail = path.split(image_path)
for i in range(2):
head_tail = head_tail[0]
head_tail = path.split(head_tail)
if ~image_path.endswith('/'):
image_path = image_path + '/'
print(image_path)
for i in range(5000):
if path.exists(image_path + str(f"{i}.png")):
new_image_path = image_path + str(f"{i}.png")
print("Loaded image: ", str(f"{i}.png"))
img = Image.open(new_image_path)
img = crop_center(img)
images.append(img)
clear_output()
images = np.asarray(images)
print(images.shape)
is_avg, is_std = calculate_inception_score(images)
print(f"The inception score for {head_tail[1]}")
print('average inception score:', is_avg, 'standard deviation inception scores:', is_std)
else:
print("Image path not found") | true | true |
f73032ee4c4acdb0ede3dd7e43679cf1876d488e | 2,431 | py | Python | theseus/utilities/cuda.py | kaylode/Custom-Template | b2f11bfacf2b03b793476a19781f9046fab6fd82 | [
"MIT"
] | 2 | 2022-02-18T04:41:29.000Z | 2022-03-12T09:04:14.000Z | theseus/utilities/cuda.py | kaylode/Custom-Template | b2f11bfacf2b03b793476a19781f9046fab6fd82 | [
"MIT"
] | 8 | 2022-02-16T17:01:28.000Z | 2022-03-28T02:53:45.000Z | theseus/utilities/cuda.py | kaylode/Custom-Template | b2f11bfacf2b03b793476a19781f9046fab6fd82 | [
"MIT"
] | 3 | 2022-02-13T05:00:13.000Z | 2022-03-02T00:11:27.000Z | """ CUDA / AMP utils
Hacked together by / Copyright 2020 Ross Wightman
"""
import torch
from typing import Any
from theseus.utilities.loggers.observer import LoggerObserver
LOGGER = LoggerObserver.getLogger('main')
def get_devices_info(device_names="0"):
if device_names.startswith('cuda'):
device_names = device_names.split('cuda:')[1]
elif device_names.startswith('cpu'):
return "CPU"
devices_info = ""
for i, device_id in enumerate(device_names.split(',')):
p = torch.cuda.get_device_properties(i)
devices_info += f"CUDA:{device_id} ({p.name}, {p.total_memory / 1024 ** 2}MB)\n" # bytes to MB
return devices_info
def get_device(name='cpu') -> torch.device:
if name.startswith('cuda'):
if not torch.cuda.is_available():
LOGGER.text("CUDA is not available. Using CPU...", level=LoggerObserver.WARN)
name = 'cpu'
return torch.device(name)
def move_to(obj: Any, device: torch.device):
"""Credit: https://discuss.pytorch.org/t/pytorch-tensor-to-device-for-a-list-of-dict/66283
Arguments:
obj {dict, list} -- Object to be moved to device
device {torch.device} -- Device that object will be moved to
Raises:
TypeError: object is of type that is not implemented to process
Returns:
type(obj) -- same object but moved to specified device
"""
if torch.is_tensor(obj) or isinstance(obj, torch.nn.Module):
return obj.to(device)
if isinstance(obj, dict):
res = {k: move_to(v, device) for k, v in obj.items()}
return res
if isinstance(obj, list):
return [move_to(v, device) for v in obj]
if isinstance(obj, tuple):
return tuple(move_to(list(obj), device))
return obj
def detach(obj: Any):
"""Credit: https://discuss.pytorch.org/t/pytorch-tensor-to-device-for-a-list-of-dict/66283
Arguments:
obj {dict, list} -- Object to be moved to cpu
Raises:
TypeError: Invalid type for detach
Returns:
type(obj) -- same object but moved to cpu
"""
if torch.is_tensor(obj):
return obj.detach()
if isinstance(obj, dict):
res = {k: detach(v) for k, v in obj.items()}
return res
if isinstance(obj, list):
return [detach(v) for v in obj]
if isinstance(obj, tuple):
return tuple(detach(list(obj)))
raise TypeError("Invalid type for detach") | 34.728571 | 103 | 0.643768 | import torch
from typing import Any
from theseus.utilities.loggers.observer import LoggerObserver
LOGGER = LoggerObserver.getLogger('main')
def get_devices_info(device_names="0"):
if device_names.startswith('cuda'):
device_names = device_names.split('cuda:')[1]
elif device_names.startswith('cpu'):
return "CPU"
devices_info = ""
for i, device_id in enumerate(device_names.split(',')):
p = torch.cuda.get_device_properties(i)
devices_info += f"CUDA:{device_id} ({p.name}, {p.total_memory / 1024 ** 2}MB)\n"
return devices_info
def get_device(name='cpu') -> torch.device:
if name.startswith('cuda'):
if not torch.cuda.is_available():
LOGGER.text("CUDA is not available. Using CPU...", level=LoggerObserver.WARN)
name = 'cpu'
return torch.device(name)
def move_to(obj: Any, device: torch.device):
if torch.is_tensor(obj) or isinstance(obj, torch.nn.Module):
return obj.to(device)
if isinstance(obj, dict):
res = {k: move_to(v, device) for k, v in obj.items()}
return res
if isinstance(obj, list):
return [move_to(v, device) for v in obj]
if isinstance(obj, tuple):
return tuple(move_to(list(obj), device))
return obj
def detach(obj: Any):
if torch.is_tensor(obj):
return obj.detach()
if isinstance(obj, dict):
res = {k: detach(v) for k, v in obj.items()}
return res
if isinstance(obj, list):
return [detach(v) for v in obj]
if isinstance(obj, tuple):
return tuple(detach(list(obj)))
raise TypeError("Invalid type for detach") | true | true |
f73033cb691eb8edb9ba077278124a516a5d48f4 | 15,257 | py | Python | bingraphvis/angr/annotator.py | fanyao/bingraphvis | 72f3f6abc0c30bc14916325c886e43dbbd853a97 | [
"BSD-2-Clause"
] | 1 | 2018-11-19T11:03:29.000Z | 2018-11-19T11:03:29.000Z | bingraphvis/angr/annotator.py | fanyao/bingraphvis | 72f3f6abc0c30bc14916325c886e43dbbd853a97 | [
"BSD-2-Clause"
] | null | null | null | bingraphvis/angr/annotator.py | fanyao/bingraphvis | 72f3f6abc0c30bc14916325c886e43dbbd853a97 | [
"BSD-2-Clause"
] | 1 | 2018-11-19T11:03:30.000Z | 2018-11-19T11:03:30.000Z |
from ..base import *
import capstone
import pyvex
class AngrColorSimprocedures(NodeAnnotator):
def __init__(self):
super(AngrColorSimprocedures, self).__init__()
def annotate_node(self, node):
if node.obj.is_simprocedure:
if node.obj.simprocedure_name in ['PathTerminator','ReturnUnconstrained','UnresolvableTarget']:
node.style = 'filled'
node.fillcolor = '#ffcccc'
else:
node.style = 'filled'
node.fillcolor = '#dddddd'
class AngrColorExit(NodeAnnotator):
def __init__(self):
super(AngrColorExit, self).__init__()
def annotate_node(self, node):
if not node.obj.is_simprocedure:
found = False
for e in self.graph.edges:
if e.src == node:
found = True
if 'jumpkind' in e.meta and e.meta['jumpkind'] == 'Ijk_Ret':
node.style = 'filled'
node.fillcolor = '#ddffdd'
if not found:
node.style = 'filled'
node.fillcolor = '#ddffdd'
class AngrColorEntry(NodeAnnotator):
def __init__(self):
super(AngrColorEntry, self).__init__()
def annotate_node(self, node):
if not node.obj.is_simprocedure:
if hasattr(node.obj, 'function_address') and node.obj.addr == node.obj.function_address:
node.style = 'filled'
node.fillcolor = '#ffffcc'
class AngrColorEdgesVex(EdgeAnnotator):
EDGECOLOR_CONDITIONAL_TRUE = 'green'
EDGECOLOR_CONDITIONAL_FALSE = 'red'
EDGECOLOR_UNCONDITIONAL = 'blue'
EDGECOLOR_CALL = 'black'
EDGECOLOR_RET = 'grey'
EDGECOLOR_UNKNOWN = 'yellow'
def __init__(self):
super(AngrColorEdgesVex, self).__init__()
def annotate_edge(self, edge):
vex = None
if 'jumpkind' in edge.meta:
jk = edge.meta['jumpkind']
if jk == 'Ijk_Ret':
edge.color = self.EDGECOLOR_RET
elif jk == 'Ijk_FakeRet':
edge.color = self.EDGECOLOR_RET
edge.style = 'dashed'
elif jk == 'Ijk_Call':
edge.color = self.EDGECOLOR_CALL
if 'vex' in edge.src.content:
vex = edge.src.content['vex']['vex']
if len (vex.next.constants) == 1 and vex.next.constants[0].value != edge.dst.obj.addr:
edge.style='dotted'
elif jk == 'Ijk_Boring':
if 'vex' in edge.src.content:
vex = edge.src.content['vex']['vex']
if len(vex.constant_jump_targets) > 1:
if len (vex.next.constants) == 1:
if edge.dst.obj.addr == vex.next.constants[0].value:
edge.color=self.EDGECOLOR_CONDITIONAL_FALSE
else:
edge.color=self.EDGECOLOR_CONDITIONAL_TRUE
else:
edge.color=self.EDGECOLOR_UNKNOWN
else:
edge.color=self.EDGECOLOR_UNCONDITIONAL
else:
edge.color=self.EDGECOLOR_UNCONDITIONAL
else:
#TODO warning
edge.color = self.EDGECOLOR_UNKNOWN
else:
edge.color = self.EDGECOLOR_UNKNOWN
class AngrPathAnnotator(EdgeAnnotator, NodeAnnotator):
def __init__(self, path):
super(AngrPathAnnotator, self).__init__()
self.path = path
self.trace = list(path.addr_trace)
def set_graph(self, graph):
super(AngrPathAnnotator, self).set_graph(graph)
self.vaddr = self.valid_addrs()
ftrace = filter(lambda _: _ in self.vaddr, self.trace)
self.edges_hit = set(zip(ftrace[:-1], ftrace[1:]))
def valid_addrs(self):
vaddr = set()
for n in self.graph.nodes:
vaddr.add(n.obj.addr)
return vaddr
#TODO add caching
#TODO not sure if this is valid
def node_hit(self, node):
ck = list(node.callstack_key)
ck.append(node.addr)
rtrace = list(reversed(self.trace))
found = True
si = 0
for c in reversed(ck):
if c == None:
break
try:
si = rtrace[si:].index(c)
except:
found = False
break
return found
def annotate_edge(self, edge):
key = (edge.src.obj.addr, edge.dst.obj.addr)
if key in self.edges_hit and self.node_hit(edge.src.obj) and self.node_hit(edge.dst.obj):
edge.width = 3
edge.color = 'red'
def annotate_node(self, node):
if self.node_hit(node.obj):
node.width = 3
node.color = 'red'
class AngrBackwardSliceAnnotatorVex(ContentAnnotator):
def __init__(self, bs):
super(AngrBackwardSliceAnnotatorVex, self).__init__('vex')
self.bs = bs
self.targets = set(self.bs._targets)
def register(self, content):
content.add_column_before('taint')
def annotate_content(self, node, content):
if node.obj.is_simprocedure or node.obj.is_syscall:
return
st = self.bs.chosen_statements[node.obj.addr]
for k in range(len(content['data'])):
c = content['data'][k]
if k in st:
c['addr']['style'] = 'B'
c['statement']['style'] = 'B'
c['taint'] = {
'content':'[*]',
'style':'B'
}
if (node.obj, k) in self.targets:
c['addr']['color'] = 'red'
c['statement']['color'] = 'red'
class AngrBackwardSliceAnnotatorAsm(ContentAnnotator):
def __init__(self, bs):
super(AngrBackwardSliceAnnotatorAsm, self).__init__('asm')
self.bs = bs
self.targets = set(self.bs._targets)
def register(self, content):
content.add_column_before('taint')
def annotate_content(self, node, content):
if node.obj.is_simprocedure or node.obj.is_syscall:
return
st = self.bs.chosen_statements[node.obj.addr]
staddr = set()
#TODO
vex = self.bs.project.factory.block(addr=node.obj.addr, size=node.obj.size).vex
caddr = None
for j, s in enumerate(vex.statements):
if isinstance(s, pyvex.stmt.IMark):
caddr = s.addr
if j in st:
staddr.add(caddr)
for c in content['data']:
if c['_addr'] in staddr:
c['addr']['style'] = 'B'
c['mnemonic']['style'] = 'B'
c['operands']['style'] = 'B'
c['taint'] = {
'content':'[*]',
'style':'B'
}
class AngrColorDDGStmtEdges(EdgeAnnotator):
def __init__(self,project=None):
super(AngrColorDDGStmtEdges, self).__init__()
self.project = project
def annotate_edge(self, edge):
if 'type' in edge.meta:
if edge.meta['type'] == 'tmp':
edge.color = 'blue'
edge.label = 't'+ str(edge.meta['data'])
elif edge.meta['type'] == 'reg':
edge.color = 'green'
if self.project:
edge.label = self.project.arch.register_names[edge.meta['data'].reg] + " " + str(edge.meta['data'].size)
else:
edge.label = "reg"+str(edge.meta['data'].reg) + " " + str(edge.meta['data'].size)
elif edge.meta['type'] == 'mem':
edge.color = 'red'
edge.label = str(edge.meta['data'])
else:
edge.label = edge.meta['type']
edge.style = 'dotted'
class AngrColorDDGData(EdgeAnnotator, NodeAnnotator):
def __init__(self,project=None, labels=False):
super(AngrColorDDGData, self).__init__()
self.project = project
self.labels = labels
def annotate_edge(self, edge):
if 'type' in edge.meta:
if edge.meta['type'] == 'kill':
edge.color = 'red'
elif edge.meta['type'] == 'mem_addr':
edge.color = 'blue'
edge.style = 'dotted'
elif edge.meta['type'] == 'mem_data':
edge.color = 'blue'
else:
edge.color = 'yellow'
if self.labels:
edge.label = edge.meta['type']
def annotate_node(self, node):
if node.obj.initial:
node.fillcolor = '#ccffcc'
node.style = 'filled'
class AngrActionAnnotatorVex(ContentAnnotator):
def __init__(self):
super(AngrActionAnnotatorVex, self).__init__('vex')
def register(self, content):
content.add_column_after('action_type')
content.add_column_after('action_addr')
content.add_column_after('action_data')
def annotate_content(self, node, content):
from simuvex.s_action import SimActionData
if node.obj.is_simprocedure or node.obj.is_syscall:
return
if len(node.obj.final_states) > 0:
state = node.obj.final_states[0]
for action in state.log.actions:
if isinstance(action, SimActionData):
c = content['data'][action.stmt_idx]
c['action_type'] = {
'content': action.type+"/"+action.action+"("+str(action.size.ast)+")",
'align': 'LEFT'
}
#TODO
if str(action.addr) != 'None':
c['action_addr'] = {
'content': str(action.addr.ast),
'align': 'LEFT'
}
if str(action.data) != 'None':
c['action_data'] = {
'content': str(action.data.ast),
'align': 'LEFT'
}
#EXPERIMENTAL
class AngrCodelocLogAnnotator(ContentAnnotator):
def __init__(self, cllog):
super(AngrCodelocLogAnnotator, self).__init__('vex')
self.cllog = cllog
def register(self, content):
content.add_column_after('log')
def annotate_content(self, node, content):
if node.obj.is_simprocedure or node.obj.is_syscall:
return
for k in range(len(content['data'])):
c = content['data'][k]
key = (node.obj.addr, k)
if key in self.cllog:
c['log'] = {
'content': self.cllog[key],
'align':'LEFT'
}
class AngrCommentsAsm(ContentAnnotator):
def __init__(self, project):
super(AngrCommentsAsm, self).__init__('asm')
self.project = project
def register(self, content):
content.add_column_after('comment')
def annotate_content(self, node, content):
if node.obj.is_simprocedure or node.obj.is_syscall:
return
comments_by_addr = {}
if len(node.obj.final_states) > 0:
state = node.obj.final_states[0]
for action in state.log.actions:
label = ''
if action.type == 'mem' or action.type == 'reg':
if isinstance(action.data.ast, int) or action.data.ast.concrete:
d = state.se.any_int(action.data.ast)
if d in self.project.kb.labels:
label += 'data=' + self.project.kb.labels[d] + ' '
if isinstance(action.addr.ast, int) or action.addr.ast.concrete:
a = state.se.any_int(action.addr.ast)
if a in self.project.kb.labels:
label += 'addr=' + self.project.kb.labels[a] + ' '
if action.type == 'exit':
if action.target.ast.concrete:
a = state.se.any_int(action.target.ast)
if a in self.project.kb.labels:
label += self.project.kb.labels[a] + ' '
if label != '':
comments_by_addr[action.ins_addr] = label
for k in content['data']:
ins = k['_ins']
if ins.address in comments_by_addr:
if not ('comment' in k and 'content' in k['comment']):
k['comment'] = {
'content': "; " + comments_by_addr[ins.address][:100]
}
else:
k['comment']['content'] += ", " + comments_by_addr[ins.address][:100]
k['comment']['color'] = 'gray'
k['comment']['align'] = 'LEFT'
class AngrCommentsDataRef(ContentAnnotator):
def __init__(self, project):
super(AngrCommentsDataRef, self).__init__('asm')
self.project = project
def register(self, content):
content.add_column_after('comment')
def annotate_content(self, node, content):
if node.obj.is_simprocedure or node.obj.is_syscall:
return
comments_by_addr = {}
for dr in node.obj.accessed_data_references:
if dr.sort == 'string':
comments_by_addr[dr.insn_addr] = dr.content
for k in content['data']:
ins = k['_ins']
if ins.address in comments_by_addr:
if not ('comment' in k and 'content' in k['comment']):
k['comment'] = {
'content': "; " + comments_by_addr[ins.address][:100]
}
else:
k['comment']['content'] += ", " + comments_by_addr[ins.address][:100]
k['comment']['color'] = 'gray'
k['comment']['align'] = 'LEFT'
class AngrVariables(ContentAnnotator):
def __init__(self, project, debug=False):
super(AngrVariables, self).__init__('asm')
self.project = project
self.debug = debug
def register(self, content):
content.add_column_before('variables')
def annotate_content(self, node, content):
if node.obj.is_simprocedure or node.obj.is_syscall:
return
vm = self.project.kb.variables[node.obj.function_address]
for k in content['data']:
ins = k['_ins']
vars = vm.find_variables_by_insn(ins.address, 'memory')
if vars:
for var in vars:
if not 'variables' in k:
k['variables'] = {'content':''}
k['variables']['content'] += repr(var[0].name + (' (' + var[0].ident + ')' if self.debug else '') )
k['variables']['color'] = 'lightblue'
k['variables']['align'] = 'LEFT'
| 35.235566 | 124 | 0.509274 |
from ..base import *
import capstone
import pyvex
class AngrColorSimprocedures(NodeAnnotator):
def __init__(self):
super(AngrColorSimprocedures, self).__init__()
def annotate_node(self, node):
if node.obj.is_simprocedure:
if node.obj.simprocedure_name in ['PathTerminator','ReturnUnconstrained','UnresolvableTarget']:
node.style = 'filled'
node.fillcolor = '#ffcccc'
else:
node.style = 'filled'
node.fillcolor = '#dddddd'
class AngrColorExit(NodeAnnotator):
def __init__(self):
super(AngrColorExit, self).__init__()
def annotate_node(self, node):
if not node.obj.is_simprocedure:
found = False
for e in self.graph.edges:
if e.src == node:
found = True
if 'jumpkind' in e.meta and e.meta['jumpkind'] == 'Ijk_Ret':
node.style = 'filled'
node.fillcolor = '#ddffdd'
if not found:
node.style = 'filled'
node.fillcolor = '#ddffdd'
class AngrColorEntry(NodeAnnotator):
def __init__(self):
super(AngrColorEntry, self).__init__()
def annotate_node(self, node):
if not node.obj.is_simprocedure:
if hasattr(node.obj, 'function_address') and node.obj.addr == node.obj.function_address:
node.style = 'filled'
node.fillcolor = '#ffffcc'
class AngrColorEdgesVex(EdgeAnnotator):
EDGECOLOR_CONDITIONAL_TRUE = 'green'
EDGECOLOR_CONDITIONAL_FALSE = 'red'
EDGECOLOR_UNCONDITIONAL = 'blue'
EDGECOLOR_CALL = 'black'
EDGECOLOR_RET = 'grey'
EDGECOLOR_UNKNOWN = 'yellow'
def __init__(self):
super(AngrColorEdgesVex, self).__init__()
def annotate_edge(self, edge):
vex = None
if 'jumpkind' in edge.meta:
jk = edge.meta['jumpkind']
if jk == 'Ijk_Ret':
edge.color = self.EDGECOLOR_RET
elif jk == 'Ijk_FakeRet':
edge.color = self.EDGECOLOR_RET
edge.style = 'dashed'
elif jk == 'Ijk_Call':
edge.color = self.EDGECOLOR_CALL
if 'vex' in edge.src.content:
vex = edge.src.content['vex']['vex']
if len (vex.next.constants) == 1 and vex.next.constants[0].value != edge.dst.obj.addr:
edge.style='dotted'
elif jk == 'Ijk_Boring':
if 'vex' in edge.src.content:
vex = edge.src.content['vex']['vex']
if len(vex.constant_jump_targets) > 1:
if len (vex.next.constants) == 1:
if edge.dst.obj.addr == vex.next.constants[0].value:
edge.color=self.EDGECOLOR_CONDITIONAL_FALSE
else:
edge.color=self.EDGECOLOR_CONDITIONAL_TRUE
else:
edge.color=self.EDGECOLOR_UNKNOWN
else:
edge.color=self.EDGECOLOR_UNCONDITIONAL
else:
edge.color=self.EDGECOLOR_UNCONDITIONAL
else:
edge.color = self.EDGECOLOR_UNKNOWN
else:
edge.color = self.EDGECOLOR_UNKNOWN
class AngrPathAnnotator(EdgeAnnotator, NodeAnnotator):
def __init__(self, path):
super(AngrPathAnnotator, self).__init__()
self.path = path
self.trace = list(path.addr_trace)
def set_graph(self, graph):
super(AngrPathAnnotator, self).set_graph(graph)
self.vaddr = self.valid_addrs()
ftrace = filter(lambda _: _ in self.vaddr, self.trace)
self.edges_hit = set(zip(ftrace[:-1], ftrace[1:]))
def valid_addrs(self):
vaddr = set()
for n in self.graph.nodes:
vaddr.add(n.obj.addr)
return vaddr
def node_hit(self, node):
ck = list(node.callstack_key)
ck.append(node.addr)
rtrace = list(reversed(self.trace))
found = True
si = 0
for c in reversed(ck):
if c == None:
break
try:
si = rtrace[si:].index(c)
except:
found = False
break
return found
def annotate_edge(self, edge):
key = (edge.src.obj.addr, edge.dst.obj.addr)
if key in self.edges_hit and self.node_hit(edge.src.obj) and self.node_hit(edge.dst.obj):
edge.width = 3
edge.color = 'red'
def annotate_node(self, node):
if self.node_hit(node.obj):
node.width = 3
node.color = 'red'
class AngrBackwardSliceAnnotatorVex(ContentAnnotator):
def __init__(self, bs):
super(AngrBackwardSliceAnnotatorVex, self).__init__('vex')
self.bs = bs
self.targets = set(self.bs._targets)
def register(self, content):
content.add_column_before('taint')
def annotate_content(self, node, content):
if node.obj.is_simprocedure or node.obj.is_syscall:
return
st = self.bs.chosen_statements[node.obj.addr]
for k in range(len(content['data'])):
c = content['data'][k]
if k in st:
c['addr']['style'] = 'B'
c['statement']['style'] = 'B'
c['taint'] = {
'content':'[*]',
'style':'B'
}
if (node.obj, k) in self.targets:
c['addr']['color'] = 'red'
c['statement']['color'] = 'red'
class AngrBackwardSliceAnnotatorAsm(ContentAnnotator):
def __init__(self, bs):
super(AngrBackwardSliceAnnotatorAsm, self).__init__('asm')
self.bs = bs
self.targets = set(self.bs._targets)
def register(self, content):
content.add_column_before('taint')
def annotate_content(self, node, content):
if node.obj.is_simprocedure or node.obj.is_syscall:
return
st = self.bs.chosen_statements[node.obj.addr]
staddr = set()
vex = self.bs.project.factory.block(addr=node.obj.addr, size=node.obj.size).vex
caddr = None
for j, s in enumerate(vex.statements):
if isinstance(s, pyvex.stmt.IMark):
caddr = s.addr
if j in st:
staddr.add(caddr)
for c in content['data']:
if c['_addr'] in staddr:
c['addr']['style'] = 'B'
c['mnemonic']['style'] = 'B'
c['operands']['style'] = 'B'
c['taint'] = {
'content':'[*]',
'style':'B'
}
class AngrColorDDGStmtEdges(EdgeAnnotator):
def __init__(self,project=None):
super(AngrColorDDGStmtEdges, self).__init__()
self.project = project
def annotate_edge(self, edge):
if 'type' in edge.meta:
if edge.meta['type'] == 'tmp':
edge.color = 'blue'
edge.label = 't'+ str(edge.meta['data'])
elif edge.meta['type'] == 'reg':
edge.color = 'green'
if self.project:
edge.label = self.project.arch.register_names[edge.meta['data'].reg] + " " + str(edge.meta['data'].size)
else:
edge.label = "reg"+str(edge.meta['data'].reg) + " " + str(edge.meta['data'].size)
elif edge.meta['type'] == 'mem':
edge.color = 'red'
edge.label = str(edge.meta['data'])
else:
edge.label = edge.meta['type']
edge.style = 'dotted'
class AngrColorDDGData(EdgeAnnotator, NodeAnnotator):
def __init__(self,project=None, labels=False):
super(AngrColorDDGData, self).__init__()
self.project = project
self.labels = labels
def annotate_edge(self, edge):
if 'type' in edge.meta:
if edge.meta['type'] == 'kill':
edge.color = 'red'
elif edge.meta['type'] == 'mem_addr':
edge.color = 'blue'
edge.style = 'dotted'
elif edge.meta['type'] == 'mem_data':
edge.color = 'blue'
else:
edge.color = 'yellow'
if self.labels:
edge.label = edge.meta['type']
def annotate_node(self, node):
if node.obj.initial:
node.fillcolor = '#ccffcc'
node.style = 'filled'
class AngrActionAnnotatorVex(ContentAnnotator):
def __init__(self):
super(AngrActionAnnotatorVex, self).__init__('vex')
def register(self, content):
content.add_column_after('action_type')
content.add_column_after('action_addr')
content.add_column_after('action_data')
def annotate_content(self, node, content):
from simuvex.s_action import SimActionData
if node.obj.is_simprocedure or node.obj.is_syscall:
return
if len(node.obj.final_states) > 0:
state = node.obj.final_states[0]
for action in state.log.actions:
if isinstance(action, SimActionData):
c = content['data'][action.stmt_idx]
c['action_type'] = {
'content': action.type+"/"+action.action+"("+str(action.size.ast)+")",
'align': 'LEFT'
}
if str(action.addr) != 'None':
c['action_addr'] = {
'content': str(action.addr.ast),
'align': 'LEFT'
}
if str(action.data) != 'None':
c['action_data'] = {
'content': str(action.data.ast),
'align': 'LEFT'
}
class AngrCodelocLogAnnotator(ContentAnnotator):
def __init__(self, cllog):
super(AngrCodelocLogAnnotator, self).__init__('vex')
self.cllog = cllog
def register(self, content):
content.add_column_after('log')
def annotate_content(self, node, content):
if node.obj.is_simprocedure or node.obj.is_syscall:
return
for k in range(len(content['data'])):
c = content['data'][k]
key = (node.obj.addr, k)
if key in self.cllog:
c['log'] = {
'content': self.cllog[key],
'align':'LEFT'
}
class AngrCommentsAsm(ContentAnnotator):
def __init__(self, project):
super(AngrCommentsAsm, self).__init__('asm')
self.project = project
def register(self, content):
content.add_column_after('comment')
def annotate_content(self, node, content):
if node.obj.is_simprocedure or node.obj.is_syscall:
return
comments_by_addr = {}
if len(node.obj.final_states) > 0:
state = node.obj.final_states[0]
for action in state.log.actions:
label = ''
if action.type == 'mem' or action.type == 'reg':
if isinstance(action.data.ast, int) or action.data.ast.concrete:
d = state.se.any_int(action.data.ast)
if d in self.project.kb.labels:
label += 'data=' + self.project.kb.labels[d] + ' '
if isinstance(action.addr.ast, int) or action.addr.ast.concrete:
a = state.se.any_int(action.addr.ast)
if a in self.project.kb.labels:
label += 'addr=' + self.project.kb.labels[a] + ' '
if action.type == 'exit':
if action.target.ast.concrete:
a = state.se.any_int(action.target.ast)
if a in self.project.kb.labels:
label += self.project.kb.labels[a] + ' '
if label != '':
comments_by_addr[action.ins_addr] = label
for k in content['data']:
ins = k['_ins']
if ins.address in comments_by_addr:
if not ('comment' in k and 'content' in k['comment']):
k['comment'] = {
'content': "; " + comments_by_addr[ins.address][:100]
}
else:
k['comment']['content'] += ", " + comments_by_addr[ins.address][:100]
k['comment']['color'] = 'gray'
k['comment']['align'] = 'LEFT'
class AngrCommentsDataRef(ContentAnnotator):
def __init__(self, project):
super(AngrCommentsDataRef, self).__init__('asm')
self.project = project
def register(self, content):
content.add_column_after('comment')
def annotate_content(self, node, content):
if node.obj.is_simprocedure or node.obj.is_syscall:
return
comments_by_addr = {}
for dr in node.obj.accessed_data_references:
if dr.sort == 'string':
comments_by_addr[dr.insn_addr] = dr.content
for k in content['data']:
ins = k['_ins']
if ins.address in comments_by_addr:
if not ('comment' in k and 'content' in k['comment']):
k['comment'] = {
'content': "; " + comments_by_addr[ins.address][:100]
}
else:
k['comment']['content'] += ", " + comments_by_addr[ins.address][:100]
k['comment']['color'] = 'gray'
k['comment']['align'] = 'LEFT'
class AngrVariables(ContentAnnotator):
def __init__(self, project, debug=False):
super(AngrVariables, self).__init__('asm')
self.project = project
self.debug = debug
def register(self, content):
content.add_column_before('variables')
def annotate_content(self, node, content):
if node.obj.is_simprocedure or node.obj.is_syscall:
return
vm = self.project.kb.variables[node.obj.function_address]
for k in content['data']:
ins = k['_ins']
vars = vm.find_variables_by_insn(ins.address, 'memory')
if vars:
for var in vars:
if not 'variables' in k:
k['variables'] = {'content':''}
k['variables']['content'] += repr(var[0].name + (' (' + var[0].ident + ')' if self.debug else '') )
k['variables']['color'] = 'lightblue'
k['variables']['align'] = 'LEFT'
| true | true |
f73033d88a29a228548a8549a992ff3da490ca17 | 3,826 | py | Python | .history/functions_20211223153653.py | tjxj/pdf2docx | 3338a95f1a971de8caf0369fa3ce794d2d6d57cd | [
"Apache-2.0"
] | null | null | null | .history/functions_20211223153653.py | tjxj/pdf2docx | 3338a95f1a971de8caf0369fa3ce794d2d6d57cd | [
"Apache-2.0"
] | null | null | null | .history/functions_20211223153653.py | tjxj/pdf2docx | 3338a95f1a971de8caf0369fa3ce794d2d6d57cd | [
"Apache-2.0"
] | null | null | null | import streamlit as st
import base64
import os
import time
from pdf2docx import Converter
import tempfile
from pathlib import Path
import streamlit as st
from pdf2image import convert_from_path
def show_pdf(uploaded_file):
with st.expander("Original PDF file"):
base64_pdf = base64.b64encode(uploaded_file.read()).decode("utf-8")
pdf_display = f'<embed src="data:application/pdf;base64,{base64_pdf}" width="100%" height="600" type="application/pdf">'
st.markdown(pdf_display, unsafe_allow_html=True)
file_details = {
"filename": uploaded_file.name,
"filetype": uploaded_file.type,
"filesize": uploaded_file.size,
}
st.write(file_details)
st.write(uploaded_file.name)
## Converted images from PDF -- pdf转图片
def pdf2pic(uploaded_file):
# Make temp file path from uploaded file
with tempfile.NamedTemporaryFile(delete=False) as tmp_file:
fp = Path(tmp_file.name)
fp.write_bytes(uploaded_file.getvalue())
imgs = convert_from_path(tmp_file.name)
with st.expander("Converted images from PDF"):
st.image(imgs)
class FileDownloader(object):
def __init__(self, data, filename="myfile", file_ext="pdf"):
super(FileDownloader, self).__init__()
self.data = data
self.filename = filename
self.file_ext = file_ext
def download(self):
b64 = base64.b64encode(self.data.encode()).decode()
new_filename = "{}_{}_.{}".format(self.filename, timestr, self.file_ext)
st.markdown("#### Download File ###")
href = f'<a href="data:file/{self.file_ext};base64,{b64}" download="{new_filename}">Click Here!!</a>'
st.markdown(href, unsafe_allow_html=True)
def save_uploadedfile(uploadedfile):
with open(os.path.join("tempDir", uploadedfile.name), "wb") as f:
f.write(uploadedfile.getbuffer())
return st.success("Saved File:{} to tempDir".format(uploadedfile.name))
def convert2docx(uploaded_file):
cv = Converter(uploaded_file)
docx_file = cv.convert(start=0, end=None)
# tables = Converter.extract_tables(pdf_file, start=0, end=1)
# for table in tables:
# print(table)
# streamlit.download_button(label, data, file_name=None, mime=None, key=None, help=None, on_click=None, args=None, kwargs=None)
# 这是官方示例
# @st.cache
# def convert_df(df):
# # IMPORTANT: Cache the conversion to prevent computation on every rerun
# return df.to_csv().encode('utf-8')
# csv = convert_df(my_large_df)
# st.download_button(
# label="Download data as CSV",
# data=csv,
# file_name='large_df.csv',
# mime='text/csv',
# )
import os
from PyPDF2 import PdfFileWriter, PdfFileReader, PdfFileMerger
def pdf_split(pdf_in, pdf_out, start, end):
# 初始化一个pdf
output = PdfFileWriter()
# 读取pdf
with open(pdf_in, "rb") as in_pdf:
pdf_file = PdfFileReader(in_pdf)
# 从pdf中取出指定页
for i in range(start, end):
output.addPage(pdf_file.getPage(i))
# 写出pdf
with open(pdf_out, "ab") as out_pdf:
output.write(out_pdf)
# if __name__ == '__main__':
# pdf_in = '待分割pdf'
# pdf_out = '分割后pdf'
# s,e = pi,po
# pdf_split(pi, po, s, e)
def pdf_merger(in_pdfs, out_pdf):
# 初始化
merger = PdfFileMerger()
# 循环,合并
for in_pdf in in_pdfs:
with open(in_pdf, "rb") as pdf:
merger.append(PdfFileReader(pdf))
merger.write(out_pdf)
# if __name__ == "__main__":
# in_pdfs = ["放要合并的PDF文件名称,注意顺序"]
# out_pdf = "输出文件"
# pdf_merger(in_pdfs, out_pdf)
def body():
st.sidebar.subheader("请选择功能")
feature = st.sidebar.selectbox(
"", ("PDF转word", "PDF转图片", "PDF分割", "PDF合并", "从PDF抽取图片", "从PDF抽取表格")
)
if feature is "PDF转图片":
pdf2pic()
| 28.552239 | 128 | 0.65081 | import streamlit as st
import base64
import os
import time
from pdf2docx import Converter
import tempfile
from pathlib import Path
import streamlit as st
from pdf2image import convert_from_path
def show_pdf(uploaded_file):
with st.expander("Original PDF file"):
base64_pdf = base64.b64encode(uploaded_file.read()).decode("utf-8")
pdf_display = f'<embed src="data:application/pdf;base64,{base64_pdf}" width="100%" height="600" type="application/pdf">'
st.markdown(pdf_display, unsafe_allow_html=True)
file_details = {
"filename": uploaded_file.name,
"filetype": uploaded_file.type,
"filesize": uploaded_file.size,
}
st.write(file_details)
st.write(uploaded_file.name)
with tempfile.NamedTemporaryFile(delete=False) as tmp_file:
fp = Path(tmp_file.name)
fp.write_bytes(uploaded_file.getvalue())
imgs = convert_from_path(tmp_file.name)
with st.expander("Converted images from PDF"):
st.image(imgs)
class FileDownloader(object):
def __init__(self, data, filename="myfile", file_ext="pdf"):
super(FileDownloader, self).__init__()
self.data = data
self.filename = filename
self.file_ext = file_ext
def download(self):
b64 = base64.b64encode(self.data.encode()).decode()
new_filename = "{}_{}_.{}".format(self.filename, timestr, self.file_ext)
st.markdown("#### Download File ###")
href = f'<a href="data:file/{self.file_ext};base64,{b64}" download="{new_filename}">Click Here!!</a>'
st.markdown(href, unsafe_allow_html=True)
def save_uploadedfile(uploadedfile):
with open(os.path.join("tempDir", uploadedfile.name), "wb") as f:
f.write(uploadedfile.getbuffer())
return st.success("Saved File:{} to tempDir".format(uploadedfile.name))
def convert2docx(uploaded_file):
cv = Converter(uploaded_file)
docx_file = cv.convert(start=0, end=None)
PdfFileMerger
def pdf_split(pdf_in, pdf_out, start, end):
output = PdfFileWriter()
with open(pdf_in, "rb") as in_pdf:
pdf_file = PdfFileReader(in_pdf)
for i in range(start, end):
output.addPage(pdf_file.getPage(i))
with open(pdf_out, "ab") as out_pdf:
output.write(out_pdf)
def pdf_merger(in_pdfs, out_pdf):
merger = PdfFileMerger()
for in_pdf in in_pdfs:
with open(in_pdf, "rb") as pdf:
merger.append(PdfFileReader(pdf))
merger.write(out_pdf)
def body():
st.sidebar.subheader("请选择功能")
feature = st.sidebar.selectbox(
"", ("PDF转word", "PDF转图片", "PDF分割", "PDF合并", "从PDF抽取图片", "从PDF抽取表格")
)
if feature is "PDF转图片":
pdf2pic()
| true | true |
f730342ab5ef9de61150bbe987c938e724c98ab0 | 793 | py | Python | taxcli/helper/invoice_files.py | Nukesor/taxcli | f313acd2f1c9a551361a535f8428a17c53e6b468 | [
"MIT"
] | null | null | null | taxcli/helper/invoice_files.py | Nukesor/taxcli | f313acd2f1c9a551361a535f8428a17c53e6b468 | [
"MIT"
] | null | null | null | taxcli/helper/invoice_files.py | Nukesor/taxcli | f313acd2f1c9a551361a535f8428a17c53e6b468 | [
"MIT"
] | null | null | null | import os
def get_invoice_files(invoices, year=False):
for invoice in invoices:
if invoice.invoice_file:
# Get folder for this invoice and create it if it doesn't exist
if not invoice.afa:
folder = invoice.invoice_type.name
else:
folder = 'afa'
if not os.path.exists(folder):
os.mkdir(folder)
invoice_name = '{}-{}-{}.{}'.format(
invoice.contact_alias,
invoice.invoice_number,
invoice.date.isoformat(),
invoice.invoice_file_type,
)
path = os.path.join(folder, invoice_name)
with open(path, "wb") as invoice_file:
invoice_file.write(invoice.invoice_file)
| 33.041667 | 75 | 0.538462 | import os
def get_invoice_files(invoices, year=False):
for invoice in invoices:
if invoice.invoice_file:
if not invoice.afa:
folder = invoice.invoice_type.name
else:
folder = 'afa'
if not os.path.exists(folder):
os.mkdir(folder)
invoice_name = '{}-{}-{}.{}'.format(
invoice.contact_alias,
invoice.invoice_number,
invoice.date.isoformat(),
invoice.invoice_file_type,
)
path = os.path.join(folder, invoice_name)
with open(path, "wb") as invoice_file:
invoice_file.write(invoice.invoice_file)
| true | true |
f73034f20da654c6f6022c6e0ce37e0082277f05 | 8,692 | py | Python | src/config/train_config.py | wangx1996/CenterPillarNet | 4be3d53265b8ecb1f9572612fa87f7acd8c57669 | [
"MIT"
] | 22 | 2021-03-19T03:13:16.000Z | 2022-03-31T03:05:07.000Z | src/config/train_config.py | wangx1996/CenterPillarNet | 4be3d53265b8ecb1f9572612fa87f7acd8c57669 | [
"MIT"
] | 4 | 2021-04-18T02:23:13.000Z | 2021-08-25T13:21:08.000Z | src/config/train_config.py | wangx1996/CenterPillarNet | 4be3d53265b8ecb1f9572612fa87f7acd8c57669 | [
"MIT"
] | 7 | 2021-06-04T06:54:21.000Z | 2022-01-17T09:18:50.000Z | """
# -*- coding: utf-8 -*-
-----------------------------------------------------------------------------------
# Author: Nguyen Mau Dung
# DoC: 2020.08.17
# email: nguyenmaudung93.kstn@gmail.com
-----------------------------------------------------------------------------------
# Description: The configurations of the project will be defined here
"""
import os
import argparse
import torch
from easydict import EasyDict as edict
import kitti_config as cnf
def parse_train_configs():
parser = argparse.ArgumentParser(description='The Implementation using PyTorch')
parser.add_argument('--seed', type=int, default=2020,
help='re-produce the results with seed random')
parser.add_argument('--saved_fn', type=str, default='fpn_resnet_18', metavar='FN',
help='The name using for saving logs, models,...')
parser.add_argument('--root-dir', type=str, default='../', metavar='PATH',
help='The ROOT working directory')
####################################################################
############## Model configs ########################
####################################################################
parser.add_argument('--arch', type=str, default='fpn_resnet_18', metavar='ARCH',
help='The name of the model architecture')
parser.add_argument('--pretrained_path', type=str, default=None, metavar='PATH',
help='the path of the pretrained checkpoint')
####################################################################
############## Dataloader and Running configs #######
####################################################################
parser.add_argument('--hflip_prob', type=float, default=0.5,
help='The probability of horizontal flip')
parser.add_argument('--no-val', action='store_true',
help='If true, dont evaluate the model on the val set')
parser.add_argument('--num_samples', type=int, default=None,
help='Take a subset of the dataset to run and debug')
parser.add_argument('--num_workers', type=int, default=4,
help='Number of threads for loading data')
parser.add_argument('--batch_size', type=int, default=16,
help='mini-batch size (default: 16), this is the total'
'batch size of all GPUs on the current node when using'
'Data Parallel or Distributed Data Parallel')
parser.add_argument('--print_freq', type=int, default=50, metavar='N',
help='print frequency (default: 50)')
parser.add_argument('--tensorboard_freq', type=int, default=50, metavar='N',
help='frequency of saving tensorboard (default: 50)')
parser.add_argument('--checkpoint_freq', type=int, default=2, metavar='N',
help='frequency of saving checkpoints (default: 5)')
####################################################################
############## Training strategy ####################
####################################################################
parser.add_argument('--start_epoch', type=int, default=1, metavar='N',
help='the starting epoch')
parser.add_argument('--num_epochs', type=int, default=300, metavar='N',
help='number of total epochs to run')
parser.add_argument('--lr_type', type=str, default='cosin',
help='the type of learning rate scheduler (cosin or multi_step or one_cycle)')
parser.add_argument('--lr', type=float, default=0.003, metavar='LR',
help='initial learning rate')
parser.add_argument('--minimum_lr', type=float, default=1e-7, metavar='MIN_LR',
help='minimum learning rate during training')
parser.add_argument('--momentum', type=float, default=0.949, metavar='M',
help='momentum')
parser.add_argument('-wd', '--weight_decay', type=float, default=0., metavar='WD',
help='weight decay (default: 0.)')
parser.add_argument('--optimizer_type', type=str, default='adam', metavar='OPTIMIZER',
help='the type of optimizer, it can be sgd or adam')
parser.add_argument('--steps', nargs='*', default=[150, 180],
help='number of burn in step')
####################################################################
############## Loss weight ##########################
####################################################################
####################################################################
############## Distributed Data Parallel ############
####################################################################
parser.add_argument('--world-size', default=-1, type=int, metavar='N',
help='number of nodes for distributed training')
parser.add_argument('--rank', default=-1, type=int, metavar='N',
help='node rank for distributed training')
parser.add_argument('--dist-url', default='tcp://127.0.0.1:29500', type=str,
help='url used to set up distributed training')
parser.add_argument('--dist-backend', default='nccl', type=str,
help='distributed backend')
parser.add_argument('--gpu_idx', default=0, type=int,
help='GPU index to use.')
parser.add_argument('--no_cuda', action='store_true',
help='If true, cuda is not used.')
parser.add_argument('--multiprocessing-distributed', action='store_true',
help='Use multi-processing distributed training to launch '
'N processes per node, which has N GPUs. This is the '
'fastest way to use PyTorch for either single node or '
'multi node data parallel training')
####################################################################
############## Evaluation configurations ###################
####################################################################
parser.add_argument('--evaluate', action='store_true',
help='only evaluate the model, not training')
parser.add_argument('--resume_path', type=str, default=None, metavar='PATH',
help='the path of the resumed checkpoint')
parser.add_argument('--K', type=int, default=50,
help='the number of top K')
configs = edict(vars(parser.parse_args()))
####################################################################
############## Hardware configurations #############################
####################################################################
configs.device = torch.device('cpu' if configs.no_cuda else 'cuda')
configs.ngpus_per_node = torch.cuda.device_count()
configs.pin_memory = True
configs.input_size = (cnf.BEV_WIDTH, cnf.BEV_HEIGHT)
configs.down_ratio = 2
configs.hm_size = (cnf.BEV_WIDTH/configs.down_ratio, cnf.BEV_HEIGHT/configs.down_ratio)
configs.max_objects = 50
configs.imagenet_pretrained = True
configs.head_conv = 256
configs.num_classes = 1
configs.num_center_offset = 2
configs.num_z = 1
configs.num_dim = 3
configs.num_direction = 2 # sin, cos 8 for bin cos sin
configs.voxel_size = [0.16, 0.16, 4]
configs.point_cloud_range =[0, -34.56, -2.73, 69.12, 34.56, 1.27]
configs.max_number_of_points_per_voxel = 100
configs.heads = {
'hm_cen': configs.num_classes,
'cen_offset': configs.num_center_offset,
'direction': configs.num_direction,
'z_coor': configs.num_z,
'dim': configs.num_dim
}
configs.num_input_features = 4
####################################################################
############## Dataset, logs, Checkpoints dir ######################
####################################################################
configs.dataset_dir = '/media/wx/File/data/kittidata'
configs.checkpoints_dir = os.path.join(configs.root_dir, 'checkpoints', configs.saved_fn)
configs.logs_dir = os.path.join(configs.root_dir, 'logs', configs.saved_fn)
if not os.path.isdir(configs.checkpoints_dir):
os.makedirs(configs.checkpoints_dir)
if not os.path.isdir(configs.logs_dir):
os.makedirs(configs.logs_dir)
return configs
| 53.654321 | 102 | 0.499655 |
import os
import argparse
import torch
from easydict import EasyDict as edict
import kitti_config as cnf
def parse_train_configs():
parser = argparse.ArgumentParser(description='The Implementation using PyTorch')
parser.add_argument('--seed', type=int, default=2020,
help='re-produce the results with seed random')
parser.add_argument('--saved_fn', type=str, default='fpn_resnet_18', metavar='FN',
help='The name using for saving logs, models,...')
parser.add_argument('--root-dir', type=str, default='../', metavar='PATH',
help='The ROOT working directory')
| true | true |
f73035c8e8d37029f07a95dd13aa78a0f9696623 | 22,403 | py | Python | scipy/special/__init__.py | alimuldal/scipy | 713cf7df7b759e2aaeef0f81eb632f48c9b4bae0 | [
"BSD-3-Clause"
] | 1 | 2019-07-29T02:53:51.000Z | 2019-07-29T02:53:51.000Z | scipy/special/__init__.py | alimuldal/scipy | 713cf7df7b759e2aaeef0f81eb632f48c9b4bae0 | [
"BSD-3-Clause"
] | 1 | 2021-09-11T14:30:32.000Z | 2021-09-11T14:30:32.000Z | scipy/special/__init__.py | alimuldal/scipy | 713cf7df7b759e2aaeef0f81eb632f48c9b4bae0 | [
"BSD-3-Clause"
] | 2 | 2016-12-19T02:27:46.000Z | 2019-07-29T02:53:54.000Z | """
========================================
Special functions (:mod:`scipy.special`)
========================================
.. module:: scipy.special
Nearly all of the functions below are universal functions and follow
broadcasting and automatic array-looping rules. Exceptions are noted.
Error handling
==============
Errors are handled by returning nans, or other appropriate values.
Some of the special function routines will emit warnings when an error
occurs. By default this is disabled. To enable such messages use
``errprint(1)``, and to disable such messages use ``errprint(0)``.
Example:
>>> print scipy.special.bdtr(-1,10,0.3)
>>> scipy.special.errprint(1)
>>> print scipy.special.bdtr(-1,10,0.3)
.. autosummary::
:toctree: generated/
errprint
SpecialFunctionWarning -- Warning that can be issued with ``errprint(True)``
Available functions
===================
Airy functions
--------------
.. autosummary::
:toctree: generated/
airy -- Airy functions and their derivatives.
airye -- Exponentially scaled Airy functions
ai_zeros -- [+]Zeros of Airy functions Ai(x) and Ai'(x)
bi_zeros -- [+]Zeros of Airy functions Bi(x) and Bi'(x)
itairy --
Elliptic Functions and Integrals
--------------------------------
.. autosummary::
:toctree: generated/
ellipj -- Jacobian elliptic functions
ellipk -- Complete elliptic integral of the first kind.
ellipkm1 -- ellipkm1(x) == ellipk(1 - x)
ellipkinc -- Incomplete elliptic integral of the first kind.
ellipe -- Complete elliptic integral of the second kind.
ellipeinc -- Incomplete elliptic integral of the second kind.
Bessel Functions
----------------
.. autosummary::
:toctree: generated/
jv -- Bessel function of real-valued order and complex argument.
jn -- Alias for jv
jve -- Exponentially scaled Bessel function.
yn -- Bessel function of second kind (integer order).
yv -- Bessel function of the second kind (real-valued order).
yve -- Exponentially scaled Bessel function of the second kind.
kn -- Modified Bessel function of the second kind (integer order).
kv -- Modified Bessel function of the second kind (real order).
kve -- Exponentially scaled modified Bessel function of the second kind.
iv -- Modified Bessel function.
ive -- Exponentially scaled modified Bessel function.
hankel1 -- Hankel function of the first kind.
hankel1e -- Exponentially scaled Hankel function of the first kind.
hankel2 -- Hankel function of the second kind.
hankel2e -- Exponentially scaled Hankel function of the second kind.
The following is not an universal function:
.. autosummary::
:toctree: generated/
lmbda -- [+]Sequence of lambda functions with arbitrary order v.
Zeros of Bessel Functions
^^^^^^^^^^^^^^^^^^^^^^^^^
These are not universal functions:
.. autosummary::
:toctree: generated/
jnjnp_zeros -- [+]Zeros of integer-order Bessel functions and derivatives sorted in order.
jnyn_zeros -- [+]Zeros of integer-order Bessel functions and derivatives as separate arrays.
jn_zeros -- [+]Zeros of Jn(x)
jnp_zeros -- [+]Zeros of Jn'(x)
yn_zeros -- [+]Zeros of Yn(x)
ynp_zeros -- [+]Zeros of Yn'(x)
y0_zeros -- [+]Complex zeros: Y0(z0)=0 and values of Y0'(z0)
y1_zeros -- [+]Complex zeros: Y1(z1)=0 and values of Y1'(z1)
y1p_zeros -- [+]Complex zeros of Y1'(z1')=0 and values of Y1(z1')
Faster versions of common Bessel Functions
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
.. autosummary::
:toctree: generated/
j0 -- Bessel function of order 0.
j1 -- Bessel function of order 1.
y0 -- Bessel function of second kind of order 0.
y1 -- Bessel function of second kind of order 1.
i0 -- Modified Bessel function of order 0.
i0e -- Exponentially scaled modified Bessel function of order 0.
i1 -- Modified Bessel function of order 1.
i1e -- Exponentially scaled modified Bessel function of order 1.
k0 -- Modified Bessel function of the second kind of order 0.
k0e -- Exponentially scaled modified Bessel function of the second kind of order 0.
k1 -- Modified Bessel function of the second kind of order 1.
k1e -- Exponentially scaled modified Bessel function of the second kind of order 1.
Integrals of Bessel Functions
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
.. autosummary::
:toctree: generated/
itj0y0 -- Basic integrals of j0 and y0 from 0 to x.
it2j0y0 -- Integrals of (1-j0(t))/t from 0 to x and y0(t)/t from x to inf.
iti0k0 -- Basic integrals of i0 and k0 from 0 to x.
it2i0k0 -- Integrals of (i0(t)-1)/t from 0 to x and k0(t)/t from x to inf.
besselpoly -- Integral of a Bessel function: Jv(2* a* x) * x[+]lambda from x=0 to 1.
Derivatives of Bessel Functions
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
.. autosummary::
:toctree: generated/
jvp -- Nth derivative of Jv(v,z)
yvp -- Nth derivative of Yv(v,z)
kvp -- Nth derivative of Kv(v,z)
ivp -- Nth derivative of Iv(v,z)
h1vp -- Nth derivative of H1v(v,z)
h2vp -- Nth derivative of H2v(v,z)
Spherical Bessel Functions
^^^^^^^^^^^^^^^^^^^^^^^^^^
These are not universal functions:
.. autosummary::
:toctree: generated/
sph_jn -- [+]Sequence of spherical Bessel functions, jn(z)
sph_yn -- [+]Sequence of spherical Bessel functions, yn(z)
sph_jnyn -- [+]Sequence of spherical Bessel functions, jn(z) and yn(z)
sph_in -- [+]Sequence of spherical Bessel functions, in(z)
sph_kn -- [+]Sequence of spherical Bessel functions, kn(z)
sph_inkn -- [+]Sequence of spherical Bessel functions, in(z) and kn(z)
Riccati-Bessel Functions
^^^^^^^^^^^^^^^^^^^^^^^^
These are not universal functions:
.. autosummary::
:toctree: generated/
riccati_jn -- [+]Sequence of Ricatti-Bessel functions of first kind.
riccati_yn -- [+]Sequence of Ricatti-Bessel functions of second kind.
Struve Functions
----------------
.. autosummary::
:toctree: generated/
struve -- Struve function --- Hv(x)
modstruve -- Modified Struve function --- Lv(x)
itstruve0 -- Integral of H0(t) from 0 to x
it2struve0 -- Integral of H0(t)/t from x to Inf.
itmodstruve0 -- Integral of L0(t) from 0 to x.
Raw Statistical Functions
-------------------------
.. seealso:: :mod:`scipy.stats`: Friendly versions of these functions.
.. autosummary::
:toctree: generated/
bdtr -- Sum of terms 0 through k of the binomial pdf.
bdtrc -- Sum of terms k+1 through n of the binomial pdf.
bdtri -- Inverse of bdtr
bdtrik --
bdtrin --
btdtr -- Integral from 0 to x of beta pdf.
btdtri -- Quantiles of beta distribution
btdtria --
btdtrib --
fdtr -- Integral from 0 to x of F pdf.
fdtrc -- Integral from x to infinity under F pdf.
fdtri -- Inverse of fdtrc
fdtridfd --
gdtr -- Integral from 0 to x of gamma pdf.
gdtrc -- Integral from x to infinity under gamma pdf.
gdtria -- Inverse with respect to `a` of gdtr.
gdtrib -- Inverse with respect to `b` of gdtr.
gdtrix -- Inverse with respect to `x` of gdtr.
nbdtr -- Sum of terms 0 through k of the negative binomial pdf.
nbdtrc -- Sum of terms k+1 to infinity under negative binomial pdf.
nbdtri -- Inverse of nbdtr
nbdtrik --
nbdtrin --
ncfdtr -- CDF of non-central t distribution.
ncfdtridfd -- Find degrees of freedom (denominator) of noncentral F distribution.
ncfdtridfn -- Find degrees of freedom (numerator) of noncentral F distribution.
ncfdtri -- Inverse CDF of noncentral F distribution.
ncfdtrinc -- Find noncentrality parameter of noncentral F distribution.
nctdtr -- CDF of noncentral t distribution.
nctdtridf -- Find degrees of freedom of noncentral t distribution.
nctdtrit -- Inverse CDF of noncentral t distribution.
nctdtrinc -- Find noncentrality parameter of noncentral t distribution.
nrdtrimn -- Find mean of normal distribution from cdf and std.
nrdtrisd -- Find std of normal distribution from cdf and mean.
pdtr -- Sum of terms 0 through k of the Poisson pdf.
pdtrc -- Sum of terms k+1 to infinity of the Poisson pdf.
pdtri -- Inverse of pdtr
pdtrik --
stdtr -- Integral from -infinity to t of the Student-t pdf.
stdtridf --
stdtrit --
chdtr -- Integral from 0 to x of the Chi-square pdf.
chdtrc -- Integral from x to infnity of Chi-square pdf.
chdtri -- Inverse of chdtrc.
chdtriv --
ndtr -- Integral from -infinity to x of standard normal pdf
log_ndtr -- Logarithm of integral from -infinity to x of standard normal pdf
ndtri -- Inverse of ndtr (quantiles)
chndtr --
chndtridf --
chndtrinc --
chndtrix --
smirnov -- Kolmogorov-Smirnov complementary CDF for one-sided test statistic (Dn+ or Dn-)
smirnovi -- Inverse of smirnov.
kolmogorov -- The complementary CDF of the (scaled) two-sided test statistic (Kn*) valid for large n.
kolmogi -- Inverse of kolmogorov
tklmbda -- Tukey-Lambda CDF
logit --
expit --
boxcox -- Compute the Box-Cox transformation.
boxcox1p -- Compute the Box-Cox transformation of 1 + x.
inv_boxcox -- Compute the inverse of the Box-Cox tranformation.
inv_boxcox1p -- Compute the inverse of the Box-Cox transformation of 1 + x.
Information Theory Functions
----------------------------
.. autosummary::
:toctree: generated/
entr -- entr(x) = -x*log(x)
rel_entr -- rel_entr(x, y) = x*log(x/y)
kl_div -- kl_div(x, y) = x*log(x/y) - x + y
huber -- Huber loss function.
pseudo_huber -- Pseudo-Huber loss function.
Gamma and Related Functions
---------------------------
.. autosummary::
:toctree: generated/
gamma -- Gamma function.
gammaln -- Log transformation of the gamma function.
gammasgn -- Sign of the gamma function.
gammainc -- Incomplete gamma integral.
gammaincinv -- Inverse of gammainc.
gammaincc -- Complemented incomplete gamma integral.
gammainccinv -- Inverse of gammaincc.
beta -- Beta function.
betaln -- Log of the absolute value of the beta function.
betainc -- Incomplete beta integral.
betaincinv -- Inverse of betainc.
psi -- Logarithmic derivative of the gamma function.
rgamma -- One divided by the gamma function.
polygamma -- Nth derivative of psi function.
multigammaln -- Log of the multivariate gamma.
digamma -- Digamma function (derivative of the logarithm of gamma).
poch -- The Pochhammer symbol (rising factorial).
Error Function and Fresnel Integrals
------------------------------------
.. autosummary::
:toctree: generated/
erf -- Error function.
erfc -- Complemented error function (1- erf(x))
erfcx -- Scaled complemented error function exp(x**2)*erfc(x)
erfi -- Imaginary error function, -i erf(i x)
erfinv -- Inverse of error function
erfcinv -- Inverse of erfc
wofz -- Fadeeva function.
dawsn -- Dawson's integral.
fresnel -- Fresnel sine and cosine integrals.
fresnel_zeros -- Complex zeros of both Fresnel integrals
modfresnelp -- Modified Fresnel integrals F_+(x) and K_+(x)
modfresnelm -- Modified Fresnel integrals F_-(x) and K_-(x)
These are not universal functions:
.. autosummary::
:toctree: generated/
erf_zeros -- [+]Complex zeros of erf(z)
fresnelc_zeros -- [+]Complex zeros of Fresnel cosine integrals
fresnels_zeros -- [+]Complex zeros of Fresnel sine integrals
Legendre Functions
------------------
.. autosummary::
:toctree: generated/
lpmv -- Associated Legendre Function of arbitrary non-negative degree v.
sph_harm -- Spherical Harmonics (complex-valued) Y^m_n(theta,phi)
These are not universal functions:
.. autosummary::
:toctree: generated/
clpmn -- [+]Associated Legendre Function of the first kind for complex arguments.
lpn -- [+]Legendre Functions (polynomials) of the first kind
lqn -- [+]Legendre Functions of the second kind.
lpmn -- [+]Associated Legendre Function of the first kind for real arguments.
lqmn -- [+]Associated Legendre Function of the second kind.
Ellipsoidal Harmonics
---------------------
.. autosummary::
:toctree: generated/
ellip_harm -- Ellipsoidal harmonic E
ellip_harm_2 -- Ellipsoidal harmonic F
ellip_normal -- Ellipsoidal normalization constant
Orthogonal polynomials
----------------------
The following functions evaluate values of orthogonal polynomials:
.. autosummary::
:toctree: generated/
assoc_laguerre
eval_legendre
eval_chebyt
eval_chebyu
eval_chebyc
eval_chebys
eval_jacobi
eval_laguerre
eval_genlaguerre
eval_hermite
eval_hermitenorm
eval_gegenbauer
eval_sh_legendre
eval_sh_chebyt
eval_sh_chebyu
eval_sh_jacobi
The functions below, in turn, return the polynomial coefficients in
:class:`~.orthopoly1d` objects, which function similarly as :ref:`numpy.poly1d`.
The :class:`~.orthopoly1d` class also has an attribute ``weights`` which returns
the roots, weights, and total weights for the appropriate form of Gaussian
quadrature. These are returned in an ``n x 3`` array with roots in the first
column, weights in the second column, and total weights in the final column.
Note that :class:`~.orthopoly1d` objects are converted to ``poly1d`` when doing
arithmetic, and lose information of the original orthogonal polynomial.
.. autosummary::
:toctree: generated/
legendre -- [+]Legendre polynomial P_n(x) (lpn -- for function).
chebyt -- [+]Chebyshev polynomial T_n(x)
chebyu -- [+]Chebyshev polynomial U_n(x)
chebyc -- [+]Chebyshev polynomial C_n(x)
chebys -- [+]Chebyshev polynomial S_n(x)
jacobi -- [+]Jacobi polynomial P^(alpha,beta)_n(x)
laguerre -- [+]Laguerre polynomial, L_n(x)
genlaguerre -- [+]Generalized (Associated) Laguerre polynomial, L^alpha_n(x)
hermite -- [+]Hermite polynomial H_n(x)
hermitenorm -- [+]Normalized Hermite polynomial, He_n(x)
gegenbauer -- [+]Gegenbauer (Ultraspherical) polynomials, C^(alpha)_n(x)
sh_legendre -- [+]shifted Legendre polynomial, P*_n(x)
sh_chebyt -- [+]shifted Chebyshev polynomial, T*_n(x)
sh_chebyu -- [+]shifted Chebyshev polynomial, U*_n(x)
sh_jacobi -- [+]shifted Jacobi polynomial, J*_n(x) = G^(p,q)_n(x)
.. warning::
Computing values of high-order polynomials (around ``order > 20``) using
polynomial coefficients is numerically unstable. To evaluate polynomial
values, the ``eval_*`` functions should be used instead.
Roots and weights for orthogonal polynomials
.. autosummary::
:toctree: generated/
c_roots
cg_roots
h_roots
he_roots
j_roots
js_roots
l_roots
la_roots
p_roots
ps_roots
s_roots
t_roots
ts_roots
u_roots
us_roots
Hypergeometric Functions
------------------------
.. autosummary::
:toctree: generated/
hyp2f1 -- Gauss hypergeometric function (2F1)
hyp1f1 -- Confluent hypergeometric function (1F1)
hyperu -- Confluent hypergeometric function (U)
hyp0f1 -- Confluent hypergeometric limit function (0F1)
hyp2f0 -- Hypergeometric function (2F0)
hyp1f2 -- Hypergeometric function (1F2)
hyp3f0 -- Hypergeometric function (3F0)
Parabolic Cylinder Functions
----------------------------
.. autosummary::
:toctree: generated/
pbdv -- Parabolic cylinder function Dv(x) and derivative.
pbvv -- Parabolic cylinder function Vv(x) and derivative.
pbwa -- Parabolic cylinder function W(a,x) and derivative.
These are not universal functions:
.. autosummary::
:toctree: generated/
pbdv_seq -- [+]Sequence of parabolic cylinder functions Dv(x)
pbvv_seq -- [+]Sequence of parabolic cylinder functions Vv(x)
pbdn_seq -- [+]Sequence of parabolic cylinder functions Dn(z), complex z
Mathieu and Related Functions
-----------------------------
.. autosummary::
:toctree: generated/
mathieu_a -- Characteristic values for even solution (ce_m)
mathieu_b -- Characteristic values for odd solution (se_m)
These are not universal functions:
.. autosummary::
:toctree: generated/
mathieu_even_coef -- [+]sequence of expansion coefficients for even solution
mathieu_odd_coef -- [+]sequence of expansion coefficients for odd solution
The following return both function and first derivative:
.. autosummary::
:toctree: generated/
mathieu_cem -- Even Mathieu function
mathieu_sem -- Odd Mathieu function
mathieu_modcem1 -- Even modified Mathieu function of the first kind
mathieu_modcem2 -- Even modified Mathieu function of the second kind
mathieu_modsem1 -- Odd modified Mathieu function of the first kind
mathieu_modsem2 -- Odd modified Mathieu function of the second kind
Spheroidal Wave Functions
-------------------------
.. autosummary::
:toctree: generated/
pro_ang1 -- Prolate spheroidal angular function of the first kind
pro_rad1 -- Prolate spheroidal radial function of the first kind
pro_rad2 -- Prolate spheroidal radial function of the second kind
obl_ang1 -- Oblate spheroidal angular function of the first kind
obl_rad1 -- Oblate spheroidal radial function of the first kind
obl_rad2 -- Oblate spheroidal radial function of the second kind
pro_cv -- Compute characteristic value for prolate functions
obl_cv -- Compute characteristic value for oblate functions
pro_cv_seq -- Compute sequence of prolate characteristic values
obl_cv_seq -- Compute sequence of oblate characteristic values
The following functions require pre-computed characteristic value:
.. autosummary::
:toctree: generated/
pro_ang1_cv -- Prolate spheroidal angular function of the first kind
pro_rad1_cv -- Prolate spheroidal radial function of the first kind
pro_rad2_cv -- Prolate spheroidal radial function of the second kind
obl_ang1_cv -- Oblate spheroidal angular function of the first kind
obl_rad1_cv -- Oblate spheroidal radial function of the first kind
obl_rad2_cv -- Oblate spheroidal radial function of the second kind
Kelvin Functions
----------------
.. autosummary::
:toctree: generated/
kelvin -- All Kelvin functions (order 0) and derivatives.
kelvin_zeros -- [+]Zeros of All Kelvin functions (order 0) and derivatives
ber -- Kelvin function ber x
bei -- Kelvin function bei x
berp -- Derivative of Kelvin function ber x
beip -- Derivative of Kelvin function bei x
ker -- Kelvin function ker x
kei -- Kelvin function kei x
kerp -- Derivative of Kelvin function ker x
keip -- Derivative of Kelvin function kei x
These are not universal functions:
.. autosummary::
:toctree: generated/
ber_zeros -- [+]Zeros of Kelvin function bei x
bei_zeros -- [+]Zeros of Kelvin function ber x
berp_zeros -- [+]Zeros of derivative of Kelvin function ber x
beip_zeros -- [+]Zeros of derivative of Kelvin function bei x
ker_zeros -- [+]Zeros of Kelvin function kei x
kei_zeros -- [+]Zeros of Kelvin function ker x
kerp_zeros -- [+]Zeros of derivative of Kelvin function ker x
keip_zeros -- [+]Zeros of derivative of Kelvin function kei x
Combinatorics
-------------
.. autosummary::
:toctree: generated/
comb -- [+]Combinations of N things taken k at a time, "N choose k"
perm -- [+]Permutations of N things taken k at a time, "k-permutations of N"
Other Special Functions
-----------------------
.. autosummary::
:toctree: generated/
agm -- Arithmetic-Geometric Mean
bernoulli -- Bernoulli numbers
binom -- Binomial coefficient.
diric -- Dirichlet function (periodic sinc)
euler -- Euler numbers
expn -- Exponential integral.
exp1 -- Exponential integral of order 1 (for complex argument)
expi -- Another exponential integral -- Ei(x)
factorial -- The factorial function, n! = special.gamma(n+1)
factorial2 -- Double factorial, (n!)!
factorialk -- [+](...((n!)!)!...)! where there are k '!'
shichi -- Hyperbolic sine and cosine integrals.
sici -- Integral of the sinc and "cosinc" functions.
spence -- Dilogarithm integral.
lambertw -- Lambert W function
zeta -- Riemann zeta function of two arguments.
zetac -- Standard Riemann zeta function minus 1.
Convenience Functions
---------------------
.. autosummary::
:toctree: generated/
cbrt -- Cube root.
exp10 -- 10 raised to the x power.
exp2 -- 2 raised to the x power.
radian -- radian angle given degrees, minutes, and seconds.
cosdg -- cosine of the angle given in degrees.
sindg -- sine of the angle given in degrees.
tandg -- tangent of the angle given in degrees.
cotdg -- cotangent of the angle given in degrees.
log1p -- log(1+x)
expm1 -- exp(x)-1
cosm1 -- cos(x)-1
round -- round the argument to the nearest integer. If argument ends in 0.5 exactly, pick the nearest even integer.
xlogy -- x*log(y)
xlog1py -- x*log1p(y)
exprel -- (exp(x)-1)/x
sinc -- sin(x)/x
.. [+] in the description indicates a function which is not a universal
.. function and does not follow broadcasting and automatic
.. array-looping rules.
"""
from __future__ import division, print_function, absolute_import
from ._ufuncs import *
from .basic import *
from . import specfun
from . import orthogonal
from .orthogonal import *
from .spfun_stats import multigammaln
from ._ellip_harm import ellip_harm, ellip_harm_2, ellip_normal
from .lambertw import lambertw
__all__ = [s for s in dir() if not s.startswith('_')]
from numpy.dual import register_func
register_func('i0',i0)
del register_func
from numpy.testing import Tester
test = Tester().test
| 34.679567 | 121 | 0.658796 |
from __future__ import division, print_function, absolute_import
from ._ufuncs import *
from .basic import *
from . import specfun
from . import orthogonal
from .orthogonal import *
from .spfun_stats import multigammaln
from ._ellip_harm import ellip_harm, ellip_harm_2, ellip_normal
from .lambertw import lambertw
__all__ = [s for s in dir() if not s.startswith('_')]
from numpy.dual import register_func
register_func('i0',i0)
del register_func
from numpy.testing import Tester
test = Tester().test
| true | true |
f730363349853b93aa06cd6ec0467353dfec8ae9 | 2,024 | py | Python | config/settings/test.py | seankim84/twitter | 71cbcd821effc4e77588b195d770ef003887d322 | [
"MIT"
] | null | null | null | config/settings/test.py | seankim84/twitter | 71cbcd821effc4e77588b195d770ef003887d322 | [
"MIT"
] | null | null | null | config/settings/test.py | seankim84/twitter | 71cbcd821effc4e77588b195d770ef003887d322 | [
"MIT"
] | null | null | null | """
With these settings, tests run faster.
"""
from .base import * # noqa
from .base import env
# GENERAL
# ------------------------------------------------------------------------------
# https://docs.djangoproject.com/en/dev/ref/settings/#debug
DEBUG = False
# https://docs.djangoproject.com/en/dev/ref/settings/#secret-key
SECRET_KEY = env("DJANGO_SECRET_KEY", default="uaAkGkerZ7vi0VWITpJheK17oRcfIACMfcTmeZhyrF5IG4jJO3ougsdusXKzpyF0")
# https://docs.djangoproject.com/en/dev/ref/settings/#test-runner
TEST_RUNNER = "django.test.runner.DiscoverRunner"
# CACHES
# ------------------------------------------------------------------------------
# https://docs.djangoproject.com/en/dev/ref/settings/#caches
CACHES = {
"default": {
"BACKEND": "django.core.cache.backends.locmem.LocMemCache", "LOCATION": ""
}
}
# PASSWORDS
# ------------------------------------------------------------------------------
# https://docs.djangoproject.com/en/dev/ref/settings/#password-hashers
PASSWORD_HASHERS = ["django.contrib.auth.hashers.MD5PasswordHasher"]
# TEMPLATES
# ------------------------------------------------------------------------------
# https://docs.djangoproject.com/en/dev/ref/settings/#templates
TEMPLATES[0]["OPTIONS"]["debug"] = DEBUG # noqa F405
TEMPLATES[0]["OPTIONS"]["loaders"] = [ # noqa F405
(
"django.template.loaders.cached.Loader",
[
"django.template.loaders.filesystem.Loader",
"django.template.loaders.app_directories.Loader",
],
)
]
# EMAIL
# ------------------------------------------------------------------------------
# https://docs.djangoproject.com/en/dev/ref/settings/#email-backend
EMAIL_BACKEND = "django.core.mail.backends.locmem.EmailBackend"
# https://docs.djangoproject.com/en/dev/ref/settings/#email-host
EMAIL_HOST = "localhost"
# https://docs.djangoproject.com/en/dev/ref/settings/#email-port
EMAIL_PORT = 1025
# Your stuff...
# ------------------------------------------------------------------------------
| 36.142857 | 113 | 0.546443 |
from .base import *
from .base import env
= False
= env("DJANGO_SECRET_KEY", default="uaAkGkerZ7vi0VWITpJheK17oRcfIACMfcTmeZhyrF5IG4jJO3ougsdusXKzpyF0")
= "django.test.runner.DiscoverRunner"
= {
"default": {
"BACKEND": "django.core.cache.backends.locmem.LocMemCache", "LOCATION": ""
}
}
= ["django.contrib.auth.hashers.MD5PasswordHasher"]
[0]["OPTIONS"]["debug"] = DEBUG
TEMPLATES[0]["OPTIONS"]["loaders"] = [
(
"django.template.loaders.cached.Loader",
[
"django.template.loaders.filesystem.Loader",
"django.template.loaders.app_directories.Loader",
],
)
]
= "django.core.mail.backends.locmem.EmailBackend"
= "localhost"
= 1025
| true | true |
f73036fe32a7f58510cda8b92c49c85b9b6d8b44 | 1,195 | py | Python | src/scrape/organisation_model.py | younginnovations/iati-organisations-cleanup | 6a073abbb43957632d988880fc96319da2265e13 | [
"MIT"
] | 1 | 2017-09-07T11:44:56.000Z | 2017-09-07T11:44:56.000Z | src/scrape/organisation_model.py | younginnovations/iati-organisations-cleanup | 6a073abbb43957632d988880fc96319da2265e13 | [
"MIT"
] | null | null | null | src/scrape/organisation_model.py | younginnovations/iati-organisations-cleanup | 6a073abbb43957632d988880fc96319da2265e13 | [
"MIT"
] | null | null | null | from peewee import *
import datetime
from config import *
database = PostgresqlDatabase(POSTGRES_DATABASE, user=POSTGRES_USER, password=POSTGRES_PASSWORD, host=POSTGRES_HOST)
class TblOrganisation(Model):
id = PrimaryKeyField()
identifier = CharField()
type = IntegerField()
country = CharField()
is_org_file = BooleanField(default=False)
is_publisher = BooleanField(default=False)
last_updated = DateTimeField(null=True, default=datetime.datetime.now().strftime('%Y-%m-%d'))
class Meta:
db_table = "organisations"
database = database
class TblName(Model):
organisation = ForeignKeyField(TblOrganisation, to_field="id", related_name='names')
name = TextField()
is_primary = BooleanField(default=True)
language = CharField()
class Meta:
db_table = "names"
database = database
def getLanguages(row):
knownheader = ["name", "identifier", "type", "country", "countrycode", "is_org_file", "is_publisher", "last_updated"]
languages = []
for key in row.keys():
key = key.strip()
if not key in knownheader and not key in languages:
languages.append(key)
return languages
| 33.194444 | 121 | 0.68954 | from peewee import *
import datetime
from config import *
database = PostgresqlDatabase(POSTGRES_DATABASE, user=POSTGRES_USER, password=POSTGRES_PASSWORD, host=POSTGRES_HOST)
class TblOrganisation(Model):
id = PrimaryKeyField()
identifier = CharField()
type = IntegerField()
country = CharField()
is_org_file = BooleanField(default=False)
is_publisher = BooleanField(default=False)
last_updated = DateTimeField(null=True, default=datetime.datetime.now().strftime('%Y-%m-%d'))
class Meta:
db_table = "organisations"
database = database
class TblName(Model):
organisation = ForeignKeyField(TblOrganisation, to_field="id", related_name='names')
name = TextField()
is_primary = BooleanField(default=True)
language = CharField()
class Meta:
db_table = "names"
database = database
def getLanguages(row):
knownheader = ["name", "identifier", "type", "country", "countrycode", "is_org_file", "is_publisher", "last_updated"]
languages = []
for key in row.keys():
key = key.strip()
if not key in knownheader and not key in languages:
languages.append(key)
return languages
| true | true |
f73037a5f0c5b05174b9618464d1cd001011645b | 6,765 | py | Python | dm_pix/_src/metrics.py | mbilalai/dm_pix | 458e86f28df3f72017dc00b5449bc9ede3e0f566 | [
"Apache-2.0"
] | 1 | 2021-07-29T06:51:21.000Z | 2021-07-29T06:51:21.000Z | dm_pix/_src/metrics.py | mbilalai/dm_pix | 458e86f28df3f72017dc00b5449bc9ede3e0f566 | [
"Apache-2.0"
] | null | null | null | dm_pix/_src/metrics.py | mbilalai/dm_pix | 458e86f28df3f72017dc00b5449bc9ede3e0f566 | [
"Apache-2.0"
] | null | null | null | # Copyright 2021 DeepMind Technologies Limited. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Functions to compare image pairs.
Images are assumed to be [0, 1] of floating point dtype with [N]HWC shapes.
Each image metric function returns a scalar for each image pair.
"""
import chex
import jax
import jax.numpy as jnp
import jax.scipy as jsp
def mae(a: chex.Array, b: chex.Array) -> chex.Numeric:
"""Returns the Mean Absolute Error between `a` and `b`.
Args:
a: First image (or set of images).
b: Second image (or set of images).
Returns:
MAE between `a` and `b`.
"""
chex.assert_rank([a, b], {3, 4})
chex.assert_type([a, b], float)
chex.assert_equal_shape([a, b])
return jnp.abs(a - b).mean(axis=(-3, -2, -1))
def mse(a: chex.Array, b: chex.Array) -> chex.Numeric:
"""Returns the Mean Squared Error between `a` and `b`.
Args:
a: First image (or set of images).
b: Second image (or set of images).
Returns:
MSE between `a` and `b`.
"""
chex.assert_rank([a, b], {3, 4})
chex.assert_type([a, b], float)
chex.assert_equal_shape([a, b])
return jnp.square(a - b).mean(axis=(-3, -2, -1))
def psnr(a: chex.Array, b: chex.Array) -> chex.Numeric:
"""Returns the Peak Signal-to-Noise Ratio between `a` and `b`.
Assumes that the dynamic range of the images (the difference between the
maximum and the minimum allowed values) is 1.0.
Args:
a: First image (or set of images).
b: Second image (or set of images).
Returns:
PSNR in decibels between `a` and `b`.
"""
chex.assert_rank([a, b], {3, 4})
chex.assert_type([a, b], float)
chex.assert_equal_shape([a, b])
return -10.0 * jnp.log(mse(a, b)) / jnp.log(10.0)
def rmse(a: chex.Array, b: chex.Array) -> chex.Numeric:
"""Returns the Root Mean Squared Error between `a` and `b`.
Args:
a: First image (or set of images).
b: Second image (or set of images).
Returns:
RMSE between `a` and `b`.
"""
chex.assert_rank([a, b], {3, 4})
chex.assert_type([a, b], float)
chex.assert_equal_shape([a, b])
return jnp.sqrt(mse(a, b))
def simse(a: chex.Array, b: chex.Array) -> chex.Numeric:
"""Returns the Scale-Invariant Mean Squared Error between `a` and `b`.
For each image pair, a scaling factor for `b` is computed as the solution to
the following problem:
min_alpha || vec(a) - alpha * vec(b) ||_2^2,
where `a` and `b` are flattened, i.e., vec(x) = np.flatten(x). The MSE between
the optimally scaled `b` and `a` is returned: mse(a, alpha*b).
This is a scale-invariant metric, so for example: simse(x, y) == sims(x, y*5).
This metric was used in "Shape, Illumination, and Reflectance from Shading" by
Barron and Malik, TPAMI, '15.
Args:
a: First image (or set of images).
b: Second image (or set of images).
Returns:
SIMSE between `a` and `b`.
"""
chex.assert_rank([a, b], {3, 4})
chex.assert_type([a, b], float)
chex.assert_equal_shape([a, b])
a_dot_b = (a * b).sum(axis=(-3, -2, -1), keepdims=True)
b_dot_b = (b * b).sum(axis=(-3, -2, -1), keepdims=True)
alpha = a_dot_b / b_dot_b
return mse(a, alpha * b)
def ssim(
a: chex.Array,
b: chex.Array,
*,
max_val: float = 1.0,
filter_size: int = 11,
filter_sigma: float = 1.5,
k1: float = 0.01,
k2: float = 0.03,
return_map: bool = False,
) -> chex.Numeric:
"""Computes the structural similarity index (SSIM) between image pairs.
This function is based on the standard SSIM implementation from:
Z. Wang, A. C. Bovik, H. R. Sheikh and E. P. Simoncelli,
"Image quality assessment: from error visibility to structural similarity",
in IEEE Transactions on Image Processing, vol. 13, no. 4, pp. 600-612, 2004.
This function was modeled after tf.image.ssim, and should produce comparable
output.
Note: the true SSIM is only defined on grayscale. This function does not
perform any colorspace transform. If the input is in a color space, then it
will compute the average SSIM.
Args:
a: First image (or set of images).
b: Second image (or set of images).
max_val: The maximum magnitude that `a` or `b` can have.
filter_size: Window size (>= 1). Image dims must be at least this small.
filter_sigma: The bandwidth of the Gaussian used for filtering (> 0.).
k1: One of the SSIM dampening parameters (> 0.).
k2: One of the SSIM dampening parameters (> 0.).
return_map: If True, will cause the per-pixel SSIM "map" to be returned.
Returns:
Each image's mean SSIM, or a tensor of individual values if `return_map`.
"""
chex.assert_rank([a, b], {3, 4})
chex.assert_type([a, b], float)
chex.assert_equal_shape([a, b])
# Construct a 1D Gaussian blur filter.
hw = filter_size // 2
shift = (2 * hw - filter_size + 1) / 2
f_i = ((jnp.arange(filter_size) - hw + shift) / filter_sigma)**2
filt = jnp.exp(-0.5 * f_i)
filt /= jnp.sum(filt)
# Blur in x and y (faster than the 2D convolution).
def convolve2d(z, f):
return jsp.signal.convolve2d(
z, f, mode="valid", precision=jax.lax.Precision.HIGHEST)
filt_fn1 = lambda z: convolve2d(z, filt[:, jnp.newaxis])
filt_fn2 = lambda z: convolve2d(z, filt[jnp.newaxis, :])
# `vmap` the blurs to the tensor size, and then compose them.
num_dims = len(a.shape)
map_axes = tuple(list(range(num_dims - 3)) + [num_dims - 1])
filt_fn = lambda z: filt_fn1(filt_fn2(z))
for d in map_axes:
filt_fn = jax.vmap(filt_fn, in_axes=d, out_axes=d)
mu0 = filt_fn(a)
mu1 = filt_fn(b)
mu00 = mu0 * mu0
mu11 = mu1 * mu1
mu01 = mu0 * mu1
sigma00 = filt_fn(a**2) - mu00
sigma11 = filt_fn(b**2) - mu11
sigma01 = filt_fn(a * b) - mu01
# Clip the variances and covariances to valid values.
# Variance must be non-negative:
sigma00 = jnp.maximum(0., sigma00)
sigma11 = jnp.maximum(0., sigma11)
sigma01 = jnp.sign(sigma01) * jnp.minimum(
jnp.sqrt(sigma00 * sigma11), jnp.abs(sigma01))
c1 = (k1 * max_val)**2
c2 = (k2 * max_val)**2
numer = (2 * mu01 + c1) * (2 * sigma01 + c2)
denom = (mu00 + mu11 + c1) * (sigma00 + sigma11 + c2)
ssim_map = numer / denom
ssim_value = jnp.mean(ssim_map, list(range(num_dims - 3, num_dims)))
return ssim_map if return_map else ssim_value
| 31.910377 | 80 | 0.660902 |
import chex
import jax
import jax.numpy as jnp
import jax.scipy as jsp
def mae(a: chex.Array, b: chex.Array) -> chex.Numeric:
chex.assert_rank([a, b], {3, 4})
chex.assert_type([a, b], float)
chex.assert_equal_shape([a, b])
return jnp.abs(a - b).mean(axis=(-3, -2, -1))
def mse(a: chex.Array, b: chex.Array) -> chex.Numeric:
chex.assert_rank([a, b], {3, 4})
chex.assert_type([a, b], float)
chex.assert_equal_shape([a, b])
return jnp.square(a - b).mean(axis=(-3, -2, -1))
def psnr(a: chex.Array, b: chex.Array) -> chex.Numeric:
chex.assert_rank([a, b], {3, 4})
chex.assert_type([a, b], float)
chex.assert_equal_shape([a, b])
return -10.0 * jnp.log(mse(a, b)) / jnp.log(10.0)
def rmse(a: chex.Array, b: chex.Array) -> chex.Numeric:
chex.assert_rank([a, b], {3, 4})
chex.assert_type([a, b], float)
chex.assert_equal_shape([a, b])
return jnp.sqrt(mse(a, b))
def simse(a: chex.Array, b: chex.Array) -> chex.Numeric:
chex.assert_rank([a, b], {3, 4})
chex.assert_type([a, b], float)
chex.assert_equal_shape([a, b])
a_dot_b = (a * b).sum(axis=(-3, -2, -1), keepdims=True)
b_dot_b = (b * b).sum(axis=(-3, -2, -1), keepdims=True)
alpha = a_dot_b / b_dot_b
return mse(a, alpha * b)
def ssim(
a: chex.Array,
b: chex.Array,
*,
max_val: float = 1.0,
filter_size: int = 11,
filter_sigma: float = 1.5,
k1: float = 0.01,
k2: float = 0.03,
return_map: bool = False,
) -> chex.Numeric:
chex.assert_rank([a, b], {3, 4})
chex.assert_type([a, b], float)
chex.assert_equal_shape([a, b])
hw = filter_size // 2
shift = (2 * hw - filter_size + 1) / 2
f_i = ((jnp.arange(filter_size) - hw + shift) / filter_sigma)**2
filt = jnp.exp(-0.5 * f_i)
filt /= jnp.sum(filt)
def convolve2d(z, f):
return jsp.signal.convolve2d(
z, f, mode="valid", precision=jax.lax.Precision.HIGHEST)
filt_fn1 = lambda z: convolve2d(z, filt[:, jnp.newaxis])
filt_fn2 = lambda z: convolve2d(z, filt[jnp.newaxis, :])
num_dims = len(a.shape)
map_axes = tuple(list(range(num_dims - 3)) + [num_dims - 1])
filt_fn = lambda z: filt_fn1(filt_fn2(z))
for d in map_axes:
filt_fn = jax.vmap(filt_fn, in_axes=d, out_axes=d)
mu0 = filt_fn(a)
mu1 = filt_fn(b)
mu00 = mu0 * mu0
mu11 = mu1 * mu1
mu01 = mu0 * mu1
sigma00 = filt_fn(a**2) - mu00
sigma11 = filt_fn(b**2) - mu11
sigma01 = filt_fn(a * b) - mu01
sigma00 = jnp.maximum(0., sigma00)
sigma11 = jnp.maximum(0., sigma11)
sigma01 = jnp.sign(sigma01) * jnp.minimum(
jnp.sqrt(sigma00 * sigma11), jnp.abs(sigma01))
c1 = (k1 * max_val)**2
c2 = (k2 * max_val)**2
numer = (2 * mu01 + c1) * (2 * sigma01 + c2)
denom = (mu00 + mu11 + c1) * (sigma00 + sigma11 + c2)
ssim_map = numer / denom
ssim_value = jnp.mean(ssim_map, list(range(num_dims - 3, num_dims)))
return ssim_map if return_map else ssim_value
| true | true |
f7303a247f463ef73a520434699ce478c010e0a7 | 416 | py | Python | receitas/migrations/0005_receita_foto_receita.py | maldonadopereira/django-receitas | 72a2215abacf5e8076b57b34ebf36211a8f0afb2 | [
"MIT"
] | null | null | null | receitas/migrations/0005_receita_foto_receita.py | maldonadopereira/django-receitas | 72a2215abacf5e8076b57b34ebf36211a8f0afb2 | [
"MIT"
] | null | null | null | receitas/migrations/0005_receita_foto_receita.py | maldonadopereira/django-receitas | 72a2215abacf5e8076b57b34ebf36211a8f0afb2 | [
"MIT"
] | null | null | null | # Generated by Django 4.0.2 on 2022-02-20 01:28
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('receitas', '0004_receita_publicar'),
]
operations = [
migrations.AddField(
model_name='receita',
name='foto_receita',
field=models.ImageField(blank=True, upload_to='fotos/%d/%m/%Y'),
),
]
| 21.894737 | 76 | 0.600962 |
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('receitas', '0004_receita_publicar'),
]
operations = [
migrations.AddField(
model_name='receita',
name='foto_receita',
field=models.ImageField(blank=True, upload_to='fotos/%d/%m/%Y'),
),
]
| true | true |
f7303adbd62fce274698448d9c9ef1a6103caf71 | 11,555 | py | Python | jupyter_kernel_test/msgspec_v5.py | IsraelMiles/jupyter_kernel_test | 369cf22e505820d910aaf50cdbbb6b3f51766a63 | [
"BSD-3-Clause"
] | 57 | 2015-08-04T15:45:45.000Z | 2022-02-03T23:14:31.000Z | jupyter_kernel_test/msgspec_v5.py | IsraelMiles/jupyter_kernel_test | 369cf22e505820d910aaf50cdbbb6b3f51766a63 | [
"BSD-3-Clause"
] | 55 | 2015-07-02T17:55:05.000Z | 2021-12-30T19:05:40.000Z | jupyter_kernel_test/msgspec_v5.py | IsraelMiles/jupyter_kernel_test | 369cf22e505820d910aaf50cdbbb6b3f51766a63 | [
"BSD-3-Clause"
] | 34 | 2015-07-02T17:20:43.000Z | 2022-03-28T22:25:05.000Z | """Message schemas for message spec version 5"""
# Copyright (c) Jupyter Development Team.
# Distributed under the terms of the Modified BSD License.
from jsonschema import Draft4Validator, ValidationError
import re
protocol_version = (5, 1)
# These fragments will be wrapped in the boilerplate for a valid JSON schema.
# We also add a default 'required' containing all keys.
schema_fragments = {}
def get_msg_content_validator(msg_type, version_minor):
frag = schema_fragments[msg_type]
schema = {
"$schema": "http://json-schema.org/draft-04/schema#",
"description": "{} message contents schema".format(msg_type),
"type": "object",
"properties": {},
"additionalProperties": version_minor > protocol_version[1],
}
schema.update(frag)
if "required" not in schema:
# Require all keys by default
schema["required"] = sorted(schema["properties"].keys())
return Draft4Validator(schema)
header_part = {"type": "object", "properties": {
"msg_id": {"type": "string"},
"username": {"type": "string"},
"session": {"type": "string"},
# TODO - this is parsed to a datetime before we get it:
"date": {}, #{"type": "string"},
"msg_type": {"type": "string"},
"version": {"type": "string"},
}, "required": ["msg_id", "username", "session", "date", "msg_type", "version"]}
msg_schema = {
"$schema": "http://json-schema.org/draft-04/schema#",
"description": "Jupyter message structure schema",
"type": "object",
"properties": {
"header": header_part,
"parent_header": {"type": "object"},
"metadata": {"type": "object"},
"content": {"type": "object"}, # Checked separately
"buffers": {"type": "array"}
},
"required": ["header", "parent_header", "metadata", "content"],
}
msg_structure_validator = Draft4Validator(msg_schema)
def get_error_reply_validator(version_minor):
return Draft4Validator({
"$schema": "http://json-schema.org/draft-04/schema#",
"description": "Jupyter 'error' reply schema",
"type": "object",
"properties": {
"status": {"const": "error"},
"ename": {"type": "string"},
"evalue": {"type": "string"},
"traceback": {"type": "array", "items": {"type": "string"}},
},
"required": ["status", "ename", "evalue", "traceback"],
"additionalProperties": version_minor > protocol_version[1]
})
def get_abort_reply_validator(version_minor):
return Draft4Validator({
"$schema": "http://json-schema.org/draft-04/schema#",
"description": "Jupyter 'abort' reply schema",
"type": "object",
"properties": {
"status": {"const": "error"},
"ename": {"type": "string"},
"evalue": {"type": "string"},
"traceback": {"type": "list", "items": {"type": "string"}},
},
"required": ["status", "ename", "evalue", "traceback"],
"additionalProperties": version_minor > protocol_version[1]
})
reply_msgs_using_status = {
'execute_reply', 'inspect_reply', 'complete_reply', 'history_reply',
'connect_reply', 'comm_info_reply', 'kernel_info_reply', 'shutdown_reply',
'interrupt_reply',
}
def validate_message(msg, msg_type=None, parent_id=None):
msg_structure_validator.validate(msg)
msg_version_s = msg['header']['version']
m = re.match(r'(\d+)\.(\d+)', msg_version_s)
if not m:
raise ValidationError("Version {} not like 'x.y'")
version_minor = int(m.group(2))
if msg_type is not None:
if msg['header']['msg_type'] != msg_type:
raise ValidationError("Message type {!r} != {!r}".format(
msg['header']['msg_type'], msg_type
))
else:
msg_type = msg['header']['msg_type']
# Check for unexpected fields, unless it's a newer protocol version
if version_minor <= protocol_version[1]:
unx_top = set(msg) - set(msg_schema['properties'])
if unx_top:
raise ValidationError("Unexpected keys: {}".format(unx_top))
unx_header = set(msg['header']) - set(header_part['properties'])
if unx_header:
raise ValidationError("Unexpected keys in header: {}".format(unx_header))
# Check the parent id
if 'reply' in msg_type and parent_id and msg['parent_header']['msg_id'] != parent_id:
raise ValidationError("Parent header does not match expected")
if msg_type in reply_msgs_using_status:
# Most _reply messages have common 'error' and 'abort' structures
try:
status = msg['content']['status']
except KeyError as e:
raise ValidationError(str(e))
if status == 'error':
content_vdor = get_error_reply_validator(version_minor)
elif status == 'abort':
content_vdor = get_abort_reply_validator(version_minor)
elif status == 'ok':
content_vdor = get_msg_content_validator(msg_type, version_minor)
else:
raise ValidationError(
"status {!r} should be ok/error/abort".format(status))
else:
content_vdor = get_msg_content_validator(msg_type, version_minor)
content_vdor.validate(msg['content'])
# Shell messages ----------------------------------------------
schema_fragments['execute_request'] = {"properties": {
"code": {"type": "string"},
"silent": {"type": "boolean"},
"store_history": {"type": "boolean"},
"user_expressions": {"type": "object"},
"allow_stdin": {"type": "boolean"},
"stop_on_error": {"type": "boolean"}
}}
schema_fragments['execute_reply'] = {"properties": {
# statuses 'error' and 'abort' change the structure, so check separately
"status": {"const": "ok"},
"execution_count": {"type": "number"},
"payload": {"type": "array", "items": {
"type": "object",
"properties": {"source": {"type": "string"}},
"additionalProperties": True,
}},
"user_expressions": {"type": "object"},
}, "required": ["status", "execution_count"]}
schema_fragments['inspect_request'] = {"properties": {
"code": {"type": "string"},
"cursor_pos": {"type": "number"},
"detail_level": {"enum": [0, 1]},
}}
schema_fragments['inspect_reply'] = {"properties": {
# statuses 'error' and 'abort' change the structure, so check separately
"status": {"const": "ok"},
"found": {"type": "boolean"},
"data": {"type": "object"},
"metadata": {"type": "object"},
}}
schema_fragments['complete_request'] = {"properties": {
"code": {"type": "string"},
"cursor_pos": {"type": "number"},
}}
schema_fragments['complete_reply'] = {"properties": {
# statuses 'error' and 'abort' change the structure, so check separately
"status": {"const": "ok"},
"matches": {"type": "array", "items": {"type": "string"}},
"cursor_start": {"type": "number"},
"cursor_end": {"type": "number"},
"metadata": {"type": "object"},
}}
schema_fragments['history_request'] = {"properties": {
'output' : {"type": "boolean"},
'raw' : {"type": "boolean"},
'hist_access_type' : {"enum": ["range", "tail", "search"]},
'session' : {"type": "number"},
'start' : {"type": "number"},
'stop' : {"type": "number"},
'n' : {"type": "number"},
'pattern' : {"type": "string"},
'unique' : {"type": "boolean"},
}, "required": ["output", "raw", "hist_access_type"]}
schema_fragments['history_reply'] = {"properties": {
"status": {"const": "ok"},
"history": {"type": "array", "items": {
"minItems": 3, "maxItems": 3
}}
}}
schema_fragments['is_complete_request'] = {"properties": {
"code": {"type": "string"},
}}
schema_fragments['is_complete_reply'] = {"properties": {
"status": {"enum": ["complete", "incomplete", "invalid", "unknown"]},
"indent": {"type": "string"}
}, "required": ["status"]}
# NB connect_request is deprecated
schema_fragments["connect_request"] = {"properties": {}}
schema_fragments["connect_reply"] = {"properties": {
"shell_port": {"type": "number"},
"iopub_port": {"type": "number"},
"stdin_port": {"type": "number"},
"hb_port": {"type": "number"},
"control_port": {"type": "number"},
}}
schema_fragments["comm_info_request"] = {"properties": {
"target_name": {"type": "string"},
}, "required": []}
schema_fragments["comm_info_reply"] = {"properties": {
# statuses 'error' and 'abort' change the structure, so check separately
"status": {"const": "ok"},
"comms": {"type": "object"},
}}
schema_fragments["kernel_info_request"] = {"properties": {}}
schema_fragments["kernel_info_reply"] = {"properties": {
# statuses 'error' and 'abort' change the structure, so check separately
"status": {"const": "ok"},
"protocol_version": {"type": "string"},
"implementation": {"type": "string"},
"implementation_version": {"type": "string"},
"language_info": {"type": "object"},
"banner": {"type": "string"},
"debugger": {"type": "boolean"},
"help_links": {"type": "array", "items": {"type": "object", "properties": {
"text": {"type": "string"},
"url": {"type": "string"}
}}}
}, "required": ["status", "protocol_version", "implementation", "language_info", "banner"]}
schema_fragments['shutdown_request'] = {"properties": {
"restart": {"type": "boolean"},
}}
schema_fragments['shutdown_reply'] = {"properties": {
# statuses 'error' and 'abort' change the structure, so check separately
"status": {"const": "ok"},
"restart": {"type": "boolean"},
}}
schema_fragments["interrupt_request"] = {"properties": {}}
schema_fragments["interrupt_reply"] = {"properties": {
# statuses 'error' and 'abort' change the structure, so check separately
"status": {"const": "ok"},
}}
# IOPub messages ----------------------------------------------
mime_data = {
"type":"object",
"patternProperties": {r'^[\w\-\+\.]+/[\w\-\+\.]+$': {}},
"additionalProperties": False,
}
schema_fragments['stream'] = {"properties": {
"name": {"enum": ["stdout", "stderr"]},
"text": {"type": "string"},
}}
schema_fragments['display_data'] = {"properties": {
"data": mime_data,
"metadata": {"type": "object"},
"transient": {"type": "object"},
}, "required": ["data", "metadata"]}
schema_fragments['update_display_data'] = {"properties": {
"data": mime_data,
"metadata": {"type": "object"},
"transient": {"type": "object"},
}}
schema_fragments['execute_result'] = {"properties": {
"execution_count": {"type": "number"},
"data": mime_data,
"metadata": {"type": "object"},
"transient": {"type": "object"},
}, "required": ["execution_count", "data", "metadata"]}
schema_fragments['clear_output'] = {"properties": {
"wait": {"type": "boolean"},
}}
schema_fragments['execute_input'] = {"properties": {
"code": {"type": "string"},
"execution_count": {"type": "number"},
}}
schema_fragments['error'] = {"properties": {
"ename": {"type": "string"},
"evalue": {"type": "string"},
"traceback": {"type": "array", "items": {"type": "string"}},
}}
schema_fragments['status'] = {"properties": {
"execution_state": {"enum": ["busy", "idle", "starting"]},
}}
# Stdin messages ---------------------------------------------
schema_fragments["input_request"] = {"properties": {
"prompt": {"type": "string"},
"password": {"type": "number"},
}}
schema_fragments["input_reply"] = {"properties": {
"value": {"type": "string"},
}}
| 34.186391 | 91 | 0.590134 |
from jsonschema import Draft4Validator, ValidationError
import re
protocol_version = (5, 1)
schema_fragments = {}
def get_msg_content_validator(msg_type, version_minor):
frag = schema_fragments[msg_type]
schema = {
"$schema": "http://json-schema.org/draft-04/schema#",
"description": "{} message contents schema".format(msg_type),
"type": "object",
"properties": {},
"additionalProperties": version_minor > protocol_version[1],
}
schema.update(frag)
if "required" not in schema:
schema["required"] = sorted(schema["properties"].keys())
return Draft4Validator(schema)
header_part = {"type": "object", "properties": {
"msg_id": {"type": "string"},
"username": {"type": "string"},
"session": {"type": "string"},
"date": {},
"msg_type": {"type": "string"},
"version": {"type": "string"},
}, "required": ["msg_id", "username", "session", "date", "msg_type", "version"]}
msg_schema = {
"$schema": "http://json-schema.org/draft-04/schema#",
"description": "Jupyter message structure schema",
"type": "object",
"properties": {
"header": header_part,
"parent_header": {"type": "object"},
"metadata": {"type": "object"},
"content": {"type": "object"},
"buffers": {"type": "array"}
},
"required": ["header", "parent_header", "metadata", "content"],
}
msg_structure_validator = Draft4Validator(msg_schema)
def get_error_reply_validator(version_minor):
return Draft4Validator({
"$schema": "http://json-schema.org/draft-04/schema#",
"description": "Jupyter 'error' reply schema",
"type": "object",
"properties": {
"status": {"const": "error"},
"ename": {"type": "string"},
"evalue": {"type": "string"},
"traceback": {"type": "array", "items": {"type": "string"}},
},
"required": ["status", "ename", "evalue", "traceback"],
"additionalProperties": version_minor > protocol_version[1]
})
def get_abort_reply_validator(version_minor):
return Draft4Validator({
"$schema": "http://json-schema.org/draft-04/schema#",
"description": "Jupyter 'abort' reply schema",
"type": "object",
"properties": {
"status": {"const": "error"},
"ename": {"type": "string"},
"evalue": {"type": "string"},
"traceback": {"type": "list", "items": {"type": "string"}},
},
"required": ["status", "ename", "evalue", "traceback"],
"additionalProperties": version_minor > protocol_version[1]
})
reply_msgs_using_status = {
'execute_reply', 'inspect_reply', 'complete_reply', 'history_reply',
'connect_reply', 'comm_info_reply', 'kernel_info_reply', 'shutdown_reply',
'interrupt_reply',
}
def validate_message(msg, msg_type=None, parent_id=None):
msg_structure_validator.validate(msg)
msg_version_s = msg['header']['version']
m = re.match(r'(\d+)\.(\d+)', msg_version_s)
if not m:
raise ValidationError("Version {} not like 'x.y'")
version_minor = int(m.group(2))
if msg_type is not None:
if msg['header']['msg_type'] != msg_type:
raise ValidationError("Message type {!r} != {!r}".format(
msg['header']['msg_type'], msg_type
))
else:
msg_type = msg['header']['msg_type']
if version_minor <= protocol_version[1]:
unx_top = set(msg) - set(msg_schema['properties'])
if unx_top:
raise ValidationError("Unexpected keys: {}".format(unx_top))
unx_header = set(msg['header']) - set(header_part['properties'])
if unx_header:
raise ValidationError("Unexpected keys in header: {}".format(unx_header))
# Check the parent id
if 'reply' in msg_type and parent_id and msg['parent_header']['msg_id'] != parent_id:
raise ValidationError("Parent header does not match expected")
if msg_type in reply_msgs_using_status:
# Most _reply messages have common 'error' and 'abort' structures
try:
status = msg['content']['status']
except KeyError as e:
raise ValidationError(str(e))
if status == 'error':
content_vdor = get_error_reply_validator(version_minor)
elif status == 'abort':
content_vdor = get_abort_reply_validator(version_minor)
elif status == 'ok':
content_vdor = get_msg_content_validator(msg_type, version_minor)
else:
raise ValidationError(
"status {!r} should be ok/error/abort".format(status))
else:
content_vdor = get_msg_content_validator(msg_type, version_minor)
content_vdor.validate(msg['content'])
# Shell messages ----------------------------------------------
schema_fragments['execute_request'] = {"properties": {
"code": {"type": "string"},
"silent": {"type": "boolean"},
"store_history": {"type": "boolean"},
"user_expressions": {"type": "object"},
"allow_stdin": {"type": "boolean"},
"stop_on_error": {"type": "boolean"}
}}
schema_fragments['execute_reply'] = {"properties": {
# statuses 'error' and 'abort' change the structure, so check separately
"status": {"const": "ok"},
"execution_count": {"type": "number"},
"payload": {"type": "array", "items": {
"type": "object",
"properties": {"source": {"type": "string"}},
"additionalProperties": True,
}},
"user_expressions": {"type": "object"},
}, "required": ["status", "execution_count"]}
schema_fragments['inspect_request'] = {"properties": {
"code": {"type": "string"},
"cursor_pos": {"type": "number"},
"detail_level": {"enum": [0, 1]},
}}
schema_fragments['inspect_reply'] = {"properties": {
# statuses 'error' and 'abort' change the structure, so check separately
"status": {"const": "ok"},
"found": {"type": "boolean"},
"data": {"type": "object"},
"metadata": {"type": "object"},
}}
schema_fragments['complete_request'] = {"properties": {
"code": {"type": "string"},
"cursor_pos": {"type": "number"},
}}
schema_fragments['complete_reply'] = {"properties": {
# statuses 'error' and 'abort' change the structure, so check separately
"status": {"const": "ok"},
"matches": {"type": "array", "items": {"type": "string"}},
"cursor_start": {"type": "number"},
"cursor_end": {"type": "number"},
"metadata": {"type": "object"},
}}
schema_fragments['history_request'] = {"properties": {
'output' : {"type": "boolean"},
'raw' : {"type": "boolean"},
'hist_access_type' : {"enum": ["range", "tail", "search"]},
'session' : {"type": "number"},
'start' : {"type": "number"},
'stop' : {"type": "number"},
'n' : {"type": "number"},
'pattern' : {"type": "string"},
'unique' : {"type": "boolean"},
}, "required": ["output", "raw", "hist_access_type"]}
schema_fragments['history_reply'] = {"properties": {
"status": {"const": "ok"},
"history": {"type": "array", "items": {
"minItems": 3, "maxItems": 3
}}
}}
schema_fragments['is_complete_request'] = {"properties": {
"code": {"type": "string"},
}}
schema_fragments['is_complete_reply'] = {"properties": {
"status": {"enum": ["complete", "incomplete", "invalid", "unknown"]},
"indent": {"type": "string"}
}, "required": ["status"]}
# NB connect_request is deprecated
schema_fragments["connect_request"] = {"properties": {}}
schema_fragments["connect_reply"] = {"properties": {
"shell_port": {"type": "number"},
"iopub_port": {"type": "number"},
"stdin_port": {"type": "number"},
"hb_port": {"type": "number"},
"control_port": {"type": "number"},
}}
schema_fragments["comm_info_request"] = {"properties": {
"target_name": {"type": "string"},
}, "required": []}
schema_fragments["comm_info_reply"] = {"properties": {
# statuses 'error' and 'abort' change the structure, so check separately
"status": {"const": "ok"},
"comms": {"type": "object"},
}}
schema_fragments["kernel_info_request"] = {"properties": {}}
schema_fragments["kernel_info_reply"] = {"properties": {
# statuses 'error' and 'abort' change the structure, so check separately
"status": {"const": "ok"},
"protocol_version": {"type": "string"},
"implementation": {"type": "string"},
"implementation_version": {"type": "string"},
"language_info": {"type": "object"},
"banner": {"type": "string"},
"debugger": {"type": "boolean"},
"help_links": {"type": "array", "items": {"type": "object", "properties": {
"text": {"type": "string"},
"url": {"type": "string"}
}}}
}, "required": ["status", "protocol_version", "implementation", "language_info", "banner"]}
schema_fragments['shutdown_request'] = {"properties": {
"restart": {"type": "boolean"},
}}
schema_fragments['shutdown_reply'] = {"properties": {
# statuses 'error' and 'abort' change the structure, so check separately
"status": {"const": "ok"},
"restart": {"type": "boolean"},
}}
schema_fragments["interrupt_request"] = {"properties": {}}
schema_fragments["interrupt_reply"] = {"properties": {
# statuses 'error' and 'abort' change the structure, so check separately
"status": {"const": "ok"},
}}
# IOPub messages ----------------------------------------------
mime_data = {
"type":"object",
"patternProperties": {r'^[\w\-\+\.]+/[\w\-\+\.]+$': {}},
"additionalProperties": False,
}
schema_fragments['stream'] = {"properties": {
"name": {"enum": ["stdout", "stderr"]},
"text": {"type": "string"},
}}
schema_fragments['display_data'] = {"properties": {
"data": mime_data,
"metadata": {"type": "object"},
"transient": {"type": "object"},
}, "required": ["data", "metadata"]}
schema_fragments['update_display_data'] = {"properties": {
"data": mime_data,
"metadata": {"type": "object"},
"transient": {"type": "object"},
}}
schema_fragments['execute_result'] = {"properties": {
"execution_count": {"type": "number"},
"data": mime_data,
"metadata": {"type": "object"},
"transient": {"type": "object"},
}, "required": ["execution_count", "data", "metadata"]}
schema_fragments['clear_output'] = {"properties": {
"wait": {"type": "boolean"},
}}
schema_fragments['execute_input'] = {"properties": {
"code": {"type": "string"},
"execution_count": {"type": "number"},
}}
schema_fragments['error'] = {"properties": {
"ename": {"type": "string"},
"evalue": {"type": "string"},
"traceback": {"type": "array", "items": {"type": "string"}},
}}
schema_fragments['status'] = {"properties": {
"execution_state": {"enum": ["busy", "idle", "starting"]},
}}
# Stdin messages ---------------------------------------------
schema_fragments["input_request"] = {"properties": {
"prompt": {"type": "string"},
"password": {"type": "number"},
}}
schema_fragments["input_reply"] = {"properties": {
"value": {"type": "string"},
}}
| true | true |
f7303b21e2e3db21c47520be7c83cb2644cc4758 | 466 | py | Python | env/Lib/site-packages/plotly/validators/scattercarpet/marker/colorbar/_xpad.py | andresgreen-byte/Laboratorio-1--Inversion-de-Capital | 8a4707301d19c3826c31026c4077930bcd6a8182 | [
"MIT"
] | 11,750 | 2015-10-12T07:03:39.000Z | 2022-03-31T20:43:15.000Z | env/Lib/site-packages/plotly/validators/scattercarpet/marker/colorbar/_xpad.py | andresgreen-byte/Laboratorio-1--Inversion-de-Capital | 8a4707301d19c3826c31026c4077930bcd6a8182 | [
"MIT"
] | 2,951 | 2015-10-12T00:41:25.000Z | 2022-03-31T22:19:26.000Z | env/Lib/site-packages/plotly/validators/scattercarpet/marker/colorbar/_xpad.py | andresgreen-byte/Laboratorio-1--Inversion-de-Capital | 8a4707301d19c3826c31026c4077930bcd6a8182 | [
"MIT"
] | 2,623 | 2015-10-15T14:40:27.000Z | 2022-03-28T16:05:50.000Z | import _plotly_utils.basevalidators
class XpadValidator(_plotly_utils.basevalidators.NumberValidator):
def __init__(
self, plotly_name="xpad", parent_name="scattercarpet.marker.colorbar", **kwargs
):
super(XpadValidator, self).__init__(
plotly_name=plotly_name,
parent_name=parent_name,
edit_type=kwargs.pop("edit_type", "colorbars"),
min=kwargs.pop("min", 0),
**kwargs
)
| 31.066667 | 87 | 0.641631 | import _plotly_utils.basevalidators
class XpadValidator(_plotly_utils.basevalidators.NumberValidator):
def __init__(
self, plotly_name="xpad", parent_name="scattercarpet.marker.colorbar", **kwargs
):
super(XpadValidator, self).__init__(
plotly_name=plotly_name,
parent_name=parent_name,
edit_type=kwargs.pop("edit_type", "colorbars"),
min=kwargs.pop("min", 0),
**kwargs
)
| true | true |
f7303bca22e1ac8527afeffd1d88492df41a22c8 | 1,874 | py | Python | Sources/ospboard/opt/osp/share/scripts/system_check.py | nihospr01/OpenSpeechPlatform | 799fb5baa5b8cdfad0f5387dd48b394adc583ede | [
"BSD-2-Clause"
] | null | null | null | Sources/ospboard/opt/osp/share/scripts/system_check.py | nihospr01/OpenSpeechPlatform | 799fb5baa5b8cdfad0f5387dd48b394adc583ede | [
"BSD-2-Clause"
] | null | null | null | Sources/ospboard/opt/osp/share/scripts/system_check.py | nihospr01/OpenSpeechPlatform | 799fb5baa5b8cdfad0f5387dd48b394adc583ede | [
"BSD-2-Clause"
] | null | null | null | #!/usr/bin/env python3
import sys
import time
import datetime
import os
import psutil
def main():
CurrentTime = datetime.datetime.now()
with open(r"/sys/class/thermal/thermal_zone0/temp") as f:
CurrentTemp0 = f.readline()
with open(r"/sys/class/thermal/thermal_zone1/temp") as f:
CurrentTemp1 = f.readline()
freq = []
for i in range(4):
with open(f"/sys/devices/system/cpu/cpu{i}/cpufreq/cpuinfo_cur_freq") as f:
freq.append(f.readline())
with open(r"/sys/devices/system/cpu/cpu0/cpufreq/stats/time_in_state") as f:
time_in_state = f.read()
print(f"\n{CurrentTime.strftime('%H:%M:%S')}\t CPU0-1: {float(CurrentTemp0) / 1000} ℃\t\tCPU2-3: {float(CurrentTemp1) / 1000} ℃")
cpu = psutil.cpu_times_percent(percpu=True)
time.sleep(1)
cpu = psutil.cpu_times_percent(percpu=True)
print(f"\nCPU busy (%) (1-4) : {100-cpu[0].idle:.2f} {100-cpu[1].idle:.2f} {100-cpu[2].idle:.2f} {100-cpu[3].idle:.2f}")
print(f"\nCPU freq (kHz) (1-4) : {int(freq[0])/1000} {int(freq[1])/1000} {int(freq[2])/1000} {int(freq[3])/1000}")
print("\nTIME IN STATE\n-------------\nkHz Percent\n-------------")
total = 0
for t in time_in_state.split('\n'):
if t:
freq, per = t.split()
total += int(per)
for t in time_in_state.split('\n'):
if t:
freq, per = t.split()
freq = int(int(freq)/1000)
per = int(int(per) / total * 100)
print(f"{freq} {per}")
print("\nOSP Status")
os.system('ps -T -p `pgrep OSP` -o cpuid,cls,pri,pcpu,lwp,comm')
diskfree = psutil.disk_usage('/').percent
print(f"\nDiskfree: {diskfree}%")
print("\nCharge Log\n----------")
with open(r"/var/log/charge.log") as f:
print(f.read())
if __name__ == '__main__':
sys.exit(main())
| 30.225806 | 133 | 0.577375 |
import sys
import time
import datetime
import os
import psutil
def main():
CurrentTime = datetime.datetime.now()
with open(r"/sys/class/thermal/thermal_zone0/temp") as f:
CurrentTemp0 = f.readline()
with open(r"/sys/class/thermal/thermal_zone1/temp") as f:
CurrentTemp1 = f.readline()
freq = []
for i in range(4):
with open(f"/sys/devices/system/cpu/cpu{i}/cpufreq/cpuinfo_cur_freq") as f:
freq.append(f.readline())
with open(r"/sys/devices/system/cpu/cpu0/cpufreq/stats/time_in_state") as f:
time_in_state = f.read()
print(f"\n{CurrentTime.strftime('%H:%M:%S')}\t CPU0-1: {float(CurrentTemp0) / 1000} ℃\t\tCPU2-3: {float(CurrentTemp1) / 1000} ℃")
cpu = psutil.cpu_times_percent(percpu=True)
time.sleep(1)
cpu = psutil.cpu_times_percent(percpu=True)
print(f"\nCPU busy (%) (1-4) : {100-cpu[0].idle:.2f} {100-cpu[1].idle:.2f} {100-cpu[2].idle:.2f} {100-cpu[3].idle:.2f}")
print(f"\nCPU freq (kHz) (1-4) : {int(freq[0])/1000} {int(freq[1])/1000} {int(freq[2])/1000} {int(freq[3])/1000}")
print("\nTIME IN STATE\n-------------\nkHz Percent\n-------------")
total = 0
for t in time_in_state.split('\n'):
if t:
freq, per = t.split()
total += int(per)
for t in time_in_state.split('\n'):
if t:
freq, per = t.split()
freq = int(int(freq)/1000)
per = int(int(per) / total * 100)
print(f"{freq} {per}")
print("\nOSP Status")
os.system('ps -T -p `pgrep OSP` -o cpuid,cls,pri,pcpu,lwp,comm')
diskfree = psutil.disk_usage('/').percent
print(f"\nDiskfree: {diskfree}%")
print("\nCharge Log\n----------")
with open(r"/var/log/charge.log") as f:
print(f.read())
if __name__ == '__main__':
sys.exit(main())
| true | true |
f7303bebd882517201c20381d045a17ff877a40a | 6,044 | py | Python | passengerCOVIDscan/glove_detection/tensorflow_infer.py | pradip026/passengerCOVIDscan | 1ebbe23beb91963679a97d8e9fe45354c47bbbff | [
"MIT"
] | null | null | null | passengerCOVIDscan/glove_detection/tensorflow_infer.py | pradip026/passengerCOVIDscan | 1ebbe23beb91963679a97d8e9fe45354c47bbbff | [
"MIT"
] | null | null | null | passengerCOVIDscan/glove_detection/tensorflow_infer.py | pradip026/passengerCOVIDscan | 1ebbe23beb91963679a97d8e9fe45354c47bbbff | [
"MIT"
] | null | null | null | # -*- coding:utf-8 -*-
import cv2
import time
import argparse
import os
import numpy as np
from PIL import Image
#from keras.models import model_from_json
from .utils.anchor_generator import generate_anchors
from .utils.anchor_decode import decode_bbox
from .utils.nms import single_class_non_max_suppression
from .load_model.tensorflow_loader import load_tf_model, tf_inference
MODEL_PATH = os.path.join(os.path.dirname(os.path.abspath(__file__)), "models/face_mask_detection.pb")
sess, graph = load_tf_model(MODEL_PATH)
# anchor configuration
feature_map_sizes = [[33, 33], [17, 17], [9, 9], [5, 5], [3, 3]]
anchor_sizes = [[0.04, 0.056], [0.08, 0.11], [0.16, 0.22], [0.32, 0.45], [0.64, 0.72]]
anchor_ratios = [[1, 0.62, 0.42]] * 5
# generate anchors
anchors = generate_anchors(feature_map_sizes, anchor_sizes, anchor_ratios)
# for inference , the batch size is 1, the model output shape is [1, N, 4],
# so we expand dim for anchors to [1, anchor_num, 4]
anchors_exp = np.expand_dims(anchors, axis=0)
id2class = {0: 'glove', 1: 'Noglove'}
def inference(image,
conf_thresh=0.5,
iou_thresh=0.4,
target_shape=(160, 160),
draw_result=True,
show_result=True
):
'''
Main function of detection inference
:param image: 3D numpy array of image
:param conf_thresh: the min threshold of classification probabity.
:param iou_thresh: the IOU threshold of NMS
:param target_shape: the model input size.
:param draw_result: whether to daw bounding box to the image.
:param show_result: whether to display the image.
:return:
'''
# image = np.copy(image)
output_info = []
height, width, _ = image.shape
image_resized = cv2.resize(image, target_shape)
image_np = image_resized / 255.0 # 归一化到0~1
image_exp = np.expand_dims(image_np, axis=0)
y_bboxes_output, y_cls_output = tf_inference(sess, graph, image_exp)
# remove the batch dimension, for batch is always 1 for inference.
y_bboxes = decode_bbox(anchors_exp, y_bboxes_output)[0]
y_cls = y_cls_output[0]
# To speed up, do single class NMS, not multiple classes NMS.
bbox_max_scores = np.max(y_cls, axis=1)
bbox_max_score_classes = np.argmax(y_cls, axis=1)
# keep_idx is the alive bounding box after nms.
keep_idxs = single_class_non_max_suppression(y_bboxes,
bbox_max_scores,
conf_thresh=conf_thresh,
iou_thresh=iou_thresh,
)
for idx in keep_idxs:
conf = float(bbox_max_scores[idx])
class_id = bbox_max_score_classes[idx]
bbox = y_bboxes[idx]
# clip the coordinate, avoid the value exceed the image boundary.
xmin = max(0, int(bbox[0] * width))
ymin = max(0, int(bbox[1] * height))
xmax = min(int(bbox[2] * width), width)
ymax = min(int(bbox[3] * height), height)
if draw_result:
if class_id == 0:
color = (0, 255, 0)
else:
color = (255, 0, 0)
cv2.rectangle(image, (xmin, ymin), (xmax, ymax), color, 2)
cv2.putText(image, "%s: %.2f" % (id2class[class_id], conf), (xmin + 2, ymin - 2),
cv2.FONT_HERSHEY_SIMPLEX, 0.8, color)
output_info.append([class_id, conf, xmin, ymin, xmax, ymax])
if show_result:
Image.fromarray(image).show()
return output_info
def run_on_video(video_path, output_video_name, conf_thresh):
cap = cv2.VideoCapture(video_path)
height = cap.get(cv2.CAP_PROP_FRAME_HEIGHT)
width = cap.get(cv2.CAP_PROP_FRAME_WIDTH)
fps = cap.get(cv2.CAP_PROP_FPS)
fourcc = cv2.VideoWriter_fourcc(*'XVID')
# writer = cv2.VideoWriter(output_video_name, fourcc, int(fps), (int(width), int(height)))
total_frames = cap.get(cv2.CAP_PROP_FRAME_COUNT)
if not cap.isOpened():
raise ValueError("Video open failed.")
return
status = True
idx = 0
while status:
start_stamp = time.time()
status, img_raw = cap.read()
img_raw = cv2.cvtColor(img_raw, cv2.COLOR_BGR2RGB)
read_frame_stamp = time.time()
if (status):
inference(img_raw,
conf_thresh,
iou_thresh=0.5,
target_shape=(260, 260),
draw_result=True,
show_result=False)
cv2.imshow('image', img_raw[:, :, ::-1])
cv2.waitKey(1)
inference_stamp = time.time()
# writer.write(img_raw)
write_frame_stamp = time.time()
idx += 1
print("%d of %d" % (idx, total_frames))
print("read_frame:%f, infer time:%f, write time:%f" % (read_frame_stamp - start_stamp,
inference_stamp - read_frame_stamp,
write_frame_stamp - inference_stamp))
# writer.release()
if __name__ == "__main__":
parser = argparse.ArgumentParser(description="Face Mask Detection")
parser.add_argument('--img-mode', type=int, default=1, help='set 1 to run on image, 0 to run on video.')
parser.add_argument('--img-path', type=str, help='path to your image.')
parser.add_argument('--video-path', type=str, default='0', help='path to your video, `0` means to use camera.')
# parser.add_argument('--hdf5', type=str, help='keras hdf5 file')
args = parser.parse_args()
if args.img_mode:
imgPath = args.img_path
img = cv2.imread(imgPath)
img = cv2.cvtColor(img, cv2.COLOR_BGR2RGB)
inference(img, show_result=True, target_shape=(260, 260))
else:
video_path = args.video_path
if args.video_path == '0':
video_path = 0
run_on_video(video_path, '', conf_thresh=0.5)
| 40.293333 | 115 | 0.603739 |
import cv2
import time
import argparse
import os
import numpy as np
from PIL import Image
from .utils.anchor_generator import generate_anchors
from .utils.anchor_decode import decode_bbox
from .utils.nms import single_class_non_max_suppression
from .load_model.tensorflow_loader import load_tf_model, tf_inference
MODEL_PATH = os.path.join(os.path.dirname(os.path.abspath(__file__)), "models/face_mask_detection.pb")
sess, graph = load_tf_model(MODEL_PATH)
feature_map_sizes = [[33, 33], [17, 17], [9, 9], [5, 5], [3, 3]]
anchor_sizes = [[0.04, 0.056], [0.08, 0.11], [0.16, 0.22], [0.32, 0.45], [0.64, 0.72]]
anchor_ratios = [[1, 0.62, 0.42]] * 5
anchors = generate_anchors(feature_map_sizes, anchor_sizes, anchor_ratios)
anchors_exp = np.expand_dims(anchors, axis=0)
id2class = {0: 'glove', 1: 'Noglove'}
def inference(image,
conf_thresh=0.5,
iou_thresh=0.4,
target_shape=(160, 160),
draw_result=True,
show_result=True
):
output_info = []
height, width, _ = image.shape
image_resized = cv2.resize(image, target_shape)
image_np = image_resized / 255.0
image_exp = np.expand_dims(image_np, axis=0)
y_bboxes_output, y_cls_output = tf_inference(sess, graph, image_exp)
y_bboxes = decode_bbox(anchors_exp, y_bboxes_output)[0]
y_cls = y_cls_output[0]
bbox_max_scores = np.max(y_cls, axis=1)
bbox_max_score_classes = np.argmax(y_cls, axis=1)
keep_idxs = single_class_non_max_suppression(y_bboxes,
bbox_max_scores,
conf_thresh=conf_thresh,
iou_thresh=iou_thresh,
)
for idx in keep_idxs:
conf = float(bbox_max_scores[idx])
class_id = bbox_max_score_classes[idx]
bbox = y_bboxes[idx]
xmin = max(0, int(bbox[0] * width))
ymin = max(0, int(bbox[1] * height))
xmax = min(int(bbox[2] * width), width)
ymax = min(int(bbox[3] * height), height)
if draw_result:
if class_id == 0:
color = (0, 255, 0)
else:
color = (255, 0, 0)
cv2.rectangle(image, (xmin, ymin), (xmax, ymax), color, 2)
cv2.putText(image, "%s: %.2f" % (id2class[class_id], conf), (xmin + 2, ymin - 2),
cv2.FONT_HERSHEY_SIMPLEX, 0.8, color)
output_info.append([class_id, conf, xmin, ymin, xmax, ymax])
if show_result:
Image.fromarray(image).show()
return output_info
def run_on_video(video_path, output_video_name, conf_thresh):
cap = cv2.VideoCapture(video_path)
height = cap.get(cv2.CAP_PROP_FRAME_HEIGHT)
width = cap.get(cv2.CAP_PROP_FRAME_WIDTH)
fps = cap.get(cv2.CAP_PROP_FPS)
fourcc = cv2.VideoWriter_fourcc(*'XVID')
total_frames = cap.get(cv2.CAP_PROP_FRAME_COUNT)
if not cap.isOpened():
raise ValueError("Video open failed.")
return
status = True
idx = 0
while status:
start_stamp = time.time()
status, img_raw = cap.read()
img_raw = cv2.cvtColor(img_raw, cv2.COLOR_BGR2RGB)
read_frame_stamp = time.time()
if (status):
inference(img_raw,
conf_thresh,
iou_thresh=0.5,
target_shape=(260, 260),
draw_result=True,
show_result=False)
cv2.imshow('image', img_raw[:, :, ::-1])
cv2.waitKey(1)
inference_stamp = time.time()
write_frame_stamp = time.time()
idx += 1
print("%d of %d" % (idx, total_frames))
print("read_frame:%f, infer time:%f, write time:%f" % (read_frame_stamp - start_stamp,
inference_stamp - read_frame_stamp,
write_frame_stamp - inference_stamp))
if __name__ == "__main__":
parser = argparse.ArgumentParser(description="Face Mask Detection")
parser.add_argument('--img-mode', type=int, default=1, help='set 1 to run on image, 0 to run on video.')
parser.add_argument('--img-path', type=str, help='path to your image.')
parser.add_argument('--video-path', type=str, default='0', help='path to your video, `0` means to use camera.')
args = parser.parse_args()
if args.img_mode:
imgPath = args.img_path
img = cv2.imread(imgPath)
img = cv2.cvtColor(img, cv2.COLOR_BGR2RGB)
inference(img, show_result=True, target_shape=(260, 260))
else:
video_path = args.video_path
if args.video_path == '0':
video_path = 0
run_on_video(video_path, '', conf_thresh=0.5)
| true | true |
f7303c0eda96006944c2233d97ba5ca90c986d6f | 2,295 | py | Python | benchresults/rankselect/bitsize_table.py | pacman616/hybrid-fenwick-tree | 84e7cc8aa84b87937b98d85f3c2ed1998c0d79af | [
"MIT"
] | 4 | 2019-01-10T17:55:43.000Z | 2019-11-26T09:33:38.000Z | benchresults/rankselect/bitsize_table.py | pacman616/hybrid-fenwick-tree | 84e7cc8aa84b87937b98d85f3c2ed1998c0d79af | [
"MIT"
] | null | null | null | benchresults/rankselect/bitsize_table.py | pacman616/hybrid-fenwick-tree | 84e7cc8aa84b87937b98d85f3c2ed1998c0d79af | [
"MIT"
] | null | null | null | from collections import OrderedDict
# Data from: benchresults/rankselect/data/rankselect_nohugetlb/bitsize.csv
# Numbers to LaTeX regex: :s/\([0-9]\.?[0-9]*\)\([,\" ]\)/$\1$\2/g
lbls = "Elements,fixed[F],fixed[$\ell$],byte[F],byte[$\ell$],bit[F],bit[$\ell$],fixed[$16$]fixed,byte[$16$]byte,bit[$16$]bit,fixed[$16$]byte,fixed[$16$]bit,byte[$16$]bit,fixed[F]$8$,fixed[$\ell$]$8$,byte[F]$8$,byte[$\ell$]$8$,bit[F]$8$,bit[$\ell$]$8$,fixed[$16$]fixed$8$,byte[$16$]byte$8$,bit[$16$]bit$8$,fixed[$16$]byte$8$,fixed[$16$]bit$8$,byte[$16$]bit$8$,fixed[F]$16$,fixed[$\ell$]$16$,byte[F]$16$,byte[$\ell$]$16$,bit[F]$16$,bit[$\ell$]$16$,fixed[$16$]fixed$16$,byte[$16$]byte$16$,bit[$16$]bit$16$,fixed[$16$]byte$16$,fixed[$16$]bit$16$,byte[$16$]bit$16$,fixed[F]$32$,fixed[$\ell$]$32$,byte[F]$32$,byte[$\ell$]$32$,bit[F]$32$,bit[$\ell$]$32$,fixed[$16$]fixed$32$,byte[$16$]byte$32$,bit[$16$]bit$32$,fixed[$16$]byte$32$,fixed[$16$]bit$32$,byte[$16$]bit$32$,fixed[F]$64$,fixed[$\ell$]$64$,byte[F]$64$,byte[$\ell$]$64$,bit[F]$64$,bit[$\ell$]$64$,fixed[$16$]fixed$64$,byte[$16$]byte$64$,bit[$16$]bit$64$,fixed[$16$]byte$64$,fixed[$16$]bit$64$,byte[$16$]bit$64$,\\emph{Prezza}"
vals = [ 32_000_000_000,2.000000,2.000000,1.156982,1.156373,1.125000,1.125000,2.000073,1.157061,1.125095,1.157070,1.125105,1.125096,1.125000,1.125000,1.031982,1.031373,1.021484,1.021484,1.125009,1.031992,1.021496,1.031993,1.021497,1.021496,1.062500,1.062500,1.016357,1.015748,1.011719,1.011719,1.062505,1.016362,1.011725,1.016363,1.011725,1.011725,1.031250,1.031250,1.008545,1.007935,1.006348,1.006348,1.031252,1.008547,1.006351,1.008548,1.006351,1.006351,1.015625,1.015625,1.004639,1.004029,1.003418,1.003418,1.015626,1.004640,1.003419,1.004640,1.003420,1.003420,1.127441 ]
#if __name__ == '__main__':
mapped = dict(zip(lbls.split(',')[1:], vals[1:]))
ordered = OrderedDict(sorted(mapped.items(), key=lambda x: x[1]))
length = 7
keys, vals = list(ordered.keys()), [ "${:0.4f}$".format(i) for i in ordered.values() ]
for i in range(0, len(keys), length):
print("\\begin{tabular}{" + "|x{2.35cm}"*len(keys[i:i+length]) + "|}")
print("\\hline")
print(" & ".join(keys[i:i+length]) + " \\\\")
print("\\hline")
print(" & ".join(vals[i:i+length]) + " \\\\")
print("\\hline")
print("\\end{tabular} \\vspace{0.2cm}\n")
| 91.8 | 977 | 0.639216 | from collections import OrderedDict
lbls = "Elements,fixed[F],fixed[$\ell$],byte[F],byte[$\ell$],bit[F],bit[$\ell$],fixed[$16$]fixed,byte[$16$]byte,bit[$16$]bit,fixed[$16$]byte,fixed[$16$]bit,byte[$16$]bit,fixed[F]$8$,fixed[$\ell$]$8$,byte[F]$8$,byte[$\ell$]$8$,bit[F]$8$,bit[$\ell$]$8$,fixed[$16$]fixed$8$,byte[$16$]byte$8$,bit[$16$]bit$8$,fixed[$16$]byte$8$,fixed[$16$]bit$8$,byte[$16$]bit$8$,fixed[F]$16$,fixed[$\ell$]$16$,byte[F]$16$,byte[$\ell$]$16$,bit[F]$16$,bit[$\ell$]$16$,fixed[$16$]fixed$16$,byte[$16$]byte$16$,bit[$16$]bit$16$,fixed[$16$]byte$16$,fixed[$16$]bit$16$,byte[$16$]bit$16$,fixed[F]$32$,fixed[$\ell$]$32$,byte[F]$32$,byte[$\ell$]$32$,bit[F]$32$,bit[$\ell$]$32$,fixed[$16$]fixed$32$,byte[$16$]byte$32$,bit[$16$]bit$32$,fixed[$16$]byte$32$,fixed[$16$]bit$32$,byte[$16$]bit$32$,fixed[F]$64$,fixed[$\ell$]$64$,byte[F]$64$,byte[$\ell$]$64$,bit[F]$64$,bit[$\ell$]$64$,fixed[$16$]fixed$64$,byte[$16$]byte$64$,bit[$16$]bit$64$,fixed[$16$]byte$64$,fixed[$16$]bit$64$,byte[$16$]bit$64$,\\emph{Prezza}"
vals = [ 32_000_000_000,2.000000,2.000000,1.156982,1.156373,1.125000,1.125000,2.000073,1.157061,1.125095,1.157070,1.125105,1.125096,1.125000,1.125000,1.031982,1.031373,1.021484,1.021484,1.125009,1.031992,1.021496,1.031993,1.021497,1.021496,1.062500,1.062500,1.016357,1.015748,1.011719,1.011719,1.062505,1.016362,1.011725,1.016363,1.011725,1.011725,1.031250,1.031250,1.008545,1.007935,1.006348,1.006348,1.031252,1.008547,1.006351,1.008548,1.006351,1.006351,1.015625,1.015625,1.004639,1.004029,1.003418,1.003418,1.015626,1.004640,1.003419,1.004640,1.003420,1.003420,1.127441 ]
#if __name__ == '__main__':
mapped = dict(zip(lbls.split(',')[1:], vals[1:]))
ordered = OrderedDict(sorted(mapped.items(), key=lambda x: x[1]))
length = 7
keys, vals = list(ordered.keys()), [ "${:0.4f}$".format(i) for i in ordered.values() ]
for i in range(0, len(keys), length):
print("\\begin{tabular}{" + "|x{2.35cm}"*len(keys[i:i+length]) + "|}")
print("\\hline")
print(" & ".join(keys[i:i+length]) + " \\\\")
print("\\hline")
print(" & ".join(vals[i:i+length]) + " \\\\")
print("\\hline")
print("\\end{tabular} \\vspace{0.2cm}\n")
| true | true |
f7303cda043ce962a43ddaf990831db987ed128f | 758 | py | Python | manage.py | Boring-Mind/sbc-store | d16cce07bcb05ff2ea901411a5129ab1f0540161 | [
"MIT"
] | null | null | null | manage.py | Boring-Mind/sbc-store | d16cce07bcb05ff2ea901411a5129ab1f0540161 | [
"MIT"
] | null | null | null | manage.py | Boring-Mind/sbc-store | d16cce07bcb05ff2ea901411a5129ab1f0540161 | [
"MIT"
] | null | null | null | #!/usr/bin/env python
"""Django's command-line utility for administrative tasks."""
import os
import sys
from store.settings.base import get_config_type
def main():
# Set path to the current config file
os.environ.setdefault(
'DJANGO_SETTINGS_MODULE',
'store.settings.' + get_config_type()
)
try:
from django.core.management import execute_from_command_line
except ImportError as exc:
raise ImportError(
"Couldn't import Django. Are you sure it's installed and "
"available on your PYTHONPATH environment variable? Did you "
"forget to activate a virtual environment?"
) from exc
execute_from_command_line(sys.argv)
if __name__ == '__main__':
main()
| 28.074074 | 73 | 0.675462 |
import os
import sys
from store.settings.base import get_config_type
def main():
os.environ.setdefault(
'DJANGO_SETTINGS_MODULE',
'store.settings.' + get_config_type()
)
try:
from django.core.management import execute_from_command_line
except ImportError as exc:
raise ImportError(
"Couldn't import Django. Are you sure it's installed and "
"available on your PYTHONPATH environment variable? Did you "
"forget to activate a virtual environment?"
) from exc
execute_from_command_line(sys.argv)
if __name__ == '__main__':
main()
| true | true |
f7303e3fc37e9d2b3d1733a2847b96c9ba7bc3e0 | 7,086 | py | Python | ansible/modules/network/avi/avi_sslprofile.py | EnjoyLifeFund/py36pkgs | 0ac677fbbfa7b6d8c527fe2c759ba05117b07fd2 | [
"MIT",
"BSD-2-Clause",
"BSD-3-Clause"
] | null | null | null | ansible/modules/network/avi/avi_sslprofile.py | EnjoyLifeFund/py36pkgs | 0ac677fbbfa7b6d8c527fe2c759ba05117b07fd2 | [
"MIT",
"BSD-2-Clause",
"BSD-3-Clause"
] | null | null | null | ansible/modules/network/avi/avi_sslprofile.py | EnjoyLifeFund/py36pkgs | 0ac677fbbfa7b6d8c527fe2c759ba05117b07fd2 | [
"MIT",
"BSD-2-Clause",
"BSD-3-Clause"
] | 1 | 2020-02-13T14:24:57.000Z | 2020-02-13T14:24:57.000Z | #!/usr/bin/python
#
# Created on Aug 25, 2016
# @author: Gaurav Rastogi (grastogi@avinetworks.com)
# Eric Anderson (eanderson@avinetworks.com)
# module_check: supported
# Avi Version: 16.3.8
#
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
#
ANSIBLE_METADATA = {'metadata_version': '1.0',
'status': ['preview'],
'supported_by': 'community'}
DOCUMENTATION = '''
---
module: avi_sslprofile
author: Gaurav Rastogi (grastogi@avinetworks.com)
short_description: Module for setup of SSLProfile Avi RESTful Object
description:
- This module is used to configure SSLProfile object
- more examples at U(https://github.com/avinetworks/devops)
requirements: [ avisdk ]
version_added: "2.3"
options:
state:
description:
- The state that should be applied on the entity.
default: present
choices: ["absent","present"]
accepted_ciphers:
description:
- Ciphers suites represented as defined by U(http://www.openssl.org/docs/apps/ciphers.html).
- Default value when not specified in API or module is interpreted by Avi Controller as AES:3DES:RC4.
accepted_versions:
description:
- Set of versions accepted by the server.
cipher_enums:
description:
- Cipher_enums of sslprofile.
description:
description:
- User defined description for the object.
dhparam:
description:
- Dh parameters used in ssl.
- At this time, it is not configurable and is set to 2048 bits.
enable_ssl_session_reuse:
description:
- Enable ssl session re-use.
- Default value when not specified in API or module is interpreted by Avi Controller as True.
name:
description:
- Name of the object.
required: true
prefer_client_cipher_ordering:
description:
- Prefer the ssl cipher ordering presented by the client during the ssl handshake over the one specified in the ssl profile.
- Default value when not specified in API or module is interpreted by Avi Controller as False.
send_close_notify:
description:
- Send 'close notify' alert message for a clean shutdown of the ssl connection.
- Default value when not specified in API or module is interpreted by Avi Controller as True.
ssl_rating:
description:
- Sslrating settings for sslprofile.
ssl_session_timeout:
description:
- The amount of time before an ssl session expires.
- Default value when not specified in API or module is interpreted by Avi Controller as 86400.
tags:
description:
- List of tag.
tenant_ref:
description:
- It is a reference to an object of type tenant.
url:
description:
- Avi controller URL of the object.
uuid:
description:
- Unique object identifier of the object.
extends_documentation_fragment:
- avi
'''
EXAMPLES = '''
- name: Create SSL profile with list of allowed ciphers
avi_sslprofile:
controller: ''
username: ''
password: ''
accepted_ciphers: >
ECDHE-ECDSA-AES128-GCM-SHA256:ECDHE-ECDSA-AES128-SHA:ECDHE-ECDSA-AES256-SHA:
ECDHE-ECDSA-AES256-GCM-SHA384:ECDHE-ECDSA-AES128-SHA256:ECDHE-ECDSA-AES256-SHA384:
AES128-GCM-SHA256:AES256-GCM-SHA384:AES128-SHA256:AES256-SHA256:AES128-SHA:
AES256-SHA:DES-CBC3-SHA:ECDHE-RSA-AES128-SHA:ECDHE-RSA-AES256-SHA384:
ECDHE-RSA-AES128-SHA256:ECDHE-RSA-AES256-GCM-SHA384:ECDHE-RSA-AES128-GCM-SHA256:ECDHE-RSA-AES256-SHA
accepted_versions:
- type: SSL_VERSION_TLS1
- type: SSL_VERSION_TLS1_1
- type: SSL_VERSION_TLS1_2
cipher_enums:
- TLS_ECDHE_ECDSA_WITH_AES_128_GCM_SHA256
- TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA
- TLS_ECDHE_ECDSA_WITH_AES_256_CBC_SHA
- TLS_ECDHE_ECDSA_WITH_AES_256_GCM_SHA384
- TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256
- TLS_ECDHE_ECDSA_WITH_AES_256_CBC_SHA384
- TLS_RSA_WITH_AES_128_GCM_SHA256
- TLS_RSA_WITH_AES_256_GCM_SHA384
- TLS_RSA_WITH_AES_128_CBC_SHA256
- TLS_RSA_WITH_AES_256_CBC_SHA256
- TLS_RSA_WITH_AES_128_CBC_SHA
- TLS_RSA_WITH_AES_256_CBC_SHA
- TLS_RSA_WITH_3DES_EDE_CBC_SHA
- TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA
- TLS_ECDHE_RSA_WITH_AES_256_CBC_SHA384
- TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256
- TLS_ECDHE_RSA_WITH_AES_256_GCM_SHA384
- TLS_ECDHE_RSA_WITH_AES_128_GCM_SHA256
- TLS_ECDHE_RSA_WITH_AES_256_CBC_SHA
name: PFS-BOTH-RSA-EC
send_close_notify: true
ssl_rating:
compatibility_rating: SSL_SCORE_EXCELLENT
performance_rating: SSL_SCORE_EXCELLENT
security_score: '100.0'
tenant_ref: Demo
'''
RETURN = '''
obj:
description: SSLProfile (api/sslprofile) object
returned: success, changed
type: dict
'''
from ansible.module_utils.basic import AnsibleModule
try:
from ansible.module_utils.avi import (
avi_common_argument_spec, HAS_AVI, avi_ansible_api)
except ImportError:
HAS_AVI = False
def main():
argument_specs = dict(
state=dict(default='present',
choices=['absent', 'present']),
accepted_ciphers=dict(type='str',),
accepted_versions=dict(type='list',),
cipher_enums=dict(type='list',),
description=dict(type='str',),
dhparam=dict(type='str',),
enable_ssl_session_reuse=dict(type='bool',),
name=dict(type='str', required=True),
prefer_client_cipher_ordering=dict(type='bool',),
send_close_notify=dict(type='bool',),
ssl_rating=dict(type='dict',),
ssl_session_timeout=dict(type='int',),
tags=dict(type='list',),
tenant_ref=dict(type='str',),
url=dict(type='str',),
uuid=dict(type='str',),
)
argument_specs.update(avi_common_argument_spec())
module = AnsibleModule(
argument_spec=argument_specs, supports_check_mode=True)
if not HAS_AVI:
return module.fail_json(msg=(
'Avi python API SDK (avisdk>=16.3.5.post1) is not installed. '
'For more details visit https://github.com/avinetworks/sdk.'))
return avi_ansible_api(module, 'sslprofile',
set([]))
if __name__ == '__main__':
main()
| 35.787879 | 136 | 0.673299 |
ANSIBLE_METADATA = {'metadata_version': '1.0',
'status': ['preview'],
'supported_by': 'community'}
DOCUMENTATION = '''
---
module: avi_sslprofile
author: Gaurav Rastogi (grastogi@avinetworks.com)
short_description: Module for setup of SSLProfile Avi RESTful Object
description:
- This module is used to configure SSLProfile object
- more examples at U(https://github.com/avinetworks/devops)
requirements: [ avisdk ]
version_added: "2.3"
options:
state:
description:
- The state that should be applied on the entity.
default: present
choices: ["absent","present"]
accepted_ciphers:
description:
- Ciphers suites represented as defined by U(http://www.openssl.org/docs/apps/ciphers.html).
- Default value when not specified in API or module is interpreted by Avi Controller as AES:3DES:RC4.
accepted_versions:
description:
- Set of versions accepted by the server.
cipher_enums:
description:
- Cipher_enums of sslprofile.
description:
description:
- User defined description for the object.
dhparam:
description:
- Dh parameters used in ssl.
- At this time, it is not configurable and is set to 2048 bits.
enable_ssl_session_reuse:
description:
- Enable ssl session re-use.
- Default value when not specified in API or module is interpreted by Avi Controller as True.
name:
description:
- Name of the object.
required: true
prefer_client_cipher_ordering:
description:
- Prefer the ssl cipher ordering presented by the client during the ssl handshake over the one specified in the ssl profile.
- Default value when not specified in API or module is interpreted by Avi Controller as False.
send_close_notify:
description:
- Send 'close notify' alert message for a clean shutdown of the ssl connection.
- Default value when not specified in API or module is interpreted by Avi Controller as True.
ssl_rating:
description:
- Sslrating settings for sslprofile.
ssl_session_timeout:
description:
- The amount of time before an ssl session expires.
- Default value when not specified in API or module is interpreted by Avi Controller as 86400.
tags:
description:
- List of tag.
tenant_ref:
description:
- It is a reference to an object of type tenant.
url:
description:
- Avi controller URL of the object.
uuid:
description:
- Unique object identifier of the object.
extends_documentation_fragment:
- avi
'''
EXAMPLES = '''
- name: Create SSL profile with list of allowed ciphers
avi_sslprofile:
controller: ''
username: ''
password: ''
accepted_ciphers: >
ECDHE-ECDSA-AES128-GCM-SHA256:ECDHE-ECDSA-AES128-SHA:ECDHE-ECDSA-AES256-SHA:
ECDHE-ECDSA-AES256-GCM-SHA384:ECDHE-ECDSA-AES128-SHA256:ECDHE-ECDSA-AES256-SHA384:
AES128-GCM-SHA256:AES256-GCM-SHA384:AES128-SHA256:AES256-SHA256:AES128-SHA:
AES256-SHA:DES-CBC3-SHA:ECDHE-RSA-AES128-SHA:ECDHE-RSA-AES256-SHA384:
ECDHE-RSA-AES128-SHA256:ECDHE-RSA-AES256-GCM-SHA384:ECDHE-RSA-AES128-GCM-SHA256:ECDHE-RSA-AES256-SHA
accepted_versions:
- type: SSL_VERSION_TLS1
- type: SSL_VERSION_TLS1_1
- type: SSL_VERSION_TLS1_2
cipher_enums:
- TLS_ECDHE_ECDSA_WITH_AES_128_GCM_SHA256
- TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA
- TLS_ECDHE_ECDSA_WITH_AES_256_CBC_SHA
- TLS_ECDHE_ECDSA_WITH_AES_256_GCM_SHA384
- TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256
- TLS_ECDHE_ECDSA_WITH_AES_256_CBC_SHA384
- TLS_RSA_WITH_AES_128_GCM_SHA256
- TLS_RSA_WITH_AES_256_GCM_SHA384
- TLS_RSA_WITH_AES_128_CBC_SHA256
- TLS_RSA_WITH_AES_256_CBC_SHA256
- TLS_RSA_WITH_AES_128_CBC_SHA
- TLS_RSA_WITH_AES_256_CBC_SHA
- TLS_RSA_WITH_3DES_EDE_CBC_SHA
- TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA
- TLS_ECDHE_RSA_WITH_AES_256_CBC_SHA384
- TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256
- TLS_ECDHE_RSA_WITH_AES_256_GCM_SHA384
- TLS_ECDHE_RSA_WITH_AES_128_GCM_SHA256
- TLS_ECDHE_RSA_WITH_AES_256_CBC_SHA
name: PFS-BOTH-RSA-EC
send_close_notify: true
ssl_rating:
compatibility_rating: SSL_SCORE_EXCELLENT
performance_rating: SSL_SCORE_EXCELLENT
security_score: '100.0'
tenant_ref: Demo
'''
RETURN = '''
obj:
description: SSLProfile (api/sslprofile) object
returned: success, changed
type: dict
'''
from ansible.module_utils.basic import AnsibleModule
try:
from ansible.module_utils.avi import (
avi_common_argument_spec, HAS_AVI, avi_ansible_api)
except ImportError:
HAS_AVI = False
def main():
argument_specs = dict(
state=dict(default='present',
choices=['absent', 'present']),
accepted_ciphers=dict(type='str',),
accepted_versions=dict(type='list',),
cipher_enums=dict(type='list',),
description=dict(type='str',),
dhparam=dict(type='str',),
enable_ssl_session_reuse=dict(type='bool',),
name=dict(type='str', required=True),
prefer_client_cipher_ordering=dict(type='bool',),
send_close_notify=dict(type='bool',),
ssl_rating=dict(type='dict',),
ssl_session_timeout=dict(type='int',),
tags=dict(type='list',),
tenant_ref=dict(type='str',),
url=dict(type='str',),
uuid=dict(type='str',),
)
argument_specs.update(avi_common_argument_spec())
module = AnsibleModule(
argument_spec=argument_specs, supports_check_mode=True)
if not HAS_AVI:
return module.fail_json(msg=(
'Avi python API SDK (avisdk>=16.3.5.post1) is not installed. '
'For more details visit https://github.com/avinetworks/sdk.'))
return avi_ansible_api(module, 'sslprofile',
set([]))
if __name__ == '__main__':
main()
| true | true |
f7303fe1c7865651a322e4e2df538947ebd447f6 | 6,503 | py | Python | zerver/webhooks/sonarr/view.py | dumpmemory/zulip | 496273ddbc567330a0022699d6d6eb5c646e5da5 | [
"Apache-2.0"
] | 4 | 2021-09-16T16:46:55.000Z | 2022-02-06T13:00:21.000Z | zerver/webhooks/sonarr/view.py | dumpmemory/zulip | 496273ddbc567330a0022699d6d6eb5c646e5da5 | [
"Apache-2.0"
] | null | null | null | zerver/webhooks/sonarr/view.py | dumpmemory/zulip | 496273ddbc567330a0022699d6d6eb5c646e5da5 | [
"Apache-2.0"
] | 1 | 2022-02-04T05:15:12.000Z | 2022-02-04T05:15:12.000Z | from typing import Any, Dict
from django.http import HttpRequest, HttpResponse
from zerver.decorator import webhook_view
from zerver.lib.exceptions import UnsupportedWebhookEventType
from zerver.lib.request import REQ, has_request_variables
from zerver.lib.response import json_success
from zerver.lib.webhooks.common import check_send_webhook_message, get_setup_webhook_message
from zerver.models import UserProfile
SONARR_TOPIC_TEMPLATE = "{series_title}".strip()
SONARR_TOPIC_TEMPLATE_TEST = "Sonarr - Test".strip()
SONARR_TOPIC_TEMPLATE_HEALTH_CHECK = "Health {level}".strip()
SONARR_MESSAGE_TEMPLATE_SERIES_DELETED = "{series_title} has been deleted.".strip()
SONARR_MESSAGE_TEMPLATE_HEALTH_CHECK = "{message}.".strip()
SONARR_MESSAGE_TEMPLATE_EPISODES_RENAMED = "{series_title} episodes have been renamed.".strip()
SONARR_MESSAGE_TEMPLATE_EPISODE_IMPORTED = (
"{series_title} - {series_number}x{episode_number} - {episode_name} has been imported.".strip()
)
SONARR_MESSAGE_TEMPLATE_EPISODE_IMPORTED_UPGRADE = "{series_title} - {series_number}x{episode_number} - {episode_name} has been upgraded from {old_quality} to {new_quality}.".strip()
SONARR_MESSAGE_TEMPLATE_EPISODE_GRABBED = (
"{series_title} - {series_number}x{episode_number} - {episode_name} has been grabbed.".strip()
)
SONARR_MESSAGE_TEMPLATE_EPISODE_DELETED = (
"{series_title} - {series_number}x{episode_number} - {episode_name} has been deleted.".strip()
)
SONARR_MESSAGE_TEMPLATE_EPISODE_DELETED_UPGRADE = "{series_title} - {series_number}x{episode_number} - {episode_name} has been deleted due to quality upgrade.".strip()
ALL_EVENT_TYPES = [
"Grab",
"EpisodeFileDelete",
"Test",
"Download",
"SeriesDelete",
"Health",
"Rename",
]
@webhook_view("Sonarr", all_event_types=ALL_EVENT_TYPES)
@has_request_variables
def api_sonarr_webhook(
request: HttpRequest,
user_profile: UserProfile,
payload: Dict[str, Any] = REQ(argument_type="body"),
) -> HttpResponse:
body = get_body_for_http_request(payload)
subject = get_subject_for_http_request(payload)
check_send_webhook_message(request, user_profile, subject, body, payload["eventType"])
return json_success(request)
def get_subject_for_http_request(payload: Dict[str, Any]) -> str:
if payload["eventType"] != "Test" and payload["eventType"] != "Health":
topic = SONARR_TOPIC_TEMPLATE.format(series_title=payload["series"]["title"])
elif payload["eventType"] == "Test":
topic = SONARR_TOPIC_TEMPLATE_TEST
elif payload["eventType"] == "Health":
topic = SONARR_TOPIC_TEMPLATE_HEALTH_CHECK.format(level=payload["level"])
return topic
def get_body_for_health_check_event(payload: Dict[str, Any]) -> str:
return SONARR_MESSAGE_TEMPLATE_HEALTH_CHECK.format(message=payload["message"])
def get_body_for_episodes_renamed_event(payload: Dict[str, Any]) -> str:
return SONARR_MESSAGE_TEMPLATE_EPISODES_RENAMED.format(series_title=payload["series"]["title"])
def get_body_for_series_deleted_event(payload: Dict[str, Any]) -> str:
return SONARR_MESSAGE_TEMPLATE_SERIES_DELETED.format(series_title=payload["series"]["title"])
def get_body_for_episode_imported_upgrade_event(payload: Dict[str, Any]) -> str:
data = {
"series_title": payload["series"]["title"],
"series_number": payload["episodes"][0]["seasonNumber"],
"episode_number": payload["episodes"][0]["episodeNumber"],
"episode_name": payload["episodes"][0]["title"],
"new_quality": payload["episodeFile"]["quality"],
"old_quality": payload["deletedFiles"][0]["quality"],
}
return SONARR_MESSAGE_TEMPLATE_EPISODE_IMPORTED_UPGRADE.format(**data)
def get_body_for_episode_imported_event(payload: Dict[str, Any]) -> str:
data = {
"series_title": payload["series"]["title"],
"series_number": payload["episodes"][0]["seasonNumber"],
"episode_number": payload["episodes"][0]["episodeNumber"],
"episode_name": payload["episodes"][0]["title"],
}
return SONARR_MESSAGE_TEMPLATE_EPISODE_IMPORTED.format(**data)
def get_body_for_episode_grabbed_event(payload: Dict[str, Any]) -> str:
data = {
"series_title": payload["series"]["title"],
"series_number": payload["episodes"][0]["seasonNumber"],
"episode_number": payload["episodes"][0]["episodeNumber"],
"episode_name": payload["episodes"][0]["title"],
}
return SONARR_MESSAGE_TEMPLATE_EPISODE_GRABBED.format(**data)
def get_body_for_episode_deleted_upgrade_event(payload: Dict[str, Any]) -> str:
data = {
"series_title": payload["series"]["title"],
"series_number": payload["episodes"][0]["seasonNumber"],
"episode_number": payload["episodes"][0]["episodeNumber"],
"episode_name": payload["episodes"][0]["title"],
}
return SONARR_MESSAGE_TEMPLATE_EPISODE_DELETED_UPGRADE.format(**data)
def get_body_for_episode_deleted_event(payload: Dict[str, Any]) -> str:
data = {
"series_title": payload["series"]["title"],
"series_number": payload["episodes"][0]["seasonNumber"],
"episode_number": payload["episodes"][0]["episodeNumber"],
"episode_name": payload["episodes"][0]["title"],
}
return SONARR_MESSAGE_TEMPLATE_EPISODE_DELETED.format(**data)
def get_body_for_http_request(payload: Dict[str, Any]) -> str:
if payload["eventType"] == "Test":
return get_setup_webhook_message("Sonarr")
elif payload["eventType"] == "Health":
return get_body_for_health_check_event(payload)
elif payload["eventType"] == "Rename":
return get_body_for_episodes_renamed_event(payload)
elif payload["eventType"] == "SeriesDelete":
return get_body_for_series_deleted_event(payload)
elif payload["eventType"] == "Download" and "isUpgrade" in payload:
if payload["isUpgrade"]:
return get_body_for_episode_imported_upgrade_event(payload)
else:
return get_body_for_episode_imported_event(payload)
elif payload["eventType"] == "Grab":
return get_body_for_episode_grabbed_event(payload)
elif payload["eventType"] == "EpisodeFileDelete" and "deleteReason" in payload:
if payload["deleteReason"] == "upgrade":
return get_body_for_episode_deleted_upgrade_event(payload)
else:
return get_body_for_episode_deleted_event(payload)
else:
raise UnsupportedWebhookEventType(payload["eventType"])
| 40.899371 | 182 | 0.721974 | from typing import Any, Dict
from django.http import HttpRequest, HttpResponse
from zerver.decorator import webhook_view
from zerver.lib.exceptions import UnsupportedWebhookEventType
from zerver.lib.request import REQ, has_request_variables
from zerver.lib.response import json_success
from zerver.lib.webhooks.common import check_send_webhook_message, get_setup_webhook_message
from zerver.models import UserProfile
SONARR_TOPIC_TEMPLATE = "{series_title}".strip()
SONARR_TOPIC_TEMPLATE_TEST = "Sonarr - Test".strip()
SONARR_TOPIC_TEMPLATE_HEALTH_CHECK = "Health {level}".strip()
SONARR_MESSAGE_TEMPLATE_SERIES_DELETED = "{series_title} has been deleted.".strip()
SONARR_MESSAGE_TEMPLATE_HEALTH_CHECK = "{message}.".strip()
SONARR_MESSAGE_TEMPLATE_EPISODES_RENAMED = "{series_title} episodes have been renamed.".strip()
SONARR_MESSAGE_TEMPLATE_EPISODE_IMPORTED = (
"{series_title} - {series_number}x{episode_number} - {episode_name} has been imported.".strip()
)
SONARR_MESSAGE_TEMPLATE_EPISODE_IMPORTED_UPGRADE = "{series_title} - {series_number}x{episode_number} - {episode_name} has been upgraded from {old_quality} to {new_quality}.".strip()
SONARR_MESSAGE_TEMPLATE_EPISODE_GRABBED = (
"{series_title} - {series_number}x{episode_number} - {episode_name} has been grabbed.".strip()
)
SONARR_MESSAGE_TEMPLATE_EPISODE_DELETED = (
"{series_title} - {series_number}x{episode_number} - {episode_name} has been deleted.".strip()
)
SONARR_MESSAGE_TEMPLATE_EPISODE_DELETED_UPGRADE = "{series_title} - {series_number}x{episode_number} - {episode_name} has been deleted due to quality upgrade.".strip()
ALL_EVENT_TYPES = [
"Grab",
"EpisodeFileDelete",
"Test",
"Download",
"SeriesDelete",
"Health",
"Rename",
]
@webhook_view("Sonarr", all_event_types=ALL_EVENT_TYPES)
@has_request_variables
def api_sonarr_webhook(
request: HttpRequest,
user_profile: UserProfile,
payload: Dict[str, Any] = REQ(argument_type="body"),
) -> HttpResponse:
body = get_body_for_http_request(payload)
subject = get_subject_for_http_request(payload)
check_send_webhook_message(request, user_profile, subject, body, payload["eventType"])
return json_success(request)
def get_subject_for_http_request(payload: Dict[str, Any]) -> str:
if payload["eventType"] != "Test" and payload["eventType"] != "Health":
topic = SONARR_TOPIC_TEMPLATE.format(series_title=payload["series"]["title"])
elif payload["eventType"] == "Test":
topic = SONARR_TOPIC_TEMPLATE_TEST
elif payload["eventType"] == "Health":
topic = SONARR_TOPIC_TEMPLATE_HEALTH_CHECK.format(level=payload["level"])
return topic
def get_body_for_health_check_event(payload: Dict[str, Any]) -> str:
return SONARR_MESSAGE_TEMPLATE_HEALTH_CHECK.format(message=payload["message"])
def get_body_for_episodes_renamed_event(payload: Dict[str, Any]) -> str:
return SONARR_MESSAGE_TEMPLATE_EPISODES_RENAMED.format(series_title=payload["series"]["title"])
def get_body_for_series_deleted_event(payload: Dict[str, Any]) -> str:
return SONARR_MESSAGE_TEMPLATE_SERIES_DELETED.format(series_title=payload["series"]["title"])
def get_body_for_episode_imported_upgrade_event(payload: Dict[str, Any]) -> str:
data = {
"series_title": payload["series"]["title"],
"series_number": payload["episodes"][0]["seasonNumber"],
"episode_number": payload["episodes"][0]["episodeNumber"],
"episode_name": payload["episodes"][0]["title"],
"new_quality": payload["episodeFile"]["quality"],
"old_quality": payload["deletedFiles"][0]["quality"],
}
return SONARR_MESSAGE_TEMPLATE_EPISODE_IMPORTED_UPGRADE.format(**data)
def get_body_for_episode_imported_event(payload: Dict[str, Any]) -> str:
data = {
"series_title": payload["series"]["title"],
"series_number": payload["episodes"][0]["seasonNumber"],
"episode_number": payload["episodes"][0]["episodeNumber"],
"episode_name": payload["episodes"][0]["title"],
}
return SONARR_MESSAGE_TEMPLATE_EPISODE_IMPORTED.format(**data)
def get_body_for_episode_grabbed_event(payload: Dict[str, Any]) -> str:
data = {
"series_title": payload["series"]["title"],
"series_number": payload["episodes"][0]["seasonNumber"],
"episode_number": payload["episodes"][0]["episodeNumber"],
"episode_name": payload["episodes"][0]["title"],
}
return SONARR_MESSAGE_TEMPLATE_EPISODE_GRABBED.format(**data)
def get_body_for_episode_deleted_upgrade_event(payload: Dict[str, Any]) -> str:
data = {
"series_title": payload["series"]["title"],
"series_number": payload["episodes"][0]["seasonNumber"],
"episode_number": payload["episodes"][0]["episodeNumber"],
"episode_name": payload["episodes"][0]["title"],
}
return SONARR_MESSAGE_TEMPLATE_EPISODE_DELETED_UPGRADE.format(**data)
def get_body_for_episode_deleted_event(payload: Dict[str, Any]) -> str:
data = {
"series_title": payload["series"]["title"],
"series_number": payload["episodes"][0]["seasonNumber"],
"episode_number": payload["episodes"][0]["episodeNumber"],
"episode_name": payload["episodes"][0]["title"],
}
return SONARR_MESSAGE_TEMPLATE_EPISODE_DELETED.format(**data)
def get_body_for_http_request(payload: Dict[str, Any]) -> str:
if payload["eventType"] == "Test":
return get_setup_webhook_message("Sonarr")
elif payload["eventType"] == "Health":
return get_body_for_health_check_event(payload)
elif payload["eventType"] == "Rename":
return get_body_for_episodes_renamed_event(payload)
elif payload["eventType"] == "SeriesDelete":
return get_body_for_series_deleted_event(payload)
elif payload["eventType"] == "Download" and "isUpgrade" in payload:
if payload["isUpgrade"]:
return get_body_for_episode_imported_upgrade_event(payload)
else:
return get_body_for_episode_imported_event(payload)
elif payload["eventType"] == "Grab":
return get_body_for_episode_grabbed_event(payload)
elif payload["eventType"] == "EpisodeFileDelete" and "deleteReason" in payload:
if payload["deleteReason"] == "upgrade":
return get_body_for_episode_deleted_upgrade_event(payload)
else:
return get_body_for_episode_deleted_event(payload)
else:
raise UnsupportedWebhookEventType(payload["eventType"])
| true | true |
f730429da94736b9ecf8b387b5667497fa6dec89 | 132 | py | Python | GA_tsp_optimisation/__init__.py | JessikaSmith/OptimizationAlgorithms | bf0f871f4d6150e1e7533360cfc6f70eb616c870 | [
"MIT"
] | 15 | 2018-11-16T04:42:44.000Z | 2020-03-20T16:00:47.000Z | GA_tsp_optimisation/__init__.py | JessikaSmith/OptimizationAlgorithms | bf0f871f4d6150e1e7533360cfc6f70eb616c870 | [
"MIT"
] | null | null | null | GA_tsp_optimisation/__init__.py | JessikaSmith/OptimizationAlgorithms | bf0f871f4d6150e1e7533360cfc6f70eb616c870 | [
"MIT"
] | 3 | 2019-01-17T13:18:56.000Z | 2019-12-17T22:22:48.000Z | from .mutation import Mutation
from .crossover import Crossover
from .selection import Selector
from .ga_pipeline import ga_pipeline | 33 | 36 | 0.856061 | from .mutation import Mutation
from .crossover import Crossover
from .selection import Selector
from .ga_pipeline import ga_pipeline | true | true |
f730429fdfa6612d800f5277781da67e08805140 | 204 | py | Python | libs/VulnScan.py | glaudsonml/kurgan-ai | c0ad4450f9fb2004f35b8a0201bfe894e01adc8f | [
"Apache-2.0"
] | 35 | 2017-05-22T14:42:01.000Z | 2020-09-07T21:24:41.000Z | libs/VulnScan.py | tmaxter/kurgan-ai | c0ad4450f9fb2004f35b8a0201bfe894e01adc8f | [
"Apache-2.0"
] | null | null | null | libs/VulnScan.py | tmaxter/kurgan-ai | c0ad4450f9fb2004f35b8a0201bfe894e01adc8f | [
"Apache-2.0"
] | 5 | 2017-12-19T03:36:54.000Z | 2021-04-14T18:05:08.000Z | '''
Vulnerability Scanner Class
'''
class VulnScan(object):
scanning=True
def set_scanning(self, val):
self.scanning = val
def get_scanning(self):
return self.scanning
| 14.571429 | 32 | 0.637255 |
class VulnScan(object):
scanning=True
def set_scanning(self, val):
self.scanning = val
def get_scanning(self):
return self.scanning
| true | true |
f730431b27e07083dae8b1e4b35faa1a13906b39 | 2,221 | py | Python | generated-libraries/python/netapp/coredump/coredump_config_modify_iter_info.py | radekg/netapp-ontap-lib-get | 6445ebb071ec147ea82a486fbe9f094c56c5c40d | [
"MIT"
] | 2 | 2017-03-28T15:31:26.000Z | 2018-08-16T22:15:18.000Z | generated-libraries/python/netapp/coredump/coredump_config_modify_iter_info.py | radekg/netapp-ontap-lib-get | 6445ebb071ec147ea82a486fbe9f094c56c5c40d | [
"MIT"
] | null | null | null | generated-libraries/python/netapp/coredump/coredump_config_modify_iter_info.py | radekg/netapp-ontap-lib-get | 6445ebb071ec147ea82a486fbe9f094c56c5c40d | [
"MIT"
] | null | null | null | from netapp.coredump.coredump_config_info import CoredumpConfigInfo
from netapp.netapp_object import NetAppObject
class CoredumpConfigModifyIterInfo(NetAppObject):
"""
Information about the modify operation that was
attempted/performed against coredump-config object.
were not modified due to some error.
due to some error.
This element will be returned only if input element
'return-failure-list' is true.
"""
_error_code = None
@property
def error_code(self):
"""
Error code, if the modify operation caused an error.
"""
return self._error_code
@error_code.setter
def error_code(self, val):
if val != None:
self.validate('error_code', val)
self._error_code = val
_error_message = None
@property
def error_message(self):
"""
Error description, if the modify operation caused an
error.
"""
return self._error_message
@error_message.setter
def error_message(self, val):
if val != None:
self.validate('error_message', val)
self._error_message = val
_coredump_config_key = None
@property
def coredump_config_key(self):
"""
The keys for the coredump-config object to which the
modify operation applies.
"""
return self._coredump_config_key
@coredump_config_key.setter
def coredump_config_key(self, val):
if val != None:
self.validate('coredump_config_key', val)
self._coredump_config_key = val
@staticmethod
def get_api_name():
return "coredump-config-modify-iter-info"
@staticmethod
def get_desired_attrs():
return [
'error-code',
'error-message',
'coredump-config-key',
]
def describe_properties(self):
return {
'error_code': { 'class': int, 'is_list': False, 'required': 'optional' },
'error_message': { 'class': basestring, 'is_list': False, 'required': 'optional' },
'coredump_config_key': { 'class': CoredumpConfigInfo, 'is_list': False, 'required': 'required' },
}
| 30.424658 | 109 | 0.61729 | from netapp.coredump.coredump_config_info import CoredumpConfigInfo
from netapp.netapp_object import NetAppObject
class CoredumpConfigModifyIterInfo(NetAppObject):
_error_code = None
@property
def error_code(self):
return self._error_code
@error_code.setter
def error_code(self, val):
if val != None:
self.validate('error_code', val)
self._error_code = val
_error_message = None
@property
def error_message(self):
return self._error_message
@error_message.setter
def error_message(self, val):
if val != None:
self.validate('error_message', val)
self._error_message = val
_coredump_config_key = None
@property
def coredump_config_key(self):
return self._coredump_config_key
@coredump_config_key.setter
def coredump_config_key(self, val):
if val != None:
self.validate('coredump_config_key', val)
self._coredump_config_key = val
@staticmethod
def get_api_name():
return "coredump-config-modify-iter-info"
@staticmethod
def get_desired_attrs():
return [
'error-code',
'error-message',
'coredump-config-key',
]
def describe_properties(self):
return {
'error_code': { 'class': int, 'is_list': False, 'required': 'optional' },
'error_message': { 'class': basestring, 'is_list': False, 'required': 'optional' },
'coredump_config_key': { 'class': CoredumpConfigInfo, 'is_list': False, 'required': 'required' },
}
| true | true |
f730433703f39fdd32cf073d91c160a604ea19a7 | 1,983 | py | Python | recipes/recipe_modules/depot_tools/api.py | xiayongtao/depot_tools | 02e6133a844e47dd55159a585144708bae11b76d | [
"BSD-3-Clause"
] | null | null | null | recipes/recipe_modules/depot_tools/api.py | xiayongtao/depot_tools | 02e6133a844e47dd55159a585144708bae11b76d | [
"BSD-3-Clause"
] | null | null | null | recipes/recipe_modules/depot_tools/api.py | xiayongtao/depot_tools | 02e6133a844e47dd55159a585144708bae11b76d | [
"BSD-3-Clause"
] | null | null | null | # Copyright 2016 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""The `depot_tools` module provides safe functions to access paths within
the depot_tools repo."""
import contextlib
from recipe_engine import recipe_api
class DepotToolsApi(recipe_api.RecipeApi):
@property
def download_from_google_storage_path(self):
return self.repo_resource('download_from_google_storage.py')
@property
def upload_to_google_storage_path(self):
return self.repo_resource('upload_to_google_storage.py')
@property
def root(self):
"""Returns (Path): The "depot_tools" root directory."""
return self.repo_resource()
@property
def cros_path(self):
return self.repo_resource('cros')
@property
def gn_py_path(self):
return self.repo_resource('gn.py')
# TODO(dnj): Remove this once everything uses the "gsutil" recipe module
# version.
@property
def gsutil_py_path(self):
return self.repo_resource('gsutil.py')
@property
def ninja_path(self):
ninja_exe = 'ninja.exe' if self.m.platform.is_win else 'ninja'
return self.repo_resource(ninja_exe)
@property
def autoninja_path(self):
autoninja = 'autoninja.bat' if self.m.platform.is_win else 'autoninja'
return self.repo_resource(autoninja)
@property
def presubmit_support_py_path(self):
return self.repo_resource('presubmit_support.py')
@property
def dirmd_path(self):
return self.repo_resource('dirmd')
@contextlib.contextmanager
def on_path(self):
"""Use this context manager to put depot_tools on $PATH.
Example:
```python
with api.depot_tools.on_path():
# run some steps
```
"""
# By default Depot Tools do not auto update on the bots.
# (crbug/1090603)
with self.m.context(
**{'env_suffixes': {
'PATH': [self.root],
'DEPOT_TOOLS_UPDATE': '0'
}}):
yield
| 25.753247 | 74 | 0.702975 |
import contextlib
from recipe_engine import recipe_api
class DepotToolsApi(recipe_api.RecipeApi):
@property
def download_from_google_storage_path(self):
return self.repo_resource('download_from_google_storage.py')
@property
def upload_to_google_storage_path(self):
return self.repo_resource('upload_to_google_storage.py')
@property
def root(self):
return self.repo_resource()
@property
def cros_path(self):
return self.repo_resource('cros')
@property
def gn_py_path(self):
return self.repo_resource('gn.py')
@property
def gsutil_py_path(self):
return self.repo_resource('gsutil.py')
@property
def ninja_path(self):
ninja_exe = 'ninja.exe' if self.m.platform.is_win else 'ninja'
return self.repo_resource(ninja_exe)
@property
def autoninja_path(self):
autoninja = 'autoninja.bat' if self.m.platform.is_win else 'autoninja'
return self.repo_resource(autoninja)
@property
def presubmit_support_py_path(self):
return self.repo_resource('presubmit_support.py')
@property
def dirmd_path(self):
return self.repo_resource('dirmd')
@contextlib.contextmanager
def on_path(self):
with self.m.context(
**{'env_suffixes': {
'PATH': [self.root],
'DEPOT_TOOLS_UPDATE': '0'
}}):
yield
| true | true |
f730440cb51639d90c3a94b3a0f7db56fd17a3c5 | 5,151 | py | Python | PaddleSlim/classification/pruning/compress.py | XiaoguangHu01/models | a95d49323ed504e5a9164586f171f408954fd43a | [
"Apache-2.0"
] | null | null | null | PaddleSlim/classification/pruning/compress.py | XiaoguangHu01/models | a95d49323ed504e5a9164586f171f408954fd43a | [
"Apache-2.0"
] | null | null | null | PaddleSlim/classification/pruning/compress.py | XiaoguangHu01/models | a95d49323ed504e5a9164586f171f408954fd43a | [
"Apache-2.0"
] | null | null | null | import os
import sys
import logging
import paddle
import argparse
import functools
import math
import paddle.fluid as fluid
sys.path.append("..")
import imagenet_reader as reader
import models
sys.path.append("../../")
from utility import add_arguments, print_arguments
from paddle.fluid.contrib.slim import Compressor
logging.basicConfig(format='%(asctime)s-%(levelname)s: %(message)s')
_logger = logging.getLogger(__name__)
_logger.setLevel(logging.INFO)
parser = argparse.ArgumentParser(description=__doc__)
add_arg = functools.partial(add_arguments, argparser=parser)
# yapf: disable
add_arg('batch_size', int, 64*4, "Minibatch size.")
add_arg('use_gpu', bool, True, "Whether to use GPU or not.")
add_arg('model', str, None, "The target model.")
add_arg('pretrained_model', str, None, "Whether to use pretrained model.")
add_arg('lr', float, 0.1, "The learning rate used to fine-tune pruned model.")
add_arg('lr_strategy', str, "piecewise_decay", "The learning rate decay strategy.")
add_arg('l2_decay', float, 3e-5, "The l2_decay parameter.")
add_arg('momentum_rate', float, 0.9, "The value of momentum_rate.")
add_arg('num_epochs', int, 120, "The number of total epochs.")
add_arg('total_images', int, 1281167, "The number of total training images.")
parser.add_argument('--step_epochs', nargs='+', type=int, default=[30, 60, 90], help="piecewise decay step")
add_arg('config_file', str, None, "The config file for compression with yaml format.")
# yapf: enable
model_list = [m for m in dir(models) if "__" not in m]
def piecewise_decay(args):
step = int(math.ceil(float(args.total_images) / args.batch_size))
bd = [step * e for e in args.step_epochs]
lr = [args.lr * (0.1**i) for i in range(len(bd) + 1)]
learning_rate = fluid.layers.piecewise_decay(boundaries=bd, values=lr)
optimizer = fluid.optimizer.Momentum(
learning_rate=learning_rate,
momentum=args.momentum_rate,
regularization=fluid.regularizer.L2Decay(args.l2_decay))
return optimizer
def cosine_decay(args):
step = int(math.ceil(float(args.total_images) / args.batch_size))
learning_rate = fluid.layers.cosine_decay(
learning_rate=args.lr,
step_each_epoch=step,
epochs=args.num_epochs)
optimizer = fluid.optimizer.Momentum(
learning_rate=learning_rate,
momentum=args.momentum_rate,
regularization=fluid.regularizer.L2Decay(args.l2_decay))
return optimizer
def create_optimizer(args):
if args.lr_strategy == "piecewise_decay":
return piecewise_decay(args)
elif args.lr_strategy == "cosine_decay":
return cosine_decay(args)
def compress(args):
class_dim=1000
image_shape="3,224,224"
image_shape = [int(m) for m in image_shape.split(",")]
assert args.model in model_list, "{} is not in lists: {}".format(args.model, model_list)
image = fluid.data(name='image', shape=[None] + image_shape, dtype='float32')
label = fluid.data(name='label', shape=[None, 1], dtype='int64')
# model definition
model = models.__dict__[args.model]()
out = model.net(input=image, class_dim=class_dim)
cost = fluid.layers.cross_entropy(input=out, label=label)
avg_cost = fluid.layers.mean(x=cost)
acc_top1 = fluid.layers.accuracy(input=out, label=label, k=1)
acc_top5 = fluid.layers.accuracy(input=out, label=label, k=5)
val_program = fluid.default_main_program().clone()
opt = create_optimizer(args)
place = fluid.CUDAPlace(0) if args.use_gpu else fluid.CPUPlace()
exe = fluid.Executor(place)
exe.run(fluid.default_startup_program())
if args.pretrained_model:
def if_exist(var):
return os.path.exists(os.path.join(args.pretrained_model, var.name))
fluid.io.load_vars(exe, args.pretrained_model, predicate=if_exist)
val_reader = paddle.batch(reader.val(), batch_size=args.batch_size)
val_feed_list = [('image', image.name), ('label', label.name)]
val_fetch_list = [('acc_top1', acc_top1.name), ('acc_top5', acc_top5.name)]
train_reader = paddle.batch(
reader.train(), batch_size=args.batch_size, drop_last=True)
train_feed_list = [('image', image.name), ('label', label.name)]
train_fetch_list = [('loss', avg_cost.name)]
com_pass = Compressor(
place,
fluid.global_scope(),
fluid.default_main_program(),
train_reader=train_reader,
train_feed_list=train_feed_list,
train_fetch_list=train_fetch_list,
eval_program=val_program,
eval_reader=val_reader,
eval_feed_list=val_feed_list,
eval_fetch_list=val_fetch_list,
save_eval_model=True,
prune_infer_model=[[image.name], [out.name]],
train_optimizer=opt)
com_pass.config(args.config_file)
com_pass.run()
def main():
args = parser.parse_args()
print_arguments(args)
compress(args)
if __name__ == '__main__':
main()
| 39.623077 | 108 | 0.673461 | import os
import sys
import logging
import paddle
import argparse
import functools
import math
import paddle.fluid as fluid
sys.path.append("..")
import imagenet_reader as reader
import models
sys.path.append("../../")
from utility import add_arguments, print_arguments
from paddle.fluid.contrib.slim import Compressor
logging.basicConfig(format='%(asctime)s-%(levelname)s: %(message)s')
_logger = logging.getLogger(__name__)
_logger.setLevel(logging.INFO)
parser = argparse.ArgumentParser(description=__doc__)
add_arg = functools.partial(add_arguments, argparser=parser)
add_arg('batch_size', int, 64*4, "Minibatch size.")
add_arg('use_gpu', bool, True, "Whether to use GPU or not.")
add_arg('model', str, None, "The target model.")
add_arg('pretrained_model', str, None, "Whether to use pretrained model.")
add_arg('lr', float, 0.1, "The learning rate used to fine-tune pruned model.")
add_arg('lr_strategy', str, "piecewise_decay", "The learning rate decay strategy.")
add_arg('l2_decay', float, 3e-5, "The l2_decay parameter.")
add_arg('momentum_rate', float, 0.9, "The value of momentum_rate.")
add_arg('num_epochs', int, 120, "The number of total epochs.")
add_arg('total_images', int, 1281167, "The number of total training images.")
parser.add_argument('--step_epochs', nargs='+', type=int, default=[30, 60, 90], help="piecewise decay step")
add_arg('config_file', str, None, "The config file for compression with yaml format.")
model_list = [m for m in dir(models) if "__" not in m]
def piecewise_decay(args):
step = int(math.ceil(float(args.total_images) / args.batch_size))
bd = [step * e for e in args.step_epochs]
lr = [args.lr * (0.1**i) for i in range(len(bd) + 1)]
learning_rate = fluid.layers.piecewise_decay(boundaries=bd, values=lr)
optimizer = fluid.optimizer.Momentum(
learning_rate=learning_rate,
momentum=args.momentum_rate,
regularization=fluid.regularizer.L2Decay(args.l2_decay))
return optimizer
def cosine_decay(args):
step = int(math.ceil(float(args.total_images) / args.batch_size))
learning_rate = fluid.layers.cosine_decay(
learning_rate=args.lr,
step_each_epoch=step,
epochs=args.num_epochs)
optimizer = fluid.optimizer.Momentum(
learning_rate=learning_rate,
momentum=args.momentum_rate,
regularization=fluid.regularizer.L2Decay(args.l2_decay))
return optimizer
def create_optimizer(args):
if args.lr_strategy == "piecewise_decay":
return piecewise_decay(args)
elif args.lr_strategy == "cosine_decay":
return cosine_decay(args)
def compress(args):
class_dim=1000
image_shape="3,224,224"
image_shape = [int(m) for m in image_shape.split(",")]
assert args.model in model_list, "{} is not in lists: {}".format(args.model, model_list)
image = fluid.data(name='image', shape=[None] + image_shape, dtype='float32')
label = fluid.data(name='label', shape=[None, 1], dtype='int64')
model = models.__dict__[args.model]()
out = model.net(input=image, class_dim=class_dim)
cost = fluid.layers.cross_entropy(input=out, label=label)
avg_cost = fluid.layers.mean(x=cost)
acc_top1 = fluid.layers.accuracy(input=out, label=label, k=1)
acc_top5 = fluid.layers.accuracy(input=out, label=label, k=5)
val_program = fluid.default_main_program().clone()
opt = create_optimizer(args)
place = fluid.CUDAPlace(0) if args.use_gpu else fluid.CPUPlace()
exe = fluid.Executor(place)
exe.run(fluid.default_startup_program())
if args.pretrained_model:
def if_exist(var):
return os.path.exists(os.path.join(args.pretrained_model, var.name))
fluid.io.load_vars(exe, args.pretrained_model, predicate=if_exist)
val_reader = paddle.batch(reader.val(), batch_size=args.batch_size)
val_feed_list = [('image', image.name), ('label', label.name)]
val_fetch_list = [('acc_top1', acc_top1.name), ('acc_top5', acc_top5.name)]
train_reader = paddle.batch(
reader.train(), batch_size=args.batch_size, drop_last=True)
train_feed_list = [('image', image.name), ('label', label.name)]
train_fetch_list = [('loss', avg_cost.name)]
com_pass = Compressor(
place,
fluid.global_scope(),
fluid.default_main_program(),
train_reader=train_reader,
train_feed_list=train_feed_list,
train_fetch_list=train_fetch_list,
eval_program=val_program,
eval_reader=val_reader,
eval_feed_list=val_feed_list,
eval_fetch_list=val_fetch_list,
save_eval_model=True,
prune_infer_model=[[image.name], [out.name]],
train_optimizer=opt)
com_pass.config(args.config_file)
com_pass.run()
def main():
args = parser.parse_args()
print_arguments(args)
compress(args)
if __name__ == '__main__':
main()
| true | true |
f730443cb35af115bf5129a4cd48778f2a808e00 | 150 | py | Python | uploading/apps.py | trevin-livele/django_api_awwwards | 604a480cfe3d0efc01019c4ba15ffba5c140be0b | [
"MIT"
] | null | null | null | uploading/apps.py | trevin-livele/django_api_awwwards | 604a480cfe3d0efc01019c4ba15ffba5c140be0b | [
"MIT"
] | null | null | null | uploading/apps.py | trevin-livele/django_api_awwwards | 604a480cfe3d0efc01019c4ba15ffba5c140be0b | [
"MIT"
] | null | null | null | from django.apps import AppConfig
class UploadingConfig(AppConfig):
default_auto_field = 'django.db.models.BigAutoField'
name = 'uploading'
| 21.428571 | 56 | 0.766667 | from django.apps import AppConfig
class UploadingConfig(AppConfig):
default_auto_field = 'django.db.models.BigAutoField'
name = 'uploading'
| true | true |
f73046cc8c22aa50dfd3a3b28c9591b12f1c2237 | 2,457 | py | Python | tests/configs/realview64-o3-checker.py | ronaldof1s1/gem5 | 418ac1d9ab21049f6d21476bc66f08467c29a9e3 | [
"BSD-3-Clause"
] | 135 | 2016-10-21T03:31:49.000Z | 2022-03-25T01:22:20.000Z | tests/configs/realview64-o3-checker.py | akeley98/FU-pools | dcd47b7dad279246093081cab24b95cae363c3b3 | [
"BSD-3-Clause"
] | 35 | 2017-03-10T17:57:46.000Z | 2022-02-18T17:34:16.000Z | tests/configs/realview64-o3-checker.py | akeley98/FU-pools | dcd47b7dad279246093081cab24b95cae363c3b3 | [
"BSD-3-Clause"
] | 48 | 2016-12-08T12:03:13.000Z | 2022-02-16T09:16:13.000Z | # Copyright (c) 2012, 2017, 2019 ARM Limited
# All rights reserved.
#
# The license below extends only to copyright in the software and shall
# not be construed as granting a license to any other intellectual
# property including but not limited to intellectual property relating
# to a hardware implementation of the functionality of the software
# licensed hereunder. You may use the software subject to the license
# terms below provided that you ensure that this notice is replicated
# unmodified and in its entirety in all distributions of the software,
# modified or unmodified, in source code or in binary form.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met: redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer;
# redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution;
# neither the name of the copyright holders nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
# Authors: Andreas Sandberg
from m5.objects import *
from arm_generic import *
from common.cores.arm.O3_ARM_v7a import O3_ARM_v7a_3
root = LinuxArmFSSystemUniprocessor(mem_mode='timing',
mem_class=DDR3_1600_8x8,
cpu_class=O3_ARM_v7a_3,
checker=True).create_root()
| 53.413043 | 72 | 0.764754 |
from m5.objects import *
from arm_generic import *
from common.cores.arm.O3_ARM_v7a import O3_ARM_v7a_3
root = LinuxArmFSSystemUniprocessor(mem_mode='timing',
mem_class=DDR3_1600_8x8,
cpu_class=O3_ARM_v7a_3,
checker=True).create_root()
| true | true |
f73047495463637b196d2d01ac086770b021da5f | 274 | py | Python | edamino/logger.py | drevenzz/aminoCAT | 48c4de0da0c830a0550fdee7fb8499645c434e90 | [
"MIT"
] | null | null | null | edamino/logger.py | drevenzz/aminoCAT | 48c4de0da0c830a0550fdee7fb8499645c434e90 | [
"MIT"
] | null | null | null | edamino/logger.py | drevenzz/aminoCAT | 48c4de0da0c830a0550fdee7fb8499645c434e90 | [
"MIT"
] | null | null | null | import logging
__all__ = ['logger']
logger = logging.getLogger()
logger.setLevel(logging.INFO)
handler = logging.StreamHandler()
fmt = logging.Formatter(fmt="%(levelname)s: %(message)s")
handler.setFormatter(fmt)
handler.setLevel(logging.INFO)
logger.addHandler(handler)
| 21.076923 | 57 | 0.770073 | import logging
__all__ = ['logger']
logger = logging.getLogger()
logger.setLevel(logging.INFO)
handler = logging.StreamHandler()
fmt = logging.Formatter(fmt="%(levelname)s: %(message)s")
handler.setFormatter(fmt)
handler.setLevel(logging.INFO)
logger.addHandler(handler)
| true | true |
f7304753312193a3300eef211e2e820629c26beb | 745 | py | Python | Python/ex49.py | Anderson0312/Python | 1fd225378c55309640d584a4894393f7c40dc9ed | [
"MIT"
] | 1 | 2022-02-01T17:59:50.000Z | 2022-02-01T17:59:50.000Z | Python/ex49.py | Anderson0312/Python | 1fd225378c55309640d584a4894393f7c40dc9ed | [
"MIT"
] | null | null | null | Python/ex49.py | Anderson0312/Python | 1fd225378c55309640d584a4894393f7c40dc9ed | [
"MIT"
] | null | null | null | numeros = list()
for n in range(0, 5):
numeros.append(int(input(f'Digite um valor para a posição {n}: ')))
print(f'Voce digitou os valores {numeros}')
print('O maior valor digitado foi {} nas possições'.format(max(numeros)), end=' ')
'''
No for
O pos foi usado para fixar a localização
o v foi usado para fixar os valores
e o enumerate para enumerar a lista informada
No if
usamos se o v = valor for igual ao maior da lista, mostrar a posição na lista +1
'''
for pos, v in enumerate(numeros):
if v == max(numeros):
print(f'{pos+1}', end=' ')
print('\nO menor numero digitado foi {} nas posições'.format(min(numeros)), end=' ')
for pos, v in enumerate(numeros):
if v == min(numeros):
print(f'{pos + 1}', end=' ')
| 29.8 | 84 | 0.660403 | numeros = list()
for n in range(0, 5):
numeros.append(int(input(f'Digite um valor para a posição {n}: ')))
print(f'Voce digitou os valores {numeros}')
print('O maior valor digitado foi {} nas possições'.format(max(numeros)), end=' ')
for pos, v in enumerate(numeros):
if v == max(numeros):
print(f'{pos+1}', end=' ')
print('\nO menor numero digitado foi {} nas posições'.format(min(numeros)), end=' ')
for pos, v in enumerate(numeros):
if v == min(numeros):
print(f'{pos + 1}', end=' ')
| true | true |
f73047624b50599b4347a4f861a06e86d3173252 | 3,865 | py | Python | database.py | techx/evolution-chamber | dea9b7d563df6f06d270078f5c512e3f7e367a92 | [
"MIT"
] | 4 | 2015-06-22T15:44:57.000Z | 2015-06-22T15:57:03.000Z | database.py | techx/evolution-chamber | dea9b7d563df6f06d270078f5c512e3f7e367a92 | [
"MIT"
] | null | null | null | database.py | techx/evolution-chamber | dea9b7d563df6f06d270078f5c512e3f7e367a92 | [
"MIT"
] | 2 | 2015-07-09T15:21:37.000Z | 2016-02-02T15:59:09.000Z | import sqlite3
from flask import g, Flask
from constants import Constants
import json
DATABASE = 'db/sqlite.db'
app = Flask(Constants.APP_NAME)
def get_db():
db = getattr(g, '_database', None)
if db is None:
db = g._database = sqlite3.connect(DATABASE)
return db
@app.teardown_appcontext
def close_connection(exception):
db = getattr(g, '_database', None)
if db is not None:
db.close()
def serialize_result_to_individual(res,idname="id"):
return {idname: res[0], "parameters":json.loads(res[1]), "elo": res[2]}
class Database:
@staticmethod
def incr_comparisons():
cursor = get_db().cursor()
cursor.execute('UPDATE stats SET num_comparisons = %d WHERE 1 == 1' % (Database.num_comparisons() + 1))
get_db().commit()
@staticmethod
def reset_comparisons():
cursor = get_db().cursor()
cursor.execute('UPDATE stats SET num_comparisons = 0 WHERE 1 == 1')
get_db().commit()
@staticmethod
def num_comparisons():
db = get_db()
cursor = db.cursor()
cursor.execute('SELECT num_comparisons FROM stats;')
return cursor.fetchone()[0]
@staticmethod
def current_generation_is_empty():
db = get_db()
cursor = db.cursor()
cursor.execute('SELECT * FROM current')
return not cursor.fetchone()
@staticmethod
def add_individual_to_current_generation(parameters):
string = json.dumps(parameters)
cursor = get_db().cursor()
cursor.execute('INSERT INTO current (parameters, elo) VALUES (?, 1000.0)', (string,))
get_db().commit()
@staticmethod
def get_individual_for_id(idd):
db = get_db()
cursor = db.cursor()
cursor.execute('SELECT id, parameters, elo FROM current WHERE id = ?', (idd,))
return serialize_result_to_individual(cursor.fetchone())
@staticmethod
def update_elo_for_id(idd, elo):
db = get_db()
cursor = db.cursor()
cursor.execute('UPDATE current SET elo = ? WHERE id = ?', (elo, idd))
db.commit()
@staticmethod
def get_all_individuals_sorted():
db = get_db()
cursor = db.cursor()
cursor.execute('SELECT id, parameters, elo FROM current ORDER BY elo DESC')
return [serialize_result_to_individual(res) for res in cursor.fetchall()]
@staticmethod
def get_random_individuals(num):
db = get_db()
cursor = db.cursor()
cursor.execute('SELECT id, parameters, elo FROM current ORDER BY RANDOM() LIMIT ?', (num,))
return [serialize_result_to_individual(res) for res in cursor.fetchall()]
@staticmethod
def delete_individuals(individuals):
cursor = get_db().cursor()
id_list = ", ".join(map(lambda x: str(x["id"]), individuals))
cursor.execute('DELETE FROM current WHERE id IN (%s)' % id_list)
get_db().commit()
@staticmethod
def get_historical_individuals():
db = get_db()
cursor = db.cursor()
cursor.execute('SELECT gen, parameters, elo FROM historical ORDER BY gen')
return [serialize_result_to_individual(res,"gen") for res in cursor.fetchall()]
@staticmethod
def add_historical_individual(individual):
string = json.dumps(individual['parameters'])
elo = individual['elo']
cursor = get_db().cursor()
cursor.execute('INSERT INTO historical (parameters, elo) VALUES (?, ?)', (string,elo))
get_db().commit()
@staticmethod
def record_decision(winner, loser):
db = get_db()
cursor = db.cursor()
data = (winner["id"],json.dumps(winner["parameters"]),loser["id"],json.dumps(loser["parameters"]))
cursor.execute('INSERT INTO decisions (winner_id, winner_parameters, loser_id, loser_parameters) VALUES (?, ?, ?, ?)', data)
| 33.318966 | 132 | 0.636999 | import sqlite3
from flask import g, Flask
from constants import Constants
import json
DATABASE = 'db/sqlite.db'
app = Flask(Constants.APP_NAME)
def get_db():
db = getattr(g, '_database', None)
if db is None:
db = g._database = sqlite3.connect(DATABASE)
return db
@app.teardown_appcontext
def close_connection(exception):
db = getattr(g, '_database', None)
if db is not None:
db.close()
def serialize_result_to_individual(res,idname="id"):
return {idname: res[0], "parameters":json.loads(res[1]), "elo": res[2]}
class Database:
@staticmethod
def incr_comparisons():
cursor = get_db().cursor()
cursor.execute('UPDATE stats SET num_comparisons = %d WHERE 1 == 1' % (Database.num_comparisons() + 1))
get_db().commit()
@staticmethod
def reset_comparisons():
cursor = get_db().cursor()
cursor.execute('UPDATE stats SET num_comparisons = 0 WHERE 1 == 1')
get_db().commit()
@staticmethod
def num_comparisons():
db = get_db()
cursor = db.cursor()
cursor.execute('SELECT num_comparisons FROM stats;')
return cursor.fetchone()[0]
@staticmethod
def current_generation_is_empty():
db = get_db()
cursor = db.cursor()
cursor.execute('SELECT * FROM current')
return not cursor.fetchone()
@staticmethod
def add_individual_to_current_generation(parameters):
string = json.dumps(parameters)
cursor = get_db().cursor()
cursor.execute('INSERT INTO current (parameters, elo) VALUES (?, 1000.0)', (string,))
get_db().commit()
@staticmethod
def get_individual_for_id(idd):
db = get_db()
cursor = db.cursor()
cursor.execute('SELECT id, parameters, elo FROM current WHERE id = ?', (idd,))
return serialize_result_to_individual(cursor.fetchone())
@staticmethod
def update_elo_for_id(idd, elo):
db = get_db()
cursor = db.cursor()
cursor.execute('UPDATE current SET elo = ? WHERE id = ?', (elo, idd))
db.commit()
@staticmethod
def get_all_individuals_sorted():
db = get_db()
cursor = db.cursor()
cursor.execute('SELECT id, parameters, elo FROM current ORDER BY elo DESC')
return [serialize_result_to_individual(res) for res in cursor.fetchall()]
@staticmethod
def get_random_individuals(num):
db = get_db()
cursor = db.cursor()
cursor.execute('SELECT id, parameters, elo FROM current ORDER BY RANDOM() LIMIT ?', (num,))
return [serialize_result_to_individual(res) for res in cursor.fetchall()]
@staticmethod
def delete_individuals(individuals):
cursor = get_db().cursor()
id_list = ", ".join(map(lambda x: str(x["id"]), individuals))
cursor.execute('DELETE FROM current WHERE id IN (%s)' % id_list)
get_db().commit()
@staticmethod
def get_historical_individuals():
db = get_db()
cursor = db.cursor()
cursor.execute('SELECT gen, parameters, elo FROM historical ORDER BY gen')
return [serialize_result_to_individual(res,"gen") for res in cursor.fetchall()]
@staticmethod
def add_historical_individual(individual):
string = json.dumps(individual['parameters'])
elo = individual['elo']
cursor = get_db().cursor()
cursor.execute('INSERT INTO historical (parameters, elo) VALUES (?, ?)', (string,elo))
get_db().commit()
@staticmethod
def record_decision(winner, loser):
db = get_db()
cursor = db.cursor()
data = (winner["id"],json.dumps(winner["parameters"]),loser["id"],json.dumps(loser["parameters"]))
cursor.execute('INSERT INTO decisions (winner_id, winner_parameters, loser_id, loser_parameters) VALUES (?, ?, ?, ?)', data)
| true | true |
f730492b5e3c2fa866c5ccaab58e6a2dbe382054 | 2,222 | py | Python | cirq-core/cirq/devices/device_test.py | dabacon/Cirq | 54286063f679d67501ff1b905cd16b879feaae27 | [
"Apache-2.0"
] | 1 | 2021-04-29T15:30:32.000Z | 2021-04-29T15:30:32.000Z | cirq-core/cirq/devices/device_test.py | dabacon/Cirq | 54286063f679d67501ff1b905cd16b879feaae27 | [
"Apache-2.0"
] | 1 | 2020-04-03T20:23:20.000Z | 2020-04-03T20:23:20.000Z | cirq-core/cirq/devices/device_test.py | dabacon/Cirq | 54286063f679d67501ff1b905cd16b879feaae27 | [
"Apache-2.0"
] | 2 | 2021-09-22T11:16:46.000Z | 2021-09-23T12:55:22.000Z | # pylint: disable=wrong-or-nonexistent-copyright-notice
import pytest
import cirq
def test_qubit_set():
class RawDevice(cirq.Device):
pass
assert RawDevice().qubit_set() is None
class QubitFieldDevice(cirq.Device):
def __init__(self):
self.qubits = cirq.LineQubit.range(3)
assert QubitFieldDevice().qubit_set() == frozenset(cirq.LineQubit.range(3))
class PrivateQubitFieldDevice(cirq.Device):
def __init__(self):
self._qubits = cirq.LineQubit.range(4)
assert PrivateQubitFieldDevice().qubit_set() == frozenset(cirq.LineQubit.range(4))
class QubitMethodDevice(cirq.Device):
def qubits(self):
return cirq.LineQubit.range(5)
assert QubitMethodDevice().qubit_set() == frozenset(cirq.LineQubit.range(5))
class PrivateQubitMethodDevice(cirq.Device):
def _qubits(self):
return cirq.LineQubit.range(6)
assert PrivateQubitMethodDevice().qubit_set() == frozenset(cirq.LineQubit.range(6))
def test_qid_pairs():
class RawDevice(cirq.Device):
pass
assert RawDevice().qid_pairs() is None
class QubitFieldDevice(cirq.Device):
def __init__(self, qubits):
self.qubits = qubits
assert len(QubitFieldDevice(cirq.LineQubit.range(10)).qid_pairs()) == 9
assert len(QubitFieldDevice(cirq.GridQubit.rect(10, 10)).qid_pairs()) == 180
assert len(QubitFieldDevice([cirq.NamedQubit(str(s)) for s in range(10)]).qid_pairs()) == 45
def test_qid_pair():
q0, q1, q2, q3 = cirq.LineQubit.range(4)
e1 = cirq.SymmetricalQidPair(q0, q1)
e2 = cirq.SymmetricalQidPair(q1, q0)
e3 = cirq.SymmetricalQidPair(q2, q3)
assert e1 == e2
assert e2 != e3
assert repr(e1) == "cirq.QidPair(cirq.LineQubit(0), cirq.LineQubit(1))"
assert len(e1) == 2
a, b = e1
assert (a, b) == (q0, q1)
a, b = e2
assert (a, b) == (q0, q1)
assert q0 in e1
assert q1 in e1
assert q2 not in e1
set1 = frozenset([e1, e2])
set2 = frozenset([e2, e3])
assert len(set1) == 1
assert len(set2) == 2
with pytest.raises(ValueError, match='A QidPair cannot have identical qids.'):
cirq.SymmetricalQidPair(q0, q0)
| 28.487179 | 96 | 0.654815 |
import pytest
import cirq
def test_qubit_set():
class RawDevice(cirq.Device):
pass
assert RawDevice().qubit_set() is None
class QubitFieldDevice(cirq.Device):
def __init__(self):
self.qubits = cirq.LineQubit.range(3)
assert QubitFieldDevice().qubit_set() == frozenset(cirq.LineQubit.range(3))
class PrivateQubitFieldDevice(cirq.Device):
def __init__(self):
self._qubits = cirq.LineQubit.range(4)
assert PrivateQubitFieldDevice().qubit_set() == frozenset(cirq.LineQubit.range(4))
class QubitMethodDevice(cirq.Device):
def qubits(self):
return cirq.LineQubit.range(5)
assert QubitMethodDevice().qubit_set() == frozenset(cirq.LineQubit.range(5))
class PrivateQubitMethodDevice(cirq.Device):
def _qubits(self):
return cirq.LineQubit.range(6)
assert PrivateQubitMethodDevice().qubit_set() == frozenset(cirq.LineQubit.range(6))
def test_qid_pairs():
class RawDevice(cirq.Device):
pass
assert RawDevice().qid_pairs() is None
class QubitFieldDevice(cirq.Device):
def __init__(self, qubits):
self.qubits = qubits
assert len(QubitFieldDevice(cirq.LineQubit.range(10)).qid_pairs()) == 9
assert len(QubitFieldDevice(cirq.GridQubit.rect(10, 10)).qid_pairs()) == 180
assert len(QubitFieldDevice([cirq.NamedQubit(str(s)) for s in range(10)]).qid_pairs()) == 45
def test_qid_pair():
q0, q1, q2, q3 = cirq.LineQubit.range(4)
e1 = cirq.SymmetricalQidPair(q0, q1)
e2 = cirq.SymmetricalQidPair(q1, q0)
e3 = cirq.SymmetricalQidPair(q2, q3)
assert e1 == e2
assert e2 != e3
assert repr(e1) == "cirq.QidPair(cirq.LineQubit(0), cirq.LineQubit(1))"
assert len(e1) == 2
a, b = e1
assert (a, b) == (q0, q1)
a, b = e2
assert (a, b) == (q0, q1)
assert q0 in e1
assert q1 in e1
assert q2 not in e1
set1 = frozenset([e1, e2])
set2 = frozenset([e2, e3])
assert len(set1) == 1
assert len(set2) == 2
with pytest.raises(ValueError, match='A QidPair cannot have identical qids.'):
cirq.SymmetricalQidPair(q0, q0)
| true | true |
f7304932a03b54e012707f27c70ff5f74726299a | 1,481 | py | Python | contributions/statistics.py | t170815518/contributions-graph | 7a3c3477afaa578789bdbfb41554f267d89f1ace | [
"MIT"
] | 106 | 2015-09-15T04:53:24.000Z | 2022-01-25T22:11:56.000Z | contributions/statistics.py | t170815518/contributions-graph | 7a3c3477afaa578789bdbfb41554f267d89f1ace | [
"MIT"
] | 2 | 2017-05-03T09:27:24.000Z | 2017-09-14T06:49:42.000Z | contributions/statistics.py | t170815518/contributions-graph | 7a3c3477afaa578789bdbfb41554f267d89f1ace | [
"MIT"
] | 22 | 2016-07-25T06:54:06.000Z | 2021-10-15T00:25:56.000Z | #!/usr/bin/env python
import dateutils
def quartiles(values):
"""
Returns the (rough) quintlines of a series of values. This is not intended
to be statistically correct - it's not a quick 'n' dirty measure.
"""
return [i * max(values) / 4 for i in range(5)]
def longest_streak(dates):
"""
Given a list of datetime.date objects, return the longest sublist of
consecutive dates. If there are multiple longest sublists of the same
length, then the first such sublist is returned.
"""
if not dates:
return []
dates = sorted(dates)
streaks = []
current_streak = [dates[0]]
# For each date, check to see whether it extends the current streak
for idx in range(1, len(dates)):
date = dates[idx]
if dateutils.previous_day(date) == current_streak[-1]:
current_streak.append(date)
else:
streaks.append(current_streak)
current_streak = [date]
# When we've gone through all the dates, save the last streak
streaks.append(current_streak)
return max(streaks, key=len)
def current_streak(dates):
"""
Given a list of datetime.date objects, return today's date (if present)
and all/any preceding consecutive dates.
"""
streak = []
current_date = dateutils.today()
while current_date in dates:
streak.append(current_date)
current_date = dateutils.previous_day(current_date)
return sorted(streak) | 27.425926 | 78 | 0.656313 |
import dateutils
def quartiles(values):
return [i * max(values) / 4 for i in range(5)]
def longest_streak(dates):
if not dates:
return []
dates = sorted(dates)
streaks = []
current_streak = [dates[0]]
for idx in range(1, len(dates)):
date = dates[idx]
if dateutils.previous_day(date) == current_streak[-1]:
current_streak.append(date)
else:
streaks.append(current_streak)
current_streak = [date]
streaks.append(current_streak)
return max(streaks, key=len)
def current_streak(dates):
streak = []
current_date = dateutils.today()
while current_date in dates:
streak.append(current_date)
current_date = dateutils.previous_day(current_date)
return sorted(streak) | true | true |
f730495e98e013a7ae5f3395303f9cca988c62f0 | 439 | py | Python | aoc2020/day_03/part_1.py | en0/aoc2020 | a2f41b909dffe1f366682b3d03fd5fbdbc924ec0 | [
"MIT"
] | null | null | null | aoc2020/day_03/part_1.py | en0/aoc2020 | a2f41b909dffe1f366682b3d03fd5fbdbc924ec0 | [
"MIT"
] | null | null | null | aoc2020/day_03/part_1.py | en0/aoc2020 | a2f41b909dffe1f366682b3d03fd5fbdbc924ec0 | [
"MIT"
] | null | null | null | from aoc2020 import *
class Solution(SolutionABC):
expected = 7
def solve(self) -> any:
x, rt, rows = 0, 0, self.resource_lines("input")
try:
# Discard the first row
next(rows)
while True:
row = next(rows)
x = (x + 3) % len(row)
if row[x] == '#':
rt += 1
except StopIteration:
return rt
| 21.95 | 56 | 0.43508 | from aoc2020 import *
class Solution(SolutionABC):
expected = 7
def solve(self) -> any:
x, rt, rows = 0, 0, self.resource_lines("input")
try:
next(rows)
while True:
row = next(rows)
x = (x + 3) % len(row)
if row[x] == '#':
rt += 1
except StopIteration:
return rt
| true | true |
f73049b42798b7cba68acbf111d74dde4a7db3b6 | 1,642 | py | Python | clock/contact/forms.py | chgad/django-clock | f855cd1253574c0582ed53a0ac34206c242f04c9 | [
"MIT"
] | null | null | null | clock/contact/forms.py | chgad/django-clock | f855cd1253574c0582ed53a0ac34206c242f04c9 | [
"MIT"
] | null | null | null | clock/contact/forms.py | chgad/django-clock | f855cd1253574c0582ed53a0ac34206c242f04c9 | [
"MIT"
] | 1 | 2020-03-13T14:42:11.000Z | 2020-03-13T14:42:11.000Z | # -*- coding: utf-8 -*-
from captcha.fields import ReCaptchaField
from crispy_forms.bootstrap import FormActions
from crispy_forms.helper import FormHelper
from crispy_forms.layout import Submit
from django import forms
from django.conf import settings
from django.core.mail import send_mail
from django.http import HttpResponseRedirect
from django.utils.translation import get_language
from django.utils.translation import ugettext_lazy as _
class ContactForm(forms.Form):
name = forms.CharField(max_length=200, label=_('Name'))
sender = forms.EmailField(label=_('E-Mail'))
message = forms.CharField(widget=forms.Textarea, label=_('Message'))
cc_myself = forms.BooleanField(label=_('Send a copy of the mail to myself'), required=False)
captcha = ReCaptchaField(attrs={'lang': get_language()})
def __init__(self, *args, **kwargs):
super(ContactForm, self).__init__(*args, **kwargs)
self.helper = FormHelper(self)
self.helper.form_action = '.'
self.helper.form_method = 'post'
self.helper.form_class = 'form-halfpage'
self.helper.layout.append(FormActions(
Submit('submit', _('Submit'), css_class='btn btn-primary pull-right'),
))
def send_mail(self, form):
message = form.cleaned_data['message']
sender = form.cleaned_data['sender']
cc_myself = form.cleaned_data['cc_myself']
recipients = settings.CONTACT_FORM_RECIPIENT
if cc_myself:
recipients.append(sender)
send_mail(settings.CONTACT_FORM_SUBJECT, message, sender, recipients)
return HttpResponseRedirect('/thanks/')
| 38.186047 | 96 | 0.708892 |
from captcha.fields import ReCaptchaField
from crispy_forms.bootstrap import FormActions
from crispy_forms.helper import FormHelper
from crispy_forms.layout import Submit
from django import forms
from django.conf import settings
from django.core.mail import send_mail
from django.http import HttpResponseRedirect
from django.utils.translation import get_language
from django.utils.translation import ugettext_lazy as _
class ContactForm(forms.Form):
name = forms.CharField(max_length=200, label=_('Name'))
sender = forms.EmailField(label=_('E-Mail'))
message = forms.CharField(widget=forms.Textarea, label=_('Message'))
cc_myself = forms.BooleanField(label=_('Send a copy of the mail to myself'), required=False)
captcha = ReCaptchaField(attrs={'lang': get_language()})
def __init__(self, *args, **kwargs):
super(ContactForm, self).__init__(*args, **kwargs)
self.helper = FormHelper(self)
self.helper.form_action = '.'
self.helper.form_method = 'post'
self.helper.form_class = 'form-halfpage'
self.helper.layout.append(FormActions(
Submit('submit', _('Submit'), css_class='btn btn-primary pull-right'),
))
def send_mail(self, form):
message = form.cleaned_data['message']
sender = form.cleaned_data['sender']
cc_myself = form.cleaned_data['cc_myself']
recipients = settings.CONTACT_FORM_RECIPIENT
if cc_myself:
recipients.append(sender)
send_mail(settings.CONTACT_FORM_SUBJECT, message, sender, recipients)
return HttpResponseRedirect('/thanks/')
| true | true |
f73049e4bd6fa9b18eb2403dd15770e56954fa20 | 1,420 | py | Python | ggplot/geoms/__init__.py | briandk/ggplot | 179bc1bb61501bbfa8aaa7ef2d0a25150c8863dd | [
"BSD-2-Clause"
] | null | null | null | ggplot/geoms/__init__.py | briandk/ggplot | 179bc1bb61501bbfa8aaa7ef2d0a25150c8863dd | [
"BSD-2-Clause"
] | null | null | null | ggplot/geoms/__init__.py | briandk/ggplot | 179bc1bb61501bbfa8aaa7ef2d0a25150c8863dd | [
"BSD-2-Clause"
] | 1 | 2020-11-14T13:31:11.000Z | 2020-11-14T13:31:11.000Z | from __future__ import (absolute_import, division, print_function,
unicode_literals)
# geoms
from .geom_abline import geom_abline
from .geom_area import geom_area
from .geom_bar import geom_bar
from .geom_density import geom_density
from .geom_histogram import geom_histogram
from .geom_hline import geom_hline
from .geom_jitter import geom_jitter
from .geom_line import geom_line
from .geom_now_its_art import geom_now_its_art
from .geom_point import geom_point
from .geom_rect import geom_rect
from .geom_step import geom_step
from .geom_text import geom_text
from .geom_tile import geom_tile
from .geom_vline import geom_vline
# stats
from .stat_bin2d import stat_bin2d
from .stat_function import stat_function
from .stat_smooth import stat_smooth
# misc
from .facet_grid import facet_grid
from .facet_wrap import facet_wrap
from .chart_components import *
__facet__ = ['facet_grid', 'facet_wrap']
__geoms__ = ['geom_abline', 'geom_area', 'geom_bar', 'geom_density',
'geom_histogram', 'geom_hline', 'geom_jitter', 'geom_line',
'geom_now_its_art', 'geom_point', 'geom_rect', 'geom_step',
'geom_text', 'geom_tile', 'geom_vline']
__stats__ = ['stat_bin2d', 'stat_smooth', 'stat_function']
__components__ = ['ylab', 'xlab', 'ylim', 'xlim', 'labs', 'ggtitle']
__all__ = __geoms__ + __facet__ + __stats__ + __components__
__all__ = [str(u) for u in __all__]
| 38.378378 | 73 | 0.762676 | from __future__ import (absolute_import, division, print_function,
unicode_literals)
from .geom_abline import geom_abline
from .geom_area import geom_area
from .geom_bar import geom_bar
from .geom_density import geom_density
from .geom_histogram import geom_histogram
from .geom_hline import geom_hline
from .geom_jitter import geom_jitter
from .geom_line import geom_line
from .geom_now_its_art import geom_now_its_art
from .geom_point import geom_point
from .geom_rect import geom_rect
from .geom_step import geom_step
from .geom_text import geom_text
from .geom_tile import geom_tile
from .geom_vline import geom_vline
from .stat_bin2d import stat_bin2d
from .stat_function import stat_function
from .stat_smooth import stat_smooth
from .facet_grid import facet_grid
from .facet_wrap import facet_wrap
from .chart_components import *
__facet__ = ['facet_grid', 'facet_wrap']
__geoms__ = ['geom_abline', 'geom_area', 'geom_bar', 'geom_density',
'geom_histogram', 'geom_hline', 'geom_jitter', 'geom_line',
'geom_now_its_art', 'geom_point', 'geom_rect', 'geom_step',
'geom_text', 'geom_tile', 'geom_vline']
__stats__ = ['stat_bin2d', 'stat_smooth', 'stat_function']
__components__ = ['ylab', 'xlab', 'ylim', 'xlim', 'labs', 'ggtitle']
__all__ = __geoms__ + __facet__ + __stats__ + __components__
__all__ = [str(u) for u in __all__]
| true | true |
f7304c8702cd45f0614fd6d7a10b566c1f7152f7 | 8,043 | py | Python | ixnetwork_restpy/testplatform/sessions/ixnetwork/topology/tlvprofile/defaulttlv_8e41257d3d01ec013783dd0fd6697862.py | rfrye-github/ixnetwork_restpy | 23eeb24b21568a23d3f31bbd72814ff55eb1af44 | [
"MIT"
] | null | null | null | ixnetwork_restpy/testplatform/sessions/ixnetwork/topology/tlvprofile/defaulttlv_8e41257d3d01ec013783dd0fd6697862.py | rfrye-github/ixnetwork_restpy | 23eeb24b21568a23d3f31bbd72814ff55eb1af44 | [
"MIT"
] | null | null | null | ixnetwork_restpy/testplatform/sessions/ixnetwork/topology/tlvprofile/defaulttlv_8e41257d3d01ec013783dd0fd6697862.py | rfrye-github/ixnetwork_restpy | 23eeb24b21568a23d3f31bbd72814ff55eb1af44 | [
"MIT"
] | null | null | null | # MIT LICENSE
#
# Copyright 1997 - 2020 by IXIA Keysight
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"),
# to deal in the Software without restriction, including without limitation
# the rights to use, copy, modify, merge, publish, distribute, sublicense,
# and/or sell copies of the Software, and to permit persons to whom the
# Software is furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
from ixnetwork_restpy.base import Base
from ixnetwork_restpy.files import Files
class DefaultTlv(Base):
"""Default Tlv container created by protocols
The DefaultTlv class encapsulates a list of defaultTlv resources that are managed by the system.
A list of resources can be retrieved from the server using the DefaultTlv.find() method.
"""
__slots__ = ()
_SDM_NAME = 'defaultTlv'
_SDM_ATT_MAP = {
'AvailableIncludeInMessages': 'availableIncludeInMessages',
'Description': 'description',
'EnablePerSession': 'enablePerSession',
'IncludeInMessages': 'includeInMessages',
'IsEnabled': 'isEnabled',
'Name': 'name',
}
def __init__(self, parent):
super(DefaultTlv, self).__init__(parent)
@property
def Value(self):
"""
Returns
-------
- obj(ixnetwork_restpy.testplatform.sessions.ixnetwork.topology.tlvprofile.value_ac1d7b13584a86b9cf1c28dca3390bca.Value): An instance of the Value class
Raises
------
- ServerError: The server has encountered an uncategorized error condition
"""
from ixnetwork_restpy.testplatform.sessions.ixnetwork.topology.tlvprofile.value_ac1d7b13584a86b9cf1c28dca3390bca import Value
return Value(self)._select()
@property
def AvailableIncludeInMessages(self):
"""
Returns
-------
- list(str): A list of available messages which are used in the includeInMessages attribute
"""
return self._get_attribute(self._SDM_ATT_MAP['AvailableIncludeInMessages'])
@property
def Description(self):
"""
Returns
-------
- str: Description of the tlv
"""
return self._get_attribute(self._SDM_ATT_MAP['Description'])
@Description.setter
def Description(self, value):
self._set_attribute(self._SDM_ATT_MAP['Description'], value)
@property
def EnablePerSession(self):
"""
Returns
-------
- obj(ixnetwork_restpy.multivalue.Multivalue): Enable TLV per session
"""
from ixnetwork_restpy.multivalue import Multivalue
return Multivalue(self, self._get_attribute(self._SDM_ATT_MAP['EnablePerSession']))
@property
def IncludeInMessages(self):
"""
Returns
-------
- list(str): Include the TLV in these protocol messages
"""
return self._get_attribute(self._SDM_ATT_MAP['IncludeInMessages'])
@IncludeInMessages.setter
def IncludeInMessages(self, value):
self._set_attribute(self._SDM_ATT_MAP['IncludeInMessages'], value)
@property
def IsEnabled(self):
"""
Returns
-------
- bool: Enables/disables this tlv
"""
return self._get_attribute(self._SDM_ATT_MAP['IsEnabled'])
@IsEnabled.setter
def IsEnabled(self, value):
self._set_attribute(self._SDM_ATT_MAP['IsEnabled'], value)
@property
def Name(self):
"""
Returns
-------
- str: Name of the tlv
"""
return self._get_attribute(self._SDM_ATT_MAP['Name'])
@Name.setter
def Name(self, value):
self._set_attribute(self._SDM_ATT_MAP['Name'], value)
def update(self, Description=None, IncludeInMessages=None, IsEnabled=None, Name=None):
"""Updates defaultTlv resource on the server.
This method has some named parameters with a type: obj (Multivalue).
The Multivalue class has documentation that details the possible values for those named parameters.
Args
----
- Description (str): Description of the tlv
- IncludeInMessages (list(str)): Include the TLV in these protocol messages
- IsEnabled (bool): Enables/disables this tlv
- Name (str): Name of the tlv
Raises
------
- ServerError: The server has encountered an uncategorized error condition
"""
return self._update(self._map_locals(self._SDM_ATT_MAP, locals()))
def find(self, AvailableIncludeInMessages=None, Description=None, IncludeInMessages=None, IsEnabled=None, Name=None):
"""Finds and retrieves defaultTlv resources from the server.
All named parameters are evaluated on the server using regex. The named parameters can be used to selectively retrieve defaultTlv resources from the server.
To retrieve an exact match ensure the parameter value starts with ^ and ends with $
By default the find method takes no parameters and will retrieve all defaultTlv resources from the server.
Args
----
- AvailableIncludeInMessages (list(str)): A list of available messages which are used in the includeInMessages attribute
- Description (str): Description of the tlv
- IncludeInMessages (list(str)): Include the TLV in these protocol messages
- IsEnabled (bool): Enables/disables this tlv
- Name (str): Name of the tlv
Returns
-------
- self: This instance with matching defaultTlv resources retrieved from the server available through an iterator or index
Raises
------
- ServerError: The server has encountered an uncategorized error condition
"""
return self._select(self._map_locals(self._SDM_ATT_MAP, locals()))
def read(self, href):
"""Retrieves a single instance of defaultTlv data from the server.
Args
----
- href (str): An href to the instance to be retrieved
Returns
-------
- self: This instance with the defaultTlv resources from the server available through an iterator or index
Raises
------
- NotFoundError: The requested resource does not exist on the server
- ServerError: The server has encountered an uncategorized error condition
"""
return self._read(href)
def get_device_ids(self, PortNames=None, EnablePerSession=None):
"""Base class infrastructure that gets a list of defaultTlv device ids encapsulated by this object.
Use the optional regex parameters in the method to refine the list of device ids encapsulated by this object.
Args
----
- PortNames (str): optional regex of port names
- EnablePerSession (str): optional regex of enablePerSession
Returns
-------
- list(int): A list of device ids that meets the regex criteria provided in the method parameters
Raises
------
- ServerError: The server has encountered an uncategorized error condition
"""
return self._get_ngpf_device_ids(locals())
| 38.668269 | 165 | 0.652617 |
from ixnetwork_restpy.base import Base
from ixnetwork_restpy.files import Files
class DefaultTlv(Base):
__slots__ = ()
_SDM_NAME = 'defaultTlv'
_SDM_ATT_MAP = {
'AvailableIncludeInMessages': 'availableIncludeInMessages',
'Description': 'description',
'EnablePerSession': 'enablePerSession',
'IncludeInMessages': 'includeInMessages',
'IsEnabled': 'isEnabled',
'Name': 'name',
}
def __init__(self, parent):
super(DefaultTlv, self).__init__(parent)
@property
def Value(self):
from ixnetwork_restpy.testplatform.sessions.ixnetwork.topology.tlvprofile.value_ac1d7b13584a86b9cf1c28dca3390bca import Value
return Value(self)._select()
@property
def AvailableIncludeInMessages(self):
return self._get_attribute(self._SDM_ATT_MAP['AvailableIncludeInMessages'])
@property
def Description(self):
return self._get_attribute(self._SDM_ATT_MAP['Description'])
@Description.setter
def Description(self, value):
self._set_attribute(self._SDM_ATT_MAP['Description'], value)
@property
def EnablePerSession(self):
from ixnetwork_restpy.multivalue import Multivalue
return Multivalue(self, self._get_attribute(self._SDM_ATT_MAP['EnablePerSession']))
@property
def IncludeInMessages(self):
return self._get_attribute(self._SDM_ATT_MAP['IncludeInMessages'])
@IncludeInMessages.setter
def IncludeInMessages(self, value):
self._set_attribute(self._SDM_ATT_MAP['IncludeInMessages'], value)
@property
def IsEnabled(self):
return self._get_attribute(self._SDM_ATT_MAP['IsEnabled'])
@IsEnabled.setter
def IsEnabled(self, value):
self._set_attribute(self._SDM_ATT_MAP['IsEnabled'], value)
@property
def Name(self):
return self._get_attribute(self._SDM_ATT_MAP['Name'])
@Name.setter
def Name(self, value):
self._set_attribute(self._SDM_ATT_MAP['Name'], value)
def update(self, Description=None, IncludeInMessages=None, IsEnabled=None, Name=None):
return self._update(self._map_locals(self._SDM_ATT_MAP, locals()))
def find(self, AvailableIncludeInMessages=None, Description=None, IncludeInMessages=None, IsEnabled=None, Name=None):
return self._select(self._map_locals(self._SDM_ATT_MAP, locals()))
def read(self, href):
return self._read(href)
def get_device_ids(self, PortNames=None, EnablePerSession=None):
return self._get_ngpf_device_ids(locals())
| true | true |
f7304dbc9e1394e220698ba5c8e2752e04b4528b | 5,563 | py | Python | sources/ebf-demo/scripts/test/imu.py | zwg0106/imx-yocto | e378ca25352a59d1ef84ee95f3386b7314f4565b | [
"MIT"
] | 1 | 2020-01-13T13:16:52.000Z | 2020-01-13T13:16:52.000Z | sources/ebf-demo/scripts/test/imu.py | zwg0106/imx-yocto | e378ca25352a59d1ef84ee95f3386b7314f4565b | [
"MIT"
] | 3 | 2019-11-20T02:53:01.000Z | 2019-12-26T03:00:15.000Z | sources/ebf-demo/scripts/test/imu.py | zwg0106/imx-yocto | e378ca25352a59d1ef84ee95f3386b7314f4565b | [
"MIT"
] | null | null | null | # -*- coding: utf-8 -*-
import pygame
from OpenGL.GL import *
from OpenGL.GLU import *
import socket
import json
from pygame.locals import *
SCREEN_SIZE = (800, 600)
address = ('', 5000)
def resize(width, height):
glViewport(0, 0, width, height)
glMatrixMode(GL_PROJECTION)
glLoadIdentity()
gluPerspective(45.0, float(width) / height, 0.001, 10.0)
glMatrixMode(GL_MODELVIEW)
glLoadIdentity()
gluLookAt(1.0, 2.0, -5.0,
0.0, 0.0, 0.0,
0.0, 1.0, 0.0)
def init():
glEnable(GL_DEPTH_TEST)
glClearColor(0.0, 0.0, 0.0, 0.0)
glShadeModel(GL_SMOOTH)
glEnable(GL_BLEND)
glEnable(GL_POLYGON_SMOOTH)
glHint(GL_POLYGON_SMOOTH_HINT, GL_NICEST)
def setupSocket():
# setup socket, blocking by default
global sock
sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
sock.bind(address)
def read_data():
global ax, ay, az, acx, acy, acz, temp
ax = ay = az = acx = acy = acz = temp = 0.0
msg, addr = sock.recvfrom(1024)
if msg:
msg.decode()
data = json.loads(msg)
#print(data)
ax, ay ,az = data["filter"]
acx, acy, acz = data["accel"]
temp = data["temp"]
def drawText(position, textString):
font = pygame.font.SysFont("Courier", 18, True)
textSurface = font.render(textString, True, (255,255,255,255), (0,0,0,255))
textData = pygame.image.tostring(textSurface, "RGBA", True)
glRasterPos3d(*position)
glDrawPixels(textSurface.get_width(), textSurface.get_height(), GL_RGBA, GL_UNSIGNED_BYTE, textData)
def run():
setupSocket()
pygame.init()
screen = pygame.display.set_mode(SCREEN_SIZE, HWSURFACE | OPENGL | DOUBLEBUF)
resize(*SCREEN_SIZE)
init()
clock = pygame.time.Clock()
cube = Cube((0.0, 0.0, 0.0), (.5, .5, .7))
angle = 0
while True:
then = pygame.time.get_ticks()
for event in pygame.event.get():
if event.type == QUIT:
return
if event.type == KEYUP and event.key == K_ESCAPE:
return
read_data()
glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT)
text = "pitch: " + str("{0:.1f}".format(ay)) + " roll: " + str("{0:.1f}".format(ax))
drawText((1, -4, 2), text)
text = "accx: " + str("{0:.2f}".format(acx)) + " accy: " + str("{0:.2f}".format(acy)) + " accz: " + str(
"{0:.2f}".format(acz))
drawText((1, -4.3, 2), text)
text = "temp: " + str("{0:.1f}".format(temp))
drawText((1, -4.6, 2), text)
glColor((1., 1., 1.))
glLineWidth(1)
glBegin(GL_LINES)
for x in range(-20, 22, 2):
glVertex3f(x / 10., -1, -1)
glVertex3f(x / 10., -1, 1)
for x in range(-20, 22, 2):
glVertex3f(x / 10., -1, 1)
glVertex3f(x / 10., 1, 1)
for z in range(-10, 12, 2):
glVertex3f(-2, -1, z / 10.)
glVertex3f(2, -1, z / 10.)
for z in range(-10, 12, 2):
glVertex3f(-2, -1, z / 10.)
glVertex3f(-2, 1, z / 10.)
for z in range(-10, 12, 2):
glVertex3f(2, -1, z / 10.)
glVertex3f(2, 1, z / 10.)
for y in range(-10, 12, 2):
glVertex3f(-2, y / 10., 1)
glVertex3f(2, y / 10., 1)
for y in range(-10, 12, 2):
glVertex3f(-2, y / 10., 1)
glVertex3f(-2, y / 10., -1)
for y in range(-10, 12, 2):
glVertex3f(2, y / 10., 1)
glVertex3f(2, y / 10., -1)
glEnd()
glPushMatrix()
glRotate(az, 0, 1, 0)
glRotate(ay, 1, 0, 0)
glRotate(ax, 0, 0, 1)
cube.render()
glPopMatrix()
pygame.display.flip()
class Cube(object):
def __init__(self, position, color):
self.position = position
self.color = color
# Cube information
num_faces = 6
vertices = [(-1.0, -0.2, 0.5),
(1.0, -0.2, 0.5),
(1.0, 0.2, 0.5),
(-1.0, 0.2, 0.5),
(-1.0, -0.2, -0.5),
(1.0, -0.2, -0.5),
(1.0, 0.2, -0.5),
(-1.0, 0.2, -0.5)]
normals = [(0.0, 0.0, +1.0), # front
(0.0, 0.0, -1.0), # back
(+1.0, 0.0, 0.0), # right
(-1.0, 0.0, 0.0), # left
(0.0, +1.0, 0.0), # top
(0.0, -1.0, 0.0)] # bottom
vertex_indices = [(0, 1, 2, 3), # front
(4, 5, 6, 7), # back
(1, 5, 6, 2), # right
(0, 4, 7, 3), # left
(3, 2, 6, 7), # top
(0, 1, 5, 4)] # bottom
def render(self):
then = pygame.time.get_ticks()
vertices = self.vertices
# Draw all 6 faces of the cube
glBegin(GL_QUADS)
for face_no in range(self.num_faces):
if face_no == 1:
glColor(1.0, 0.0, 0.0)
else:
glColor(self.color)
glNormal3dv(self.normals[face_no])
v1, v2, v3, v4 = self.vertex_indices[face_no]
glVertex(vertices[v1])
glVertex(vertices[v2])
glVertex(vertices[v3])
glVertex(vertices[v4])
glEnd()
if __name__ == "__main__":
run() | 27.815 | 113 | 0.472047 |
import pygame
from OpenGL.GL import *
from OpenGL.GLU import *
import socket
import json
from pygame.locals import *
SCREEN_SIZE = (800, 600)
address = ('', 5000)
def resize(width, height):
glViewport(0, 0, width, height)
glMatrixMode(GL_PROJECTION)
glLoadIdentity()
gluPerspective(45.0, float(width) / height, 0.001, 10.0)
glMatrixMode(GL_MODELVIEW)
glLoadIdentity()
gluLookAt(1.0, 2.0, -5.0,
0.0, 0.0, 0.0,
0.0, 1.0, 0.0)
def init():
glEnable(GL_DEPTH_TEST)
glClearColor(0.0, 0.0, 0.0, 0.0)
glShadeModel(GL_SMOOTH)
glEnable(GL_BLEND)
glEnable(GL_POLYGON_SMOOTH)
glHint(GL_POLYGON_SMOOTH_HINT, GL_NICEST)
def setupSocket():
global sock
sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
sock.bind(address)
def read_data():
global ax, ay, az, acx, acy, acz, temp
ax = ay = az = acx = acy = acz = temp = 0.0
msg, addr = sock.recvfrom(1024)
if msg:
msg.decode()
data = json.loads(msg)
ax, ay ,az = data["filter"]
acx, acy, acz = data["accel"]
temp = data["temp"]
def drawText(position, textString):
font = pygame.font.SysFont("Courier", 18, True)
textSurface = font.render(textString, True, (255,255,255,255), (0,0,0,255))
textData = pygame.image.tostring(textSurface, "RGBA", True)
glRasterPos3d(*position)
glDrawPixels(textSurface.get_width(), textSurface.get_height(), GL_RGBA, GL_UNSIGNED_BYTE, textData)
def run():
setupSocket()
pygame.init()
screen = pygame.display.set_mode(SCREEN_SIZE, HWSURFACE | OPENGL | DOUBLEBUF)
resize(*SCREEN_SIZE)
init()
clock = pygame.time.Clock()
cube = Cube((0.0, 0.0, 0.0), (.5, .5, .7))
angle = 0
while True:
then = pygame.time.get_ticks()
for event in pygame.event.get():
if event.type == QUIT:
return
if event.type == KEYUP and event.key == K_ESCAPE:
return
read_data()
glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT)
text = "pitch: " + str("{0:.1f}".format(ay)) + " roll: " + str("{0:.1f}".format(ax))
drawText((1, -4, 2), text)
text = "accx: " + str("{0:.2f}".format(acx)) + " accy: " + str("{0:.2f}".format(acy)) + " accz: " + str(
"{0:.2f}".format(acz))
drawText((1, -4.3, 2), text)
text = "temp: " + str("{0:.1f}".format(temp))
drawText((1, -4.6, 2), text)
glColor((1., 1., 1.))
glLineWidth(1)
glBegin(GL_LINES)
for x in range(-20, 22, 2):
glVertex3f(x / 10., -1, -1)
glVertex3f(x / 10., -1, 1)
for x in range(-20, 22, 2):
glVertex3f(x / 10., -1, 1)
glVertex3f(x / 10., 1, 1)
for z in range(-10, 12, 2):
glVertex3f(-2, -1, z / 10.)
glVertex3f(2, -1, z / 10.)
for z in range(-10, 12, 2):
glVertex3f(-2, -1, z / 10.)
glVertex3f(-2, 1, z / 10.)
for z in range(-10, 12, 2):
glVertex3f(2, -1, z / 10.)
glVertex3f(2, 1, z / 10.)
for y in range(-10, 12, 2):
glVertex3f(-2, y / 10., 1)
glVertex3f(2, y / 10., 1)
for y in range(-10, 12, 2):
glVertex3f(-2, y / 10., 1)
glVertex3f(-2, y / 10., -1)
for y in range(-10, 12, 2):
glVertex3f(2, y / 10., 1)
glVertex3f(2, y / 10., -1)
glEnd()
glPushMatrix()
glRotate(az, 0, 1, 0)
glRotate(ay, 1, 0, 0)
glRotate(ax, 0, 0, 1)
cube.render()
glPopMatrix()
pygame.display.flip()
class Cube(object):
def __init__(self, position, color):
self.position = position
self.color = color
num_faces = 6
vertices = [(-1.0, -0.2, 0.5),
(1.0, -0.2, 0.5),
(1.0, 0.2, 0.5),
(-1.0, 0.2, 0.5),
(-1.0, -0.2, -0.5),
(1.0, -0.2, -0.5),
(1.0, 0.2, -0.5),
(-1.0, 0.2, -0.5)]
normals = [(0.0, 0.0, +1.0),
(0.0, 0.0, -1.0),
(+1.0, 0.0, 0.0),
(-1.0, 0.0, 0.0),
(0.0, +1.0, 0.0),
(0.0, -1.0, 0.0)]
vertex_indices = [(0, 1, 2, 3),
(4, 5, 6, 7),
(1, 5, 6, 2),
(0, 4, 7, 3),
(3, 2, 6, 7),
(0, 1, 5, 4)]
def render(self):
then = pygame.time.get_ticks()
vertices = self.vertices
glBegin(GL_QUADS)
for face_no in range(self.num_faces):
if face_no == 1:
glColor(1.0, 0.0, 0.0)
else:
glColor(self.color)
glNormal3dv(self.normals[face_no])
v1, v2, v3, v4 = self.vertex_indices[face_no]
glVertex(vertices[v1])
glVertex(vertices[v2])
glVertex(vertices[v3])
glVertex(vertices[v4])
glEnd()
if __name__ == "__main__":
run() | true | true |
f7304e9d38fb2af01ffb1db1e0e42104960448d1 | 13,856 | py | Python | fox/connection.py | piger/fox | d411189e5eba9eafb334a059853a9af6ea52ba06 | [
"BSD-2-Clause"
] | 6 | 2019-04-23T16:10:40.000Z | 2021-11-17T10:18:25.000Z | fox/connection.py | piger/fox | d411189e5eba9eafb334a059853a9af6ea52ba06 | [
"BSD-2-Clause"
] | null | null | null | fox/connection.py | piger/fox | d411189e5eba9eafb334a059853a9af6ea52ba06 | [
"BSD-2-Clause"
] | null | null | null | import os
import shlex
import getpass
import warnings
import asyncio
import logging
import collections
import atexit
from typing import Optional, Dict, Deque
import tqdm
import asyncssh
from .conf import env, options_to_connect
from .utils import run_in_loop, CommandResult, prepare_environment, split_lines
# disable annoying warnings (we can't fix the problems in 3rd party libs)
warnings.simplefilter("ignore")
log = logging.getLogger(__name__)
# A cache of Connection objects indexed by *name* (not hostname!). We only cache connections creates
# with the global run() and sudo() methods. Maybe the tunnels too?
_connections_cache: Dict[str, "Connection"] = {}
def _clean_connections():
# would be better to close them all at once with gather() or similar
for hostname, conn in _connections_cache.items():
if conn.connected:
log.info(f"Cleaning up connection for {hostname}")
conn.disconnect()
atexit.register(_clean_connections)
class Connection:
"""A SSH connection to a remote server.
:param hostname: hostname of the remote server.
:param username: the username used to log into the remote server.
:param port: the optional port for connecting to the remote server (default: 22).
:param private_key: the optional path to a OpenSSH private key.
:param password: the optional password used to authenticate to the remote server.
:param agent_path: the optional path to a OpenSSH agent socket.
:param tunnel: the optional hostname of another server that will be used as tunnel.
:param nickname: the hostname of the server as passed on the command line (could be different
from the real hostname configured in `~/.ssh/config`).
"""
def __init__(
self,
hostname: str,
username: str,
port: int,
private_key=None,
password: Optional[str] = None,
agent_path: Optional[str] = None,
tunnel: Optional[str] = None,
nickname: Optional[str] = None,
):
self.hostname = hostname
self.username = username
self.port = port
self.private_key = private_key
self.password = password
self.agent_path = agent_path
self.tunnel = tunnel
if nickname:
self.nickname = nickname
else:
self.nickname = self.hostname
self._connection: Optional[asyncssh.SSHClientConnection] = None
self._sftp_client: Optional[asyncssh.SFTPClient] = None
async def _read_from(self, stream, writer, maxlen=10, echo=True) -> str:
buf: Deque[str] = collections.deque(maxlen=maxlen)
trail = ""
while True:
data = await stream.read(1024)
if data == "":
break
# everything gets stored in `buf` (within its limits)
buf.append(data)
# handle previously unprinted output, if any
if trail:
data = trail + data
trail = ""
# split lines and keep any non-newline ended data
lines, rest = split_lines(data)
if echo:
for line in lines:
print(f"[{self.nickname}] {line}")
# if the last part of `data` contains the sudo prompt, handle it
if rest.endswith(env.sudo_prompt):
print(f"[{self.nickname}] {rest}")
# we need to handle sudo erroring because the password was wrong
if lines[-1] == "Sorry, try again.":
print("Unsetting env.sudo_password")
env.sudo_password = None
if env.sudo_password is None:
env.sudo_password = getpass.getpass("Need password for sudo: ")
writer.write(f"{env.sudo_password}\n")
else:
if rest:
trail += rest
output = "".join(list(buf))
return output
async def _run(
self,
command: str,
sudo=False,
cd: Optional[str] = None,
pty=False,
environ: Optional[Dict[str, str]] = None,
echo=True,
**kwargs,
) -> CommandResult:
"""Run a shell command on the remote host"""
if self._connection is None:
await self._connect()
original_command = command
if cd:
command = 'cd "{}" && {}'.format(cd, command)
env_command = prepare_environment(environ)
log.debug(f"*{self.nickname}* environment for command: {env_command}")
if sudo:
command = f"{env_command}{command}"
command = f"sudo -S -p {shlex.quote(env.sudo_prompt)} $SHELL -c {shlex.quote(command)}"
else:
command = f"{env_command}{command}"
log.debug(f"*{self.nickname}* final command: {command}")
args = {}
if pty:
args.update({"term_type": env.term_type, "term_size": env.term_size})
async with self._connection.create_process(command, **args) as proc: # type: ignore
stdout, stderr = await asyncio.gather(
self._read_from(proc.stdout, proc.stdin, echo=echo),
self._read_from(proc.stderr, proc.stdin, echo=echo),
)
return CommandResult(
command=original_command,
actual_command=command,
exit_code=proc.exit_status,
stdout=stdout,
# if we use a pty this will be empty
stderr=stderr,
hostname=self.nickname,
sudo=sudo,
)
# use the event loop
def run(self, command, pty=True, cd=None, environ=None, echo=True) -> CommandResult:
"""Execute a command on the remote server.
:param command: the command line string to execute.
:param pty: wether to request a remote pty.
:param cd: the optional name of the directory where the command will be executed.
:param environ: an optional dictionary containing environment variables to set when
executing the command.
:param echo: set to `False` to hide the output of the command.
"""
print(f"*{self.nickname}* Running: {command}")
kwargs = {"pty": pty, "cd": cd, "environ": environ}
return run_in_loop(self._run(command, **kwargs))
# use the event loop
def sudo(self, command, pty=True, cd=None, environ=None, echo=True) -> CommandResult:
"""Execute a command with sudo on the remote server.
:param command: the command line string to execute.
:param pty: wether to request a remote pty.
:param cd: the optional name of the directory where the command will be executed.
:param environ: an optional dictionary containing environment variables to set when
executing the command.
:param echo: set to `False` to hide the output of the command.
"""
print(f"*{self.nickname}* - Sudo: {command}")
kwargs = {"pty": pty, "cd": cd, "sudo": True, "environ": environ}
return run_in_loop(self._run(command, **kwargs))
async def _connect(self):
log.info(f"Connecting to {self.hostname}:{self.port}")
args = {"username": self.username}
if env.use_known_hosts is False:
args["known_hosts"] = None
if self.tunnel:
log.info(f"Connecting to tunnel {self.tunnel}")
tunnel_conn = _get_connection(self.tunnel, use_cache=False)
await tunnel_conn._connect()
args["tunnel"] = tunnel_conn
# we either use the private key OR the agent; loading the private key might fail while the
# agent could still be working.
if self.agent_path:
args["agent_path"] = self.agent_path
elif self.private_key:
args["client_keys"] = [self.private_key]
# this may throw several exceptions:
# asyncssh.misc.HostKeyNotVerifiable: Host key is not trusted
self._connection = await asyncssh.connect(self.hostname, self.port, **args)
# use the event loop
def disconnect(self):
"""Close the SSH connection to the server."""
# Maybe here we should also delete ourself from the connection cache, but we don't know our
# own "nickname"!
if self._connection is not None:
self._connection.close()
run_in_loop(self._connection.wait_closed())
self._connection = None
print("disconnected")
@property
def connected(self) -> bool:
return self._connection is not None
async def get_sftp_client(self) -> asyncssh.SFTPClient:
if self._connection is None:
await self._connect()
if self._sftp_client is None:
self._sftp_client = await self._connection.start_sftp_client() # type: ignore
return self._sftp_client
async def _get(self, remotefile, localfile):
sftp_client = await self.get_sftp_client()
try:
size = await sftp_client.getsize(remotefile)
# from https://asyncssh.readthedocs.io/en/latest/api.html#asyncssh.SFTPClient.get
block_size = 16384
i = size // block_size + 1
if i < 0:
i = 1
bar = tqdm.tqdm(total=i, desc=os.path.basename(remotefile))
def _update_bar(source, dest, cur, tot):
bar.update(1)
await sftp_client.get(
remotefile, localfile, progress_handler=_update_bar, block_size=block_size
)
bar.close()
except (OSError, asyncssh.SFTPError):
raise
# use the event loop
def get(self, remotefile, localfile):
"""Download a file from the remote server.
:param remotefile: the path to the remote file to download.
:param localfile: the local path where to write the downloaded file.
"""
run_in_loop(self._get(remotefile, localfile))
async def _read(self, remotefile) -> bytes:
sftp_client = await self.get_sftp_client()
try:
size = await sftp_client.getsize(remotefile)
bar = tqdm.tqdm(total=size, desc=os.path.basename(remotefile))
fd = await sftp_client.open(remotefile, "rb")
data = []
while True:
# 16384 is the default block size
buf = await fd.read(16384)
if buf == b"":
break
data.append(buf)
bar.update(len(buf))
fd.close()
bar.close()
return b"".join(data)
except (OSError, asyncssh.SFTPError):
raise
# use the event loop
def read(self, remotefile) -> bytes:
"""Read the contents of a remote file.
:param remotefile: the path of the remote file to read.
This is useful when you just want to read the contents of a remote file without downloading
it.
"""
return run_in_loop(self._read(remotefile))
async def _put(self, localfile, remotefile):
sftp_client = await self.get_sftp_client()
try:
size = os.path.getsize(localfile)
# from https://asyncssh.readthedocs.io/en/latest/api.html#asyncssh.SFTPClient.get
block_size = 16384
i = size // block_size + 1
if i < 0:
i = 1
bar = tqdm.tqdm(total=i, desc=os.path.basename(localfile))
def _update_bar(source, dest, cur, tot):
bar.update(1)
await sftp_client.put(
localfile, remotefile, progress_handler=_update_bar, block_size=block_size
)
bar.close()
except (OSError, asyncssh.SFTPError):
raise
# use the event loop
def put(self, localfile, remotefile):
"""Upload a local file to a remote server.
:param localfile: the path of the local file to upload.
:param remotefile: the path where to write the file on the remote server.
"""
run_in_loop(self._put(localfile, remotefile))
async def _file_exists(self, remotefile) -> bool:
sftp_client = await self.get_sftp_client()
return await sftp_client.exists(remotefile)
# use the event loop
def file_exists(self, remotefile) -> bool:
"""Check if a file exists on the remote server.
:param remotefile: the path of the remote file that will be checked.
"""
return run_in_loop(self._file_exists(remotefile))
def _get_connection(name=None, use_cache=True) -> Connection:
"""Get a connection for `name`.
`name` does not need to be a FQDN; it can be a "nickname" from a SSH configuration file.
"""
global _connections_cache
if name is None and env.host_string is None:
raise RuntimeError("env.host_string is empty!")
if name is None:
name = env.host_string
if use_cache and name in _connections_cache:
conn = _connections_cache[name]
# here we delete stale Connections objects.
if conn.connected:
return conn
del _connections_cache[name]
ssh_options = options_to_connect(name)
args = {}
if "identityfile" in ssh_options:
args["private_key"] = ssh_options["identityfile"]
if "identityagent" in ssh_options:
args["agent_path"] = ssh_options["identityagent"]
# TODO:
# identitiesonly yes
# NOTE: we only cache connections created here, and maybe the tunnels.
# maybe by default we should not re-use the tunnels, as the default behavior of SSH
c = Connection(
ssh_options["hostname"], ssh_options["user"], ssh_options["port"], nickname=name, **args
)
if use_cache:
_connections_cache[name] = c
return c
| 33.795122 | 100 | 0.608978 | import os
import shlex
import getpass
import warnings
import asyncio
import logging
import collections
import atexit
from typing import Optional, Dict, Deque
import tqdm
import asyncssh
from .conf import env, options_to_connect
from .utils import run_in_loop, CommandResult, prepare_environment, split_lines
warnings.simplefilter("ignore")
log = logging.getLogger(__name__)
# A cache of Connection objects indexed by *name* (not hostname!). We only cache connections creates
# with the global run() and sudo() methods. Maybe the tunnels too?
_connections_cache: Dict[str, "Connection"] = {}
def _clean_connections():
# would be better to close them all at once with gather() or similar
for hostname, conn in _connections_cache.items():
if conn.connected:
log.info(f"Cleaning up connection for {hostname}")
conn.disconnect()
atexit.register(_clean_connections)
class Connection:
def __init__(
self,
hostname: str,
username: str,
port: int,
private_key=None,
password: Optional[str] = None,
agent_path: Optional[str] = None,
tunnel: Optional[str] = None,
nickname: Optional[str] = None,
):
self.hostname = hostname
self.username = username
self.port = port
self.private_key = private_key
self.password = password
self.agent_path = agent_path
self.tunnel = tunnel
if nickname:
self.nickname = nickname
else:
self.nickname = self.hostname
self._connection: Optional[asyncssh.SSHClientConnection] = None
self._sftp_client: Optional[asyncssh.SFTPClient] = None
async def _read_from(self, stream, writer, maxlen=10, echo=True) -> str:
buf: Deque[str] = collections.deque(maxlen=maxlen)
trail = ""
while True:
data = await stream.read(1024)
if data == "":
break
# everything gets stored in `buf` (within its limits)
buf.append(data)
# handle previously unprinted output, if any
if trail:
data = trail + data
trail = ""
# split lines and keep any non-newline ended data
lines, rest = split_lines(data)
if echo:
for line in lines:
print(f"[{self.nickname}] {line}")
# if the last part of `data` contains the sudo prompt, handle it
if rest.endswith(env.sudo_prompt):
print(f"[{self.nickname}] {rest}")
# we need to handle sudo erroring because the password was wrong
if lines[-1] == "Sorry, try again.":
print("Unsetting env.sudo_password")
env.sudo_password = None
if env.sudo_password is None:
env.sudo_password = getpass.getpass("Need password for sudo: ")
writer.write(f"{env.sudo_password}\n")
else:
if rest:
trail += rest
output = "".join(list(buf))
return output
async def _run(
self,
command: str,
sudo=False,
cd: Optional[str] = None,
pty=False,
environ: Optional[Dict[str, str]] = None,
echo=True,
**kwargs,
) -> CommandResult:
if self._connection is None:
await self._connect()
original_command = command
if cd:
command = 'cd "{}" && {}'.format(cd, command)
env_command = prepare_environment(environ)
log.debug(f"*{self.nickname}* environment for command: {env_command}")
if sudo:
command = f"{env_command}{command}"
command = f"sudo -S -p {shlex.quote(env.sudo_prompt)} $SHELL -c {shlex.quote(command)}"
else:
command = f"{env_command}{command}"
log.debug(f"*{self.nickname}* final command: {command}")
args = {}
if pty:
args.update({"term_type": env.term_type, "term_size": env.term_size})
async with self._connection.create_process(command, **args) as proc: # type: ignore
stdout, stderr = await asyncio.gather(
self._read_from(proc.stdout, proc.stdin, echo=echo),
self._read_from(proc.stderr, proc.stdin, echo=echo),
)
return CommandResult(
command=original_command,
actual_command=command,
exit_code=proc.exit_status,
stdout=stdout,
# if we use a pty this will be empty
stderr=stderr,
hostname=self.nickname,
sudo=sudo,
)
# use the event loop
def run(self, command, pty=True, cd=None, environ=None, echo=True) -> CommandResult:
print(f"*{self.nickname}* Running: {command}")
kwargs = {"pty": pty, "cd": cd, "environ": environ}
return run_in_loop(self._run(command, **kwargs))
# use the event loop
def sudo(self, command, pty=True, cd=None, environ=None, echo=True) -> CommandResult:
print(f"*{self.nickname}* - Sudo: {command}")
kwargs = {"pty": pty, "cd": cd, "sudo": True, "environ": environ}
return run_in_loop(self._run(command, **kwargs))
async def _connect(self):
log.info(f"Connecting to {self.hostname}:{self.port}")
args = {"username": self.username}
if env.use_known_hosts is False:
args["known_hosts"] = None
if self.tunnel:
log.info(f"Connecting to tunnel {self.tunnel}")
tunnel_conn = _get_connection(self.tunnel, use_cache=False)
await tunnel_conn._connect()
args["tunnel"] = tunnel_conn
# we either use the private key OR the agent; loading the private key might fail while the
# agent could still be working.
if self.agent_path:
args["agent_path"] = self.agent_path
elif self.private_key:
args["client_keys"] = [self.private_key]
# this may throw several exceptions:
# asyncssh.misc.HostKeyNotVerifiable: Host key is not trusted
self._connection = await asyncssh.connect(self.hostname, self.port, **args)
# use the event loop
def disconnect(self):
# Maybe here we should also delete ourself from the connection cache, but we don't know our
if self._connection is not None:
self._connection.close()
run_in_loop(self._connection.wait_closed())
self._connection = None
print("disconnected")
@property
def connected(self) -> bool:
return self._connection is not None
async def get_sftp_client(self) -> asyncssh.SFTPClient:
if self._connection is None:
await self._connect()
if self._sftp_client is None:
self._sftp_client = await self._connection.start_sftp_client()
return self._sftp_client
async def _get(self, remotefile, localfile):
sftp_client = await self.get_sftp_client()
try:
size = await sftp_client.getsize(remotefile)
= 16384
i = size // block_size + 1
if i < 0:
i = 1
bar = tqdm.tqdm(total=i, desc=os.path.basename(remotefile))
def _update_bar(source, dest, cur, tot):
bar.update(1)
await sftp_client.get(
remotefile, localfile, progress_handler=_update_bar, block_size=block_size
)
bar.close()
except (OSError, asyncssh.SFTPError):
raise
def get(self, remotefile, localfile):
run_in_loop(self._get(remotefile, localfile))
async def _read(self, remotefile) -> bytes:
sftp_client = await self.get_sftp_client()
try:
size = await sftp_client.getsize(remotefile)
bar = tqdm.tqdm(total=size, desc=os.path.basename(remotefile))
fd = await sftp_client.open(remotefile, "rb")
data = []
while True:
buf = await fd.read(16384)
if buf == b"":
break
data.append(buf)
bar.update(len(buf))
fd.close()
bar.close()
return b"".join(data)
except (OSError, asyncssh.SFTPError):
raise
def read(self, remotefile) -> bytes:
return run_in_loop(self._read(remotefile))
async def _put(self, localfile, remotefile):
sftp_client = await self.get_sftp_client()
try:
size = os.path.getsize(localfile)
= 16384
i = size // block_size + 1
if i < 0:
i = 1
bar = tqdm.tqdm(total=i, desc=os.path.basename(localfile))
def _update_bar(source, dest, cur, tot):
bar.update(1)
await sftp_client.put(
localfile, remotefile, progress_handler=_update_bar, block_size=block_size
)
bar.close()
except (OSError, asyncssh.SFTPError):
raise
def put(self, localfile, remotefile):
run_in_loop(self._put(localfile, remotefile))
async def _file_exists(self, remotefile) -> bool:
sftp_client = await self.get_sftp_client()
return await sftp_client.exists(remotefile)
def file_exists(self, remotefile) -> bool:
return run_in_loop(self._file_exists(remotefile))
def _get_connection(name=None, use_cache=True) -> Connection:
global _connections_cache
if name is None and env.host_string is None:
raise RuntimeError("env.host_string is empty!")
if name is None:
name = env.host_string
if use_cache and name in _connections_cache:
conn = _connections_cache[name]
if conn.connected:
return conn
del _connections_cache[name]
ssh_options = options_to_connect(name)
args = {}
if "identityfile" in ssh_options:
args["private_key"] = ssh_options["identityfile"]
if "identityagent" in ssh_options:
args["agent_path"] = ssh_options["identityagent"]
c = Connection(
ssh_options["hostname"], ssh_options["user"], ssh_options["port"], nickname=name, **args
)
if use_cache:
_connections_cache[name] = c
return c
| true | true |
f7304f89593d1379710d96a3531d54d1dcff8680 | 733 | py | Python | tests/test_toboday.py | Jesse-Yung/jsonclasses | d40c52aec42bcb978a80ceb98b93ab38134dc790 | [
"MIT"
] | 50 | 2021-08-18T08:08:04.000Z | 2022-03-20T07:23:26.000Z | tests/test_toboday.py | Jesse-Yung/jsonclasses | d40c52aec42bcb978a80ceb98b93ab38134dc790 | [
"MIT"
] | 1 | 2021-02-21T03:18:09.000Z | 2021-03-08T01:07:52.000Z | tests/test_toboday.py | Jesse-Yung/jsonclasses | d40c52aec42bcb978a80ceb98b93ab38134dc790 | [
"MIT"
] | 8 | 2021-07-01T02:39:15.000Z | 2021-12-10T02:20:18.000Z | from __future__ import annotations
from datetime import date, datetime
from unittest import TestCase
from tests.classes.super_datetime import SuperDateTime
class TestToboday(TestCase):
def test_toboday_transforms_datetime_into_the_time_of_beginning_of_day(self):
d = SuperDateTime(dtbd=datetime(2021, 10, 11, 17, 37, 27,43235))
self.assertEqual(d.dtbd, datetime(2021,10, 11, 0, 0))
def test_toboday_transforms_date_into_the_time_of_beginning_of_day(self):
d = SuperDateTime(dbd=date(2021, 10, 11))
self.assertEqual(d.dbd, date(2021,10, 11))
def test_toboday_does_not_transform_if_is_not_datetime(self):
s = SuperDateTime(sbd="12345")
self.assertEqual(s.sbd, "12345")
| 34.904762 | 81 | 0.747613 | from __future__ import annotations
from datetime import date, datetime
from unittest import TestCase
from tests.classes.super_datetime import SuperDateTime
class TestToboday(TestCase):
def test_toboday_transforms_datetime_into_the_time_of_beginning_of_day(self):
d = SuperDateTime(dtbd=datetime(2021, 10, 11, 17, 37, 27,43235))
self.assertEqual(d.dtbd, datetime(2021,10, 11, 0, 0))
def test_toboday_transforms_date_into_the_time_of_beginning_of_day(self):
d = SuperDateTime(dbd=date(2021, 10, 11))
self.assertEqual(d.dbd, date(2021,10, 11))
def test_toboday_does_not_transform_if_is_not_datetime(self):
s = SuperDateTime(sbd="12345")
self.assertEqual(s.sbd, "12345")
| true | true |
f7305096c852a5bd53317118877b8128c82fd818 | 20,647 | py | Python | web/datasets/migrations/0011_auto_20200515_1115.py | RaulBSantos/maria-quiteria | 9eb1a307099e208ce666bcc0d65be9c9a4cae150 | [
"MIT"
] | 151 | 2019-11-10T02:18:25.000Z | 2022-01-18T14:28:25.000Z | web/datasets/migrations/0011_auto_20200515_1115.py | RaulBSantos/maria-quiteria | 9eb1a307099e208ce666bcc0d65be9c9a4cae150 | [
"MIT"
] | 202 | 2019-11-09T16:27:19.000Z | 2022-03-22T12:41:27.000Z | web/datasets/migrations/0011_auto_20200515_1115.py | RaulBSantos/maria-quiteria | 9eb1a307099e208ce666bcc0d65be9c9a4cae150 | [
"MIT"
] | 69 | 2020-02-05T01:33:35.000Z | 2022-03-30T10:39:27.000Z | # Generated by Django 2.2.12 on 2020-05-15 14:15
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
("datasets", "0010_auto_20200515_0959"),
]
operations = [
migrations.AlterField(
model_name="citycouncilagenda",
name="crawled_at",
field=models.DateTimeField(verbose_name="Coletado em"),
),
migrations.AlterField(
model_name="citycouncilagenda",
name="crawled_from",
field=models.URLField(verbose_name="Fonte"),
),
migrations.AlterField(
model_name="citycouncilagenda",
name="created_at",
field=models.DateTimeField(auto_now_add=True, verbose_name="Criado em"),
),
migrations.AlterField(
model_name="citycouncilagenda",
name="date",
field=models.DateField(verbose_name="Data"),
),
migrations.AlterField(
model_name="citycouncilagenda",
name="details",
field=models.TextField(blank=True, null=True, verbose_name="Detalhes"),
),
migrations.AlterField(
model_name="citycouncilagenda",
name="event_type",
field=models.CharField(
choices=[
("sessao_ordinaria", "Sessão Ordinária"),
("ordem_do_dia", "Ordem do Dia"),
("sessao_solene", "Sessão Solene"),
("sessao_especial", "Sessão Especial"),
("audiencia_publica", "Audiência Pública"),
],
max_length=20,
verbose_name="Tipo do evento",
),
),
migrations.AlterField(
model_name="citycouncilagenda",
name="notes",
field=models.TextField(blank=True, null=True, verbose_name="Anotações"),
),
migrations.AlterField(
model_name="citycouncilagenda",
name="title",
field=models.CharField(
blank=True, max_length=100, null=True, verbose_name="Título"
),
),
migrations.AlterField(
model_name="citycouncilagenda",
name="updated_at",
field=models.DateTimeField(auto_now=True, verbose_name="Atualizado em"),
),
migrations.AlterField(
model_name="citycouncilattendancelist",
name="council_member",
field=models.CharField(max_length=200, verbose_name="Vereador"),
),
migrations.AlterField(
model_name="citycouncilattendancelist",
name="crawled_at",
field=models.DateTimeField(verbose_name="Coletado em"),
),
migrations.AlterField(
model_name="citycouncilattendancelist",
name="crawled_from",
field=models.URLField(verbose_name="Fonte"),
),
migrations.AlterField(
model_name="citycouncilattendancelist",
name="created_at",
field=models.DateTimeField(auto_now_add=True, verbose_name="Criado em"),
),
migrations.AlterField(
model_name="citycouncilattendancelist",
name="date",
field=models.DateField(verbose_name="Data"),
),
migrations.AlterField(
model_name="citycouncilattendancelist",
name="description",
field=models.CharField(
blank=True, max_length=200, null=True, verbose_name="Descrição"
),
),
migrations.AlterField(
model_name="citycouncilattendancelist",
name="notes",
field=models.TextField(blank=True, null=True, verbose_name="Anotações"),
),
migrations.AlterField(
model_name="citycouncilattendancelist",
name="status",
field=models.CharField(
choices=[
("presente", "Presente"),
("falta_justificada", "Falta Justificada"),
("licenca_justificada", "Licença Justificada"),
("ausente", "Ausente"),
],
max_length=20,
verbose_name="Situação",
),
),
migrations.AlterField(
model_name="citycouncilattendancelist",
name="updated_at",
field=models.DateTimeField(auto_now=True, verbose_name="Atualizado em"),
),
migrations.AlterField(
model_name="citycouncilcontract",
name="crawled_at",
field=models.DateTimeField(verbose_name="Coletado em"),
),
migrations.AlterField(
model_name="citycouncilcontract",
name="crawled_from",
field=models.URLField(verbose_name="Fonte"),
),
migrations.AlterField(
model_name="citycouncilcontract",
name="created_at",
field=models.DateTimeField(auto_now_add=True, verbose_name="Criado em"),
),
migrations.AlterField(
model_name="citycouncilcontract",
name="excluded",
field=models.BooleanField(default=False, verbose_name="Excluído?"),
),
migrations.AlterField(
model_name="citycouncilcontract",
name="notes",
field=models.TextField(blank=True, null=True, verbose_name="Anotações"),
),
migrations.AlterField(
model_name="citycouncilcontract",
name="updated_at",
field=models.DateTimeField(auto_now=True, verbose_name="Atualizado em"),
),
migrations.AlterField(
model_name="citycouncilexpense",
name="budget_unit",
field=models.PositiveIntegerField(
default=101, verbose_name="Unidade orçamentária"
),
),
migrations.AlterField(
model_name="citycouncilexpense",
name="company_or_person",
field=models.TextField(
blank=True, null=True, verbose_name="Empresa ou pessoa"
),
),
migrations.AlterField(
model_name="citycouncilexpense",
name="crawled_at",
field=models.DateTimeField(verbose_name="Coletado em"),
),
migrations.AlterField(
model_name="citycouncilexpense",
name="crawled_from",
field=models.URLField(verbose_name="Fonte"),
),
migrations.AlterField(
model_name="citycouncilexpense",
name="created_at",
field=models.DateTimeField(auto_now_add=True, verbose_name="Criado em"),
),
migrations.AlterField(
model_name="citycouncilexpense",
name="date",
field=models.DateField(verbose_name="Data"),
),
migrations.AlterField(
model_name="citycouncilexpense",
name="document",
field=models.CharField(
blank=True, max_length=50, null=True, verbose_name="CNPJ ou CPF"
),
),
migrations.AlterField(
model_name="citycouncilexpense",
name="excluded",
field=models.BooleanField(default=False, verbose_name="Excluído?"),
),
migrations.AlterField(
model_name="citycouncilexpense",
name="external_file_code",
field=models.CharField(
blank=True,
max_length=50,
null=True,
verbose_name="Código do arquivo (externo)",
),
),
migrations.AlterField(
model_name="citycouncilexpense",
name="external_file_line",
field=models.CharField(
blank=True,
max_length=50,
null=True,
verbose_name="Linha do arquivo (externo)",
),
),
migrations.AlterField(
model_name="citycouncilexpense",
name="function",
field=models.CharField(
blank=True, max_length=50, null=True, verbose_name="Função"
),
),
migrations.AlterField(
model_name="citycouncilexpense",
name="group",
field=models.CharField(
blank=True, max_length=100, null=True, verbose_name="Grupo"
),
),
migrations.AlterField(
model_name="citycouncilexpense",
name="legal_status",
field=models.CharField(
blank=True, max_length=200, null=True, verbose_name="Natureza"
),
),
migrations.AlterField(
model_name="citycouncilexpense",
name="modality",
field=models.CharField(
blank=True,
choices=[
("convenio", "Convênio"),
("tomada_de_precos", "Tomada de Preço"),
("pregao", "Pregão"),
("inexigibilidade", "Inexigibilidade"),
("convite", "Convite"),
("concorrencia", "Concorrência"),
("dispensa", "Dispensa"),
("isento", "Isento"),
],
max_length=50,
null=True,
verbose_name="Modalidade",
),
),
migrations.AlterField(
model_name="citycouncilexpense",
name="notes",
field=models.TextField(blank=True, null=True, verbose_name="Anotações"),
),
migrations.AlterField(
model_name="citycouncilexpense",
name="number",
field=models.CharField(
blank=True, max_length=50, null=True, verbose_name="Número"
),
),
migrations.AlterField(
model_name="citycouncilexpense",
name="phase",
field=models.CharField(
choices=[
("empenho", "Empenho"),
("liquidacao", "Liquidação"),
("pagamento", "Pagamento"),
],
max_length=20,
verbose_name="Fase",
),
),
migrations.AlterField(
model_name="citycouncilexpense",
name="phase_code",
field=models.CharField(
blank=True, max_length=20, null=True, verbose_name="Código da fase"
),
),
migrations.AlterField(
model_name="citycouncilexpense",
name="process_number",
field=models.CharField(
blank=True, max_length=50, null=True, verbose_name="Número do processo"
),
),
migrations.AlterField(
model_name="citycouncilexpense",
name="published_at",
field=models.DateField(blank=True, null=True, verbose_name="Publicado em"),
),
migrations.AlterField(
model_name="citycouncilexpense",
name="resource",
field=models.CharField(
blank=True, max_length=200, null=True, verbose_name="Fonte"
),
),
migrations.AlterField(
model_name="citycouncilexpense",
name="subfunction",
field=models.CharField(
blank=True, max_length=50, null=True, verbose_name="Subfunção"
),
),
migrations.AlterField(
model_name="citycouncilexpense",
name="subgroup",
field=models.CharField(
blank=True, max_length=100, null=True, verbose_name="Subgrupos"
),
),
migrations.AlterField(
model_name="citycouncilexpense",
name="summary",
field=models.TextField(blank=True, null=True, verbose_name="Descrição"),
),
migrations.AlterField(
model_name="citycouncilexpense",
name="updated_at",
field=models.DateTimeField(auto_now=True, verbose_name="Atualizado em"),
),
migrations.AlterField(
model_name="citycouncilminute",
name="crawled_at",
field=models.DateTimeField(verbose_name="Coletado em"),
),
migrations.AlterField(
model_name="citycouncilminute",
name="crawled_from",
field=models.URLField(verbose_name="Fonte"),
),
migrations.AlterField(
model_name="citycouncilminute",
name="created_at",
field=models.DateTimeField(auto_now_add=True, verbose_name="Criado em"),
),
migrations.AlterField(
model_name="citycouncilminute",
name="date",
field=models.DateField(verbose_name="Data"),
),
migrations.AlterField(
model_name="citycouncilminute",
name="event_type",
field=models.CharField(
choices=[
("sessao_ordinaria", "Sessão Ordinária"),
("ordem_do_dia", "Ordem do Dia"),
("sessao_solene", "Sessão Solene"),
("sessao_especial", "Sessão Especial"),
("audiencia_publica", "Audiência Pública"),
],
max_length=20,
verbose_name="Tipo de evento",
),
),
migrations.AlterField(
model_name="citycouncilminute",
name="file_content",
field=models.TextField(
blank=True, null=True, verbose_name="Conteúdo do arquivo"
),
),
migrations.AlterField(
model_name="citycouncilminute",
name="file_url",
field=models.URLField(blank=True, null=True, verbose_name="Endereço (URL)"),
),
migrations.AlterField(
model_name="citycouncilminute",
name="notes",
field=models.TextField(blank=True, null=True, verbose_name="Anotações"),
),
migrations.AlterField(
model_name="citycouncilminute",
name="title",
field=models.CharField(
blank=True, max_length=300, null=True, verbose_name="Título"
),
),
migrations.AlterField(
model_name="citycouncilminute",
name="updated_at",
field=models.DateTimeField(auto_now=True, verbose_name="Atualizado em"),
),
migrations.AlterField(
model_name="cityhallbid",
name="crawled_at",
field=models.DateTimeField(verbose_name="Coletado em"),
),
migrations.AlterField(
model_name="cityhallbid",
name="crawled_from",
field=models.URLField(verbose_name="Fonte"),
),
migrations.AlterField(
model_name="cityhallbid",
name="created_at",
field=models.DateTimeField(auto_now_add=True, verbose_name="Criado em"),
),
migrations.AlterField(
model_name="cityhallbid",
name="notes",
field=models.TextField(blank=True, null=True, verbose_name="Anotações"),
),
migrations.AlterField(
model_name="cityhallbid",
name="updated_at",
field=models.DateTimeField(auto_now=True, verbose_name="Atualizado em"),
),
migrations.AlterField(
model_name="cityhallbidevent",
name="crawled_at",
field=models.DateTimeField(verbose_name="Coletado em"),
),
migrations.AlterField(
model_name="cityhallbidevent",
name="crawled_from",
field=models.URLField(verbose_name="Fonte"),
),
migrations.AlterField(
model_name="cityhallbidevent",
name="created_at",
field=models.DateTimeField(auto_now_add=True, verbose_name="Criado em"),
),
migrations.AlterField(
model_name="cityhallbidevent",
name="notes",
field=models.TextField(blank=True, null=True, verbose_name="Anotações"),
),
migrations.AlterField(
model_name="cityhallbidevent",
name="updated_at",
field=models.DateTimeField(auto_now=True, verbose_name="Atualizado em"),
),
migrations.AlterField(
model_name="gazette",
name="crawled_at",
field=models.DateTimeField(verbose_name="Coletado em"),
),
migrations.AlterField(
model_name="gazette",
name="crawled_from",
field=models.URLField(verbose_name="Fonte"),
),
migrations.AlterField(
model_name="gazette",
name="created_at",
field=models.DateTimeField(auto_now_add=True, verbose_name="Criado em"),
),
migrations.AlterField(
model_name="gazette",
name="date",
field=models.DateField(null=True, verbose_name="Data"),
),
migrations.AlterField(
model_name="gazette",
name="file_content",
field=models.TextField(
blank=True, null=True, verbose_name="Conteúdo do arquivo"
),
),
migrations.AlterField(
model_name="gazette",
name="file_url",
field=models.URLField(blank=True, null=True, verbose_name="Endereço (URL)"),
),
migrations.AlterField(
model_name="gazette",
name="is_legacy",
field=models.BooleanField(default=False, verbose_name="É do site antigo?"),
),
migrations.AlterField(
model_name="gazette",
name="notes",
field=models.TextField(blank=True, null=True, verbose_name="Anotações"),
),
migrations.AlterField(
model_name="gazette",
name="power",
field=models.CharField(
choices=[
("executivo", "Poder Executivo"),
("legislativo", "Poder Legislativo"),
],
max_length=25,
verbose_name="Poder",
),
),
migrations.AlterField(
model_name="gazette",
name="updated_at",
field=models.DateTimeField(auto_now=True, verbose_name="Atualizado em"),
),
migrations.AlterField(
model_name="gazette",
name="year_and_edition",
field=models.CharField(max_length=100, verbose_name="Ano e edição"),
),
migrations.AlterField(
model_name="gazetteevent",
name="crawled_at",
field=models.DateTimeField(verbose_name="Coletado em"),
),
migrations.AlterField(
model_name="gazetteevent",
name="crawled_from",
field=models.URLField(verbose_name="Fonte"),
),
migrations.AlterField(
model_name="gazetteevent",
name="created_at",
field=models.DateTimeField(auto_now_add=True, verbose_name="Criado em"),
),
migrations.AlterField(
model_name="gazetteevent",
name="notes",
field=models.TextField(blank=True, null=True, verbose_name="Anotações"),
),
migrations.AlterField(
model_name="gazetteevent",
name="published_on",
field=models.CharField(
blank=True, max_length=100, null=True, verbose_name="Publicado em"
),
),
migrations.AlterField(
model_name="gazetteevent",
name="secretariat",
field=models.CharField(
blank=True, max_length=100, null=True, verbose_name="Secretaria"
),
),
migrations.AlterField(
model_name="gazetteevent",
name="summary",
field=models.TextField(blank=True, null=True, verbose_name="Sumário"),
),
migrations.AlterField(
model_name="gazetteevent",
name="title",
field=models.CharField(
blank=True, max_length=300, null=True, verbose_name="Título"
),
),
migrations.AlterField(
model_name="gazetteevent",
name="updated_at",
field=models.DateTimeField(auto_now=True, verbose_name="Atualizado em"),
),
]
| 36.350352 | 88 | 0.539885 |
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
("datasets", "0010_auto_20200515_0959"),
]
operations = [
migrations.AlterField(
model_name="citycouncilagenda",
name="crawled_at",
field=models.DateTimeField(verbose_name="Coletado em"),
),
migrations.AlterField(
model_name="citycouncilagenda",
name="crawled_from",
field=models.URLField(verbose_name="Fonte"),
),
migrations.AlterField(
model_name="citycouncilagenda",
name="created_at",
field=models.DateTimeField(auto_now_add=True, verbose_name="Criado em"),
),
migrations.AlterField(
model_name="citycouncilagenda",
name="date",
field=models.DateField(verbose_name="Data"),
),
migrations.AlterField(
model_name="citycouncilagenda",
name="details",
field=models.TextField(blank=True, null=True, verbose_name="Detalhes"),
),
migrations.AlterField(
model_name="citycouncilagenda",
name="event_type",
field=models.CharField(
choices=[
("sessao_ordinaria", "Sessão Ordinária"),
("ordem_do_dia", "Ordem do Dia"),
("sessao_solene", "Sessão Solene"),
("sessao_especial", "Sessão Especial"),
("audiencia_publica", "Audiência Pública"),
],
max_length=20,
verbose_name="Tipo do evento",
),
),
migrations.AlterField(
model_name="citycouncilagenda",
name="notes",
field=models.TextField(blank=True, null=True, verbose_name="Anotações"),
),
migrations.AlterField(
model_name="citycouncilagenda",
name="title",
field=models.CharField(
blank=True, max_length=100, null=True, verbose_name="Título"
),
),
migrations.AlterField(
model_name="citycouncilagenda",
name="updated_at",
field=models.DateTimeField(auto_now=True, verbose_name="Atualizado em"),
),
migrations.AlterField(
model_name="citycouncilattendancelist",
name="council_member",
field=models.CharField(max_length=200, verbose_name="Vereador"),
),
migrations.AlterField(
model_name="citycouncilattendancelist",
name="crawled_at",
field=models.DateTimeField(verbose_name="Coletado em"),
),
migrations.AlterField(
model_name="citycouncilattendancelist",
name="crawled_from",
field=models.URLField(verbose_name="Fonte"),
),
migrations.AlterField(
model_name="citycouncilattendancelist",
name="created_at",
field=models.DateTimeField(auto_now_add=True, verbose_name="Criado em"),
),
migrations.AlterField(
model_name="citycouncilattendancelist",
name="date",
field=models.DateField(verbose_name="Data"),
),
migrations.AlterField(
model_name="citycouncilattendancelist",
name="description",
field=models.CharField(
blank=True, max_length=200, null=True, verbose_name="Descrição"
),
),
migrations.AlterField(
model_name="citycouncilattendancelist",
name="notes",
field=models.TextField(blank=True, null=True, verbose_name="Anotações"),
),
migrations.AlterField(
model_name="citycouncilattendancelist",
name="status",
field=models.CharField(
choices=[
("presente", "Presente"),
("falta_justificada", "Falta Justificada"),
("licenca_justificada", "Licença Justificada"),
("ausente", "Ausente"),
],
max_length=20,
verbose_name="Situação",
),
),
migrations.AlterField(
model_name="citycouncilattendancelist",
name="updated_at",
field=models.DateTimeField(auto_now=True, verbose_name="Atualizado em"),
),
migrations.AlterField(
model_name="citycouncilcontract",
name="crawled_at",
field=models.DateTimeField(verbose_name="Coletado em"),
),
migrations.AlterField(
model_name="citycouncilcontract",
name="crawled_from",
field=models.URLField(verbose_name="Fonte"),
),
migrations.AlterField(
model_name="citycouncilcontract",
name="created_at",
field=models.DateTimeField(auto_now_add=True, verbose_name="Criado em"),
),
migrations.AlterField(
model_name="citycouncilcontract",
name="excluded",
field=models.BooleanField(default=False, verbose_name="Excluído?"),
),
migrations.AlterField(
model_name="citycouncilcontract",
name="notes",
field=models.TextField(blank=True, null=True, verbose_name="Anotações"),
),
migrations.AlterField(
model_name="citycouncilcontract",
name="updated_at",
field=models.DateTimeField(auto_now=True, verbose_name="Atualizado em"),
),
migrations.AlterField(
model_name="citycouncilexpense",
name="budget_unit",
field=models.PositiveIntegerField(
default=101, verbose_name="Unidade orçamentária"
),
),
migrations.AlterField(
model_name="citycouncilexpense",
name="company_or_person",
field=models.TextField(
blank=True, null=True, verbose_name="Empresa ou pessoa"
),
),
migrations.AlterField(
model_name="citycouncilexpense",
name="crawled_at",
field=models.DateTimeField(verbose_name="Coletado em"),
),
migrations.AlterField(
model_name="citycouncilexpense",
name="crawled_from",
field=models.URLField(verbose_name="Fonte"),
),
migrations.AlterField(
model_name="citycouncilexpense",
name="created_at",
field=models.DateTimeField(auto_now_add=True, verbose_name="Criado em"),
),
migrations.AlterField(
model_name="citycouncilexpense",
name="date",
field=models.DateField(verbose_name="Data"),
),
migrations.AlterField(
model_name="citycouncilexpense",
name="document",
field=models.CharField(
blank=True, max_length=50, null=True, verbose_name="CNPJ ou CPF"
),
),
migrations.AlterField(
model_name="citycouncilexpense",
name="excluded",
field=models.BooleanField(default=False, verbose_name="Excluído?"),
),
migrations.AlterField(
model_name="citycouncilexpense",
name="external_file_code",
field=models.CharField(
blank=True,
max_length=50,
null=True,
verbose_name="Código do arquivo (externo)",
),
),
migrations.AlterField(
model_name="citycouncilexpense",
name="external_file_line",
field=models.CharField(
blank=True,
max_length=50,
null=True,
verbose_name="Linha do arquivo (externo)",
),
),
migrations.AlterField(
model_name="citycouncilexpense",
name="function",
field=models.CharField(
blank=True, max_length=50, null=True, verbose_name="Função"
),
),
migrations.AlterField(
model_name="citycouncilexpense",
name="group",
field=models.CharField(
blank=True, max_length=100, null=True, verbose_name="Grupo"
),
),
migrations.AlterField(
model_name="citycouncilexpense",
name="legal_status",
field=models.CharField(
blank=True, max_length=200, null=True, verbose_name="Natureza"
),
),
migrations.AlterField(
model_name="citycouncilexpense",
name="modality",
field=models.CharField(
blank=True,
choices=[
("convenio", "Convênio"),
("tomada_de_precos", "Tomada de Preço"),
("pregao", "Pregão"),
("inexigibilidade", "Inexigibilidade"),
("convite", "Convite"),
("concorrencia", "Concorrência"),
("dispensa", "Dispensa"),
("isento", "Isento"),
],
max_length=50,
null=True,
verbose_name="Modalidade",
),
),
migrations.AlterField(
model_name="citycouncilexpense",
name="notes",
field=models.TextField(blank=True, null=True, verbose_name="Anotações"),
),
migrations.AlterField(
model_name="citycouncilexpense",
name="number",
field=models.CharField(
blank=True, max_length=50, null=True, verbose_name="Número"
),
),
migrations.AlterField(
model_name="citycouncilexpense",
name="phase",
field=models.CharField(
choices=[
("empenho", "Empenho"),
("liquidacao", "Liquidação"),
("pagamento", "Pagamento"),
],
max_length=20,
verbose_name="Fase",
),
),
migrations.AlterField(
model_name="citycouncilexpense",
name="phase_code",
field=models.CharField(
blank=True, max_length=20, null=True, verbose_name="Código da fase"
),
),
migrations.AlterField(
model_name="citycouncilexpense",
name="process_number",
field=models.CharField(
blank=True, max_length=50, null=True, verbose_name="Número do processo"
),
),
migrations.AlterField(
model_name="citycouncilexpense",
name="published_at",
field=models.DateField(blank=True, null=True, verbose_name="Publicado em"),
),
migrations.AlterField(
model_name="citycouncilexpense",
name="resource",
field=models.CharField(
blank=True, max_length=200, null=True, verbose_name="Fonte"
),
),
migrations.AlterField(
model_name="citycouncilexpense",
name="subfunction",
field=models.CharField(
blank=True, max_length=50, null=True, verbose_name="Subfunção"
),
),
migrations.AlterField(
model_name="citycouncilexpense",
name="subgroup",
field=models.CharField(
blank=True, max_length=100, null=True, verbose_name="Subgrupos"
),
),
migrations.AlterField(
model_name="citycouncilexpense",
name="summary",
field=models.TextField(blank=True, null=True, verbose_name="Descrição"),
),
migrations.AlterField(
model_name="citycouncilexpense",
name="updated_at",
field=models.DateTimeField(auto_now=True, verbose_name="Atualizado em"),
),
migrations.AlterField(
model_name="citycouncilminute",
name="crawled_at",
field=models.DateTimeField(verbose_name="Coletado em"),
),
migrations.AlterField(
model_name="citycouncilminute",
name="crawled_from",
field=models.URLField(verbose_name="Fonte"),
),
migrations.AlterField(
model_name="citycouncilminute",
name="created_at",
field=models.DateTimeField(auto_now_add=True, verbose_name="Criado em"),
),
migrations.AlterField(
model_name="citycouncilminute",
name="date",
field=models.DateField(verbose_name="Data"),
),
migrations.AlterField(
model_name="citycouncilminute",
name="event_type",
field=models.CharField(
choices=[
("sessao_ordinaria", "Sessão Ordinária"),
("ordem_do_dia", "Ordem do Dia"),
("sessao_solene", "Sessão Solene"),
("sessao_especial", "Sessão Especial"),
("audiencia_publica", "Audiência Pública"),
],
max_length=20,
verbose_name="Tipo de evento",
),
),
migrations.AlterField(
model_name="citycouncilminute",
name="file_content",
field=models.TextField(
blank=True, null=True, verbose_name="Conteúdo do arquivo"
),
),
migrations.AlterField(
model_name="citycouncilminute",
name="file_url",
field=models.URLField(blank=True, null=True, verbose_name="Endereço (URL)"),
),
migrations.AlterField(
model_name="citycouncilminute",
name="notes",
field=models.TextField(blank=True, null=True, verbose_name="Anotações"),
),
migrations.AlterField(
model_name="citycouncilminute",
name="title",
field=models.CharField(
blank=True, max_length=300, null=True, verbose_name="Título"
),
),
migrations.AlterField(
model_name="citycouncilminute",
name="updated_at",
field=models.DateTimeField(auto_now=True, verbose_name="Atualizado em"),
),
migrations.AlterField(
model_name="cityhallbid",
name="crawled_at",
field=models.DateTimeField(verbose_name="Coletado em"),
),
migrations.AlterField(
model_name="cityhallbid",
name="crawled_from",
field=models.URLField(verbose_name="Fonte"),
),
migrations.AlterField(
model_name="cityhallbid",
name="created_at",
field=models.DateTimeField(auto_now_add=True, verbose_name="Criado em"),
),
migrations.AlterField(
model_name="cityhallbid",
name="notes",
field=models.TextField(blank=True, null=True, verbose_name="Anotações"),
),
migrations.AlterField(
model_name="cityhallbid",
name="updated_at",
field=models.DateTimeField(auto_now=True, verbose_name="Atualizado em"),
),
migrations.AlterField(
model_name="cityhallbidevent",
name="crawled_at",
field=models.DateTimeField(verbose_name="Coletado em"),
),
migrations.AlterField(
model_name="cityhallbidevent",
name="crawled_from",
field=models.URLField(verbose_name="Fonte"),
),
migrations.AlterField(
model_name="cityhallbidevent",
name="created_at",
field=models.DateTimeField(auto_now_add=True, verbose_name="Criado em"),
),
migrations.AlterField(
model_name="cityhallbidevent",
name="notes",
field=models.TextField(blank=True, null=True, verbose_name="Anotações"),
),
migrations.AlterField(
model_name="cityhallbidevent",
name="updated_at",
field=models.DateTimeField(auto_now=True, verbose_name="Atualizado em"),
),
migrations.AlterField(
model_name="gazette",
name="crawled_at",
field=models.DateTimeField(verbose_name="Coletado em"),
),
migrations.AlterField(
model_name="gazette",
name="crawled_from",
field=models.URLField(verbose_name="Fonte"),
),
migrations.AlterField(
model_name="gazette",
name="created_at",
field=models.DateTimeField(auto_now_add=True, verbose_name="Criado em"),
),
migrations.AlterField(
model_name="gazette",
name="date",
field=models.DateField(null=True, verbose_name="Data"),
),
migrations.AlterField(
model_name="gazette",
name="file_content",
field=models.TextField(
blank=True, null=True, verbose_name="Conteúdo do arquivo"
),
),
migrations.AlterField(
model_name="gazette",
name="file_url",
field=models.URLField(blank=True, null=True, verbose_name="Endereço (URL)"),
),
migrations.AlterField(
model_name="gazette",
name="is_legacy",
field=models.BooleanField(default=False, verbose_name="É do site antigo?"),
),
migrations.AlterField(
model_name="gazette",
name="notes",
field=models.TextField(blank=True, null=True, verbose_name="Anotações"),
),
migrations.AlterField(
model_name="gazette",
name="power",
field=models.CharField(
choices=[
("executivo", "Poder Executivo"),
("legislativo", "Poder Legislativo"),
],
max_length=25,
verbose_name="Poder",
),
),
migrations.AlterField(
model_name="gazette",
name="updated_at",
field=models.DateTimeField(auto_now=True, verbose_name="Atualizado em"),
),
migrations.AlterField(
model_name="gazette",
name="year_and_edition",
field=models.CharField(max_length=100, verbose_name="Ano e edição"),
),
migrations.AlterField(
model_name="gazetteevent",
name="crawled_at",
field=models.DateTimeField(verbose_name="Coletado em"),
),
migrations.AlterField(
model_name="gazetteevent",
name="crawled_from",
field=models.URLField(verbose_name="Fonte"),
),
migrations.AlterField(
model_name="gazetteevent",
name="created_at",
field=models.DateTimeField(auto_now_add=True, verbose_name="Criado em"),
),
migrations.AlterField(
model_name="gazetteevent",
name="notes",
field=models.TextField(blank=True, null=True, verbose_name="Anotações"),
),
migrations.AlterField(
model_name="gazetteevent",
name="published_on",
field=models.CharField(
blank=True, max_length=100, null=True, verbose_name="Publicado em"
),
),
migrations.AlterField(
model_name="gazetteevent",
name="secretariat",
field=models.CharField(
blank=True, max_length=100, null=True, verbose_name="Secretaria"
),
),
migrations.AlterField(
model_name="gazetteevent",
name="summary",
field=models.TextField(blank=True, null=True, verbose_name="Sumário"),
),
migrations.AlterField(
model_name="gazetteevent",
name="title",
field=models.CharField(
blank=True, max_length=300, null=True, verbose_name="Título"
),
),
migrations.AlterField(
model_name="gazetteevent",
name="updated_at",
field=models.DateTimeField(auto_now=True, verbose_name="Atualizado em"),
),
]
| true | true |
f73051c3feff91789b9c7364b801010e5c39b0e2 | 149 | py | Python | 13-python-integration/1-python-introduction/hello.py | tehilabk/cpp-5781 | 736ed05dddb2a7270bbcdbb04a3ffb4b9046e358 | [
"MIT"
] | 14 | 2021-01-30T16:36:18.000Z | 2022-03-30T17:24:44.000Z | 13-python-integration/1-python-introduction/hello.py | dimastar2310/cpp-5781 | 615ba07e0841522df74384f380172557f5e305a7 | [
"MIT"
] | null | null | null | 13-python-integration/1-python-introduction/hello.py | dimastar2310/cpp-5781 | 615ba07e0841522df74384f380172557f5e305a7 | [
"MIT"
] | 23 | 2020-03-12T13:21:29.000Z | 2021-02-22T21:29:48.000Z | #!python3
# single-line string:
print("hello")
# multi-line strings:
print("""
w
o
r
l
d
"""
)
print("abc"*3)
a = [1,2,3,4]
print(a*3)
| 7.45 | 21 | 0.536913 |
print("hello")
print("""
w
o
r
l
d
"""
)
print("abc"*3)
a = [1,2,3,4]
print(a*3)
| true | true |
f7305317714c65d2ae52341ae5236b5e05b703e3 | 703 | py | Python | main.py | benayas1/FastAPI-demo | 9f242acf499a4f896ac7102d0c301c90d11acb2e | [
"MIT"
] | null | null | null | main.py | benayas1/FastAPI-demo | 9f242acf499a4f896ac7102d0c301c90d11acb2e | [
"MIT"
] | null | null | null | main.py | benayas1/FastAPI-demo | 9f242acf499a4f896ac7102d0c301c90d11acb2e | [
"MIT"
] | null | null | null | from fastapi import FastAPI, HTTPException
from pydantic import BaseModel
from src.build_model import convert, predict
app = FastAPI()
# pydantic models
class StockIn(BaseModel):
ticker: str
class StockOut(StockIn):
forecast: dict
# routes
@app.get("/ping")
async def pong():
return {"ping": "pong!"}
@app.post("/predict", response_model=StockOut, status_code=200)
def get_prediction(payload: StockIn):
ticker = payload.ticker
prediction_list = predict(ticker)
if not prediction_list:
raise HTTPException(status_code=400, detail="Model not found.")
response_object = {"ticker": ticker, "forecast": convert(prediction_list)}
return response_object | 18.5 | 78 | 0.721195 | from fastapi import FastAPI, HTTPException
from pydantic import BaseModel
from src.build_model import convert, predict
app = FastAPI()
class StockIn(BaseModel):
ticker: str
class StockOut(StockIn):
forecast: dict
@app.get("/ping")
async def pong():
return {"ping": "pong!"}
@app.post("/predict", response_model=StockOut, status_code=200)
def get_prediction(payload: StockIn):
ticker = payload.ticker
prediction_list = predict(ticker)
if not prediction_list:
raise HTTPException(status_code=400, detail="Model not found.")
response_object = {"ticker": ticker, "forecast": convert(prediction_list)}
return response_object | true | true |
f730535cc3c8c3715aaee00884ba41ed22be7ce5 | 8,550 | py | Python | cinder/volume/drivers/open_e/jovian_common/rest_proxy.py | shubhamdang/cinder | 03a8ca07d5710771c597fd92de50103313ec7f76 | [
"Apache-2.0"
] | 3 | 2016-08-23T20:37:47.000Z | 2016-10-12T11:16:35.000Z | cinder/volume/drivers/open_e/jovian_common/rest_proxy.py | shubhamdang/cinder | 03a8ca07d5710771c597fd92de50103313ec7f76 | [
"Apache-2.0"
] | null | null | null | cinder/volume/drivers/open_e/jovian_common/rest_proxy.py | shubhamdang/cinder | 03a8ca07d5710771c597fd92de50103313ec7f76 | [
"Apache-2.0"
] | 1 | 2019-09-25T11:15:01.000Z | 2019-09-25T11:15:01.000Z | # Copyright (c) 2020 Open-E, Inc.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""Network connection handling class for JovianDSS driver."""
import json
import time
from oslo_log import log as logging
from oslo_utils import netutils as o_netutils
import requests
import urllib3
from cinder import exception
from cinder.i18n import _
from cinder.volume.drivers.open_e.jovian_common import exception as jexc
LOG = logging.getLogger(__name__)
class JovianRESTProxy(object):
"""Jovian REST API proxy."""
def __init__(self, config):
""":param config: config is like dict."""
self.proto = 'http'
if config.get('driver_use_ssl', True):
self.proto = 'https'
self.hosts = config.safe_get('san_hosts')
self.port = str(config.get('san_api_port', 82))
self.active_host = 0
for host in self.hosts:
if o_netutils.is_valid_ip(host) is False:
err_msg = ('Invalid value of jovian_host property: '
'%(addr)s, IP address expected.' %
{'addr': host})
LOG.debug(err_msg)
raise exception.InvalidConfigurationValue(err_msg)
self.api_path = "/api/v3"
self.delay = config.get('jovian_recovery_delay', 40)
self.pool = config.safe_get('jovian_pool')
self.user = config.get('san_login', 'admin')
self.password = config.get('san_password', 'admin')
self.auth = requests.auth.HTTPBasicAuth(self.user, self.password)
self.verify = False
self.retry_n = config.get('jovian_rest_send_repeats', 3)
self.header = {'connection': 'keep-alive',
'Content-Type': 'application/json',
'authorization': 'Basic '}
urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning)
def _get_pool_url(self, host):
url = ('%(proto)s://%(host)s:%(port)s/api/v3/pools/%(pool)s' % {
'proto': self.proto,
'host': host,
'port': self.port,
'pool': self.pool})
return url
def _get_url(self, host):
url = ('%(proto)s://%(host)s:%(port)s/api/v3' % {
'proto': self.proto,
'host': host,
'port': self.port})
return url
def request(self, request_method, req, json_data=None):
"""Send request to the specific url.
:param request_method: GET, POST, DELETE
:param url: where to send
:param json_data: data
"""
for j in range(self.retry_n):
for i in range(len(self.hosts)):
host = self.hosts[self.active_host]
url = self._get_url(host) + req
LOG.debug(
"sending request of type %(type)s to %(url)s "
"attempt: %(num)s.",
{'type': request_method,
'url': url,
'num': j})
if json_data is not None:
LOG.debug(
"sending data: %s.", json_data)
try:
ret = self._request_routine(url, request_method, json_data)
if len(ret) == 0:
self.active_host = ((self.active_host + 1)
% len(self.hosts))
continue
return ret
except requests.ConnectionError as err:
LOG.debug("Connection error %s", err)
self.active_host = (self.active_host + 1) % len(self.hosts)
continue
time.sleep(self.delay)
msg = (_('%(times) faild in a row') % {'times': j})
raise jexc.JDSSRESTProxyException(host=url, reason=msg)
def pool_request(self, request_method, req, json_data=None):
"""Send request to the specific url.
:param request_method: GET, POST, DELETE
:param url: where to send
:param json_data: data
"""
url = ""
for j in range(self.retry_n):
for i in range(len(self.hosts)):
host = self.hosts[self.active_host]
url = self._get_pool_url(host) + req
LOG.debug(
"sending pool request of type %(type)s to %(url)s "
"attempt: %(num)s.",
{'type': request_method,
'url': url,
'num': j})
if json_data is not None:
LOG.debug(
"JovianDSS: Sending data: %s.", str(json_data))
try:
ret = self._request_routine(url, request_method, json_data)
if len(ret) == 0:
self.active_host = ((self.active_host + 1)
% len(self.hosts))
continue
return ret
except requests.ConnectionError as err:
LOG.debug("Connection error %s", err)
self.active_host = (self.active_host + 1) % len(self.hosts)
continue
time.sleep(int(self.delay))
msg = (_('%(times) faild in a row') % {'times': j})
raise jexc.JDSSRESTProxyException(host=url, reason=msg)
def _request_routine(self, url, request_method, json_data=None):
"""Make an HTTPS request and return the results."""
ret = None
for i in range(3):
ret = dict()
try:
response_obj = requests.request(request_method,
auth=self.auth,
url=url,
headers=self.header,
data=json.dumps(json_data),
verify=self.verify)
LOG.debug('response code: %s', response_obj.status_code)
LOG.debug('response data: %s', response_obj.text)
ret['code'] = response_obj.status_code
if '{' in response_obj.text and '}' in response_obj.text:
if "error" in response_obj.text:
ret["error"] = json.loads(response_obj.text)["error"]
else:
ret["error"] = None
if "data" in response_obj.text:
ret["data"] = json.loads(response_obj.text)["data"]
else:
ret["data"] = None
if ret["code"] == 500:
if ret["error"] is not None:
if (("errno" in ret["error"]) and
("class" in ret["error"])):
if (ret["error"]["class"] ==
"opene.tools.scstadmin.ScstAdminError"):
LOG.debug("ScstAdminError %(code)d %(msg)s", {
"code": ret["error"]["errno"],
"msg": ret["error"]["message"]})
continue
if (ret["error"]["class"] ==
"exceptions.OSError"):
LOG.debug("OSError %(code)d %(msg)s", {
"code": ret["error"]["errno"],
"msg": ret["error"]["message"]})
continue
break
except requests.HTTPError as err:
LOG.debug("HTTP parsing error %s", err)
self.active_host = (self.active_host + 1) % len(self.hosts)
return ret
def get_active_host(self):
"""Return address of currently used host."""
return self.hosts[self.active_host]
| 37.665198 | 79 | 0.491579 |
import json
import time
from oslo_log import log as logging
from oslo_utils import netutils as o_netutils
import requests
import urllib3
from cinder import exception
from cinder.i18n import _
from cinder.volume.drivers.open_e.jovian_common import exception as jexc
LOG = logging.getLogger(__name__)
class JovianRESTProxy(object):
def __init__(self, config):
self.proto = 'http'
if config.get('driver_use_ssl', True):
self.proto = 'https'
self.hosts = config.safe_get('san_hosts')
self.port = str(config.get('san_api_port', 82))
self.active_host = 0
for host in self.hosts:
if o_netutils.is_valid_ip(host) is False:
err_msg = ('Invalid value of jovian_host property: '
'%(addr)s, IP address expected.' %
{'addr': host})
LOG.debug(err_msg)
raise exception.InvalidConfigurationValue(err_msg)
self.api_path = "/api/v3"
self.delay = config.get('jovian_recovery_delay', 40)
self.pool = config.safe_get('jovian_pool')
self.user = config.get('san_login', 'admin')
self.password = config.get('san_password', 'admin')
self.auth = requests.auth.HTTPBasicAuth(self.user, self.password)
self.verify = False
self.retry_n = config.get('jovian_rest_send_repeats', 3)
self.header = {'connection': 'keep-alive',
'Content-Type': 'application/json',
'authorization': 'Basic '}
urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning)
def _get_pool_url(self, host):
url = ('%(proto)s://%(host)s:%(port)s/api/v3/pools/%(pool)s' % {
'proto': self.proto,
'host': host,
'port': self.port,
'pool': self.pool})
return url
def _get_url(self, host):
url = ('%(proto)s://%(host)s:%(port)s/api/v3' % {
'proto': self.proto,
'host': host,
'port': self.port})
return url
def request(self, request_method, req, json_data=None):
for j in range(self.retry_n):
for i in range(len(self.hosts)):
host = self.hosts[self.active_host]
url = self._get_url(host) + req
LOG.debug(
"sending request of type %(type)s to %(url)s "
"attempt: %(num)s.",
{'type': request_method,
'url': url,
'num': j})
if json_data is not None:
LOG.debug(
"sending data: %s.", json_data)
try:
ret = self._request_routine(url, request_method, json_data)
if len(ret) == 0:
self.active_host = ((self.active_host + 1)
% len(self.hosts))
continue
return ret
except requests.ConnectionError as err:
LOG.debug("Connection error %s", err)
self.active_host = (self.active_host + 1) % len(self.hosts)
continue
time.sleep(self.delay)
msg = (_('%(times) faild in a row') % {'times': j})
raise jexc.JDSSRESTProxyException(host=url, reason=msg)
def pool_request(self, request_method, req, json_data=None):
url = ""
for j in range(self.retry_n):
for i in range(len(self.hosts)):
host = self.hosts[self.active_host]
url = self._get_pool_url(host) + req
LOG.debug(
"sending pool request of type %(type)s to %(url)s "
"attempt: %(num)s.",
{'type': request_method,
'url': url,
'num': j})
if json_data is not None:
LOG.debug(
"JovianDSS: Sending data: %s.", str(json_data))
try:
ret = self._request_routine(url, request_method, json_data)
if len(ret) == 0:
self.active_host = ((self.active_host + 1)
% len(self.hosts))
continue
return ret
except requests.ConnectionError as err:
LOG.debug("Connection error %s", err)
self.active_host = (self.active_host + 1) % len(self.hosts)
continue
time.sleep(int(self.delay))
msg = (_('%(times) faild in a row') % {'times': j})
raise jexc.JDSSRESTProxyException(host=url, reason=msg)
def _request_routine(self, url, request_method, json_data=None):
ret = None
for i in range(3):
ret = dict()
try:
response_obj = requests.request(request_method,
auth=self.auth,
url=url,
headers=self.header,
data=json.dumps(json_data),
verify=self.verify)
LOG.debug('response code: %s', response_obj.status_code)
LOG.debug('response data: %s', response_obj.text)
ret['code'] = response_obj.status_code
if '{' in response_obj.text and '}' in response_obj.text:
if "error" in response_obj.text:
ret["error"] = json.loads(response_obj.text)["error"]
else:
ret["error"] = None
if "data" in response_obj.text:
ret["data"] = json.loads(response_obj.text)["data"]
else:
ret["data"] = None
if ret["code"] == 500:
if ret["error"] is not None:
if (("errno" in ret["error"]) and
("class" in ret["error"])):
if (ret["error"]["class"] ==
"opene.tools.scstadmin.ScstAdminError"):
LOG.debug("ScstAdminError %(code)d %(msg)s", {
"code": ret["error"]["errno"],
"msg": ret["error"]["message"]})
continue
if (ret["error"]["class"] ==
"exceptions.OSError"):
LOG.debug("OSError %(code)d %(msg)s", {
"code": ret["error"]["errno"],
"msg": ret["error"]["message"]})
continue
break
except requests.HTTPError as err:
LOG.debug("HTTP parsing error %s", err)
self.active_host = (self.active_host + 1) % len(self.hosts)
return ret
def get_active_host(self):
return self.hosts[self.active_host]
| true | true |
f73053fd0a165b1b4054dc4e1336ef1364710f9d | 2,296 | py | Python | Experiments/ST_MGCN/deprecated/didi_trial.py | TempAnonymous/Context_Analysis | bbeba1ed7ea7001c22a12721fc4f390d4cc01a6e | [
"MIT"
] | 28 | 2020-02-28T03:16:43.000Z | 2022-03-31T07:24:47.000Z | Experiments/ST_MGCN/deprecated/didi_trial.py | TempAnonymous/Context_Analysis | bbeba1ed7ea7001c22a12721fc4f390d4cc01a6e | [
"MIT"
] | 8 | 2020-06-30T09:34:56.000Z | 2022-01-17T12:20:28.000Z | Experiments/ST_MGCN/deprecated/didi_trial.py | TempAnonymous/Context_Analysis | bbeba1ed7ea7001c22a12721fc4f390d4cc01a6e | [
"MIT"
] | 13 | 2020-06-04T09:47:36.000Z | 2022-02-25T09:50:52.000Z | import os
import warnings
warnings.filterwarnings("ignore")
shared_params_st_mgcn = ('python ST_MGCN_Obj.py '
'--Dataset DiDi '
'--CT 6 '
'--PT 7 '
'--TT 4 '
'--LSTMUnits 64 '
'--LSTMLayers 3 '
'--DataRange All '
'--TrainDays All '
'--TC 0.65 '
'--TD 7500 '
'--TI 30 '
'--Epoch 10000 '
'--Train True '
'--lr 1e-4 '
'--patience 0.1 '
'--ESlength 100 '
'--BatchSize 16 '
'--MergeWay sum '
'--Device 1 ')
if __name__ == "__main__":
"""
Multiple Graphs
"""
# Chengdu
os.system(shared_params_st_mgcn + ' --City Chengdu --K 1 --L 1 '
' --Graph Distance-Correlation-Interaction --MergeIndex 1')
os.system(shared_params_st_mgcn + ' --City Chengdu --K 1 --L 1 '
' --Graph Distance-Correlation-Interaction --MergeIndex 3')
os.system(shared_params_st_mgcn + ' --City Chengdu --K 1 --L 1 '
' --Graph Distance-Correlation-Interaction --MergeIndex 6')
os.system(shared_params_st_mgcn + ' --City Chengdu --K 1 --L 1 '
' --Graph Distance-Correlation-Interaction --MergeIndex 12')
# Xian
os.system(shared_params_st_mgcn + ' --City Xian --K 1 --L 1 '
' --Graph Distance-Correlation-Interaction --MergeIndex 1')
os.system(shared_params_st_mgcn + ' --City Xian --K 1 --L 1 '
' --Graph Distance-Correlation-Interaction --MergeIndex 3')
os.system(shared_params_st_mgcn + ' --City Xian --K 1 --L 1 '
' --Graph Distance-Correlation-Interaction --MergeIndex 6')
os.system(shared_params_st_mgcn + ' --City Xian --K 1 --L 1 '
' --Graph Distance-Correlation-Interaction --MergeIndex 12')
| 45.019608 | 98 | 0.434669 | import os
import warnings
warnings.filterwarnings("ignore")
shared_params_st_mgcn = ('python ST_MGCN_Obj.py '
'--Dataset DiDi '
'--CT 6 '
'--PT 7 '
'--TT 4 '
'--LSTMUnits 64 '
'--LSTMLayers 3 '
'--DataRange All '
'--TrainDays All '
'--TC 0.65 '
'--TD 7500 '
'--TI 30 '
'--Epoch 10000 '
'--Train True '
'--lr 1e-4 '
'--patience 0.1 '
'--ESlength 100 '
'--BatchSize 16 '
'--MergeWay sum '
'--Device 1 ')
if __name__ == "__main__":
os.system(shared_params_st_mgcn + ' --City Chengdu --K 1 --L 1 '
' --Graph Distance-Correlation-Interaction --MergeIndex 1')
os.system(shared_params_st_mgcn + ' --City Chengdu --K 1 --L 1 '
' --Graph Distance-Correlation-Interaction --MergeIndex 3')
os.system(shared_params_st_mgcn + ' --City Chengdu --K 1 --L 1 '
' --Graph Distance-Correlation-Interaction --MergeIndex 6')
os.system(shared_params_st_mgcn + ' --City Chengdu --K 1 --L 1 '
' --Graph Distance-Correlation-Interaction --MergeIndex 12')
os.system(shared_params_st_mgcn + ' --City Xian --K 1 --L 1 '
' --Graph Distance-Correlation-Interaction --MergeIndex 1')
os.system(shared_params_st_mgcn + ' --City Xian --K 1 --L 1 '
' --Graph Distance-Correlation-Interaction --MergeIndex 3')
os.system(shared_params_st_mgcn + ' --City Xian --K 1 --L 1 '
' --Graph Distance-Correlation-Interaction --MergeIndex 6')
os.system(shared_params_st_mgcn + ' --City Xian --K 1 --L 1 '
' --Graph Distance-Correlation-Interaction --MergeIndex 12')
| true | true |
f73054605f8bd8b1ec1a6f2b7c0863270a33ba11 | 1,022 | py | Python | setup.py | TaruniSurampally/testpatrolev | 7c7fe1cad2967e0be84ca74b9a200ae2fde356db | [
"Apache-2.0"
] | null | null | null | setup.py | TaruniSurampally/testpatrolev | 7c7fe1cad2967e0be84ca74b9a200ae2fde356db | [
"Apache-2.0"
] | null | null | null | setup.py | TaruniSurampally/testpatrolev | 7c7fe1cad2967e0be84ca74b9a200ae2fde356db | [
"Apache-2.0"
] | null | null | null | # Copyright 2017 ATT Corporation.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# THIS FILE IS MANAGED BY THE GLOBAL REQUIREMENTS REPO - DO NOT EDIT
import setuptools
# In python < 2.7.4, a lazy loading of package `pbr` will break
# setuptools if some other modules registered functions in `atexit`.
# solution from: http://bugs.python.org/issue15881#msg170215
try:
import multiprocessing # noqa
except ImportError:
pass
setuptools.setup(
setup_requires=['pbr>=1.8'],
pbr=True)
| 32.967742 | 69 | 0.749511 |
import setuptools
import multiprocessing
except ImportError:
pass
setuptools.setup(
setup_requires=['pbr>=1.8'],
pbr=True)
| true | true |
f73055f4dfa87a425c2bda83e8e320add863b887 | 490 | py | Python | backend/src/myCU_App/migrations/0001_initial.py | citz73/myCUProject | afad36d6cf072e44d4707860496a023053d34789 | [
"MIT"
] | 1 | 2020-03-15T04:27:30.000Z | 2020-03-15T04:27:30.000Z | backend/src/myCU_App/migrations/0001_initial.py | citz73/myCUSide_Project | afad36d6cf072e44d4707860496a023053d34789 | [
"MIT"
] | null | null | null | backend/src/myCU_App/migrations/0001_initial.py | citz73/myCUSide_Project | afad36d6cf072e44d4707860496a023053d34789 | [
"MIT"
] | null | null | null | # Generated by Django 2.2.7 on 2020-03-01 01:09
from django.db import migrations, models
class Migration(migrations.Migration):
initial = True
dependencies = [
]
operations = [
migrations.CreateModel(
name='MyModelTest',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('col', models.CharField(max_length=100)),
],
),
]
| 22.272727 | 114 | 0.577551 |
from django.db import migrations, models
class Migration(migrations.Migration):
initial = True
dependencies = [
]
operations = [
migrations.CreateModel(
name='MyModelTest',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('col', models.CharField(max_length=100)),
],
),
]
| true | true |
f73056d77fb654514f04a7ff33b8c84ed4722dee | 1,732 | py | Python | data_postprocessing_10.py | KokBob/InitProject | 63b7cefb9a130118db9ff5405c5dd87bbe34e9f3 | [
"BSD-2-Clause"
] | null | null | null | data_postprocessing_10.py | KokBob/InitProject | 63b7cefb9a130118db9ff5405c5dd87bbe34e9f3 | [
"BSD-2-Clause"
] | null | null | null | data_postprocessing_10.py | KokBob/InitProject | 63b7cefb9a130118db9ff5405c5dd87bbe34e9f3 | [
"BSD-2-Clause"
] | null | null | null | # -*- coding: utf-8 -*-
"""
20181010
ciklaminima
"""
import numpy as np
import matplotlib.pyplot as plt
import os
import pandas as pd
import _dataPostprLib_ as lib
import seaborn as sns
import importlib
#%%
sns.set()
#sns.set_context("poster")
sns.set_context("paper")
#sns.color_palette("Paired")
seq_col_brew = sns.color_palette('hls', 12)
sns.set_palette(seq_col_brew)
plt.close('all')
path_glob = r'U:\projects\0005_Moventas_RCA\40_measurement'
test_bench_name = ['Data_test_run_63526_PPH-5700', 'Data_test_run_63527_PPH-5700']
#%%
path_test_bench_i = path_glob + '\\' + test_bench_name[0]
path_meas = os.listdir(path_test_bench_i)
#%%
i = 0
lc_repos = []
for lc in path_meas:
#load_collection = path_meas[0]
load_collection = lc
#load_collection = path_meas[-1]
path_mea_i = path_test_bench_i + '\\' + load_collection
meas_i = os.listdir(path_mea_i)
data_repos = []
for mf in meas_i:
h_,r_,freq_,name_ = lib.catch_mea(mf)
mea_file = path_mea_i + '\\' + mf
data_i = pd.read_csv(mea_file,sep=';',header=3, skiprows = [4])
t_i = lib.time_vector(freq_,data_i)
mea_dict = {'data': data_i,
't': t_i,
'name': name_,
'load': load_collection}
data_repos.append(mea_dict)
# lib.plot_Torque_Temp_pls1(data_repos)
# lib.plot_Torque_Temp_pls2(data_repos)
lib.plot_Torque_Temp_pls(data_repos)
lc_repos.append(data_repos)
# data_repos_actual = data_repos[i]
#%%
# lib.plot_Torque_Temp_pls1(data_repos)
# lib.plot_Torque_Temp_pls2(data_repos)
# lib.plot_Torque_Temp_pls(data_repos)
# i += 1 | 28.866667 | 83 | 0.646074 |
import numpy as np
import matplotlib.pyplot as plt
import os
import pandas as pd
import _dataPostprLib_ as lib
import seaborn as sns
import importlib
sns.set()
sns.set_context("paper")
seq_col_brew = sns.color_palette('hls', 12)
sns.set_palette(seq_col_brew)
plt.close('all')
path_glob = r'U:\projects\0005_Moventas_RCA\40_measurement'
test_bench_name = ['Data_test_run_63526_PPH-5700', 'Data_test_run_63527_PPH-5700']
path_test_bench_i = path_glob + '\\' + test_bench_name[0]
path_meas = os.listdir(path_test_bench_i)
i = 0
lc_repos = []
for lc in path_meas:
load_collection = lc
path_mea_i = path_test_bench_i + '\\' + load_collection
meas_i = os.listdir(path_mea_i)
data_repos = []
for mf in meas_i:
h_,r_,freq_,name_ = lib.catch_mea(mf)
mea_file = path_mea_i + '\\' + mf
data_i = pd.read_csv(mea_file,sep=';',header=3, skiprows = [4])
t_i = lib.time_vector(freq_,data_i)
mea_dict = {'data': data_i,
't': t_i,
'name': name_,
'load': load_collection}
data_repos.append(mea_dict)
lib.plot_Torque_Temp_pls(data_repos)
lc_repos.append(data_repos)
| true | true |
f73057eeee7582782ed1d122e1834de489b95405 | 698 | py | Python | github_stats/cli.py | mrlesmithjr/python-github-stats | 5e9237b8caf372b1b0cb791e593f8209f1d19204 | [
"MIT"
] | null | null | null | github_stats/cli.py | mrlesmithjr/python-github-stats | 5e9237b8caf372b1b0cb791e593f8209f1d19204 | [
"MIT"
] | 6 | 2021-04-23T12:45:04.000Z | 2021-08-08T01:07:41.000Z | github_stats/cli.py | mrlesmithjr/python-github-stats | 5e9237b8caf372b1b0cb791e593f8209f1d19204 | [
"MIT"
] | null | null | null | """Console script for python-github-stats."""
import argparse
import os
def cli_args():
"""Parse CLI arguments."""
parser = argparse.ArgumentParser(description="Manage GitHub via API.")
parser.add_argument(
"action", help="Define action to take.", choices=["user-attrs", "user-repos"]
)
parser.add_argument(
"--netrcfile",
help="Path to Netrc file",
default=os.path.join(os.path.expanduser("~"), ".netrc"),
)
parser.add_argument("--token", help="Your GitHub API private token.")
parser.add_argument(
"--url", help="GitHub API url", default="https://api.github.com"
)
args = parser.parse_args()
return args
| 24.068966 | 85 | 0.627507 |
import argparse
import os
def cli_args():
parser = argparse.ArgumentParser(description="Manage GitHub via API.")
parser.add_argument(
"action", help="Define action to take.", choices=["user-attrs", "user-repos"]
)
parser.add_argument(
"--netrcfile",
help="Path to Netrc file",
default=os.path.join(os.path.expanduser("~"), ".netrc"),
)
parser.add_argument("--token", help="Your GitHub API private token.")
parser.add_argument(
"--url", help="GitHub API url", default="https://api.github.com"
)
args = parser.parse_args()
return args
| true | true |
f730580ba95c7eaadd448112b44a1b8b774609d7 | 1,012 | py | Python | application.py | WengChaoxi/flask-mvc | 250a0c5811745f674b45e39262bc81c92756ce0c | [
"MIT"
] | 1 | 2020-10-25T15:15:01.000Z | 2020-10-25T15:15:01.000Z | application.py | WengChaoxi/flask-mvc | 250a0c5811745f674b45e39262bc81c92756ce0c | [
"MIT"
] | null | null | null | application.py | WengChaoxi/flask-mvc | 250a0c5811745f674b45e39262bc81c92756ce0c | [
"MIT"
] | null | null | null | # coding: utf-8
from flask import Flask as FlaskBase
from flask_sqlalchemy import SQLAlchemy
from flask_apscheduler import APScheduler
import os
from jobs.tasks.timer import SchedulerConfig
from common.libs.utils import correctPath
db = SQLAlchemy()
scheduler = APScheduler() # 定时任务
class Flask(FlaskBase):
def __init__(self, import_name, static_folder, template_folder, root_path):
super(Flask, self).__init__(import_name, static_folder=static_folder, template_folder=template_folder, root_path=root_path)
self.config.from_pyfile(correctPath('config/config.py'))
db.init_app(self)
self.config.from_object(SchedulerConfig())
scheduler.init_app(self)
scheduler.start()
static_path = correctPath('web/static')
templates_path = correctPath('web/templates')
app = Flask(__name__, static_folder=static_path, template_folder=templates_path, root_path=os.getcwd())
# 解决跨域问题
# from flask_cors import CORS
# CORS(app, supports_credentials = True)
| 33.733333 | 131 | 0.759881 |
from flask import Flask as FlaskBase
from flask_sqlalchemy import SQLAlchemy
from flask_apscheduler import APScheduler
import os
from jobs.tasks.timer import SchedulerConfig
from common.libs.utils import correctPath
db = SQLAlchemy()
scheduler = APScheduler()
class Flask(FlaskBase):
def __init__(self, import_name, static_folder, template_folder, root_path):
super(Flask, self).__init__(import_name, static_folder=static_folder, template_folder=template_folder, root_path=root_path)
self.config.from_pyfile(correctPath('config/config.py'))
db.init_app(self)
self.config.from_object(SchedulerConfig())
scheduler.init_app(self)
scheduler.start()
static_path = correctPath('web/static')
templates_path = correctPath('web/templates')
app = Flask(__name__, static_folder=static_path, template_folder=templates_path, root_path=os.getcwd())
| true | true |
f73058b87fae3279f8d290c4759bd446c990a5a0 | 437 | py | Python | app/model/tables.py | fabiomvieira/users-flask | 3e57c673b51e2f1cbb6f32a70ddd2c90cd86c1cb | [
"MIT"
] | null | null | null | app/model/tables.py | fabiomvieira/users-flask | 3e57c673b51e2f1cbb6f32a70ddd2c90cd86c1cb | [
"MIT"
] | null | null | null | app/model/tables.py | fabiomvieira/users-flask | 3e57c673b51e2f1cbb6f32a70ddd2c90cd86c1cb | [
"MIT"
] | null | null | null | from app import app, db
class User(db.Model):
__tablename__ = 'user'
id = db.Column(db.Integer, autoincrement = True, primary_key = True)
name = db.Column(db.String(100))
age = db.Column(db.Integer)
email = db.Column(db.String(30))
phone = db.Column(db.Integer)
def __init__(self, name, age, email, phone):
self.name = name
self.age = age
self.email= email
self.phone = phone | 29.133333 | 72 | 0.622426 | from app import app, db
class User(db.Model):
__tablename__ = 'user'
id = db.Column(db.Integer, autoincrement = True, primary_key = True)
name = db.Column(db.String(100))
age = db.Column(db.Integer)
email = db.Column(db.String(30))
phone = db.Column(db.Integer)
def __init__(self, name, age, email, phone):
self.name = name
self.age = age
self.email= email
self.phone = phone | true | true |
f7305a0dbb2a8aae8064a95ec3fa52386dab4833 | 37,699 | py | Python | website/venv/lib/python2.7/site-packages/psutil/tests/test_misc.py | wenhuiyang/ARgot | 3fd1eacca6f81a3157649dda95ab427ca1f5efe1 | [
"MIT"
] | 2 | 2017-11-24T12:44:30.000Z | 2020-04-11T17:28:43.000Z | psutil/tests/test_misc.py | EnjoyLifeFund/Debian_py36_packages | 1985d4c73fabd5f08f54b922e73a9306e09c77a5 | [
"BSD-3-Clause",
"BSD-2-Clause",
"MIT"
] | 6 | 2017-11-10T19:45:18.000Z | 2017-11-12T14:50:42.000Z | psutil/tests/test_misc.py | EnjoyLifeFund/Debian_py36_packages | 1985d4c73fabd5f08f54b922e73a9306e09c77a5 | [
"BSD-3-Clause",
"BSD-2-Clause",
"MIT"
] | null | null | null | #!/usr/bin/env python
# -*- coding: utf-8 -*-
# Copyright (c) 2009, Giampaolo Rodola'. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""
Miscellaneous tests.
"""
import ast
import collections
import contextlib
import errno
import json
import os
import pickle
import socket
import stat
import sys
from psutil import LINUX
from psutil import POSIX
from psutil import WINDOWS
from psutil._common import memoize
from psutil._common import memoize_when_activated
from psutil._common import supports_ipv6
from psutil._common import wrap_numbers
from psutil._compat import PY3
from psutil.tests import APPVEYOR
from psutil.tests import bind_socket
from psutil.tests import bind_unix_socket
from psutil.tests import call_until
from psutil.tests import chdir
from psutil.tests import create_proc_children_pair
from psutil.tests import create_sockets
from psutil.tests import create_zombie_proc
from psutil.tests import DEVNULL
from psutil.tests import get_free_port
from psutil.tests import get_test_subprocess
from psutil.tests import HAS_BATTERY
from psutil.tests import HAS_CONNECTIONS_UNIX
from psutil.tests import HAS_MEMORY_FULL_INFO
from psutil.tests import HAS_MEMORY_MAPS
from psutil.tests import HAS_SENSORS_BATTERY
from psutil.tests import HAS_SENSORS_FANS
from psutil.tests import HAS_SENSORS_TEMPERATURES
from psutil.tests import import_module_by_path
from psutil.tests import is_namedtuple
from psutil.tests import mock
from psutil.tests import reap_children
from psutil.tests import reload_module
from psutil.tests import retry
from psutil.tests import ROOT_DIR
from psutil.tests import run_test_module_by_name
from psutil.tests import safe_rmpath
from psutil.tests import SCRIPTS_DIR
from psutil.tests import sh
from psutil.tests import tcp_socketpair
from psutil.tests import TESTFN
from psutil.tests import TOX
from psutil.tests import TRAVIS
from psutil.tests import unittest
from psutil.tests import unix_socket_path
from psutil.tests import unix_socketpair
from psutil.tests import wait_for_file
from psutil.tests import wait_for_pid
import psutil
import psutil.tests
# ===================================================================
# --- Misc / generic tests.
# ===================================================================
class TestMisc(unittest.TestCase):
def test_process__repr__(self, func=repr):
p = psutil.Process()
r = func(p)
self.assertIn("psutil.Process", r)
self.assertIn("pid=%s" % p.pid, r)
self.assertIn("name=", r)
self.assertIn(p.name(), r)
with mock.patch.object(psutil.Process, "name",
side_effect=psutil.ZombieProcess(os.getpid())):
p = psutil.Process()
r = func(p)
self.assertIn("pid=%s" % p.pid, r)
self.assertIn("zombie", r)
self.assertNotIn("name=", r)
with mock.patch.object(psutil.Process, "name",
side_effect=psutil.NoSuchProcess(os.getpid())):
p = psutil.Process()
r = func(p)
self.assertIn("pid=%s" % p.pid, r)
self.assertIn("terminated", r)
self.assertNotIn("name=", r)
with mock.patch.object(psutil.Process, "name",
side_effect=psutil.AccessDenied(os.getpid())):
p = psutil.Process()
r = func(p)
self.assertIn("pid=%s" % p.pid, r)
self.assertNotIn("name=", r)
def test_process__str__(self):
self.test_process__repr__(func=str)
def test_no_such_process__repr__(self, func=repr):
self.assertEqual(
repr(psutil.NoSuchProcess(321)),
"psutil.NoSuchProcess process no longer exists (pid=321)")
self.assertEqual(
repr(psutil.NoSuchProcess(321, name='foo')),
"psutil.NoSuchProcess process no longer exists (pid=321, "
"name='foo')")
self.assertEqual(
repr(psutil.NoSuchProcess(321, msg='foo')),
"psutil.NoSuchProcess foo")
def test_zombie_process__repr__(self, func=repr):
self.assertEqual(
repr(psutil.ZombieProcess(321)),
"psutil.ZombieProcess process still exists but it's a zombie "
"(pid=321)")
self.assertEqual(
repr(psutil.ZombieProcess(321, name='foo')),
"psutil.ZombieProcess process still exists but it's a zombie "
"(pid=321, name='foo')")
self.assertEqual(
repr(psutil.ZombieProcess(321, name='foo', ppid=1)),
"psutil.ZombieProcess process still exists but it's a zombie "
"(pid=321, name='foo', ppid=1)")
self.assertEqual(
repr(psutil.ZombieProcess(321, msg='foo')),
"psutil.ZombieProcess foo")
def test_access_denied__repr__(self, func=repr):
self.assertEqual(
repr(psutil.AccessDenied(321)),
"psutil.AccessDenied (pid=321)")
self.assertEqual(
repr(psutil.AccessDenied(321, name='foo')),
"psutil.AccessDenied (pid=321, name='foo')")
self.assertEqual(
repr(psutil.AccessDenied(321, msg='foo')),
"psutil.AccessDenied foo")
def test_timeout_expired__repr__(self, func=repr):
self.assertEqual(
repr(psutil.TimeoutExpired(321)),
"psutil.TimeoutExpired timeout after 321 seconds")
self.assertEqual(
repr(psutil.TimeoutExpired(321, pid=111)),
"psutil.TimeoutExpired timeout after 321 seconds (pid=111)")
self.assertEqual(
repr(psutil.TimeoutExpired(321, pid=111, name='foo')),
"psutil.TimeoutExpired timeout after 321 seconds "
"(pid=111, name='foo')")
def test_process__eq__(self):
p1 = psutil.Process()
p2 = psutil.Process()
self.assertEqual(p1, p2)
p2._ident = (0, 0)
self.assertNotEqual(p1, p2)
self.assertNotEqual(p1, 'foo')
def test_process__hash__(self):
s = set([psutil.Process(), psutil.Process()])
self.assertEqual(len(s), 1)
def test__all__(self):
dir_psutil = dir(psutil)
for name in dir_psutil:
if name in ('callable', 'error', 'namedtuple', 'tests',
'long', 'test', 'NUM_CPUS', 'BOOT_TIME',
'TOTAL_PHYMEM'):
continue
if not name.startswith('_'):
try:
__import__(name)
except ImportError:
if name not in psutil.__all__:
fun = getattr(psutil, name)
if fun is None:
continue
if (fun.__doc__ is not None and
'deprecated' not in fun.__doc__.lower()):
self.fail('%r not in psutil.__all__' % name)
# Import 'star' will break if __all__ is inconsistent, see:
# https://github.com/giampaolo/psutil/issues/656
# Can't do `from psutil import *` as it won't work on python 3
# so we simply iterate over __all__.
for name in psutil.__all__:
self.assertIn(name, dir_psutil)
def test_version(self):
self.assertEqual('.'.join([str(x) for x in psutil.version_info]),
psutil.__version__)
def test_process_as_dict_no_new_names(self):
# See https://github.com/giampaolo/psutil/issues/813
p = psutil.Process()
p.foo = '1'
self.assertNotIn('foo', p.as_dict())
def test_memoize(self):
@memoize
def foo(*args, **kwargs):
"foo docstring"
calls.append(None)
return (args, kwargs)
calls = []
# no args
for x in range(2):
ret = foo()
expected = ((), {})
self.assertEqual(ret, expected)
self.assertEqual(len(calls), 1)
# with args
for x in range(2):
ret = foo(1)
expected = ((1, ), {})
self.assertEqual(ret, expected)
self.assertEqual(len(calls), 2)
# with args + kwargs
for x in range(2):
ret = foo(1, bar=2)
expected = ((1, ), {'bar': 2})
self.assertEqual(ret, expected)
self.assertEqual(len(calls), 3)
# clear cache
foo.cache_clear()
ret = foo()
expected = ((), {})
self.assertEqual(ret, expected)
self.assertEqual(len(calls), 4)
# docstring
self.assertEqual(foo.__doc__, "foo docstring")
def test_memoize_when_activated(self):
class Foo:
@memoize_when_activated
def foo(self):
calls.append(None)
f = Foo()
calls = []
f.foo()
f.foo()
self.assertEqual(len(calls), 2)
# activate
calls = []
f.foo.cache_activate()
f.foo()
f.foo()
self.assertEqual(len(calls), 1)
# deactivate
calls = []
f.foo.cache_deactivate()
f.foo()
f.foo()
self.assertEqual(len(calls), 2)
def test_parse_environ_block(self):
from psutil._common import parse_environ_block
def k(s):
return s.upper() if WINDOWS else s
self.assertEqual(parse_environ_block("a=1\0"),
{k("a"): "1"})
self.assertEqual(parse_environ_block("a=1\0b=2\0\0"),
{k("a"): "1", k("b"): "2"})
self.assertEqual(parse_environ_block("a=1\0b=\0\0"),
{k("a"): "1", k("b"): ""})
# ignore everything after \0\0
self.assertEqual(parse_environ_block("a=1\0b=2\0\0c=3\0"),
{k("a"): "1", k("b"): "2"})
# ignore everything that is not an assignment
self.assertEqual(parse_environ_block("xxx\0a=1\0"), {k("a"): "1"})
self.assertEqual(parse_environ_block("a=1\0=b=2\0"), {k("a"): "1"})
# do not fail if the block is incomplete
self.assertEqual(parse_environ_block("a=1\0b=2"), {k("a"): "1"})
def test_supports_ipv6(self):
self.addCleanup(supports_ipv6.cache_clear)
if supports_ipv6():
with mock.patch('psutil._common.socket') as s:
s.has_ipv6 = False
supports_ipv6.cache_clear()
assert not supports_ipv6()
supports_ipv6.cache_clear()
with mock.patch('psutil._common.socket.socket',
side_effect=socket.error) as s:
assert not supports_ipv6()
assert s.called
supports_ipv6.cache_clear()
with mock.patch('psutil._common.socket.socket',
side_effect=socket.gaierror) as s:
assert not supports_ipv6()
supports_ipv6.cache_clear()
assert s.called
supports_ipv6.cache_clear()
with mock.patch('psutil._common.socket.socket.bind',
side_effect=socket.gaierror) as s:
assert not supports_ipv6()
supports_ipv6.cache_clear()
assert s.called
else:
with self.assertRaises(Exception):
sock = socket.socket(socket.AF_INET6, socket.SOCK_STREAM)
sock.bind(("::1", 0))
def test_isfile_strict(self):
from psutil._common import isfile_strict
this_file = os.path.abspath(__file__)
assert isfile_strict(this_file)
assert not isfile_strict(os.path.dirname(this_file))
with mock.patch('psutil._common.os.stat',
side_effect=OSError(errno.EPERM, "foo")):
self.assertRaises(OSError, isfile_strict, this_file)
with mock.patch('psutil._common.os.stat',
side_effect=OSError(errno.EACCES, "foo")):
self.assertRaises(OSError, isfile_strict, this_file)
with mock.patch('psutil._common.os.stat',
side_effect=OSError(errno.EINVAL, "foo")):
assert not isfile_strict(this_file)
with mock.patch('psutil._common.stat.S_ISREG', return_value=False):
assert not isfile_strict(this_file)
def test_serialization(self):
def check(ret):
if json is not None:
json.loads(json.dumps(ret))
a = pickle.dumps(ret)
b = pickle.loads(a)
self.assertEqual(ret, b)
check(psutil.Process().as_dict())
check(psutil.virtual_memory())
check(psutil.swap_memory())
check(psutil.cpu_times())
check(psutil.cpu_times_percent(interval=0))
check(psutil.net_io_counters())
if LINUX and not os.path.exists('/proc/diskstats'):
pass
else:
if not APPVEYOR:
check(psutil.disk_io_counters())
check(psutil.disk_partitions())
check(psutil.disk_usage(os.getcwd()))
check(psutil.users())
def test_setup_script(self):
setup_py = os.path.join(ROOT_DIR, 'setup.py')
module = import_module_by_path(setup_py)
self.assertRaises(SystemExit, module.setup)
self.assertEqual(module.get_version(), psutil.__version__)
def test_ad_on_process_creation(self):
# We are supposed to be able to instantiate Process also in case
# of zombie processes or access denied.
with mock.patch.object(psutil.Process, 'create_time',
side_effect=psutil.AccessDenied) as meth:
psutil.Process()
assert meth.called
with mock.patch.object(psutil.Process, 'create_time',
side_effect=psutil.ZombieProcess(1)) as meth:
psutil.Process()
assert meth.called
with mock.patch.object(psutil.Process, 'create_time',
side_effect=ValueError) as meth:
with self.assertRaises(ValueError):
psutil.Process()
assert meth.called
def test_sanity_version_check(self):
# see: https://github.com/giampaolo/psutil/issues/564
with mock.patch(
"psutil._psplatform.cext.version", return_value="0.0.0"):
with self.assertRaises(ImportError) as cm:
reload_module(psutil)
self.assertIn("version conflict", str(cm.exception).lower())
# ===================================================================
# --- Tests for wrap_numbers() function.
# ===================================================================
nt = collections.namedtuple('foo', 'a b c')
class TestWrapNumbers(unittest.TestCase):
def setUp(self):
wrap_numbers.cache_clear()
tearDown = setUp
def test_first_call(self):
input = {'disk1': nt(5, 5, 5)}
self.assertEqual(wrap_numbers(input, 'disk_io'), input)
def test_input_hasnt_changed(self):
input = {'disk1': nt(5, 5, 5)}
self.assertEqual(wrap_numbers(input, 'disk_io'), input)
self.assertEqual(wrap_numbers(input, 'disk_io'), input)
def test_increase_but_no_wrap(self):
input = {'disk1': nt(5, 5, 5)}
self.assertEqual(wrap_numbers(input, 'disk_io'), input)
input = {'disk1': nt(10, 15, 20)}
self.assertEqual(wrap_numbers(input, 'disk_io'), input)
input = {'disk1': nt(20, 25, 30)}
self.assertEqual(wrap_numbers(input, 'disk_io'), input)
input = {'disk1': nt(20, 25, 30)}
self.assertEqual(wrap_numbers(input, 'disk_io'), input)
def test_wrap(self):
# let's say 100 is the threshold
input = {'disk1': nt(100, 100, 100)}
self.assertEqual(wrap_numbers(input, 'disk_io'), input)
# first wrap restarts from 10
input = {'disk1': nt(100, 100, 10)}
self.assertEqual(wrap_numbers(input, 'disk_io'),
{'disk1': nt(100, 100, 110)})
# then it remains the same
input = {'disk1': nt(100, 100, 10)}
self.assertEqual(wrap_numbers(input, 'disk_io'),
{'disk1': nt(100, 100, 110)})
# then it goes up
input = {'disk1': nt(100, 100, 90)}
self.assertEqual(wrap_numbers(input, 'disk_io'),
{'disk1': nt(100, 100, 190)})
# then it wraps again
input = {'disk1': nt(100, 100, 20)}
self.assertEqual(wrap_numbers(input, 'disk_io'),
{'disk1': nt(100, 100, 210)})
# and remains the same
input = {'disk1': nt(100, 100, 20)}
self.assertEqual(wrap_numbers(input, 'disk_io'),
{'disk1': nt(100, 100, 210)})
# now wrap another num
input = {'disk1': nt(50, 100, 20)}
self.assertEqual(wrap_numbers(input, 'disk_io'),
{'disk1': nt(150, 100, 210)})
# and again
input = {'disk1': nt(40, 100, 20)}
self.assertEqual(wrap_numbers(input, 'disk_io'),
{'disk1': nt(190, 100, 210)})
# keep it the same
input = {'disk1': nt(40, 100, 20)}
self.assertEqual(wrap_numbers(input, 'disk_io'),
{'disk1': nt(190, 100, 210)})
def test_changing_keys(self):
# Emulate a case where the second call to disk_io()
# (or whatever) provides a new disk, then the new disk
# disappears on the third call.
input = {'disk1': nt(5, 5, 5)}
self.assertEqual(wrap_numbers(input, 'disk_io'), input)
input = {'disk1': nt(5, 5, 5),
'disk2': nt(7, 7, 7)}
self.assertEqual(wrap_numbers(input, 'disk_io'), input)
input = {'disk1': nt(8, 8, 8)}
self.assertEqual(wrap_numbers(input, 'disk_io'), input)
def test_changing_keys_w_wrap(self):
input = {'disk1': nt(50, 50, 50),
'disk2': nt(100, 100, 100)}
self.assertEqual(wrap_numbers(input, 'disk_io'), input)
# disk 2 wraps
input = {'disk1': nt(50, 50, 50),
'disk2': nt(100, 100, 10)}
self.assertEqual(wrap_numbers(input, 'disk_io'),
{'disk1': nt(50, 50, 50),
'disk2': nt(100, 100, 110)})
# disk 2 disappears
input = {'disk1': nt(50, 50, 50)}
self.assertEqual(wrap_numbers(input, 'disk_io'), input)
# then it appears again; the old wrap is supposed to be
# gone.
input = {'disk1': nt(50, 50, 50),
'disk2': nt(100, 100, 100)}
self.assertEqual(wrap_numbers(input, 'disk_io'), input)
# remains the same
input = {'disk1': nt(50, 50, 50),
'disk2': nt(100, 100, 100)}
self.assertEqual(wrap_numbers(input, 'disk_io'), input)
# and then wraps again
input = {'disk1': nt(50, 50, 50),
'disk2': nt(100, 100, 10)}
self.assertEqual(wrap_numbers(input, 'disk_io'),
{'disk1': nt(50, 50, 50),
'disk2': nt(100, 100, 110)})
def test_real_data(self):
d = {'nvme0n1': (300, 508, 640, 1571, 5970, 1987, 2049, 451751, 47048),
'nvme0n1p1': (1171, 2, 5600256, 1024, 516, 0, 0, 0, 8),
'nvme0n1p2': (54, 54, 2396160, 5165056, 4, 24, 30, 1207, 28),
'nvme0n1p3': (2389, 4539, 5154, 150, 4828, 1844, 2019, 398, 348)}
self.assertEqual(wrap_numbers(d, 'disk_io'), d)
self.assertEqual(wrap_numbers(d, 'disk_io'), d)
# decrease this ↓
d = {'nvme0n1': (100, 508, 640, 1571, 5970, 1987, 2049, 451751, 47048),
'nvme0n1p1': (1171, 2, 5600256, 1024, 516, 0, 0, 0, 8),
'nvme0n1p2': (54, 54, 2396160, 5165056, 4, 24, 30, 1207, 28),
'nvme0n1p3': (2389, 4539, 5154, 150, 4828, 1844, 2019, 398, 348)}
out = wrap_numbers(d, 'disk_io')
self.assertEqual(out['nvme0n1'][0], 400)
# --- cache tests
def test_cache_first_call(self):
input = {'disk1': nt(5, 5, 5)}
wrap_numbers(input, 'disk_io')
cache = wrap_numbers.cache_info()
self.assertEqual(cache[0], {'disk_io': input})
self.assertEqual(cache[1], {'disk_io': {}})
self.assertEqual(cache[2], {'disk_io': {}})
def test_cache_call_twice(self):
input = {'disk1': nt(5, 5, 5)}
wrap_numbers(input, 'disk_io')
input = {'disk1': nt(10, 10, 10)}
wrap_numbers(input, 'disk_io')
cache = wrap_numbers.cache_info()
self.assertEqual(cache[0], {'disk_io': input})
self.assertEqual(
cache[1],
{'disk_io': {('disk1', 0): 0, ('disk1', 1): 0, ('disk1', 2): 0}})
self.assertEqual(cache[2], {'disk_io': {}})
def test_cache_wrap(self):
# let's say 100 is the threshold
input = {'disk1': nt(100, 100, 100)}
wrap_numbers(input, 'disk_io')
# first wrap restarts from 10
input = {'disk1': nt(100, 100, 10)}
wrap_numbers(input, 'disk_io')
cache = wrap_numbers.cache_info()
self.assertEqual(cache[0], {'disk_io': input})
self.assertEqual(
cache[1],
{'disk_io': {('disk1', 0): 0, ('disk1', 1): 0, ('disk1', 2): 100}})
self.assertEqual(cache[2], {'disk_io': {'disk1': set([('disk1', 2)])}})
def assert_():
cache = wrap_numbers.cache_info()
self.assertEqual(
cache[1],
{'disk_io': {('disk1', 0): 0, ('disk1', 1): 0,
('disk1', 2): 100}})
self.assertEqual(cache[2],
{'disk_io': {'disk1': set([('disk1', 2)])}})
# then it remains the same
input = {'disk1': nt(100, 100, 10)}
wrap_numbers(input, 'disk_io')
cache = wrap_numbers.cache_info()
self.assertEqual(cache[0], {'disk_io': input})
assert_()
# then it goes up
input = {'disk1': nt(100, 100, 90)}
wrap_numbers(input, 'disk_io')
cache = wrap_numbers.cache_info()
self.assertEqual(cache[0], {'disk_io': input})
assert_()
# then it wraps again
input = {'disk1': nt(100, 100, 20)}
wrap_numbers(input, 'disk_io')
cache = wrap_numbers.cache_info()
self.assertEqual(cache[0], {'disk_io': input})
self.assertEqual(
cache[1],
{'disk_io': {('disk1', 0): 0, ('disk1', 1): 0, ('disk1', 2): 190}})
self.assertEqual(cache[2], {'disk_io': {'disk1': set([('disk1', 2)])}})
def test_cache_changing_keys(self):
input = {'disk1': nt(5, 5, 5)}
wrap_numbers(input, 'disk_io')
input = {'disk1': nt(5, 5, 5),
'disk2': nt(7, 7, 7)}
wrap_numbers(input, 'disk_io')
cache = wrap_numbers.cache_info()
self.assertEqual(cache[0], {'disk_io': input})
self.assertEqual(
cache[1],
{'disk_io': {('disk1', 0): 0, ('disk1', 1): 0, ('disk1', 2): 0}})
self.assertEqual(cache[2], {'disk_io': {}})
def test_cache_clear(self):
input = {'disk1': nt(5, 5, 5)}
wrap_numbers(input, 'disk_io')
wrap_numbers(input, 'disk_io')
wrap_numbers.cache_clear('disk_io')
self.assertEqual(wrap_numbers.cache_info(), ({}, {}, {}))
wrap_numbers.cache_clear('disk_io')
wrap_numbers.cache_clear('?!?')
@unittest.skipIf(
not psutil.disk_io_counters() or not psutil.net_io_counters(),
"no disks or NICs available")
def test_cache_clear_public_apis(self):
psutil.disk_io_counters()
psutil.net_io_counters()
caches = wrap_numbers.cache_info()
for cache in caches:
self.assertIn('psutil.disk_io_counters', cache)
self.assertIn('psutil.net_io_counters', cache)
psutil.disk_io_counters.cache_clear()
caches = wrap_numbers.cache_info()
for cache in caches:
self.assertIn('psutil.net_io_counters', cache)
self.assertNotIn('psutil.disk_io_counters', cache)
psutil.net_io_counters.cache_clear()
caches = wrap_numbers.cache_info()
self.assertEqual(caches, ({}, {}, {}))
# ===================================================================
# --- Example script tests
# ===================================================================
@unittest.skipIf(TOX, "can't test on TOX")
# See: https://travis-ci.org/giampaolo/psutil/jobs/295224806
@unittest.skipIf(TRAVIS and not
os.path.exists(os.path.join(SCRIPTS_DIR, 'free.py')),
"can't locate scripts directory")
class TestScripts(unittest.TestCase):
"""Tests for scripts in the "scripts" directory."""
@staticmethod
def assert_stdout(exe, args=None, **kwds):
exe = '"%s"' % os.path.join(SCRIPTS_DIR, exe)
if args:
exe = exe + ' ' + args
try:
out = sh(sys.executable + ' ' + exe, **kwds).strip()
except RuntimeError as err:
if 'AccessDenied' in str(err):
return str(err)
else:
raise
assert out, out
return out
@staticmethod
def assert_syntax(exe, args=None):
exe = os.path.join(SCRIPTS_DIR, exe)
if PY3:
f = open(exe, 'rt', encoding='utf8')
else:
f = open(exe, 'rt')
with f:
src = f.read()
ast.parse(src)
def test_coverage(self):
# make sure all example scripts have a test method defined
meths = dir(self)
for name in os.listdir(SCRIPTS_DIR):
if name.endswith('.py'):
if 'test_' + os.path.splitext(name)[0] not in meths:
# self.assert_stdout(name)
self.fail('no test defined for %r script'
% os.path.join(SCRIPTS_DIR, name))
@unittest.skipIf(not POSIX, "POSIX only")
def test_executable(self):
for name in os.listdir(SCRIPTS_DIR):
if name.endswith('.py'):
path = os.path.join(SCRIPTS_DIR, name)
if not stat.S_IXUSR & os.stat(path)[stat.ST_MODE]:
self.fail('%r is not executable' % path)
def test_disk_usage(self):
self.assert_stdout('disk_usage.py')
def test_free(self):
self.assert_stdout('free.py')
def test_meminfo(self):
self.assert_stdout('meminfo.py')
def test_procinfo(self):
self.assert_stdout('procinfo.py', args=str(os.getpid()))
# can't find users on APPVEYOR or TRAVIS
@unittest.skipIf(APPVEYOR or TRAVIS and not psutil.users(),
"unreliable on APPVEYOR or TRAVIS")
def test_who(self):
self.assert_stdout('who.py')
def test_ps(self):
self.assert_stdout('ps.py')
def test_pstree(self):
self.assert_stdout('pstree.py')
def test_netstat(self):
self.assert_stdout('netstat.py')
# permission denied on travis
@unittest.skipIf(TRAVIS, "unreliable on TRAVIS")
def test_ifconfig(self):
self.assert_stdout('ifconfig.py')
@unittest.skipIf(not HAS_MEMORY_MAPS, "not supported")
def test_pmap(self):
self.assert_stdout('pmap.py', args=str(os.getpid()))
@unittest.skipIf(not HAS_MEMORY_FULL_INFO, "not supported")
def test_procsmem(self):
self.assert_stdout('procsmem.py', stderr=DEVNULL)
def test_killall(self):
self.assert_syntax('killall.py')
def test_nettop(self):
self.assert_syntax('nettop.py')
def test_top(self):
self.assert_syntax('top.py')
def test_iotop(self):
self.assert_syntax('iotop.py')
def test_pidof(self):
output = self.assert_stdout('pidof.py', args=psutil.Process().name())
self.assertIn(str(os.getpid()), output)
@unittest.skipIf(not WINDOWS, "WINDOWS only")
def test_winservices(self):
self.assert_stdout('winservices.py')
def test_cpu_distribution(self):
self.assert_syntax('cpu_distribution.py')
@unittest.skipIf(not HAS_SENSORS_TEMPERATURES, "not supported")
@unittest.skipIf(TRAVIS, "unreliable on TRAVIS")
def test_temperatures(self):
self.assert_stdout('temperatures.py')
@unittest.skipIf(not HAS_SENSORS_FANS, "not supported")
@unittest.skipIf(TRAVIS, "unreliable on TRAVIS")
def test_fans(self):
self.assert_stdout('fans.py')
@unittest.skipIf(not HAS_SENSORS_BATTERY, "not supported")
@unittest.skipIf(not HAS_BATTERY, "no battery")
def test_battery(self):
self.assert_stdout('battery.py')
def test_sensors(self):
self.assert_stdout('sensors.py')
# ===================================================================
# --- Unit tests for test utilities.
# ===================================================================
class TestRetryDecorator(unittest.TestCase):
@mock.patch('time.sleep')
def test_retry_success(self, sleep):
# Fail 3 times out of 5; make sure the decorated fun returns.
@retry(retries=5, interval=1, logfun=None)
def foo():
while queue:
queue.pop()
1 / 0
return 1
queue = list(range(3))
self.assertEqual(foo(), 1)
self.assertEqual(sleep.call_count, 3)
@mock.patch('time.sleep')
def test_retry_failure(self, sleep):
# Fail 6 times out of 5; th function is supposed to raise exc.
@retry(retries=5, interval=1, logfun=None)
def foo():
while queue:
queue.pop()
1 / 0
return 1
queue = list(range(6))
self.assertRaises(ZeroDivisionError, foo)
self.assertEqual(sleep.call_count, 5)
@mock.patch('time.sleep')
def test_exception_arg(self, sleep):
@retry(exception=ValueError, interval=1)
def foo():
raise TypeError
self.assertRaises(TypeError, foo)
self.assertEqual(sleep.call_count, 0)
@mock.patch('time.sleep')
def test_no_interval_arg(self, sleep):
# if interval is not specified sleep is not supposed to be called
@retry(retries=5, interval=None, logfun=None)
def foo():
1 / 0
self.assertRaises(ZeroDivisionError, foo)
self.assertEqual(sleep.call_count, 0)
@mock.patch('time.sleep')
def test_retries_arg(self, sleep):
@retry(retries=5, interval=1, logfun=None)
def foo():
1 / 0
self.assertRaises(ZeroDivisionError, foo)
self.assertEqual(sleep.call_count, 5)
@mock.patch('time.sleep')
def test_retries_and_timeout_args(self, sleep):
self.assertRaises(ValueError, retry, retries=5, timeout=1)
class TestSyncTestUtils(unittest.TestCase):
def tearDown(self):
safe_rmpath(TESTFN)
def test_wait_for_pid(self):
wait_for_pid(os.getpid())
nopid = max(psutil.pids()) + 99999
with mock.patch('psutil.tests.retry.__iter__', return_value=iter([0])):
self.assertRaises(psutil.NoSuchProcess, wait_for_pid, nopid)
def test_wait_for_file(self):
with open(TESTFN, 'w') as f:
f.write('foo')
wait_for_file(TESTFN)
assert not os.path.exists(TESTFN)
def test_wait_for_file_empty(self):
with open(TESTFN, 'w'):
pass
wait_for_file(TESTFN, empty=True)
assert not os.path.exists(TESTFN)
def test_wait_for_file_no_file(self):
with mock.patch('psutil.tests.retry.__iter__', return_value=iter([0])):
self.assertRaises(IOError, wait_for_file, TESTFN)
def test_wait_for_file_no_delete(self):
with open(TESTFN, 'w') as f:
f.write('foo')
wait_for_file(TESTFN, delete=False)
assert os.path.exists(TESTFN)
def test_call_until(self):
ret = call_until(lambda: 1, "ret == 1")
self.assertEqual(ret, 1)
class TestFSTestUtils(unittest.TestCase):
def setUp(self):
safe_rmpath(TESTFN)
tearDown = setUp
def test_safe_rmpath(self):
# test file is removed
open(TESTFN, 'w').close()
safe_rmpath(TESTFN)
assert not os.path.exists(TESTFN)
# test no exception if path does not exist
safe_rmpath(TESTFN)
# test dir is removed
os.mkdir(TESTFN)
safe_rmpath(TESTFN)
assert not os.path.exists(TESTFN)
# test other exceptions are raised
with mock.patch('psutil.tests.os.stat',
side_effect=OSError(errno.EINVAL, "")) as m:
with self.assertRaises(OSError):
safe_rmpath(TESTFN)
assert m.called
def test_chdir(self):
base = os.getcwd()
os.mkdir(TESTFN)
with chdir(TESTFN):
self.assertEqual(os.getcwd(), os.path.join(base, TESTFN))
self.assertEqual(os.getcwd(), base)
class TestProcessUtils(unittest.TestCase):
def test_reap_children(self):
subp = get_test_subprocess()
p = psutil.Process(subp.pid)
assert p.is_running()
reap_children()
assert not p.is_running()
assert not psutil.tests._pids_started
assert not psutil.tests._subprocesses_started
def test_create_proc_children_pair(self):
p1, p2 = create_proc_children_pair()
self.assertNotEqual(p1.pid, p2.pid)
assert p1.is_running()
assert p2.is_running()
children = psutil.Process().children(recursive=True)
self.assertEqual(len(children), 2)
self.assertIn(p1, children)
self.assertIn(p2, children)
self.assertEqual(p1.ppid(), os.getpid())
self.assertEqual(p2.ppid(), p1.pid)
# make sure both of them are cleaned up
reap_children()
assert not p1.is_running()
assert not p2.is_running()
assert not psutil.tests._pids_started
assert not psutil.tests._subprocesses_started
@unittest.skipIf(not POSIX, "POSIX only")
def test_create_zombie_proc(self):
zpid = create_zombie_proc()
self.addCleanup(reap_children, recursive=True)
p = psutil.Process(zpid)
self.assertEqual(p.status(), psutil.STATUS_ZOMBIE)
class TestNetUtils(unittest.TestCase):
def bind_socket(self):
port = get_free_port()
with contextlib.closing(bind_socket(addr=('', port))) as s:
self.assertEqual(s.getsockname()[1], port)
@unittest.skipIf(not POSIX, "POSIX only")
def test_bind_unix_socket(self):
with unix_socket_path() as name:
sock = bind_unix_socket(name)
with contextlib.closing(sock):
self.assertEqual(sock.family, socket.AF_UNIX)
self.assertEqual(sock.type, socket.SOCK_STREAM)
self.assertEqual(sock.getsockname(), name)
assert os.path.exists(name)
assert stat.S_ISSOCK(os.stat(name).st_mode)
# UDP
with unix_socket_path() as name:
sock = bind_unix_socket(name, type=socket.SOCK_DGRAM)
with contextlib.closing(sock):
self.assertEqual(sock.type, socket.SOCK_DGRAM)
def tcp_tcp_socketpair(self):
addr = ("127.0.0.1", get_free_port())
server, client = tcp_socketpair(socket.AF_INET, addr=addr)
with contextlib.closing(server):
with contextlib.closing(client):
# Ensure they are connected and the positions are
# correct.
self.assertEqual(server.getsockname(), addr)
self.assertEqual(client.getpeername(), addr)
self.assertNotEqual(client.getsockname(), addr)
@unittest.skipIf(not POSIX, "POSIX only")
def test_unix_socketpair(self):
p = psutil.Process()
num_fds = p.num_fds()
assert not p.connections(kind='unix')
with unix_socket_path() as name:
server, client = unix_socketpair(name)
try:
assert os.path.exists(name)
assert stat.S_ISSOCK(os.stat(name).st_mode)
self.assertEqual(p.num_fds() - num_fds, 2)
self.assertEqual(len(p.connections(kind='unix')), 2)
self.assertEqual(server.getsockname(), name)
self.assertEqual(client.getpeername(), name)
finally:
client.close()
server.close()
def test_create_sockets(self):
with create_sockets() as socks:
fams = collections.defaultdict(int)
types = collections.defaultdict(int)
for s in socks:
fams[s.family] += 1
# work around http://bugs.python.org/issue30204
types[s.getsockopt(socket.SOL_SOCKET, socket.SO_TYPE)] += 1
self.assertGreaterEqual(fams[socket.AF_INET], 2)
self.assertGreaterEqual(fams[socket.AF_INET6], 2)
if POSIX and HAS_CONNECTIONS_UNIX:
self.assertGreaterEqual(fams[socket.AF_UNIX], 2)
self.assertGreaterEqual(types[socket.SOCK_STREAM], 2)
self.assertGreaterEqual(types[socket.SOCK_DGRAM], 2)
class TestOtherUtils(unittest.TestCase):
def test_is_namedtuple(self):
assert is_namedtuple(collections.namedtuple('foo', 'a b c')(1, 2, 3))
assert not is_namedtuple(tuple())
if __name__ == '__main__':
run_test_module_by_name(__file__)
| 36.353905 | 79 | 0.577257 |
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import ast
import collections
import contextlib
import errno
import json
import os
import pickle
import socket
import stat
import sys
from psutil import LINUX
from psutil import POSIX
from psutil import WINDOWS
from psutil._common import memoize
from psutil._common import memoize_when_activated
from psutil._common import supports_ipv6
from psutil._common import wrap_numbers
from psutil._compat import PY3
from psutil.tests import APPVEYOR
from psutil.tests import bind_socket
from psutil.tests import bind_unix_socket
from psutil.tests import call_until
from psutil.tests import chdir
from psutil.tests import create_proc_children_pair
from psutil.tests import create_sockets
from psutil.tests import create_zombie_proc
from psutil.tests import DEVNULL
from psutil.tests import get_free_port
from psutil.tests import get_test_subprocess
from psutil.tests import HAS_BATTERY
from psutil.tests import HAS_CONNECTIONS_UNIX
from psutil.tests import HAS_MEMORY_FULL_INFO
from psutil.tests import HAS_MEMORY_MAPS
from psutil.tests import HAS_SENSORS_BATTERY
from psutil.tests import HAS_SENSORS_FANS
from psutil.tests import HAS_SENSORS_TEMPERATURES
from psutil.tests import import_module_by_path
from psutil.tests import is_namedtuple
from psutil.tests import mock
from psutil.tests import reap_children
from psutil.tests import reload_module
from psutil.tests import retry
from psutil.tests import ROOT_DIR
from psutil.tests import run_test_module_by_name
from psutil.tests import safe_rmpath
from psutil.tests import SCRIPTS_DIR
from psutil.tests import sh
from psutil.tests import tcp_socketpair
from psutil.tests import TESTFN
from psutil.tests import TOX
from psutil.tests import TRAVIS
from psutil.tests import unittest
from psutil.tests import unix_socket_path
from psutil.tests import unix_socketpair
from psutil.tests import wait_for_file
from psutil.tests import wait_for_pid
import psutil
import psutil.tests
# ===================================================================
# --- Misc / generic tests.
# ===================================================================
class TestMisc(unittest.TestCase):
def test_process__repr__(self, func=repr):
p = psutil.Process()
r = func(p)
self.assertIn("psutil.Process", r)
self.assertIn("pid=%s" % p.pid, r)
self.assertIn("name=", r)
self.assertIn(p.name(), r)
with mock.patch.object(psutil.Process, "name",
side_effect=psutil.ZombieProcess(os.getpid())):
p = psutil.Process()
r = func(p)
self.assertIn("pid=%s" % p.pid, r)
self.assertIn("zombie", r)
self.assertNotIn("name=", r)
with mock.patch.object(psutil.Process, "name",
side_effect=psutil.NoSuchProcess(os.getpid())):
p = psutil.Process()
r = func(p)
self.assertIn("pid=%s" % p.pid, r)
self.assertIn("terminated", r)
self.assertNotIn("name=", r)
with mock.patch.object(psutil.Process, "name",
side_effect=psutil.AccessDenied(os.getpid())):
p = psutil.Process()
r = func(p)
self.assertIn("pid=%s" % p.pid, r)
self.assertNotIn("name=", r)
def test_process__str__(self):
self.test_process__repr__(func=str)
def test_no_such_process__repr__(self, func=repr):
self.assertEqual(
repr(psutil.NoSuchProcess(321)),
"psutil.NoSuchProcess process no longer exists (pid=321)")
self.assertEqual(
repr(psutil.NoSuchProcess(321, name='foo')),
"psutil.NoSuchProcess process no longer exists (pid=321, "
"name='foo')")
self.assertEqual(
repr(psutil.NoSuchProcess(321, msg='foo')),
"psutil.NoSuchProcess foo")
def test_zombie_process__repr__(self, func=repr):
self.assertEqual(
repr(psutil.ZombieProcess(321)),
"psutil.ZombieProcess process still exists but it's a zombie "
"(pid=321)")
self.assertEqual(
repr(psutil.ZombieProcess(321, name='foo')),
"psutil.ZombieProcess process still exists but it's a zombie "
"(pid=321, name='foo')")
self.assertEqual(
repr(psutil.ZombieProcess(321, name='foo', ppid=1)),
"psutil.ZombieProcess process still exists but it's a zombie "
"(pid=321, name='foo', ppid=1)")
self.assertEqual(
repr(psutil.ZombieProcess(321, msg='foo')),
"psutil.ZombieProcess foo")
def test_access_denied__repr__(self, func=repr):
self.assertEqual(
repr(psutil.AccessDenied(321)),
"psutil.AccessDenied (pid=321)")
self.assertEqual(
repr(psutil.AccessDenied(321, name='foo')),
"psutil.AccessDenied (pid=321, name='foo')")
self.assertEqual(
repr(psutil.AccessDenied(321, msg='foo')),
"psutil.AccessDenied foo")
def test_timeout_expired__repr__(self, func=repr):
self.assertEqual(
repr(psutil.TimeoutExpired(321)),
"psutil.TimeoutExpired timeout after 321 seconds")
self.assertEqual(
repr(psutil.TimeoutExpired(321, pid=111)),
"psutil.TimeoutExpired timeout after 321 seconds (pid=111)")
self.assertEqual(
repr(psutil.TimeoutExpired(321, pid=111, name='foo')),
"psutil.TimeoutExpired timeout after 321 seconds "
"(pid=111, name='foo')")
def test_process__eq__(self):
p1 = psutil.Process()
p2 = psutil.Process()
self.assertEqual(p1, p2)
p2._ident = (0, 0)
self.assertNotEqual(p1, p2)
self.assertNotEqual(p1, 'foo')
def test_process__hash__(self):
s = set([psutil.Process(), psutil.Process()])
self.assertEqual(len(s), 1)
def test__all__(self):
dir_psutil = dir(psutil)
for name in dir_psutil:
if name in ('callable', 'error', 'namedtuple', 'tests',
'long', 'test', 'NUM_CPUS', 'BOOT_TIME',
'TOTAL_PHYMEM'):
continue
if not name.startswith('_'):
try:
__import__(name)
except ImportError:
if name not in psutil.__all__:
fun = getattr(psutil, name)
if fun is None:
continue
if (fun.__doc__ is not None and
'deprecated' not in fun.__doc__.lower()):
self.fail('%r not in psutil.__all__' % name)
for name in psutil.__all__:
self.assertIn(name, dir_psutil)
def test_version(self):
self.assertEqual('.'.join([str(x) for x in psutil.version_info]),
psutil.__version__)
def test_process_as_dict_no_new_names(self):
p = psutil.Process()
p.foo = '1'
self.assertNotIn('foo', p.as_dict())
def test_memoize(self):
@memoize
def foo(*args, **kwargs):
calls.append(None)
return (args, kwargs)
calls = []
for x in range(2):
ret = foo()
expected = ((), {})
self.assertEqual(ret, expected)
self.assertEqual(len(calls), 1)
for x in range(2):
ret = foo(1)
expected = ((1, ), {})
self.assertEqual(ret, expected)
self.assertEqual(len(calls), 2)
for x in range(2):
ret = foo(1, bar=2)
expected = ((1, ), {'bar': 2})
self.assertEqual(ret, expected)
self.assertEqual(len(calls), 3)
foo.cache_clear()
ret = foo()
expected = ((), {})
self.assertEqual(ret, expected)
self.assertEqual(len(calls), 4)
self.assertEqual(foo.__doc__, "foo docstring")
def test_memoize_when_activated(self):
class Foo:
@memoize_when_activated
def foo(self):
calls.append(None)
f = Foo()
calls = []
f.foo()
f.foo()
self.assertEqual(len(calls), 2)
calls = []
f.foo.cache_activate()
f.foo()
f.foo()
self.assertEqual(len(calls), 1)
calls = []
f.foo.cache_deactivate()
f.foo()
f.foo()
self.assertEqual(len(calls), 2)
def test_parse_environ_block(self):
from psutil._common import parse_environ_block
def k(s):
return s.upper() if WINDOWS else s
self.assertEqual(parse_environ_block("a=1\0"),
{k("a"): "1"})
self.assertEqual(parse_environ_block("a=1\0b=2\0\0"),
{k("a"): "1", k("b"): "2"})
self.assertEqual(parse_environ_block("a=1\0b=\0\0"),
{k("a"): "1", k("b"): ""})
self.assertEqual(parse_environ_block("a=1\0b=2\0\0c=3\0"),
{k("a"): "1", k("b"): "2"})
self.assertEqual(parse_environ_block("xxx\0a=1\0"), {k("a"): "1"})
self.assertEqual(parse_environ_block("a=1\0=b=2\0"), {k("a"): "1"})
self.assertEqual(parse_environ_block("a=1\0b=2"), {k("a"): "1"})
def test_supports_ipv6(self):
self.addCleanup(supports_ipv6.cache_clear)
if supports_ipv6():
with mock.patch('psutil._common.socket') as s:
s.has_ipv6 = False
supports_ipv6.cache_clear()
assert not supports_ipv6()
supports_ipv6.cache_clear()
with mock.patch('psutil._common.socket.socket',
side_effect=socket.error) as s:
assert not supports_ipv6()
assert s.called
supports_ipv6.cache_clear()
with mock.patch('psutil._common.socket.socket',
side_effect=socket.gaierror) as s:
assert not supports_ipv6()
supports_ipv6.cache_clear()
assert s.called
supports_ipv6.cache_clear()
with mock.patch('psutil._common.socket.socket.bind',
side_effect=socket.gaierror) as s:
assert not supports_ipv6()
supports_ipv6.cache_clear()
assert s.called
else:
with self.assertRaises(Exception):
sock = socket.socket(socket.AF_INET6, socket.SOCK_STREAM)
sock.bind(("::1", 0))
def test_isfile_strict(self):
from psutil._common import isfile_strict
this_file = os.path.abspath(__file__)
assert isfile_strict(this_file)
assert not isfile_strict(os.path.dirname(this_file))
with mock.patch('psutil._common.os.stat',
side_effect=OSError(errno.EPERM, "foo")):
self.assertRaises(OSError, isfile_strict, this_file)
with mock.patch('psutil._common.os.stat',
side_effect=OSError(errno.EACCES, "foo")):
self.assertRaises(OSError, isfile_strict, this_file)
with mock.patch('psutil._common.os.stat',
side_effect=OSError(errno.EINVAL, "foo")):
assert not isfile_strict(this_file)
with mock.patch('psutil._common.stat.S_ISREG', return_value=False):
assert not isfile_strict(this_file)
def test_serialization(self):
def check(ret):
if json is not None:
json.loads(json.dumps(ret))
a = pickle.dumps(ret)
b = pickle.loads(a)
self.assertEqual(ret, b)
check(psutil.Process().as_dict())
check(psutil.virtual_memory())
check(psutil.swap_memory())
check(psutil.cpu_times())
check(psutil.cpu_times_percent(interval=0))
check(psutil.net_io_counters())
if LINUX and not os.path.exists('/proc/diskstats'):
pass
else:
if not APPVEYOR:
check(psutil.disk_io_counters())
check(psutil.disk_partitions())
check(psutil.disk_usage(os.getcwd()))
check(psutil.users())
def test_setup_script(self):
setup_py = os.path.join(ROOT_DIR, 'setup.py')
module = import_module_by_path(setup_py)
self.assertRaises(SystemExit, module.setup)
self.assertEqual(module.get_version(), psutil.__version__)
def test_ad_on_process_creation(self):
with mock.patch.object(psutil.Process, 'create_time',
side_effect=psutil.AccessDenied) as meth:
psutil.Process()
assert meth.called
with mock.patch.object(psutil.Process, 'create_time',
side_effect=psutil.ZombieProcess(1)) as meth:
psutil.Process()
assert meth.called
with mock.patch.object(psutil.Process, 'create_time',
side_effect=ValueError) as meth:
with self.assertRaises(ValueError):
psutil.Process()
assert meth.called
def test_sanity_version_check(self):
with mock.patch(
"psutil._psplatform.cext.version", return_value="0.0.0"):
with self.assertRaises(ImportError) as cm:
reload_module(psutil)
self.assertIn("version conflict", str(cm.exception).lower())
nt = collections.namedtuple('foo', 'a b c')
class TestWrapNumbers(unittest.TestCase):
def setUp(self):
wrap_numbers.cache_clear()
tearDown = setUp
def test_first_call(self):
input = {'disk1': nt(5, 5, 5)}
self.assertEqual(wrap_numbers(input, 'disk_io'), input)
def test_input_hasnt_changed(self):
input = {'disk1': nt(5, 5, 5)}
self.assertEqual(wrap_numbers(input, 'disk_io'), input)
self.assertEqual(wrap_numbers(input, 'disk_io'), input)
def test_increase_but_no_wrap(self):
input = {'disk1': nt(5, 5, 5)}
self.assertEqual(wrap_numbers(input, 'disk_io'), input)
input = {'disk1': nt(10, 15, 20)}
self.assertEqual(wrap_numbers(input, 'disk_io'), input)
input = {'disk1': nt(20, 25, 30)}
self.assertEqual(wrap_numbers(input, 'disk_io'), input)
input = {'disk1': nt(20, 25, 30)}
self.assertEqual(wrap_numbers(input, 'disk_io'), input)
def test_wrap(self):
input = {'disk1': nt(100, 100, 100)}
self.assertEqual(wrap_numbers(input, 'disk_io'), input)
# first wrap restarts from 10
input = {'disk1': nt(100, 100, 10)}
self.assertEqual(wrap_numbers(input, 'disk_io'),
{'disk1': nt(100, 100, 110)})
# then it remains the same
input = {'disk1': nt(100, 100, 10)}
self.assertEqual(wrap_numbers(input, 'disk_io'),
{'disk1': nt(100, 100, 110)})
# then it goes up
input = {'disk1': nt(100, 100, 90)}
self.assertEqual(wrap_numbers(input, 'disk_io'),
{'disk1': nt(100, 100, 190)})
# then it wraps again
input = {'disk1': nt(100, 100, 20)}
self.assertEqual(wrap_numbers(input, 'disk_io'),
{'disk1': nt(100, 100, 210)})
# and remains the same
input = {'disk1': nt(100, 100, 20)}
self.assertEqual(wrap_numbers(input, 'disk_io'),
{'disk1': nt(100, 100, 210)})
# now wrap another num
input = {'disk1': nt(50, 100, 20)}
self.assertEqual(wrap_numbers(input, 'disk_io'),
{'disk1': nt(150, 100, 210)})
# and again
input = {'disk1': nt(40, 100, 20)}
self.assertEqual(wrap_numbers(input, 'disk_io'),
{'disk1': nt(190, 100, 210)})
# keep it the same
input = {'disk1': nt(40, 100, 20)}
self.assertEqual(wrap_numbers(input, 'disk_io'),
{'disk1': nt(190, 100, 210)})
def test_changing_keys(self):
# Emulate a case where the second call to disk_io()
# (or whatever) provides a new disk, then the new disk
# disappears on the third call.
input = {'disk1': nt(5, 5, 5)}
self.assertEqual(wrap_numbers(input, 'disk_io'), input)
input = {'disk1': nt(5, 5, 5),
'disk2': nt(7, 7, 7)}
self.assertEqual(wrap_numbers(input, 'disk_io'), input)
input = {'disk1': nt(8, 8, 8)}
self.assertEqual(wrap_numbers(input, 'disk_io'), input)
def test_changing_keys_w_wrap(self):
input = {'disk1': nt(50, 50, 50),
'disk2': nt(100, 100, 100)}
self.assertEqual(wrap_numbers(input, 'disk_io'), input)
# disk 2 wraps
input = {'disk1': nt(50, 50, 50),
'disk2': nt(100, 100, 10)}
self.assertEqual(wrap_numbers(input, 'disk_io'),
{'disk1': nt(50, 50, 50),
'disk2': nt(100, 100, 110)})
# disk 2 disappears
input = {'disk1': nt(50, 50, 50)}
self.assertEqual(wrap_numbers(input, 'disk_io'), input)
# then it appears again; the old wrap is supposed to be
# gone.
input = {'disk1': nt(50, 50, 50),
'disk2': nt(100, 100, 100)}
self.assertEqual(wrap_numbers(input, 'disk_io'), input)
# remains the same
input = {'disk1': nt(50, 50, 50),
'disk2': nt(100, 100, 100)}
self.assertEqual(wrap_numbers(input, 'disk_io'), input)
# and then wraps again
input = {'disk1': nt(50, 50, 50),
'disk2': nt(100, 100, 10)}
self.assertEqual(wrap_numbers(input, 'disk_io'),
{'disk1': nt(50, 50, 50),
'disk2': nt(100, 100, 110)})
def test_real_data(self):
d = {'nvme0n1': (300, 508, 640, 1571, 5970, 1987, 2049, 451751, 47048),
'nvme0n1p1': (1171, 2, 5600256, 1024, 516, 0, 0, 0, 8),
'nvme0n1p2': (54, 54, 2396160, 5165056, 4, 24, 30, 1207, 28),
'nvme0n1p3': (2389, 4539, 5154, 150, 4828, 1844, 2019, 398, 348)}
self.assertEqual(wrap_numbers(d, 'disk_io'), d)
self.assertEqual(wrap_numbers(d, 'disk_io'), d)
# decrease this ↓
d = {'nvme0n1': (100, 508, 640, 1571, 5970, 1987, 2049, 451751, 47048),
'nvme0n1p1': (1171, 2, 5600256, 1024, 516, 0, 0, 0, 8),
'nvme0n1p2': (54, 54, 2396160, 5165056, 4, 24, 30, 1207, 28),
'nvme0n1p3': (2389, 4539, 5154, 150, 4828, 1844, 2019, 398, 348)}
out = wrap_numbers(d, 'disk_io')
self.assertEqual(out['nvme0n1'][0], 400)
# --- cache tests
def test_cache_first_call(self):
input = {'disk1': nt(5, 5, 5)}
wrap_numbers(input, 'disk_io')
cache = wrap_numbers.cache_info()
self.assertEqual(cache[0], {'disk_io': input})
self.assertEqual(cache[1], {'disk_io': {}})
self.assertEqual(cache[2], {'disk_io': {}})
def test_cache_call_twice(self):
input = {'disk1': nt(5, 5, 5)}
wrap_numbers(input, 'disk_io')
input = {'disk1': nt(10, 10, 10)}
wrap_numbers(input, 'disk_io')
cache = wrap_numbers.cache_info()
self.assertEqual(cache[0], {'disk_io': input})
self.assertEqual(
cache[1],
{'disk_io': {('disk1', 0): 0, ('disk1', 1): 0, ('disk1', 2): 0}})
self.assertEqual(cache[2], {'disk_io': {}})
def test_cache_wrap(self):
# let's say 100 is the threshold
input = {'disk1': nt(100, 100, 100)}
wrap_numbers(input, 'disk_io')
input = {'disk1': nt(100, 100, 10)}
wrap_numbers(input, 'disk_io')
cache = wrap_numbers.cache_info()
self.assertEqual(cache[0], {'disk_io': input})
self.assertEqual(
cache[1],
{'disk_io': {('disk1', 0): 0, ('disk1', 1): 0, ('disk1', 2): 100}})
self.assertEqual(cache[2], {'disk_io': {'disk1': set([('disk1', 2)])}})
def assert_():
cache = wrap_numbers.cache_info()
self.assertEqual(
cache[1],
{'disk_io': {('disk1', 0): 0, ('disk1', 1): 0,
('disk1', 2): 100}})
self.assertEqual(cache[2],
{'disk_io': {'disk1': set([('disk1', 2)])}})
input = {'disk1': nt(100, 100, 10)}
wrap_numbers(input, 'disk_io')
cache = wrap_numbers.cache_info()
self.assertEqual(cache[0], {'disk_io': input})
assert_()
input = {'disk1': nt(100, 100, 90)}
wrap_numbers(input, 'disk_io')
cache = wrap_numbers.cache_info()
self.assertEqual(cache[0], {'disk_io': input})
assert_()
input = {'disk1': nt(100, 100, 20)}
wrap_numbers(input, 'disk_io')
cache = wrap_numbers.cache_info()
self.assertEqual(cache[0], {'disk_io': input})
self.assertEqual(
cache[1],
{'disk_io': {('disk1', 0): 0, ('disk1', 1): 0, ('disk1', 2): 190}})
self.assertEqual(cache[2], {'disk_io': {'disk1': set([('disk1', 2)])}})
def test_cache_changing_keys(self):
input = {'disk1': nt(5, 5, 5)}
wrap_numbers(input, 'disk_io')
input = {'disk1': nt(5, 5, 5),
'disk2': nt(7, 7, 7)}
wrap_numbers(input, 'disk_io')
cache = wrap_numbers.cache_info()
self.assertEqual(cache[0], {'disk_io': input})
self.assertEqual(
cache[1],
{'disk_io': {('disk1', 0): 0, ('disk1', 1): 0, ('disk1', 2): 0}})
self.assertEqual(cache[2], {'disk_io': {}})
def test_cache_clear(self):
input = {'disk1': nt(5, 5, 5)}
wrap_numbers(input, 'disk_io')
wrap_numbers(input, 'disk_io')
wrap_numbers.cache_clear('disk_io')
self.assertEqual(wrap_numbers.cache_info(), ({}, {}, {}))
wrap_numbers.cache_clear('disk_io')
wrap_numbers.cache_clear('?!?')
@unittest.skipIf(
not psutil.disk_io_counters() or not psutil.net_io_counters(),
"no disks or NICs available")
def test_cache_clear_public_apis(self):
psutil.disk_io_counters()
psutil.net_io_counters()
caches = wrap_numbers.cache_info()
for cache in caches:
self.assertIn('psutil.disk_io_counters', cache)
self.assertIn('psutil.net_io_counters', cache)
psutil.disk_io_counters.cache_clear()
caches = wrap_numbers.cache_info()
for cache in caches:
self.assertIn('psutil.net_io_counters', cache)
self.assertNotIn('psutil.disk_io_counters', cache)
psutil.net_io_counters.cache_clear()
caches = wrap_numbers.cache_info()
self.assertEqual(caches, ({}, {}, {}))
@unittest.skipIf(TOX, "can't test on TOX")
# See: https://travis-ci.org/giampaolo/psutil/jobs/295224806
@unittest.skipIf(TRAVIS and not
os.path.exists(os.path.join(SCRIPTS_DIR, 'free.py')),
"can't locate scripts directory")
class TestScripts(unittest.TestCase):
@staticmethod
def assert_stdout(exe, args=None, **kwds):
exe = '"%s"' % os.path.join(SCRIPTS_DIR, exe)
if args:
exe = exe + ' ' + args
try:
out = sh(sys.executable + ' ' + exe, **kwds).strip()
except RuntimeError as err:
if 'AccessDenied' in str(err):
return str(err)
else:
raise
assert out, out
return out
@staticmethod
def assert_syntax(exe, args=None):
exe = os.path.join(SCRIPTS_DIR, exe)
if PY3:
f = open(exe, 'rt', encoding='utf8')
else:
f = open(exe, 'rt')
with f:
src = f.read()
ast.parse(src)
def test_coverage(self):
meths = dir(self)
for name in os.listdir(SCRIPTS_DIR):
if name.endswith('.py'):
if 'test_' + os.path.splitext(name)[0] not in meths:
self.fail('no test defined for %r script'
% os.path.join(SCRIPTS_DIR, name))
@unittest.skipIf(not POSIX, "POSIX only")
def test_executable(self):
for name in os.listdir(SCRIPTS_DIR):
if name.endswith('.py'):
path = os.path.join(SCRIPTS_DIR, name)
if not stat.S_IXUSR & os.stat(path)[stat.ST_MODE]:
self.fail('%r is not executable' % path)
def test_disk_usage(self):
self.assert_stdout('disk_usage.py')
def test_free(self):
self.assert_stdout('free.py')
def test_meminfo(self):
self.assert_stdout('meminfo.py')
def test_procinfo(self):
self.assert_stdout('procinfo.py', args=str(os.getpid()))
@unittest.skipIf(APPVEYOR or TRAVIS and not psutil.users(),
"unreliable on APPVEYOR or TRAVIS")
def test_who(self):
self.assert_stdout('who.py')
def test_ps(self):
self.assert_stdout('ps.py')
def test_pstree(self):
self.assert_stdout('pstree.py')
def test_netstat(self):
self.assert_stdout('netstat.py')
# permission denied on travis
@unittest.skipIf(TRAVIS, "unreliable on TRAVIS")
def test_ifconfig(self):
self.assert_stdout('ifconfig.py')
@unittest.skipIf(not HAS_MEMORY_MAPS, "not supported")
def test_pmap(self):
self.assert_stdout('pmap.py', args=str(os.getpid()))
@unittest.skipIf(not HAS_MEMORY_FULL_INFO, "not supported")
def test_procsmem(self):
self.assert_stdout('procsmem.py', stderr=DEVNULL)
def test_killall(self):
self.assert_syntax('killall.py')
def test_nettop(self):
self.assert_syntax('nettop.py')
def test_top(self):
self.assert_syntax('top.py')
def test_iotop(self):
self.assert_syntax('iotop.py')
def test_pidof(self):
output = self.assert_stdout('pidof.py', args=psutil.Process().name())
self.assertIn(str(os.getpid()), output)
@unittest.skipIf(not WINDOWS, "WINDOWS only")
def test_winservices(self):
self.assert_stdout('winservices.py')
def test_cpu_distribution(self):
self.assert_syntax('cpu_distribution.py')
@unittest.skipIf(not HAS_SENSORS_TEMPERATURES, "not supported")
@unittest.skipIf(TRAVIS, "unreliable on TRAVIS")
def test_temperatures(self):
self.assert_stdout('temperatures.py')
@unittest.skipIf(not HAS_SENSORS_FANS, "not supported")
@unittest.skipIf(TRAVIS, "unreliable on TRAVIS")
def test_fans(self):
self.assert_stdout('fans.py')
@unittest.skipIf(not HAS_SENSORS_BATTERY, "not supported")
@unittest.skipIf(not HAS_BATTERY, "no battery")
def test_battery(self):
self.assert_stdout('battery.py')
def test_sensors(self):
self.assert_stdout('sensors.py')
# ===================================================================
# --- Unit tests for test utilities.
# ===================================================================
class TestRetryDecorator(unittest.TestCase):
@mock.patch('time.sleep')
def test_retry_success(self, sleep):
# Fail 3 times out of 5; make sure the decorated fun returns.
@retry(retries=5, interval=1, logfun=None)
def foo():
while queue:
queue.pop()
1 / 0
return 1
queue = list(range(3))
self.assertEqual(foo(), 1)
self.assertEqual(sleep.call_count, 3)
@mock.patch('time.sleep')
def test_retry_failure(self, sleep):
# Fail 6 times out of 5; th function is supposed to raise exc.
@retry(retries=5, interval=1, logfun=None)
def foo():
while queue:
queue.pop()
1 / 0
return 1
queue = list(range(6))
self.assertRaises(ZeroDivisionError, foo)
self.assertEqual(sleep.call_count, 5)
@mock.patch('time.sleep')
def test_exception_arg(self, sleep):
@retry(exception=ValueError, interval=1)
def foo():
raise TypeError
self.assertRaises(TypeError, foo)
self.assertEqual(sleep.call_count, 0)
@mock.patch('time.sleep')
def test_no_interval_arg(self, sleep):
# if interval is not specified sleep is not supposed to be called
@retry(retries=5, interval=None, logfun=None)
def foo():
1 / 0
self.assertRaises(ZeroDivisionError, foo)
self.assertEqual(sleep.call_count, 0)
@mock.patch('time.sleep')
def test_retries_arg(self, sleep):
@retry(retries=5, interval=1, logfun=None)
def foo():
1 / 0
self.assertRaises(ZeroDivisionError, foo)
self.assertEqual(sleep.call_count, 5)
@mock.patch('time.sleep')
def test_retries_and_timeout_args(self, sleep):
self.assertRaises(ValueError, retry, retries=5, timeout=1)
class TestSyncTestUtils(unittest.TestCase):
def tearDown(self):
safe_rmpath(TESTFN)
def test_wait_for_pid(self):
wait_for_pid(os.getpid())
nopid = max(psutil.pids()) + 99999
with mock.patch('psutil.tests.retry.__iter__', return_value=iter([0])):
self.assertRaises(psutil.NoSuchProcess, wait_for_pid, nopid)
def test_wait_for_file(self):
with open(TESTFN, 'w') as f:
f.write('foo')
wait_for_file(TESTFN)
assert not os.path.exists(TESTFN)
def test_wait_for_file_empty(self):
with open(TESTFN, 'w'):
pass
wait_for_file(TESTFN, empty=True)
assert not os.path.exists(TESTFN)
def test_wait_for_file_no_file(self):
with mock.patch('psutil.tests.retry.__iter__', return_value=iter([0])):
self.assertRaises(IOError, wait_for_file, TESTFN)
def test_wait_for_file_no_delete(self):
with open(TESTFN, 'w') as f:
f.write('foo')
wait_for_file(TESTFN, delete=False)
assert os.path.exists(TESTFN)
def test_call_until(self):
ret = call_until(lambda: 1, "ret == 1")
self.assertEqual(ret, 1)
class TestFSTestUtils(unittest.TestCase):
def setUp(self):
safe_rmpath(TESTFN)
tearDown = setUp
def test_safe_rmpath(self):
# test file is removed
open(TESTFN, 'w').close()
safe_rmpath(TESTFN)
assert not os.path.exists(TESTFN)
# test no exception if path does not exist
safe_rmpath(TESTFN)
# test dir is removed
os.mkdir(TESTFN)
safe_rmpath(TESTFN)
assert not os.path.exists(TESTFN)
# test other exceptions are raised
with mock.patch('psutil.tests.os.stat',
side_effect=OSError(errno.EINVAL, "")) as m:
with self.assertRaises(OSError):
safe_rmpath(TESTFN)
assert m.called
def test_chdir(self):
base = os.getcwd()
os.mkdir(TESTFN)
with chdir(TESTFN):
self.assertEqual(os.getcwd(), os.path.join(base, TESTFN))
self.assertEqual(os.getcwd(), base)
class TestProcessUtils(unittest.TestCase):
def test_reap_children(self):
subp = get_test_subprocess()
p = psutil.Process(subp.pid)
assert p.is_running()
reap_children()
assert not p.is_running()
assert not psutil.tests._pids_started
assert not psutil.tests._subprocesses_started
def test_create_proc_children_pair(self):
p1, p2 = create_proc_children_pair()
self.assertNotEqual(p1.pid, p2.pid)
assert p1.is_running()
assert p2.is_running()
children = psutil.Process().children(recursive=True)
self.assertEqual(len(children), 2)
self.assertIn(p1, children)
self.assertIn(p2, children)
self.assertEqual(p1.ppid(), os.getpid())
self.assertEqual(p2.ppid(), p1.pid)
# make sure both of them are cleaned up
reap_children()
assert not p1.is_running()
assert not p2.is_running()
assert not psutil.tests._pids_started
assert not psutil.tests._subprocesses_started
@unittest.skipIf(not POSIX, "POSIX only")
def test_create_zombie_proc(self):
zpid = create_zombie_proc()
self.addCleanup(reap_children, recursive=True)
p = psutil.Process(zpid)
self.assertEqual(p.status(), psutil.STATUS_ZOMBIE)
class TestNetUtils(unittest.TestCase):
def bind_socket(self):
port = get_free_port()
with contextlib.closing(bind_socket(addr=('', port))) as s:
self.assertEqual(s.getsockname()[1], port)
@unittest.skipIf(not POSIX, "POSIX only")
def test_bind_unix_socket(self):
with unix_socket_path() as name:
sock = bind_unix_socket(name)
with contextlib.closing(sock):
self.assertEqual(sock.family, socket.AF_UNIX)
self.assertEqual(sock.type, socket.SOCK_STREAM)
self.assertEqual(sock.getsockname(), name)
assert os.path.exists(name)
assert stat.S_ISSOCK(os.stat(name).st_mode)
# UDP
with unix_socket_path() as name:
sock = bind_unix_socket(name, type=socket.SOCK_DGRAM)
with contextlib.closing(sock):
self.assertEqual(sock.type, socket.SOCK_DGRAM)
def tcp_tcp_socketpair(self):
addr = ("127.0.0.1", get_free_port())
server, client = tcp_socketpair(socket.AF_INET, addr=addr)
with contextlib.closing(server):
with contextlib.closing(client):
# Ensure they are connected and the positions are
# correct.
self.assertEqual(server.getsockname(), addr)
self.assertEqual(client.getpeername(), addr)
self.assertNotEqual(client.getsockname(), addr)
@unittest.skipIf(not POSIX, "POSIX only")
def test_unix_socketpair(self):
p = psutil.Process()
num_fds = p.num_fds()
assert not p.connections(kind='unix')
with unix_socket_path() as name:
server, client = unix_socketpair(name)
try:
assert os.path.exists(name)
assert stat.S_ISSOCK(os.stat(name).st_mode)
self.assertEqual(p.num_fds() - num_fds, 2)
self.assertEqual(len(p.connections(kind='unix')), 2)
self.assertEqual(server.getsockname(), name)
self.assertEqual(client.getpeername(), name)
finally:
client.close()
server.close()
def test_create_sockets(self):
with create_sockets() as socks:
fams = collections.defaultdict(int)
types = collections.defaultdict(int)
for s in socks:
fams[s.family] += 1
# work around http://bugs.python.org/issue30204
types[s.getsockopt(socket.SOL_SOCKET, socket.SO_TYPE)] += 1
self.assertGreaterEqual(fams[socket.AF_INET], 2)
self.assertGreaterEqual(fams[socket.AF_INET6], 2)
if POSIX and HAS_CONNECTIONS_UNIX:
self.assertGreaterEqual(fams[socket.AF_UNIX], 2)
self.assertGreaterEqual(types[socket.SOCK_STREAM], 2)
self.assertGreaterEqual(types[socket.SOCK_DGRAM], 2)
class TestOtherUtils(unittest.TestCase):
def test_is_namedtuple(self):
assert is_namedtuple(collections.namedtuple('foo', 'a b c')(1, 2, 3))
assert not is_namedtuple(tuple())
if __name__ == '__main__':
run_test_module_by_name(__file__)
| true | true |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.