function stringlengths 11 56k | repo_name stringlengths 5 60 | features list |
|---|---|---|
def get_sort_order_info(
cls, event_details: EventDetails | the-blue-alliance/the-blue-alliance | [
334,
153,
334,
422,
1283632451
] |
def __init__(self, client, config, serializer, deserializer):
self._client = client
self._serialize = serializer
self._deserialize = deserializer
self._config = config | Azure/azure-sdk-for-python | [
3526,
2256,
3526,
986,
1335285972
] |
def begin_delete(
self,
resource_group_name, # type: str
circuit_name, # type: str
peering_name, # type: str
connection_name, # type: str
**kwargs # type: Any | Azure/azure-sdk-for-python | [
3526,
2256,
3526,
986,
1335285972
] |
def get_long_running_output(pipeline_response):
if cls:
return cls(pipeline_response, None, {}) | Azure/azure-sdk-for-python | [
3526,
2256,
3526,
986,
1335285972
] |
def get(
self,
resource_group_name, # type: str
circuit_name, # type: str
peering_name, # type: str
connection_name, # type: str
**kwargs # type: Any | Azure/azure-sdk-for-python | [
3526,
2256,
3526,
986,
1335285972
] |
def _create_or_update_initial(
self,
resource_group_name, # type: str
circuit_name, # type: str
peering_name, # type: str
connection_name, # type: str
express_route_circuit_connection_parameters, # type: "_models.ExpressRouteCircuitConnection"
**kwargs # type: Any | Azure/azure-sdk-for-python | [
3526,
2256,
3526,
986,
1335285972
] |
def begin_create_or_update(
self,
resource_group_name, # type: str
circuit_name, # type: str
peering_name, # type: str
connection_name, # type: str
express_route_circuit_connection_parameters, # type: "_models.ExpressRouteCircuitConnection"
**kwargs # type: Any | Azure/azure-sdk-for-python | [
3526,
2256,
3526,
986,
1335285972
] |
def get_long_running_output(pipeline_response):
deserialized = self._deserialize('ExpressRouteCircuitConnection', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized | Azure/azure-sdk-for-python | [
3526,
2256,
3526,
986,
1335285972
] |
def list(
self,
resource_group_name, # type: str
circuit_name, # type: str
peering_name, # type: str
**kwargs # type: Any | Azure/azure-sdk-for-python | [
3526,
2256,
3526,
986,
1335285972
] |
def prepare_request(next_link=None):
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
if not next_link:
# Construct URL
url = self.list.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'circuitName': self._serialize.url("circuit_name", circuit_name, 'str'),
'peeringName': self._serialize.url("peering_name", peering_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
request = self._client.get(url, query_parameters, header_parameters)
else:
url = next_link
query_parameters = {} # type: Dict[str, Any]
request = self._client.get(url, query_parameters, header_parameters)
return request | Azure/azure-sdk-for-python | [
3526,
2256,
3526,
986,
1335285972
] |
def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
return pipeline_response | Azure/azure-sdk-for-python | [
3526,
2256,
3526,
986,
1335285972
] |
def __init__(self, user_id=None, token=None, created=None, local_vars_configuration=None): # noqa: E501
"""Session - a model defined in OpenAPI""" # noqa: E501
if local_vars_configuration is None:
local_vars_configuration = Configuration()
self.local_vars_configuration = local_vars_configuration
self._user_id = None
self._token = None
self._created = None
self.discriminator = None
self.user_id = user_id
self.token = token
self.created = created | talon-one/talon_one.py | [
1,
3,
1,
1,
1484929786
] |
def user_id(self):
"""Gets the user_id of this Session. # noqa: E501
The ID of the user of this session # noqa: E501
:return: The user_id of this Session. # noqa: E501
:rtype: int
"""
return self._user_id | talon-one/talon_one.py | [
1,
3,
1,
1,
1484929786
] |
def user_id(self, user_id):
"""Sets the user_id of this Session.
The ID of the user of this session # noqa: E501
:param user_id: The user_id of this Session. # noqa: E501
:type: int
"""
if self.local_vars_configuration.client_side_validation and user_id is None: # noqa: E501
raise ValueError("Invalid value for `user_id`, must not be `None`") # noqa: E501
self._user_id = user_id | talon-one/talon_one.py | [
1,
3,
1,
1,
1484929786
] |
def token(self):
"""Gets the token of this Session. # noqa: E501
An opaque session identifier # noqa: E501
:return: The token of this Session. # noqa: E501
:rtype: str
"""
return self._token | talon-one/talon_one.py | [
1,
3,
1,
1,
1484929786
] |
def token(self, token):
"""Sets the token of this Session.
An opaque session identifier # noqa: E501
:param token: The token of this Session. # noqa: E501
:type: str
"""
if self.local_vars_configuration.client_side_validation and token is None: # noqa: E501
raise ValueError("Invalid value for `token`, must not be `None`") # noqa: E501
self._token = token | talon-one/talon_one.py | [
1,
3,
1,
1,
1484929786
] |
def created(self):
"""Gets the created of this Session. # noqa: E501
Unix timestamp indicating when the session was first created. # noqa: E501
:return: The created of this Session. # noqa: E501
:rtype: datetime
"""
return self._created | talon-one/talon_one.py | [
1,
3,
1,
1,
1484929786
] |
def created(self, created):
"""Sets the created of this Session.
Unix timestamp indicating when the session was first created. # noqa: E501
:param created: The created of this Session. # noqa: E501
:type: datetime
"""
if self.local_vars_configuration.client_side_validation and created is None: # noqa: E501
raise ValueError("Invalid value for `created`, must not be `None`") # noqa: E501
self._created = created | talon-one/talon_one.py | [
1,
3,
1,
1,
1484929786
] |
def to_str(self):
"""Returns the string representation of the model"""
return pprint.pformat(self.to_dict()) | talon-one/talon_one.py | [
1,
3,
1,
1,
1484929786
] |
def __eq__(self, other):
"""Returns true if both objects are equal"""
if not isinstance(other, Session):
return False
return self.to_dict() == other.to_dict() | talon-one/talon_one.py | [
1,
3,
1,
1,
1484929786
] |
def build_export_request(
vault_name: str,
resource_group_name: str,
subscription_id: str,
*,
filter: Optional[str] = None,
**kwargs: Any | Azure/azure-sdk-for-python | [
3526,
2256,
3526,
986,
1335285972
] |
def __init__(self, client, config, serializer, deserializer):
self._client = client
self._serialize = serializer
self._deserialize = deserializer
self._config = config | Azure/azure-sdk-for-python | [
3526,
2256,
3526,
986,
1335285972
] |
def export(
self,
vault_name: str,
resource_group_name: str,
filter: Optional[str] = None,
**kwargs: Any | Azure/azure-sdk-for-python | [
3526,
2256,
3526,
986,
1335285972
] |
def setUp(self):
self.test_particle = Particle(1, 2) | blond-admin/BLonD | [
10,
23,
10,
17,
1413184470
] |
def test_attribute_types(self):
for attribute in ['mass', 'charge']:
self.assertIsInstance(getattr(self.test_particle, attribute), float,
msg=f"Particle: {attribute} is not a float") | blond-admin/BLonD | [
10,
23,
10,
17,
1413184470
] |
def setUp(self):
self.electron = Electron() | blond-admin/BLonD | [
10,
23,
10,
17,
1413184470
] |
def test_Sand_radiation_constant(self):
# value from S. Lee: Accelerator Physics, 2nd ed., eq (4.5)
# convert from GeV^3 to eV^3
self.assertAlmostEqual(self.electron.C_gamma, 8.846e-5 / (1e9)**3, delta=1e-35,
msg='Electron: wrong radiation constant') | blond-admin/BLonD | [
10,
23,
10,
17,
1413184470
] |
def setUp(self):
self.proton = Proton() | blond-admin/BLonD | [
10,
23,
10,
17,
1413184470
] |
def test_Sand_radiation_constant(self):
# value from S. Lee: Accelerator Physics, 2nd ed., eq (4.5)
# convert from GeV^3 to eV^3
self.assertAlmostEqual(self.proton.C_gamma, 7.783e-18 / (1e9)**3, delta=1e-48,
msg='Proton: wrong radiation constant') | blond-admin/BLonD | [
10,
23,
10,
17,
1413184470
] |
def setUp(self):
# Bunch parameters
# -----------------
N_turn = 200
N_b = 1e9 # Intensity
N_p = int(2e6) # Macro-particles
# Machine parameters
# --------------------
C = 6911.5038 # Machine circumference [m]
p = 450e9 # Synchronous momentum [eV/c]
gamma_t = 17.95142852 # Transition gamma
alpha = 1./gamma_t**2 # First order mom. comp. factor
# Define general parameters
# --------------------------
self.general_params = Ring(C, alpha, p, Proton(), N_turn)
# Define beam
# ------------
self.beam = Beam(self.general_params, N_p, N_b)
# Define RF section
# -----------------
self.rf_params = RFStation(self.general_params, [4620], [7e6], [0.]) | blond-admin/BLonD | [
10,
23,
10,
17,
1413184470
] |
def tearDown(self):
del self.general_params
del self.beam
del self.rf_params | blond-admin/BLonD | [
10,
23,
10,
17,
1413184470
] |
def test_beam_statistic(self):
sigma_dt = 1.
sigma_dE = 1.
self.beam.dt = sigma_dt*numpy.random.randn(self.beam.n_macroparticles)
self.beam.dE = sigma_dE*numpy.random.randn(self.beam.n_macroparticles)
self.beam.statistics()
self.assertAlmostEqual(self.beam.sigma_dt, sigma_dt, delta=1e-2,
msg='Beam: Failed statistic sigma_dt')
self.assertAlmostEqual(self.beam.sigma_dE, sigma_dE, delta=1e-2,
msg='Beam: Failed statistic sigma_dE')
self.assertAlmostEqual(self.beam.mean_dt, 0., delta=1e-2,
msg='Beam: Failed statistic mean_dt')
self.assertAlmostEqual(self.beam.mean_dE, 0., delta=1e-2,
msg='Beam: Failed statistic mean_dE') | blond-admin/BLonD | [
10,
23,
10,
17,
1413184470
] |
def test_losses_longitudinal_cut(self):
longitudinal_tracker = RingAndRFTracker(self.rf_params, self.beam)
full_tracker = FullRingAndRF([longitudinal_tracker])
try:
matched_from_distribution_function(self.beam,
full_tracker,
distribution_exponent=1.5,
distribution_type='binomial',
bunch_length=1.65e-9,
bunch_length_fit='fwhm',
distribution_variable='Hamiltonian')
except TypeError as te:
self.skipTest("Skipped because of known bug in deepcopy. Exception message %s"
% str(te))
self.beam.losses_longitudinal_cut(0., 5e-9)
self.assertEqual(len(self.beam.id[self.beam.id == 0]), 0,
msg='Beam: Failed losses_longitudinal_cut, first')
self.beam.dt += 10e-9
self.beam.losses_longitudinal_cut(0., 5e-9)
self.assertEqual(len(self.beam.id[self.beam.id == 0]),
self.beam.n_macroparticles,
msg='Beam: Failed losses_longitudinal_cut, second') | blond-admin/BLonD | [
10,
23,
10,
17,
1413184470
] |
def test_addition(self):
np = numpy
testdEs = np.linspace(-1E6, 1E6, 2000000)
testdts = np.linspace(0, 10E-9, 2000000)
self.beam.dE = testdEs
self.beam.dt = testdts
testdEs = np.linspace(-2E6, 2E6, 100000)
testdts = np.linspace(-1E-9, 12E-9, 100000)
self.beam.add_particles([testdts, testdEs])
self.assertEqual(self.beam.n_macroparticles, 2100000,
msg="n_macroparticles not incremented correctly")
testBeam = Beam(self.general_params, 200, 0)
testBeam.id[:100] = 0
self.beam.add_beam(testBeam)
self.assertEqual(self.beam.id[2100000:2100100].tolist(), [0]*100,
msg="particle ids not applied correctly")
self.assertEqual(self.beam.n_macroparticles, 2100200,
msg="Added macroparticles not incremented n_macro correctly")
self.beam += testBeam
self.assertEqual(self.beam.n_macroparticles, 2100400,
msg="Added macroparticles not incremented n_macro correctly")
self.beam += (testdts, testdEs)
self.assertEqual(self.beam.n_macroparticles, 2200400,
msg="Added macroparticles not incremented n_macro correctly")
self.assertEqual(-2E6, np.min(self.beam.dE),
msg="coordinates of added beam not used correctly")
self.assertEqual(2E6, np.max(self.beam.dE),
msg="coordinates of added beam not used correctly")
self.assertEqual(-1E-9, np.min(self.beam.dt),
msg="coordinates of added beam not used correctly")
self.assertEqual(12E-9, np.max(self.beam.dt),
msg="coordinates of added beam not used correctly")
with self.assertRaises(blExcept.ParticleAdditionError,
msg="""Unequal length time and energy should raise exception"""):
self.beam += ([1, 2, 3], [4, 5])
with self.assertRaises(blExcept.ParticleAdditionError,
msg="""Mising time/energy should raise exception"""):
self.beam += ([1, 2, 3])
with self.assertRaises(TypeError, msg='Wrong type should raise exception'):
self.beam.add_beam(([1], [2])) | blond-admin/BLonD | [
10,
23,
10,
17,
1413184470
] |
def __init__(self, target_suite_name: str, source_suite_name: str):
self._lconf = LocalConfig()
self._dak = DakBridge()
# FIXME: Don't hardcode this!
repo_name = 'master'
# the repository of the distribution we import stuff into
self._target_repo = Repository(self._lconf.archive_root_dir,
repo_name)
self._target_repo.set_trusted(True)
self._target_suite_name = target_suite_name
self._source_suite_name = source_suite_name
self._distro_tag = config_get_distro_tag()
self._synced_source_pkgs = []
with session_scope() as session:
sync_source = session.query(SynchrotronSource) \
.filter(SynchrotronSource.suite_name == self._source_suite_name).one()
# FIXME: Synchrotron needs adjustments to work
# better with the new "multiple autosync tasks" model.
# This code will need to be revised for that
# (currently it is just a 1:1 translation from D code)
# the repository of the distribution we use to sync stuff from
self._source_repo = Repository(sync_source.repo_url,
sync_source.os_name,
self._lconf.synchrotron_sourcekeyrings)
# we trust everything by default
self._imports_trusted = True
with session_scope() as session:
self._sync_blacklist = set([value for value, in session.query(SyncBlacklistEntry.pkgname)]) | lkorigin/laniakea | [
20,
8,
20,
1,
1465596960
] |
def _get_repo_source_package_map(self, repo, suite_name: str, component_name: str):
''' Get an associative array of the newest source packages present in a repository. '''
suite = ArchiveSuite(suite_name)
component = ArchiveComponent(component_name)
spkgs = repo.source_packages(suite, component)
return make_newest_packages_dict(spkgs) | lkorigin/laniakea | [
20,
8,
20,
1,
1465596960
] |
def _get_target_source_packages(self, component: str):
''' Get mapping of all sources packages in a suite and its parent suite. '''
with session_scope() as session:
target_suite = session.query(ArchiveSuite) \
.filter(ArchiveSuite.name == self._target_suite_name).one()
suite_pkgmap = self._get_repo_source_package_map(self._target_repo,
target_suite.name,
component)
if target_suite.parent:
# we have a parent suite
parent_map = self._get_repo_source_package_map(self._target_repo,
target_suite.parent.name,
component)
# merge the two arrays, keeping only the latest versions
suite_pkgmap = make_newest_packages_dict(list(parent_map.values()) + list(suite_pkgmap.values()))
return suite_pkgmap | lkorigin/laniakea | [
20,
8,
20,
1,
1465596960
] |
def _import_source_package(self, spkg: SourcePackage, component: str) -> bool:
'''
Import a source package from the source repository into the
target repo.
'''
dscfile = None
for f in spkg.files:
# the source repository might be on a remote location, so we need to
# request each file to be there.
# (dak will fetch the files referenced in the .dsc file from the same directory)
if f.fname.endswith('.dsc'):
dscfile = self._source_repo.get_file(f)
self._source_repo.get_file(f)
if not dscfile:
log.error('Critical consistency error: Source package {} in repository {} has no .dsc file.'
.format(spkg.name, self._source_repo.base_dir))
return False
if self._import_package_files(self._target_suite_name, component, [dscfile]):
self._synced_source_pkgs.append(spkg)
return True
return False | lkorigin/laniakea | [
20,
8,
20,
1,
1465596960
] |
def sync_packages(self, component: str, pkgnames: List[str], force: bool = False):
self._synced_source_pkgs = []
with session_scope() as session:
sync_conf = session.query(SynchrotronConfig) \
.join(SynchrotronConfig.destination_suite) \
.join(SynchrotronConfig.source) \
.filter(ArchiveSuite.name == self._target_suite_name,
SynchrotronSource.suite_name == self._source_suite_name).one_or_none()
if not sync_conf:
log.error('Unable to find a sync config for this source/destination combination.')
return False
if not sync_conf.sync_enabled:
log.error('Can not synchronize package: Synchronization is disabled for this configuration.')
return False
target_suite = session.query(ArchiveSuite) \
.filter(ArchiveSuite.name == self._target_suite_name).one()
dest_pkg_map = self._get_target_source_packages(component)
src_pkg_map = self._get_repo_source_package_map(self._source_repo,
self._source_suite_name,
component)
for pkgname in pkgnames:
spkg = src_pkg_map.get(pkgname)
dpkg = dest_pkg_map.get(pkgname)
if not spkg:
log.info('Can not sync {}: Does not exist in source.'.format(pkgname))
continue
if pkgname in self._sync_blacklist:
log.info('Can not sync {}: The package is blacklisted.'.format(pkgname))
continue
if dpkg:
if version_compare(dpkg.version, spkg.version) >= 0:
if force:
log.warning('{}: Target version \'{}\' is newer/equal than source version \'{}\'.'
.format(pkgname, dpkg.version, spkg.version))
else:
log.info('Can not sync {}: Target version \'{}\' is newer/equal than source version \'{}\'.'
.format(pkgname, dpkg.version, spkg.version))
continue
if not force:
if self._distro_tag in version_revision(dpkg.version):
log.error('Not syncing {}/{}: Destination has modifications (found {}).'
.format(spkg.name, spkg.version, dpkg.version))
continue
# sync source package
# the source package must always be known to dak first
ret = self._import_source_package(spkg, component)
if not ret:
return False
ret = self._import_binaries_for_source(sync_conf, target_suite, component, self._synced_source_pkgs, force)
# TODO: Analyze the input, fetch the packages from the source distribution and
# import them into the target in their correct order.
# Then apply the correct, synced override from the source distro.
self._publish_synced_spkg_events(sync_conf.source.os_name,
sync_conf.source.suite_name,
sync_conf.destination_suite.name,
force)
return ret | lkorigin/laniakea | [
20,
8,
20,
1,
1465596960
] |
def test_0000_initiate_users( self ):
"""Create necessary user accounts and login as an admin user."""
"""
Create all the user accounts that are needed for this test script to run independently of other tests.
Previously created accounts will not be re-created.
"""
self.logout()
self.login( email=common.test_user_1_email, username=common.test_user_1_name )
test_user_1 = self.test_db_util.get_user( common.test_user_1_email )
assert test_user_1 is not None, 'Problem retrieving user with email %s from the database' % common.test_user_1_email
test_user_1_private_role = self.test_db_util.get_private_role( test_user_1 )
self.logout()
self.login( email=common.test_user_2_email, username=common.test_user_2_name )
test_user_2 = self.test_db_util.get_user( common.test_user_1_email )
assert test_user_2 is not None, 'Problem retrieving user with email %s from the database' % common.test_user_2_email
test_user_2_private_role = self.test_db_util.get_private_role( test_user_2 )
self.logout()
self.login( email=common.admin_email, username=common.admin_username )
admin_user = self.test_db_util.get_user( common.admin_email )
assert admin_user is not None, 'Problem retrieving user with email %s from the database' % common.admin_email
admin_user_private_role = self.test_db_util.get_private_role( admin_user ) | mikel-egana-aranguren/SADI-Galaxy-Docker | [
1,
3,
1,
1,
1417087373
] |
def test_0010_verify_datatypes_repository( self ):
'''Verify the blast_datatypes_0120 repository.'''
'''
We are at step 1a.
Check for appropriate strings, most importantly BlastXml, BlastNucDb, and BlastProtDb,
the datatypes that are defined in datatypes_conf.xml.
'''
global repository_datatypes_count
repository = self.test_db_util.get_repository_by_name_and_owner( datatypes_repository_name, common.test_user_2_name )
strings_displayed = [ 'BlastXml', 'BlastNucDb', 'BlastProtDb', 'application/xml', 'text/html', 'blastxml', 'blastdbn', 'blastdbp']
self.display_manage_repository_page( repository, strings_displayed=strings_displayed )
repository_datatypes_count = int( self.get_repository_datatypes_count( repository ) ) | mikel-egana-aranguren/SADI-Galaxy-Docker | [
1,
3,
1,
1,
1417087373
] |
def test_0015_create_tool_repository( self ):
"""Create and populate the blastxml_to_top_descr_0120 repository"""
"""
We are at step 2.
Create and populate blastxml_to_top_descr_0120.
"""
category = self.create_category( name='Test 0120', description='Description of test 0120' )
self.logout()
self.login( email=common.test_user_1_email, username=common.test_user_1_name )
strings_displayed = [ 'Repository %s' % "'%s'" % tool_repository_name,
'Repository %s has been created' % "<b>%s</b>" % tool_repository_name ]
repository = self.get_or_create_repository( name=tool_repository_name,
description=tool_repository_description,
long_description=tool_repository_long_description,
owner=common.test_user_1_name,
category_id=self.security.encode_id( category.id ),
strings_displayed=strings_displayed )
self.upload_file( repository,
filename='blast/blastxml_to_top_descr.tar',
filepath=None,
valid_tools_only=True,
uncompress_file=True,
remove_repo_files_not_in_tar=False,
commit_message='Uploaded blastxml_to_top_descr tarball.',
strings_displayed=[],
strings_not_displayed=[] ) | mikel-egana-aranguren/SADI-Galaxy-Docker | [
1,
3,
1,
1,
1417087373
] |
def test_0020_verify_tool_repository( self ):
'''Verify the blastxml_to_top_descr_0120 repository.'''
'''
We are at step 2a.
Check for appropriate strings, such as tool name, description, and version.
'''
repository = self.test_db_util.get_repository_by_name_and_owner( tool_repository_name, common.test_user_1_name )
strings_displayed = [ 'blastxml_to_top_descr_0120', 'BLAST top hit descriptions', 'Make a table from BLAST XML' ]
strings_displayed.extend( [ '0.0.1', 'Valid tools'] )
self.display_manage_repository_page( repository, strings_displayed=strings_displayed ) | mikel-egana-aranguren/SADI-Galaxy-Docker | [
1,
3,
1,
1,
1417087373
] |
def test_0025_create_repository_dependency( self ):
'''Create a repository dependency on blast_datatypes_0120.'''
'''
We are at step 3.
Create a simple repository dependency for blastxml_to_top_descr_0120 that defines a dependency on blast_datatypes_0120.
'''
datatypes_repository = self.test_db_util.get_repository_by_name_and_owner( datatypes_repository_name, common.test_user_2_name )
tool_repository = self.test_db_util.get_repository_by_name_and_owner( tool_repository_name, common.test_user_1_name )
dependency_xml_path = self.generate_temp_path( 'test_0120', additional_paths=[ 'dependencies' ] )
datatypes_tuple = ( self.url, datatypes_repository.name, datatypes_repository.user.username, self.get_repository_tip( datatypes_repository ) )
self.create_repository_dependency( repository=tool_repository, repository_tuples=[ datatypes_tuple ], filepath=dependency_xml_path ) | mikel-egana-aranguren/SADI-Galaxy-Docker | [
1,
3,
1,
1,
1417087373
] |
def before_request():
"""
This is executed before the request
"""
ensure_no_config_object()
request.all_data = get_all_params(request)
request.User = get_user_from_param(request.all_data)
privacyidea_server = current_app.config.get("PI_AUDIT_SERVERNAME") or \
request.host
# Create a policy_object, that reads the database audit settings
# and contains the complete policy definition during the request.
# This audit_object can be used in the postpolicy and prepolicy and it
# can be passed to the innerpolicies.
g.policy_object = PolicyClass()
g.audit_object = getAudit(current_app.config, g.startdate)
g.event_config = EventConfiguration()
# access_route contains the ip addresses of all clients, hops and proxies.
g.client_ip = get_client_ip(request, get_from_config(SYSCONF.OVERRIDECLIENT))
# Save the HTTP header in the localproxy object
g.request_headers = request.headers
g.serial = getParam(request.all_data, "serial", default=None)
g.audit_object.log({"success": False,
"action_detail": "",
"client": g.client_ip,
"client_user_agent": request.user_agent.browser,
"privacyidea_server": privacyidea_server,
"action": "{0!s} {1!s}".format(request.method, request.url_rule),
"info": ""}) | privacyidea/privacyidea | [
1321,
287,
1321,
217,
1401806822
] |
def offlinerefill():
"""
This endpoint allows to fetch new offline OTP values for a token,
that is already offline.
According to the definition it will send the missing OTP values, so that
the client will have as much otp values as defined.
:param serial: The serial number of the token, that should be refilled.
:param refilltoken: The authorization token, that allows refilling.
:param pass: the last password (maybe password+OTP) entered by the user
:return:
"""
serial = getParam(request.all_data, "serial", required)
refilltoken = getParam(request.all_data, "refilltoken", required)
password = getParam(request.all_data, "pass", required)
tokenobj_list = get_tokens(serial=serial)
if len(tokenobj_list) != 1:
raise ParameterError("The token does not exist")
else:
tokenobj = tokenobj_list[0]
tokenattachments = list_machine_tokens(serial=serial, application="offline")
if tokenattachments:
# TODO: Currently we do not distinguish, if a token had more than one offline attachment
# We need the options to pass the count and the rounds for the next offline OTP values,
# which could have changed in the meantime.
options = tokenattachments[0].get("options")
# check refill token:
if tokenobj.get_tokeninfo("refilltoken") == refilltoken:
# refill
otps = MachineApplication.get_refill(tokenobj, password, options)
refilltoken = MachineApplication.generate_new_refilltoken(tokenobj)
response = send_result(True)
content = response.json
content["auth_items"] = {"offline": [{"refilltoken": refilltoken,
"response": otps}]}
response.set_data(json.dumps(content))
return response
raise ParameterError("Token is not an offline token or refill token is incorrect") | privacyidea/privacyidea | [
1321,
287,
1321,
217,
1401806822
] |
def check():
"""
check the authentication for a user or a serial number.
Either a ``serial`` or a ``user`` is required to authenticate.
The PIN and OTP value is sent in the parameter ``pass``.
In case of successful authentication it returns ``result->value: true``.
In case of a challenge response authentication a parameter ``exception=1``
can be passed. This would result in a HTTP 500 Server Error response if
an error occurred during sending of SMS or Email.
In case ``/validate/radiuscheck`` is requested, the responses are
modified as follows: A successful authentication returns an empty ``HTTP
204`` response. An unsuccessful authentication returns an empty ``HTTP
400`` response. Error responses are the same responses as for the
``/validate/check`` endpoint.
:param serial: The serial number of the token, that tries to authenticate.
:param user: The loginname/username of the user, who tries to authenticate.
:param realm: The realm of the user, who tries to authenticate. If the
realm is omitted, the user is looked up in the default realm.
:param type: The tokentype of the tokens, that are taken into account during
authentication. Requires the *authz* policy :ref:`application_tokentype_policy`.
It is ignored when a distinct serial is given.
:param pass: The password, that consists of the OTP PIN and the OTP value.
:param otponly: If set to 1, only the OTP value is verified. This is used
in the management UI. Only used with the parameter serial.
:param transaction_id: The transaction ID for a response to a challenge
request
:param state: The state ID for a response to a challenge request
:return: a json result with a boolean "result": true
**Example Validation Request**:
.. sourcecode:: http
POST /validate/check HTTP/1.1
Host: example.com
Accept: application/json
user=user
realm=realm1
pass=s3cret123456
**Example response** for a successful authentication:
.. sourcecode:: http
HTTP/1.1 200 OK
Content-Type: application/json
{
"detail": {
"message": "matching 1 tokens",
"serial": "PISP0000AB00",
"type": "spass"
},
"id": 1,
"jsonrpc": "2.0",
"result": {
"status": true,
"value": true
},
"version": "privacyIDEA unknown"
}
**Example response** for this first part of a challenge response authentication:
.. sourcecode:: http
HTTP/1.1 200 OK
Content-Type: application/json
{
"detail": {
"serial": "PIEM0000AB00",
"type": "email",
"transaction_id": "12345678901234567890",
"multi_challenge: [ {"serial": "PIEM0000AB00",
"transaction_id": "12345678901234567890",
"message": "Please enter otp from your email",
"client_mode": "interactive"},
{"serial": "PISM12345678",
"transaction_id": "12345678901234567890",
"message": "Please enter otp from your SMS",
"client_mode": "interactive"}
]
},
"id": 2,
"jsonrpc": "2.0",
"result": {
"status": true,
"value": false
},
"version": "privacyIDEA unknown"
}
In this example two challenges are triggered, one with an email and one
with an SMS. The application and thus the user has to decide, which one
to use. They can use either.
The challenges also contain the information of the "client_mode". This
tells the plugin, whether it should display an input field to ask for the
OTP value or e.g. to poll for an answered authentication.
Read more at :ref:`client_modes`.
.. note:: All challenge response tokens have the same ``transaction_id`` in
this case.
**Example response** for a successful authentication with ``/samlcheck``:
.. sourcecode:: http
HTTP/1.1 200 OK
Content-Type: application/json
{
"detail": {
"message": "matching 1 tokens",
"serial": "PISP0000AB00",
"type": "spass"
},
"id": 1,
"jsonrpc": "2.0",
"result": {
"status": true,
"value": {"attributes": {
"username": "koelbel",
"realm": "themis",
"mobile": null,
"phone": null,
"myOwn": "/data/file/home/koelbel",
"resolver": "themis",
"surname": "Kölbel",
"givenname": "Cornelius",
"email": null},
"auth": true}
},
"version": "privacyIDEA unknown"
}
The response in ``value->attributes`` can contain additional attributes
(like "myOwn") which you can define in the LDAP resolver in the attribute
mapping.
"""
user = request.User
serial = getParam(request.all_data, "serial")
password = getParam(request.all_data, "pass", required)
otp_only = getParam(request.all_data, "otponly")
token_type = getParam(request.all_data, "type")
options = {"g": g,
"clientip": g.client_ip,
"user": user}
# Add all params to the options
for key, value in request.all_data.items():
if value and key not in ["g", "clientip", "user"]:
options[key] = value
g.audit_object.log({"user": user.login,
"resolver": user.resolver,
"realm": user.realm})
if serial:
if user:
# check if the given token belongs to the user
if not get_tokens(user=user, serial=serial, count=True):
raise ParameterError('Given serial does not belong to given user!')
if not otp_only:
success, details = check_serial_pass(serial, password, options=options)
else:
success, details = check_otp(serial, password)
result = success
else:
options["token_type"] = token_type
success, details = check_user_pass(user, password, options=options)
result = success
if request.path.endswith("samlcheck"):
ui = user.info
result = {"auth": success,
"attributes": {}}
if return_saml_attributes():
if success or return_saml_attributes_on_fail():
# privacyIDEA's own attribute map
result["attributes"] = {"username": ui.get("username"),
"realm": user.realm,
"resolver": user.resolver,
"email": ui.get("email"),
"surname": ui.get("surname"),
"givenname": ui.get("givenname"),
"mobile": ui.get("mobile"),
"phone": ui.get("phone")}
# additional attributes
for k, v in ui.items():
result["attributes"][k] = v
g.audit_object.log({"info": log_used_user(user, details.get("message")),
"success": success,
"serial": serial or details.get("serial"),
"token_type": details.get("type")})
return send_result(result, rid=2, details=details) | privacyidea/privacyidea | [
1321,
287,
1321,
217,
1401806822
] |
def trigger_challenge():
"""
An administrator can call this endpoint if he has the right of
``triggerchallenge`` (scope: admin).
He can pass a ``user`` name and or a ``serial`` number.
privacyIDEA will trigger challenges for all native challenges response
tokens, possessed by this user or only for the given serial number.
The request needs to contain a valid PI-Authorization header.
:param user: The loginname/username of the user, who tries to authenticate.
:param realm: The realm of the user, who tries to authenticate. If the
realm is omitted, the user is looked up in the default realm.
:param serial: The serial number of the token.
:param type: The tokentype of the tokens, that are taken into account during
authentication. Requires authz policy application_tokentype.
Is ignored when a distinct serial is given.
:return: a json result with a "result" of the number of matching
challenge response tokens
**Example response** for a successful triggering of challenge:
.. sourcecode:: http
HTTP/1.1 200 OK
Content-Type: application/json
{
"detail": {
"client_mode": "interactive",
"message": "please enter otp: , please enter otp: ",
"messages": [
"please enter otp: ",
"please enter otp: "
],
"multi_challenge": [
{
"client_mode": "interactive",
"message": "please enter otp: ",
"serial": "TOTP000026CB",
"transaction_id": "11451135673179897001",
"type": "totp"
},
{
"client_mode": "interactive",
"message": "please enter otp: ",
"serial": "OATH0062752C",
"transaction_id": "11451135673179897001",
"type": "hotp"
}
],
"serial": "OATH0062752C",
"threadid": 140329819764480,
"transaction_id": "11451135673179897001",
"transaction_ids": [
"11451135673179897001",
"11451135673179897001"
],
"type": "hotp"
},
"id": 2,
"jsonrpc": "2.0",
"result": {
"status": true,
"value": 2
}
**Example response** for response, if the user has no challenge token:
.. sourcecode:: http
HTTP/1.1 200 OK
Content-Type: application/json
{
"detail": {"messages": [],
"threadid": 140031212377856,
"transaction_ids": []},
"id": 1,
"jsonrpc": "2.0",
"result": {"status": true,
"value": 0},
"signature": "205530282...54508",
"time": 1484303812.346576,
"version": "privacyIDEA 2.17",
"versionnumber": "2.17"
}
**Example response** for a failed triggering of a challenge. In this case
the ``status`` will be ``false``.
.. sourcecode:: http
HTTP/1.1 200 OK
Content-Type: application/json
{
"detail": null,
"id": 1,
"jsonrpc": "2.0",
"result": {"error": {"code": 905,
"message": "ERR905: The user can not be
found in any resolver in this realm!"},
"status": false},
"signature": "14468...081555",
"time": 1484303933.72481,
"version": "privacyIDEA 2.17"
}
"""
user = request.User
serial = getParam(request.all_data, "serial")
token_type = getParam(request.all_data, "type")
details = {"messages": [],
"transaction_ids": []}
options = {"g": g,
"clientip": g.client_ip,
"user": user}
# Add all params to the options
for key, value in request.all_data.items():
if value and key not in ["g", "clientip", "user"]:
options[key] = value
token_objs = get_tokens(serial=serial, user=user, active=True, revoked=False, locked=False, tokentype=token_type)
# Only use the tokens, that are allowed to do challenge response
chal_resp_tokens = [token_obj for token_obj in token_objs if "challenge" in token_obj.mode]
create_challenges_from_tokens(chal_resp_tokens, details, options)
result_obj = len(details.get("multi_challenge"))
challenge_serials = [challenge_info["serial"] for challenge_info in details["multi_challenge"]]
g.audit_object.log({
"user": user.login,
"resolver": user.resolver,
"realm": user.realm,
"success": result_obj > 0,
"info": log_used_user(user, "triggered {0!s} challenges".format(result_obj)),
"serial": ",".join(challenge_serials),
})
return send_result(result_obj, rid=2, details=details) | privacyidea/privacyidea | [
1321,
287,
1321,
217,
1401806822
] |
def get_items(cls):
return {
cls.anonymity: _('Anonymity'),
cls.pseudonymity: _('Pseudonymity')
} | ecreall/nova-ideo | [
22,
6,
22,
9,
1417421268
] |
def get_title(cls, item):
items = cls.get_items()
return items.get(item, None) | ecreall/nova-ideo | [
22,
6,
22,
9,
1417421268
] |
def get_searchable_content(request=None):
if request is None:
request = get_current_request()
return getattr(request, 'searchable_contents', {}) | ecreall/nova-ideo | [
22,
6,
22,
9,
1417421268
] |
def __init__(self, name='', context=None, renderer=None, attr=None):
self.name = name
self.context = context
self.renderer = renderer
self.attr = attr | ecreall/nova-ideo | [
22,
6,
22,
9,
1417421268
] |
def callback(context, name, ob):
config = context.config.with_package(info.module)
config.add_panel(panel=ob, **settings)
ADVERTISING_CONTAINERS[self.name] = {'title': ob.title,
'description': ob.description,
'order': ob.order,
'validator': ob.validator,
'tags': ob.tags
#TODO add validator ob.validator
} | ecreall/nova-ideo | [
22,
6,
22,
9,
1417421268
] |
def __init__(self, access_key=None):
self.access_key = access_key | ecreall/nova-ideo | [
22,
6,
22,
9,
1417421268
] |
def callback(scanner, name, ob):
if ob.context in ACCESS_ACTIONS:
ACCESS_ACTIONS[ob.context].append({'action': ob,
'access_key': self.access_key})
else:
ACCESS_ACTIONS[ob.context] = [{'action': ob,
'access_key': self.access_key}] | ecreall/nova-ideo | [
22,
6,
22,
9,
1417421268
] |
def can_access(user, context, request=None, root=None):
""" Return 'True' if the user can access to the context"""
declared = getattr(getattr(context, '__provides__', None),
'declared', [None])[0]
for data in ACCESS_ACTIONS.get(declared, []):
if data['action'].processsecurity_validation(None, context):
return True
return False | ecreall/nova-ideo | [
22,
6,
22,
9,
1417421268
] |
def serialize_roles(roles, root=None):
result = []
principal_root = getSite()
if principal_root is None:
return []
if root is None:
root = principal_root
root_oid = str(get_oid(root, ''))
principal_root_oid = str(get_oid(principal_root, ''))
for role in roles:
if isinstance(role, tuple):
obj_oid = str(get_oid(role[1], ''))
result.append((role[0]+'_'+obj_oid).lower())
superiors = getattr(DACE_ROLES.get(role[0], _marker),
'all_superiors', [])
result.extend([(r.name+'_'+obj_oid).lower()
for r in superiors])
else:
result.append(role.lower()+'_'+root_oid)
superiors = getattr(DACE_ROLES.get(role, _marker),
'all_superiors', [])
result.extend([(r.name+'_'+root_oid).lower() for r in
superiors])
for superior in superiors:
if superior.name == 'Admin':
result.append('admin_'+principal_root_oid)
break
return list(set(result)) | ecreall/nova-ideo | [
22,
6,
22,
9,
1417421268
] |
def __init__(self, **kwargs):
super(Commentable, self).__init__(**kwargs)
self.len_comments = 0 | ecreall/nova-ideo | [
22,
6,
22,
9,
1417421268
] |
def addtoproperty(self, name, value, moving=None):
super(Commentable, self).addtoproperty(name, value, moving)
if name == 'comments':
channel = getattr(self, 'channel', self)
channel.len_comments += 1
if self is not channel:
self.len_comments += 1 | ecreall/nova-ideo | [
22,
6,
22,
9,
1417421268
] |
def __init__(self, **kwargs):
super(Debatable, self).__init__(**kwargs) | ecreall/nova-ideo | [
22,
6,
22,
9,
1417421268
] |
def channel(self):
channels = getattr(self, 'channels', [])
return channels[0] if channels else None | ecreall/nova-ideo | [
22,
6,
22,
9,
1417421268
] |
def get_title(self, user=None):
return getattr(self, 'title', '') | ecreall/nova-ideo | [
22,
6,
22,
9,
1417421268
] |
def add_new_channel(self):
self.addtoproperty('channels', Channel()) | ecreall/nova-ideo | [
22,
6,
22,
9,
1417421268
] |
def __init__(self, **kwargs):
super(Channel, self).__init__(**kwargs)
self.set_data(kwargs)
self._comments_at = OOBTree() | ecreall/nova-ideo | [
22,
6,
22,
9,
1417421268
] |
def remove_comment(self, comment):
self._comments_at.pop(comment.created_at) | ecreall/nova-ideo | [
22,
6,
22,
9,
1417421268
] |
def get_subject(self, user=None):
subject = self.subject
return subject if subject else getattr(self, '__parent__', None) | ecreall/nova-ideo | [
22,
6,
22,
9,
1417421268
] |
def is_discuss(self):
return self.subject.__class__.__name__.lower() == 'person' | ecreall/nova-ideo | [
22,
6,
22,
9,
1417421268
] |
def __init__(self, **kwargs):
super(Emojiable, self).__init__(**kwargs)
self.emojis = OOBTree()
self.users_emoji = OOBTree() | ecreall/nova-ideo | [
22,
6,
22,
9,
1417421268
] |
def remove_emoji(self, emoji, user):
user_oid = get_oid(user)
if emoji in self.emojis and \
user_oid in self.emojis[emoji]:
self.emojis[emoji].remove(user_oid)
self.users_emoji.pop(user_oid) | ecreall/nova-ideo | [
22,
6,
22,
9,
1417421268
] |
def can_add_reaction(self, user, process):
return False | ecreall/nova-ideo | [
22,
6,
22,
9,
1417421268
] |
def __init__(self, **kwargs):
super(PrivateChannel, self).__init__(**kwargs)
self.set_data(kwargs) | ecreall/nova-ideo | [
22,
6,
22,
9,
1417421268
] |
def get_title(self, user=None):
title = getattr(self, 'title', '')
if not title:
return getattr(self.get_subject(user), 'title', None)
return title | ecreall/nova-ideo | [
22,
6,
22,
9,
1417421268
] |
def current_version(self):
""" Return the current version"""
if self.nextversion is None:
return self
else:
return self.nextversion.current_version | ecreall/nova-ideo | [
22,
6,
22,
9,
1417421268
] |
def history(self):
""" Return all versions"""
result = []
if self.version is None:
return [self]
else:
result.append(self)
result.extend(self.version.history)
return result | ecreall/nova-ideo | [
22,
6,
22,
9,
1417421268
] |
def keywords_choice(node, kw):
root = getSite()
values = [(i, i) for i in sorted(root.keywords)]
create = getattr(root, 'can_add_keywords', True)
return Select2Widget(max_len=5,
values=values,
create=create,
multiple=True) | ecreall/nova-ideo | [
22,
6,
22,
9,
1417421268
] |
def __init__(self, **kwargs):
super(SearchableEntity, self).__init__(**kwargs)
self.keywords = PersistentList() | ecreall/nova-ideo | [
22,
6,
22,
9,
1417421268
] |
def is_published(self):
return 'published' in self.state | ecreall/nova-ideo | [
22,
6,
22,
9,
1417421268
] |
def is_workable(self):
return self.is_published | ecreall/nova-ideo | [
22,
6,
22,
9,
1417421268
] |
def relevant_data(self):
return [getattr(self, 'title', ''),
getattr(self, 'description', ''),
', '.join(getattr(self, 'keywords', []))] | ecreall/nova-ideo | [
22,
6,
22,
9,
1417421268
] |
def get_source_data(self, app_id):
if not hasattr(self, 'source_data'):
return {} | ecreall/nova-ideo | [
22,
6,
22,
9,
1417421268
] |
def is_managed(self, root):
return True | ecreall/nova-ideo | [
22,
6,
22,
9,
1417421268
] |
def _init_presentation_text(self):
pass | ecreall/nova-ideo | [
22,
6,
22,
9,
1417421268
] |
def presentation_text(self, nb_characters=400):
return getattr(self, 'description', "")[:nb_characters]+'...' | ecreall/nova-ideo | [
22,
6,
22,
9,
1417421268
] |
def __init__(self, **kwargs):
super(PresentableEntity, self).__init__(**kwargs)
self._email_persons_contacted = PersistentList() | ecreall/nova-ideo | [
22,
6,
22,
9,
1417421268
] |
def len_contacted(self):
return len(self._email_persons_contacted) | ecreall/nova-ideo | [
22,
6,
22,
9,
1417421268
] |
def persons_contacted(self):
""" Return all contacted persons"""
dace_catalog = find_catalog('dace')
novaideo_catalog = find_catalog('novaideo')
identifier_index = novaideo_catalog['identifier']
object_provides_index = dace_catalog['object_provides']
result = []
for email in self._email_persons_contacted:
query = object_provides_index.any([IPerson.__identifier__]) &\
identifier_index.any([email])
users = list(query.execute().all())
user = users[0] if users else None
if user is not None:
result.append(user)
else:
result.append(email.split('@')[0].split('+')[0])
return set(result) | ecreall/nova-ideo | [
22,
6,
22,
9,
1417421268
] |
def correlations(self):
"""Return all source correlations and target correlations"""
result = [c.target for c in self.source_correlations]
result.extend([c.source for c in self.target_correlations])
return list(set(result)) | ecreall/nova-ideo | [
22,
6,
22,
9,
1417421268
] |
def all_source_related_contents(self):
lists_targets = [(c.targets, c) for c in self.source_correlations]
return [(target, c) for targets, c in lists_targets
for target in targets] | ecreall/nova-ideo | [
22,
6,
22,
9,
1417421268
] |
def all_target_related_contents(self):
return [(c.source, c) for c in self.target_correlations] | ecreall/nova-ideo | [
22,
6,
22,
9,
1417421268
] |
def all_related_contents(self):
related_contents = self.all_source_related_contents
related_contents.extend(self.all_target_related_contents)
return related_contents | ecreall/nova-ideo | [
22,
6,
22,
9,
1417421268
] |
def contextualized_contents(self):
lists_contents = [(c.targets, c) for c in
self.contextualized_correlations]
lists_contents = [(target, c) for targets, c in lists_contents
for target in targets]
lists_contents.extend([(c.source, c) for c in
self.contextualized_correlations])
return lists_contents | ecreall/nova-ideo | [
22,
6,
22,
9,
1417421268
] |
def opinion_value(self):
return self.opinions_base.get(
getattr(self, 'opinion', {}).get('opinion', ''), None) | ecreall/nova-ideo | [
22,
6,
22,
9,
1417421268
] |
def __init__(self, **kwargs):
super(Node, self).__init__(**kwargs)
self.graph = PersistentDict() | ecreall/nova-ideo | [
22,
6,
22,
9,
1417421268
] |
def get_node_descriminator(self):
return 'node' | ecreall/nova-ideo | [
22,
6,
22,
9,
1417421268
] |
def get_nodes_data(self, calculated=[]):
oid = self.get_node_id()
newcalculated = list(calculated)
if oid in calculated:
return {}, newcalculated
all_target_contents = [r for r in self.all_target_related_contents
if isinstance(r[0], Node)]
targets = [{'id': t.get_node_id(),
'type': c.type_name,
'oid': getattr(t, '__oid__', 0)}
for (t, c) in all_target_contents]
all_source_contents = [r for r in self.all_source_related_contents
if r[0] not in all_target_contents
and isinstance(r[0], Node)]
targets.extend([{'id': t.get_node_id(),
'type': c.type_name,
'oid': getattr(t, '__oid__', 0)}
for (t, c) in all_source_contents])
result = {oid: {
'oid': self.__oid__,
'title': self.title,
'descriminator': self.get_node_descriminator(),
'targets': targets
}}
all_source_contents.extend(all_target_contents)
newcalculated.append(oid)
for r_content in all_source_contents:
sub_result, newcalculated = r_content[0].get_nodes_data(newcalculated)
result.update(sub_result)
return result, newcalculated | ecreall/nova-ideo | [
22,
6,
22,
9,
1417421268
] |
def get_sub_nodes(self):
oid = self.get_node_id()
return set([get_obj(node['oid']) for
node in self.graph[oid]['targets']]) | ecreall/nova-ideo | [
22,
6,
22,
9,
1417421268
] |
def __init__(self, **kwargs):
super(SignalableEntity, self).__init__(**kwargs)
self.len_reports = 0
self.init_len_current_reports() | ecreall/nova-ideo | [
22,
6,
22,
9,
1417421268
] |
def subject(self):
return self.__parent__ | ecreall/nova-ideo | [
22,
6,
22,
9,
1417421268
] |
def addtoproperty(self, name, value, moving=None):
super(SignalableEntity, self).addtoproperty(name, value, moving)
if name == 'reports':
self.len_current_reports = getattr(self, 'len_current_reports', 0)
self.len_reports = getattr(self, 'len_reports', 0)
self.len_current_reports += 1
self.len_reports += 1 | ecreall/nova-ideo | [
22,
6,
22,
9,
1417421268
] |
def __init__(self, **kwargs):
super(Sustainable, self).__init__(**kwargs)
self.set_data(kwargs)
self.votes_positive = OOBTree()
self.votes_negative = OOBTree() | ecreall/nova-ideo | [
22,
6,
22,
9,
1417421268
] |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.