hexsha stringlengths 40 40 | size int64 3 1.03M | ext stringclasses 10
values | lang stringclasses 1
value | max_stars_repo_path stringlengths 3 972 | max_stars_repo_name stringlengths 6 130 | max_stars_repo_head_hexsha stringlengths 40 78 | max_stars_repo_licenses listlengths 1 10 | max_stars_count int64 1 191k ⌀ | max_stars_repo_stars_event_min_datetime stringlengths 24 24 ⌀ | max_stars_repo_stars_event_max_datetime stringlengths 24 24 ⌀ | max_issues_repo_path stringlengths 3 972 | max_issues_repo_name stringlengths 6 130 | max_issues_repo_head_hexsha stringlengths 40 78 | max_issues_repo_licenses listlengths 1 10 | max_issues_count int64 1 116k ⌀ | max_issues_repo_issues_event_min_datetime stringlengths 24 24 ⌀ | max_issues_repo_issues_event_max_datetime stringlengths 24 24 ⌀ | max_forks_repo_path stringlengths 3 972 | max_forks_repo_name stringlengths 6 130 | max_forks_repo_head_hexsha stringlengths 40 78 | max_forks_repo_licenses listlengths 1 10 | max_forks_count int64 1 105k ⌀ | max_forks_repo_forks_event_min_datetime stringlengths 24 24 ⌀ | max_forks_repo_forks_event_max_datetime stringlengths 24 24 ⌀ | content stringlengths 3 1.03M | avg_line_length float64 1.13 941k | max_line_length int64 2 941k | alphanum_fraction float64 0 1 |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
281d7407a5c3dbd72c6d44bef7211680fea8bcb6 | 248 | py | Python | Task/Runtime-evaluation-In-an-environment/Python/runtime-evaluation-in-an-environment-2.py | LaudateCorpus1/RosettaCodeData | 9ad63ea473a958506c041077f1d810c0c7c8c18d | [
"Info-ZIP"
] | 1 | 2018-11-09T22:08:38.000Z | 2018-11-09T22:08:38.000Z | Task/Runtime-evaluation-In-an-environment/Python/runtime-evaluation-in-an-environment-2.py | seanwallawalla-forks/RosettaCodeData | 9ad63ea473a958506c041077f1d810c0c7c8c18d | [
"Info-ZIP"
] | null | null | null | Task/Runtime-evaluation-In-an-environment/Python/runtime-evaluation-in-an-environment-2.py | seanwallawalla-forks/RosettaCodeData | 9ad63ea473a958506c041077f1d810c0c7c8c18d | [
"Info-ZIP"
] | 1 | 2018-11-09T22:08:40.000Z | 2018-11-09T22:08:40.000Z | >>> def eval_with_args(code, **kwordargs):
return eval(code, kwordargs)
>>> code = '2 ** x'
>>> eval_with_args(code, x=5) - eval_with_args(code, x=3)
24
>>> code = '3 * x + y'
>>> eval_with_args(code, x=5, y=2) - eval_with_args(code, x=3, y=1)
7
| 24.8 | 67 | 0.616935 |
b4f3938c5ade309d469ab6829aad476c6440d0ce | 1,083 | py | Python | server/Category/views.py | adamA113/servize | 89933c3864d997188ec79ad690b37f51bca54aa3 | [
"MIT"
] | null | null | null | server/Category/views.py | adamA113/servize | 89933c3864d997188ec79ad690b37f51bca54aa3 | [
"MIT"
] | null | null | null | server/Category/views.py | adamA113/servize | 89933c3864d997188ec79ad690b37f51bca54aa3 | [
"MIT"
] | 2 | 2020-12-26T09:50:17.000Z | 2020-12-26T09:52:45.000Z | from django.shortcuts import render
from rest_framework import generics # for post and get
from Category.models import Category
from Category.serialize import CategorySerializer,JustCategorySerializer
from Category.filter import CategoryFilter
from rest_framework.decorators import api_view
from rest_framework.response import Response
class CategoryList(generics.ListCreateAPIView):
queryset=Category.objects.all()
serializer_class=CategorySerializer
class JustCategoryList(generics.ListCreateAPIView):
queryset=Category.objects.all()
serializer_class=JustCategorySerializer
@api_view(['POST'])
def ProvCat(request):
category = Category.objects.all().filter(catName=request.data['catName'])
# cat=city.objects.all().filter(catname=city.serviceproviders['catename'])
seralizer = CategorySerializer(category,many=True)
# ids_to_get = [1]
# res = [item for item in seralizer.data if item.get('pk') in ids_to_get]
# print("heyyyyyyyyyyyyyyyyyyy:",res)
return Response (seralizer.data)
# {"name":"Nablus","catName":"Electricians"} | 40.111111 | 78 | 0.77747 |
2d077a46643277e2c3147b930505514f811abcbf | 11,757 | py | Python | client/verta/tests/test_permissions/test_visibility_e2e.py | stefan-petrov-toptal/modeldb | a8a9b9da6ed964c91351230b2f0d2703c75794de | [
"Apache-2.0"
] | null | null | null | client/verta/tests/test_permissions/test_visibility_e2e.py | stefan-petrov-toptal/modeldb | a8a9b9da6ed964c91351230b2f0d2703c75794de | [
"Apache-2.0"
] | null | null | null | client/verta/tests/test_permissions/test_visibility_e2e.py | stefan-petrov-toptal/modeldb | a8a9b9da6ed964c91351230b2f0d2703c75794de | [
"Apache-2.0"
] | null | null | null | """
End-to-end tests for org permissions access and actions.
"""
import pytest
import requests
from verta._internal_utils import _utils
from verta.visibility import (
OrgCustom,
Private,
)
from verta.dataset import Path
from verta.environment import Python
pytestmark = pytest.mark.not_oss
class TestAccess:
@pytest.mark.parametrize(
"entity_name",
["dataset", "endpoint", "project", "registered_model"],#, "repository"],
)
def test_private(self, client, client_2, organization, created_entities, entity_name):
"""Org member cannot get."""
organization.add_member(client_2._conn.email)
client.set_workspace(organization.name)
client_2.set_workspace(organization.name)
name = _utils.generate_default_name()
visibility = Private()
entity = getattr(client, "create_{}".format(entity_name))(name, visibility=visibility)
created_entities.append(entity)
with pytest.raises(Exception, match="not found|Denied"):
getattr(client_2, "get_{}".format(entity_name))(name)
@pytest.mark.parametrize(
"entity_name",
["dataset", "endpoint", "project", "registered_model"],#, "repository"],
)
def test_read(self, client, client_2, organization, created_entities, entity_name):
"""Org member can get, but not delete."""
organization.add_member(client_2._conn.email)
client.set_workspace(organization.name)
client_2.set_workspace(organization.name)
name = _utils.generate_default_name()
visibility = OrgCustom(write=False)
entity = getattr(client, "create_{}".format(entity_name))(name, visibility=visibility)
created_entities.append(entity)
retrieved_entity = getattr(client_2, "get_{}".format(entity_name))(name)
assert retrieved_entity.id == entity.id
with pytest.raises(requests.HTTPError, match="Access Denied|Forbidden"):
retrieved_entity.delete()
def test_read_registry(self, client, client_2, organization, created_entities):
"""Registry entities erroneously masked 403s in _update()."""
organization.add_member(client_2._conn.email)
client.set_workspace(organization.name)
client_2.set_workspace(organization.name)
visibility = OrgCustom(write=False)
reg_model = client.create_registered_model(visibility=visibility)
retrieved_reg_model = client_2.get_registered_model(reg_model.name)
with pytest.raises(requests.HTTPError, match="Access Denied|Forbidden"):
retrieved_reg_model.add_label("foo")
model_ver = reg_model.create_version()
retrieved_model_ver = retrieved_reg_model.get_version(model_ver.name)
with pytest.raises(requests.HTTPError, match="Access Denied|Forbidden"):
retrieved_model_ver.add_label("foo")
@pytest.mark.parametrize(
"entity_name",
["dataset", "endpoint", "project", "registered_model"],#, "repository"],
)
def test_read_write(self, client, client_2, organization, created_entities, entity_name):
"""Org member can get, and delete."""
organization.add_member(client_2._conn.email)
client.set_workspace(organization.name)
client_2.set_workspace(organization.name)
name = _utils.generate_default_name()
visibility = OrgCustom(write=True)
entity = getattr(client, "create_{}".format(entity_name))(name, visibility=visibility)
try:
retrieved_entity = getattr(client_2, "get_{}".format(entity_name))(name)
retrieved_entity.delete()
except:
created_entities.append(entity)
def test_repository(self, client, client_2, organization, created_entities):
"""
The above, but for repository.
Because there is no client.create_repository() or client.get_repository().
"""
organization.add_member(client_2._conn.email)
client.set_workspace(organization.name)
client_2.set_workspace(organization.name)
# private
private_repo = client.set_repository(_utils.generate_default_name(), visibility=Private())
created_entities.append(private_repo)
with pytest.raises(Exception, match="unable to get Repository"):
client_2.set_repository(private_repo.name)
# read-only
read_repo = client.set_repository(_utils.generate_default_name(), visibility=OrgCustom(write=False))
created_entities.append(read_repo)
retrieved_repo = client_2.set_repository(read_repo.name)
assert retrieved_repo.id == read_repo.id
with pytest.raises(requests.HTTPError, match="Access Denied|Forbidden"):
retrieved_repo.delete()
# read-write
write_repo = client.set_repository(_utils.generate_default_name(), visibility=OrgCustom(write=True))
try:
retrieved_repo = client_2.set_repository(write_repo.name)
retrieved_repo.delete()
except:
created_entities.append(write_repo)
class TestLink:
def test_run_log_commit(self, client_2, client_3, organization, created_entities):
"""Log someone else's commit to my run."""
organization.add_member(client_2._conn.email)
organization.add_member(client_3._conn.email)
client_2.set_workspace(organization.name)
client_3.set_workspace(organization.name)
created_entities.append(client_2.create_project())
run = client_2.create_experiment_run()
# private commit
repo = client_3.set_repository(_utils.generate_default_name(), visibility=Private())
created_entities.append(repo)
commit = repo.get_commit()
with pytest.raises(requests.HTTPError, match="Access Denied|Forbidden"):
run.log_commit(commit)
# org commit
repo = client_3.set_repository(_utils.generate_default_name())
created_entities.append(repo)
commit = repo.get_commit()
run.log_commit(commit)
assert run.get_commit()[0].id == commit.id
def test_run_log_dataset_version(self, client_2, client_3, organization, created_entities):
"""Log someone else's dataset version to my run."""
organization.add_member(client_2._conn.email)
organization.add_member(client_3._conn.email)
client_2.set_workspace(organization.name)
client_3.set_workspace(organization.name)
created_entities.append(client_2.create_project())
run = client_2.create_experiment_run()
# private dataset version
dataset = client_3.create_dataset(visibility=Private())
created_entities.append(dataset)
dataver = dataset.create_version(Path(__file__))
with pytest.raises(requests.HTTPError, match="Access Denied|Forbidden"):
run.log_dataset_version("train", dataver)
# org dataset version
dataset = client_3.create_dataset()
created_entities.append(dataset)
dataver = dataset.create_version(Path(__file__))
run.log_dataset_version("train", dataver)
assert run.get_dataset_version("train").id == dataver.id
def test_model_version_from_run(self, client_2, client_3, organization, created_entities):
"""Create model version from someone else's run."""
organization.add_member(client_2._conn.email)
organization.add_member(client_3._conn.email)
client_2.set_workspace(organization.name)
client_3.set_workspace(organization.name)
reg_model = client_2.create_registered_model()
created_entities.append(reg_model)
# private run
created_entities.append(client_3.create_project(visibility=Private()))
run = client_3.create_experiment_run()
with pytest.raises(requests.HTTPError, match="^404.*not found"):
reg_model.create_version_from_run(run.id)
# org run
created_entities.append(client_3.create_project())
run = client_3.create_experiment_run()
model_ver = reg_model.create_version_from_run(run.id)
assert model_ver._msg.experiment_run_id == run.id
def test_endpoint_update_run(self, client_2, client_3, organization, created_entities):
"""Update endpoint from someone else's run."""
LogisticRegression = pytest.importorskip("sklearn.linear_model").LogisticRegression
organization.add_member(client_2._conn.email)
organization.add_member(client_3._conn.email)
client_2.set_workspace(organization.name)
client_3.set_workspace(organization.name)
endpoint = client_2.create_endpoint(_utils.generate_default_name())
created_entities.append(endpoint)
# private run
created_entities.append(client_3.create_project(visibility=Private()))
run = client_3.create_experiment_run()
run.log_model(LogisticRegression(), custom_modules=[])
run.log_environment(Python(["scikit-learn"]))
with pytest.raises(requests.HTTPError, match="^404.*not found"):
endpoint.update(run)
# org run, deploy=False
created_entities.append(client_3.create_project(visibility=OrgCustom(deploy=False)))
run = client_3.create_experiment_run()
run.log_model(LogisticRegression(), custom_modules=[])
run.log_environment(Python(["scikit-learn"]))
with pytest.raises(requests.HTTPError, match="Access Denied|Forbidden"):
endpoint.update(run)
# org run, deploy=True
created_entities.append(client_3.create_project(visibility=OrgCustom(deploy=True)))
run = client_3.create_experiment_run()
run.log_model(LogisticRegression(), custom_modules=[])
run.log_environment(Python(["scikit-learn"]))
assert endpoint.update(run)
def test_endpoint_update_model_version(self, client_2, client_3, organization, created_entities):
"""Update endpoint from someone else's model version."""
LogisticRegression = pytest.importorskip("sklearn.linear_model").LogisticRegression
organization.add_member(client_2._conn.email)
organization.add_member(client_3._conn.email)
client_2.set_workspace(organization.name)
client_3.set_workspace(organization.name)
endpoint = client_2.create_endpoint(_utils.generate_default_name())
created_entities.append(endpoint)
# private model version
reg_model = client_3.create_registered_model(visibility=Private())
created_entities.append(reg_model)
model_ver = reg_model.create_version()
model_ver.log_model(LogisticRegression(), custom_modules=[])
model_ver.log_environment(Python(["scikit-learn"]))
with pytest.raises(requests.HTTPError, match="Access Denied|Forbidden"):
endpoint.update(model_ver)
# org model version, deploy=False
reg_model = client_3.create_registered_model(visibility=OrgCustom(deploy=False))
created_entities.append(reg_model)
model_ver = reg_model.create_version()
model_ver.log_model(LogisticRegression(), custom_modules=[])
model_ver.log_environment(Python(["scikit-learn"]))
with pytest.raises(requests.HTTPError, match="Access Denied|Forbidden"):
endpoint.update(model_ver)
# org model version, deploy=True
reg_model = client_3.create_registered_model(visibility=OrgCustom(deploy=True))
created_entities.append(reg_model)
model_ver = reg_model.create_version()
model_ver.log_model(LogisticRegression(), custom_modules=[])
model_ver.log_environment(Python(["scikit-learn"]))
assert endpoint.update(model_ver)
| 42.908759 | 108 | 0.698307 |
75d232bcaab153a0e4b83d93e150a4e5a38b754a | 1,932 | py | Python | backend/atlas/mutations/delete_team.py | getsentry/atlas | 9bf4a236b99a24a7a17700591a0ff94feecf7ce7 | [
"Apache-2.0"
] | 18 | 2019-09-24T23:49:41.000Z | 2020-11-14T17:30:27.000Z | backend/atlas/mutations/delete_team.py | getsentry/atlas | 9bf4a236b99a24a7a17700591a0ff94feecf7ce7 | [
"Apache-2.0"
] | 53 | 2019-09-24T18:50:25.000Z | 2022-02-27T11:44:55.000Z | backend/atlas/mutations/delete_team.py | getsentry/atlas | 9bf4a236b99a24a7a17700591a0ff94feecf7ce7 | [
"Apache-2.0"
] | 2 | 2020-02-03T08:22:36.000Z | 2021-02-28T12:55:48.000Z | import graphene
from django.db import transaction
from atlas.models import Profile, Team
from atlas.tasks import update_profile
class DeleteTeam(graphene.Mutation):
class Arguments:
team = graphene.UUID(required=True)
new_team = graphene.UUID(required=False)
ok = graphene.Boolean()
errors = graphene.List(graphene.String)
def mutate(self, info, team: str, new_team: str = None):
current_user = info.context.user
if not current_user.is_authenticated:
return DeleteTeam(ok=False, errors=["Authentication required"])
if team == new_team:
return DeleteTeam(ok=False, errors=["Must select a unique new team"])
try:
team = Team.objects.get(id=team)
except Team.DoesNotExist:
return DeleteTeam(ok=False, errors=["Invalid resource"])
if new_team:
try:
new_team = Team.objects.get(id=new_team)
except Team.DoesNotExist:
return DeleteTeam(ok=False, errors=["Invalid resource"])
# only superuser (human resources) can edit teams
if not current_user.is_superuser:
return DeleteTeam(ok=False, errors=["Cannot edit this resource"])
# XXX(dcramer): this is potentially a very long transaction
with transaction.atomic():
team_id = team.id
affected_users = []
for user_id in Profile.objects.filter(team=team_id).values_list(
"user", flat=True
):
affected_users.append(user_id)
Profile.objects.filter(user=user_id).update(team=new_team)
team.delete()
for user_id in affected_users:
update_profile.delay(
user_id=user_id,
updates={"team": str(new_team.id) if new_team else None},
)
return DeleteTeam(ok=True)
| 33.310345 | 81 | 0.60766 |
6551476f5d10eacc070b9c99c88308811de0717d | 29,617 | py | Python | pysnmp/CISCO-WIRELESS-P2MP-RF-METRICS-MIB.py | agustinhenze/mibs.snmplabs.com | 1fc5c07860542b89212f4c8ab807057d9a9206c7 | [
"Apache-2.0"
] | 11 | 2021-02-02T16:27:16.000Z | 2021-08-31T06:22:49.000Z | pysnmp/CISCO-WIRELESS-P2MP-RF-METRICS-MIB.py | agustinhenze/mibs.snmplabs.com | 1fc5c07860542b89212f4c8ab807057d9a9206c7 | [
"Apache-2.0"
] | 75 | 2021-02-24T17:30:31.000Z | 2021-12-08T00:01:18.000Z | pysnmp/CISCO-WIRELESS-P2MP-RF-METRICS-MIB.py | agustinhenze/mibs.snmplabs.com | 1fc5c07860542b89212f4c8ab807057d9a9206c7 | [
"Apache-2.0"
] | 10 | 2019-04-30T05:51:36.000Z | 2022-02-16T03:33:41.000Z | #
# PySNMP MIB module CISCO-WIRELESS-P2MP-RF-METRICS-MIB (http://snmplabs.com/pysmi)
# ASN.1 source file:///Users/davwang4/Dev/mibs.snmplabs.com/asn1/CISCO-WIRELESS-P2MP-RF-METRICS-MIB
# Produced by pysmi-0.3.4 at Mon Apr 29 18:05:15 2019
# On host DAVWANG4-M-1475 platform Darwin version 18.5.0 by user davwang4
# Using Python version 3.7.3 (default, Mar 27 2019, 09:23:15)
#
ObjectIdentifier, Integer, OctetString = mibBuilder.importSymbols("ASN1", "ObjectIdentifier", "Integer", "OctetString")
NamedValues, = mibBuilder.importSymbols("ASN1-ENUMERATION", "NamedValues")
SingleValueConstraint, ValueRangeConstraint, ConstraintsIntersection, ConstraintsUnion, ValueSizeConstraint = mibBuilder.importSymbols("ASN1-REFINEMENT", "SingleValueConstraint", "ValueRangeConstraint", "ConstraintsIntersection", "ConstraintsUnion", "ValueSizeConstraint")
ciscoMgmt, = mibBuilder.importSymbols("CISCO-SMI", "ciscoMgmt")
CwrCollectionAction, CwrFixedPointValue, CwrThreshLimitType, P2mpRadioSignalAttribute, CwrFixedPointScale, CwrFixedPointPrecision, CwrUpdateTime, CwrCollectionStatus, P2mpSnapshotAttribute = mibBuilder.importSymbols("CISCO-WIRELESS-TC-MIB", "CwrCollectionAction", "CwrFixedPointValue", "CwrThreshLimitType", "P2mpRadioSignalAttribute", "CwrFixedPointScale", "CwrFixedPointPrecision", "CwrUpdateTime", "CwrCollectionStatus", "P2mpSnapshotAttribute")
OwnerString, ifIndex = mibBuilder.importSymbols("IF-MIB", "OwnerString", "ifIndex")
ObjectGroup, ModuleCompliance, NotificationGroup = mibBuilder.importSymbols("SNMPv2-CONF", "ObjectGroup", "ModuleCompliance", "NotificationGroup")
ModuleIdentity, MibScalar, MibTable, MibTableRow, MibTableColumn, iso, Counter64, ObjectIdentity, IpAddress, Bits, Unsigned32, Gauge32, Counter32, TimeTicks, NotificationType, MibIdentifier, Integer32 = mibBuilder.importSymbols("SNMPv2-SMI", "ModuleIdentity", "MibScalar", "MibTable", "MibTableRow", "MibTableColumn", "iso", "Counter64", "ObjectIdentity", "IpAddress", "Bits", "Unsigned32", "Gauge32", "Counter32", "TimeTicks", "NotificationType", "MibIdentifier", "Integer32")
RowStatus, TimeInterval, DisplayString, MacAddress, TextualConvention, TruthValue = mibBuilder.importSymbols("SNMPv2-TC", "RowStatus", "TimeInterval", "DisplayString", "MacAddress", "TextualConvention", "TruthValue")
ciscoWirelessRfMetricsMIB = ModuleIdentity((1, 3, 6, 1, 4, 1, 9, 9, 180))
if mibBuilder.loadTexts: ciscoWirelessRfMetricsMIB.setLastUpdated('200004191910Z')
if mibBuilder.loadTexts: ciscoWirelessRfMetricsMIB.setOrganization('Cisco Systems Inc.')
p2mpRadioHistoryGroup = MibIdentifier((1, 3, 6, 1, 4, 1, 9, 9, 180, 1))
p2mpRadioTimelineGroup = MibIdentifier((1, 3, 6, 1, 4, 1, 9, 9, 180, 2))
p2mpRadioThresholdGroup = MibIdentifier((1, 3, 6, 1, 4, 1, 9, 9, 180, 3))
p2mpRadioSnapshotGroup = MibIdentifier((1, 3, 6, 1, 4, 1, 9, 9, 180, 4))
p2mpHistCtrlTable = MibTable((1, 3, 6, 1, 4, 1, 9, 9, 180, 1, 1), )
if mibBuilder.loadTexts: p2mpHistCtrlTable.setStatus('current')
p2mpHistCtrlEntry = MibTableRow((1, 3, 6, 1, 4, 1, 9, 9, 180, 1, 1, 1), ).setIndexNames((0, "IF-MIB", "ifIndex"), (0, "CISCO-WIRELESS-P2MP-RF-METRICS-MIB", "p2mpHistSuMacAddress"), (0, "CISCO-WIRELESS-P2MP-RF-METRICS-MIB", "p2mpHistClass"))
if mibBuilder.loadTexts: p2mpHistCtrlEntry.setStatus('current')
p2mpHistSuMacAddress = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 180, 1, 1, 1, 1), MacAddress())
if mibBuilder.loadTexts: p2mpHistSuMacAddress.setStatus('current')
p2mpHistClass = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 180, 1, 1, 1, 2), P2mpRadioSignalAttribute())
if mibBuilder.loadTexts: p2mpHistClass.setStatus('current')
p2mpHistSize = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 180, 1, 1, 1, 3), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("fine", 1), ("coarse", 2)))).setMaxAccess("readcreate")
if mibBuilder.loadTexts: p2mpHistSize.setStatus('current')
p2mpHistSumScale = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 180, 1, 1, 1, 4), CwrFixedPointScale()).setMaxAccess("readonly")
if mibBuilder.loadTexts: p2mpHistSumScale.setStatus('current')
p2mpHistSumPrecision = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 180, 1, 1, 1, 5), CwrFixedPointPrecision()).setMaxAccess("readonly")
if mibBuilder.loadTexts: p2mpHistSumPrecision.setStatus('current')
p2mpStartBinValue = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 180, 1, 1, 1, 6), Integer32().subtype(subtypeSpec=ValueRangeConstraint(-2147483647, 2147483647))).setMaxAccess("readcreate")
if mibBuilder.loadTexts: p2mpStartBinValue.setStatus('current')
p2mpEndBinValue = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 180, 1, 1, 1, 7), Integer32().subtype(subtypeSpec=ValueRangeConstraint(-2147483647, 2147483647))).setMaxAccess("readcreate")
if mibBuilder.loadTexts: p2mpEndBinValue.setStatus('current')
p2mpCollDuration = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 180, 1, 1, 1, 8), CwrUpdateTime()).setUnits('seconds').setMaxAccess("readcreate")
if mibBuilder.loadTexts: p2mpCollDuration.setStatus('current')
p2mpUpdateRate = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 180, 1, 1, 1, 9), CwrUpdateTime()).setUnits('seconds').setMaxAccess("readcreate")
if mibBuilder.loadTexts: p2mpUpdateRate.setStatus('current')
p2mpPeriodicSum = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 180, 1, 1, 1, 10), TruthValue()).setMaxAccess("readcreate")
if mibBuilder.loadTexts: p2mpPeriodicSum.setStatus('current')
p2mpHistOwner = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 180, 1, 1, 1, 11), OwnerString()).setMaxAccess("readcreate")
if mibBuilder.loadTexts: p2mpHistOwner.setStatus('current')
p2mpHistAction = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 180, 1, 1, 1, 12), CwrCollectionAction()).setMaxAccess("readcreate")
if mibBuilder.loadTexts: p2mpHistAction.setStatus('current')
p2mpHistStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 180, 1, 1, 1, 13), CwrCollectionStatus()).setMaxAccess("readonly")
if mibBuilder.loadTexts: p2mpHistStatus.setStatus('current')
p2mpHistRowStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 180, 1, 1, 1, 14), RowStatus()).setMaxAccess("readcreate")
if mibBuilder.loadTexts: p2mpHistRowStatus.setStatus('current')
p2mpHistSummaryTable = MibTable((1, 3, 6, 1, 4, 1, 9, 9, 180, 1, 2), )
if mibBuilder.loadTexts: p2mpHistSummaryTable.setStatus('current')
p2mpHistSummaryEntry = MibTableRow((1, 3, 6, 1, 4, 1, 9, 9, 180, 1, 2, 1), ).setIndexNames((0, "IF-MIB", "ifIndex"), (0, "CISCO-WIRELESS-P2MP-RF-METRICS-MIB", "p2mpHistSuMacAddress"), (0, "CISCO-WIRELESS-P2MP-RF-METRICS-MIB", "p2mpHistClass"))
if mibBuilder.loadTexts: p2mpHistSummaryEntry.setStatus('current')
p2mpHistUpdateTime = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 180, 1, 2, 1, 1), CwrUpdateTime()).setUnits('seconds').setMaxAccess("readonly")
if mibBuilder.loadTexts: p2mpHistUpdateTime.setStatus('current')
p2mpHistMin = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 180, 1, 2, 1, 2), CwrFixedPointValue()).setMaxAccess("readonly")
if mibBuilder.loadTexts: p2mpHistMin.setStatus('current')
p2mpHistMax = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 180, 1, 2, 1, 3), CwrFixedPointValue()).setMaxAccess("readonly")
if mibBuilder.loadTexts: p2mpHistMax.setStatus('current')
p2mpHistMean = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 180, 1, 2, 1, 4), CwrFixedPointValue()).setMaxAccess("readonly")
if mibBuilder.loadTexts: p2mpHistMean.setStatus('current')
p2mpHistDataTable = MibTable((1, 3, 6, 1, 4, 1, 9, 9, 180, 1, 3), )
if mibBuilder.loadTexts: p2mpHistDataTable.setStatus('current')
p2mpHistDataEntry = MibTableRow((1, 3, 6, 1, 4, 1, 9, 9, 180, 1, 3, 1), ).setIndexNames((0, "IF-MIB", "ifIndex"), (0, "CISCO-WIRELESS-P2MP-RF-METRICS-MIB", "p2mpHistSuMacAddress"), (0, "CISCO-WIRELESS-P2MP-RF-METRICS-MIB", "p2mpHistClass"), (0, "CISCO-WIRELESS-P2MP-RF-METRICS-MIB", "p2mpHistBinIndex"))
if mibBuilder.loadTexts: p2mpHistDataEntry.setStatus('current')
p2mpHistBinIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 180, 1, 3, 1, 1), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 50)))
if mibBuilder.loadTexts: p2mpHistBinIndex.setStatus('current')
p2mpValue = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 180, 1, 3, 1, 2), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 2147483647))).setMaxAccess("readonly")
if mibBuilder.loadTexts: p2mpValue.setStatus('current')
p2mpTlCtrlTable = MibTable((1, 3, 6, 1, 4, 1, 9, 9, 180, 2, 1), )
if mibBuilder.loadTexts: p2mpTlCtrlTable.setStatus('current')
p2mpTlCtrlEntry = MibTableRow((1, 3, 6, 1, 4, 1, 9, 9, 180, 2, 1, 1), ).setIndexNames((0, "IF-MIB", "ifIndex"), (0, "CISCO-WIRELESS-P2MP-RF-METRICS-MIB", "p2mpTlSuMacAddress"), (0, "CISCO-WIRELESS-P2MP-RF-METRICS-MIB", "p2mpTlClass"))
if mibBuilder.loadTexts: p2mpTlCtrlEntry.setStatus('current')
p2mpTlSuMacAddress = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 180, 2, 1, 1, 1), MacAddress())
if mibBuilder.loadTexts: p2mpTlSuMacAddress.setStatus('current')
p2mpTlClass = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 180, 2, 1, 1, 2), P2mpRadioSignalAttribute())
if mibBuilder.loadTexts: p2mpTlClass.setStatus('current')
p2mpTlThreshAttribute = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 180, 2, 1, 1, 3), P2mpRadioSignalAttribute()).setMaxAccess("readcreate")
if mibBuilder.loadTexts: p2mpTlThreshAttribute.setStatus('current')
p2mpTlThreshType = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 180, 2, 1, 1, 4), CwrThreshLimitType()).setMaxAccess("readcreate")
if mibBuilder.loadTexts: p2mpTlThreshType.setStatus('current')
p2mpTlNumDataValues = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 180, 2, 1, 1, 5), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 2147483647))).setUnits('number of data values').setMaxAccess("readcreate")
if mibBuilder.loadTexts: p2mpTlNumDataValues.setStatus('current')
p2mpTlDataScale = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 180, 2, 1, 1, 6), CwrFixedPointScale()).setMaxAccess("readonly")
if mibBuilder.loadTexts: p2mpTlDataScale.setStatus('current')
p2mpTlDataPrecision = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 180, 2, 1, 1, 7), CwrFixedPointPrecision()).setMaxAccess("readonly")
if mibBuilder.loadTexts: p2mpTlDataPrecision.setStatus('current')
p2mpTlSamplePeriod = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 180, 2, 1, 1, 8), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 2147483647))).setUnits('milliseconds').setMaxAccess("readcreate")
if mibBuilder.loadTexts: p2mpTlSamplePeriod.setStatus('current')
p2mpTlAction = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 180, 2, 1, 1, 9), CwrCollectionAction()).setMaxAccess("readcreate")
if mibBuilder.loadTexts: p2mpTlAction.setStatus('current')
p2mpTlPostTrigBufMgmt = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 180, 2, 1, 1, 10), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("preTrigger", 1), ("postTrigger", 2)))).setMaxAccess("readcreate")
if mibBuilder.loadTexts: p2mpTlPostTrigBufMgmt.setStatus('current')
p2mpTlOwner = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 180, 2, 1, 1, 11), OwnerString()).setMaxAccess("readcreate")
if mibBuilder.loadTexts: p2mpTlOwner.setStatus('current')
p2mpTlStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 180, 2, 1, 1, 12), CwrCollectionStatus()).setMaxAccess("readonly")
if mibBuilder.loadTexts: p2mpTlStatus.setStatus('current')
p2mpTlRowStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 180, 2, 1, 1, 13), RowStatus()).setMaxAccess("readcreate")
if mibBuilder.loadTexts: p2mpTlRowStatus.setStatus('current')
p2mpTlSummaryTable = MibTable((1, 3, 6, 1, 4, 1, 9, 9, 180, 2, 2), )
if mibBuilder.loadTexts: p2mpTlSummaryTable.setStatus('current')
p2mpTlSummaryEntry = MibTableRow((1, 3, 6, 1, 4, 1, 9, 9, 180, 2, 2, 1), ).setIndexNames((0, "IF-MIB", "ifIndex"), (0, "CISCO-WIRELESS-P2MP-RF-METRICS-MIB", "p2mpTlSuMacAddress"), (0, "CISCO-WIRELESS-P2MP-RF-METRICS-MIB", "p2mpTlClass"))
if mibBuilder.loadTexts: p2mpTlSummaryEntry.setStatus('current')
p2mpTlUpdateTime = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 180, 2, 2, 1, 1), CwrUpdateTime()).setUnits('seconds').setMaxAccess("readonly")
if mibBuilder.loadTexts: p2mpTlUpdateTime.setStatus('current')
p2mpTlNumValues = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 180, 2, 2, 1, 2), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 2147483647))).setMaxAccess("readonly")
if mibBuilder.loadTexts: p2mpTlNumValues.setStatus('current')
p2mpTlTriggerLoc = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 180, 2, 2, 1, 3), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 2147483647))).setMaxAccess("readonly")
if mibBuilder.loadTexts: p2mpTlTriggerLoc.setStatus('current')
p2mpTlDataTable = MibTable((1, 3, 6, 1, 4, 1, 9, 9, 180, 2, 3), )
if mibBuilder.loadTexts: p2mpTlDataTable.setStatus('current')
p2mpTlDataEntry = MibTableRow((1, 3, 6, 1, 4, 1, 9, 9, 180, 2, 3, 1), ).setIndexNames((0, "IF-MIB", "ifIndex"), (0, "CISCO-WIRELESS-P2MP-RF-METRICS-MIB", "p2mpTlSuMacAddress"), (0, "CISCO-WIRELESS-P2MP-RF-METRICS-MIB", "p2mpTlClass"), (0, "CISCO-WIRELESS-P2MP-RF-METRICS-MIB", "p2mpTlValueIndex"))
if mibBuilder.loadTexts: p2mpTlDataEntry.setStatus('current')
p2mpTlValueIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 180, 2, 3, 1, 1), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 2147483647)))
if mibBuilder.loadTexts: p2mpTlValueIndex.setStatus('current')
p2mpTlValue = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 180, 2, 3, 1, 2), CwrFixedPointValue()).setMaxAccess("readonly")
if mibBuilder.loadTexts: p2mpTlValue.setStatus('current')
p2mpThresholdTable = MibTable((1, 3, 6, 1, 4, 1, 9, 9, 180, 3, 1), )
if mibBuilder.loadTexts: p2mpThresholdTable.setStatus('current')
p2mpThresholdEntry = MibTableRow((1, 3, 6, 1, 4, 1, 9, 9, 180, 3, 1, 1), ).setIndexNames((0, "IF-MIB", "ifIndex"), (0, "CISCO-WIRELESS-P2MP-RF-METRICS-MIB", "p2mpThreshSuMacAddr"), (0, "CISCO-WIRELESS-P2MP-RF-METRICS-MIB", "p2mpThreshAttribute"), (0, "CISCO-WIRELESS-P2MP-RF-METRICS-MIB", "p2mpThreshType"))
if mibBuilder.loadTexts: p2mpThresholdEntry.setStatus('current')
p2mpThreshSuMacAddr = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 180, 3, 1, 1, 1), MacAddress())
if mibBuilder.loadTexts: p2mpThreshSuMacAddr.setStatus('current')
p2mpThreshAttribute = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 180, 3, 1, 1, 2), P2mpRadioSignalAttribute())
if mibBuilder.loadTexts: p2mpThreshAttribute.setStatus('current')
p2mpThreshType = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 180, 3, 1, 1, 3), CwrThreshLimitType())
if mibBuilder.loadTexts: p2mpThreshType.setStatus('current')
p2mpThreshValue = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 180, 3, 1, 1, 4), Integer32().subtype(subtypeSpec=ValueRangeConstraint(-2147483647, 2147483647))).setMaxAccess("readcreate")
if mibBuilder.loadTexts: p2mpThreshValue.setStatus('current')
p2mpThreshHysteresisTime = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 180, 3, 1, 1, 5), TimeInterval()).setMaxAccess("readcreate")
if mibBuilder.loadTexts: p2mpThreshHysteresisTime.setStatus('current')
p2mpThreshLimitTime = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 180, 3, 1, 1, 6), TimeInterval()).setMaxAccess("readcreate")
if mibBuilder.loadTexts: p2mpThreshLimitTime.setStatus('current')
p2mpThreshRowStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 180, 3, 1, 1, 7), RowStatus()).setMaxAccess("readcreate")
if mibBuilder.loadTexts: p2mpThreshRowStatus.setStatus('current')
p2mpSnapshotCtrlTable = MibTable((1, 3, 6, 1, 4, 1, 9, 9, 180, 4, 1), )
if mibBuilder.loadTexts: p2mpSnapshotCtrlTable.setStatus('current')
p2mpSnapshotCtrlEntry = MibTableRow((1, 3, 6, 1, 4, 1, 9, 9, 180, 4, 1, 1), ).setIndexNames((0, "IF-MIB", "ifIndex"), (0, "CISCO-WIRELESS-P2MP-RF-METRICS-MIB", "p2mpSnapshotDspNum"))
if mibBuilder.loadTexts: p2mpSnapshotCtrlEntry.setStatus('current')
p2mpSnapshotDspNum = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 180, 4, 1, 1, 1), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 8)))
if mibBuilder.loadTexts: p2mpSnapshotDspNum.setStatus('current')
p2mpSnapshotType = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 180, 4, 1, 1, 2), P2mpSnapshotAttribute()).setMaxAccess("readcreate")
if mibBuilder.loadTexts: p2mpSnapshotType.setStatus('current')
p2mpSnapshotDataScale = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 180, 4, 1, 1, 3), CwrFixedPointScale()).setMaxAccess("readonly")
if mibBuilder.loadTexts: p2mpSnapshotDataScale.setStatus('current')
p2mpSnapshotDataPrecision = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 180, 4, 1, 1, 4), CwrFixedPointPrecision()).setMaxAccess("readonly")
if mibBuilder.loadTexts: p2mpSnapshotDataPrecision.setStatus('current')
p2mpSnapshotOwner = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 180, 4, 1, 1, 5), OwnerString()).setMaxAccess("readcreate")
if mibBuilder.loadTexts: p2mpSnapshotOwner.setStatus('current')
p2mpSnapshotAction = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 180, 4, 1, 1, 6), CwrCollectionAction()).setMaxAccess("readcreate")
if mibBuilder.loadTexts: p2mpSnapshotAction.setStatus('current')
p2mpSnapshotStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 180, 4, 1, 1, 7), CwrCollectionStatus()).setMaxAccess("readonly")
if mibBuilder.loadTexts: p2mpSnapshotStatus.setStatus('current')
p2mpSnapshotRowStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 180, 4, 1, 1, 8), RowStatus()).setMaxAccess("readcreate")
if mibBuilder.loadTexts: p2mpSnapshotRowStatus.setStatus('current')
p2mpSnapSummaryTable = MibTable((1, 3, 6, 1, 4, 1, 9, 9, 180, 4, 2), )
if mibBuilder.loadTexts: p2mpSnapSummaryTable.setStatus('current')
p2mpSnapSummaryEntry = MibTableRow((1, 3, 6, 1, 4, 1, 9, 9, 180, 4, 2, 1), ).setIndexNames((0, "IF-MIB", "ifIndex"), (0, "CISCO-WIRELESS-P2MP-RF-METRICS-MIB", "p2mpSnapshotDspNum"))
if mibBuilder.loadTexts: p2mpSnapSummaryEntry.setStatus('current')
p2mpSnapAttr1Id = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 180, 4, 2, 1, 2), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 2147483647))).setMaxAccess("readonly")
if mibBuilder.loadTexts: p2mpSnapAttr1Id.setStatus('current')
p2mpSnapAttr1Size = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 180, 4, 2, 1, 3), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 4096))).setMaxAccess("readonly")
if mibBuilder.loadTexts: p2mpSnapAttr1Size.setStatus('current')
p2mpSnapAttr2Id = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 180, 4, 2, 1, 4), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 2147483647))).setMaxAccess("readonly")
if mibBuilder.loadTexts: p2mpSnapAttr2Id.setStatus('current')
p2mpSnapAttr2Size = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 180, 4, 2, 1, 5), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 4096))).setMaxAccess("readonly")
if mibBuilder.loadTexts: p2mpSnapAttr2Size.setStatus('current')
p2mpSnapAttr3Id = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 180, 4, 2, 1, 6), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 2147483647))).setMaxAccess("readonly")
if mibBuilder.loadTexts: p2mpSnapAttr3Id.setStatus('current')
p2mpSnapAttr3Size = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 180, 4, 2, 1, 7), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 4096))).setMaxAccess("readonly")
if mibBuilder.loadTexts: p2mpSnapAttr3Size.setStatus('current')
p2mpSnapAttr4Id = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 180, 4, 2, 1, 8), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 2147483647))).setMaxAccess("readonly")
if mibBuilder.loadTexts: p2mpSnapAttr4Id.setStatus('current')
p2mpSnapAttr4Size = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 180, 4, 2, 1, 9), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 4096))).setMaxAccess("readonly")
if mibBuilder.loadTexts: p2mpSnapAttr4Size.setStatus('current')
p2mpSnapDataTable = MibTable((1, 3, 6, 1, 4, 1, 9, 9, 180, 4, 3), )
if mibBuilder.loadTexts: p2mpSnapDataTable.setStatus('current')
p2mpSnapDataEntry = MibTableRow((1, 3, 6, 1, 4, 1, 9, 9, 180, 4, 3, 1), ).setIndexNames((0, "IF-MIB", "ifIndex"), (0, "CISCO-WIRELESS-P2MP-RF-METRICS-MIB", "p2mpSnapshotDspNum"), (0, "CISCO-WIRELESS-P2MP-RF-METRICS-MIB", "p2mpSnapValueIndex"))
if mibBuilder.loadTexts: p2mpSnapDataEntry.setStatus('current')
p2mpSnapValueIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 180, 4, 3, 1, 1), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 4096)))
if mibBuilder.loadTexts: p2mpSnapValueIndex.setStatus('current')
p2mpRealPart = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 180, 4, 3, 1, 2), CwrFixedPointValue()).setMaxAccess("readonly")
if mibBuilder.loadTexts: p2mpRealPart.setStatus('current')
p2mpImaginaryPart = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 180, 4, 3, 1, 3), CwrFixedPointValue()).setMaxAccess("readonly")
if mibBuilder.loadTexts: p2mpImaginaryPart.setStatus('current')
p2mpRfMetricsMIBNotificationPrefix = MibIdentifier((1, 3, 6, 1, 4, 1, 9, 9, 180, 3, 2))
p2mpRfMetricsMIBNotification = MibIdentifier((1, 3, 6, 1, 4, 1, 9, 9, 180, 3, 2, 0))
p2mpTrapThresh = NotificationType((1, 3, 6, 1, 4, 1, 9, 9, 180, 3, 2, 0, 1)).setObjects(("CISCO-WIRELESS-P2MP-RF-METRICS-MIB", "p2mpThreshValue"), ("CISCO-WIRELESS-P2MP-RF-METRICS-MIB", "p2mpThreshHysteresisTime"), ("CISCO-WIRELESS-P2MP-RF-METRICS-MIB", "p2mpThreshLimitTime"))
if mibBuilder.loadTexts: p2mpTrapThresh.setStatus('current')
p2mpRadioRfConformance = MibIdentifier((1, 3, 6, 1, 4, 1, 9, 9, 180, 5))
p2mpRadioRfCompliances = MibIdentifier((1, 3, 6, 1, 4, 1, 9, 9, 180, 5, 1))
p2mpRadioRfGroups = MibIdentifier((1, 3, 6, 1, 4, 1, 9, 9, 180, 5, 2))
p2mpRadioRfCompliance = ModuleCompliance((1, 3, 6, 1, 4, 1, 9, 9, 180, 5, 1, 1)).setObjects(("CISCO-WIRELESS-P2MP-RF-METRICS-MIB", "p2mpComplianceHistogramGroup"), ("CISCO-WIRELESS-P2MP-RF-METRICS-MIB", "p2mpComplianceThresholdGroup"), ("CISCO-WIRELESS-P2MP-RF-METRICS-MIB", "p2mpComplianceTimelineGroup"), ("CISCO-WIRELESS-P2MP-RF-METRICS-MIB", "p2mpComplianceSnapshotGroup"))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
p2mpRadioRfCompliance = p2mpRadioRfCompliance.setStatus('current')
p2mpComplianceHistogramGroup = ObjectGroup((1, 3, 6, 1, 4, 1, 9, 9, 180, 5, 2, 1)).setObjects(("CISCO-WIRELESS-P2MP-RF-METRICS-MIB", "p2mpHistSize"), ("CISCO-WIRELESS-P2MP-RF-METRICS-MIB", "p2mpHistSumScale"), ("CISCO-WIRELESS-P2MP-RF-METRICS-MIB", "p2mpHistSumPrecision"), ("CISCO-WIRELESS-P2MP-RF-METRICS-MIB", "p2mpStartBinValue"), ("CISCO-WIRELESS-P2MP-RF-METRICS-MIB", "p2mpEndBinValue"), ("CISCO-WIRELESS-P2MP-RF-METRICS-MIB", "p2mpUpdateRate"), ("CISCO-WIRELESS-P2MP-RF-METRICS-MIB", "p2mpCollDuration"), ("CISCO-WIRELESS-P2MP-RF-METRICS-MIB", "p2mpPeriodicSum"), ("CISCO-WIRELESS-P2MP-RF-METRICS-MIB", "p2mpHistOwner"), ("CISCO-WIRELESS-P2MP-RF-METRICS-MIB", "p2mpHistAction"), ("CISCO-WIRELESS-P2MP-RF-METRICS-MIB", "p2mpHistStatus"), ("CISCO-WIRELESS-P2MP-RF-METRICS-MIB", "p2mpHistRowStatus"), ("CISCO-WIRELESS-P2MP-RF-METRICS-MIB", "p2mpHistUpdateTime"), ("CISCO-WIRELESS-P2MP-RF-METRICS-MIB", "p2mpHistMin"), ("CISCO-WIRELESS-P2MP-RF-METRICS-MIB", "p2mpHistMax"), ("CISCO-WIRELESS-P2MP-RF-METRICS-MIB", "p2mpHistMean"), ("CISCO-WIRELESS-P2MP-RF-METRICS-MIB", "p2mpValue"))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
p2mpComplianceHistogramGroup = p2mpComplianceHistogramGroup.setStatus('current')
p2mpComplianceThresholdGroup = ObjectGroup((1, 3, 6, 1, 4, 1, 9, 9, 180, 5, 2, 2)).setObjects(("CISCO-WIRELESS-P2MP-RF-METRICS-MIB", "p2mpThreshValue"), ("CISCO-WIRELESS-P2MP-RF-METRICS-MIB", "p2mpThreshHysteresisTime"), ("CISCO-WIRELESS-P2MP-RF-METRICS-MIB", "p2mpThreshLimitTime"), ("CISCO-WIRELESS-P2MP-RF-METRICS-MIB", "p2mpThreshRowStatus"))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
p2mpComplianceThresholdGroup = p2mpComplianceThresholdGroup.setStatus('current')
p2mpComplianceTimelineGroup = ObjectGroup((1, 3, 6, 1, 4, 1, 9, 9, 180, 5, 2, 3)).setObjects(("CISCO-WIRELESS-P2MP-RF-METRICS-MIB", "p2mpTlThreshAttribute"), ("CISCO-WIRELESS-P2MP-RF-METRICS-MIB", "p2mpTlThreshType"), ("CISCO-WIRELESS-P2MP-RF-METRICS-MIB", "p2mpTlNumDataValues"), ("CISCO-WIRELESS-P2MP-RF-METRICS-MIB", "p2mpTlDataScale"), ("CISCO-WIRELESS-P2MP-RF-METRICS-MIB", "p2mpTlDataPrecision"), ("CISCO-WIRELESS-P2MP-RF-METRICS-MIB", "p2mpTlSamplePeriod"), ("CISCO-WIRELESS-P2MP-RF-METRICS-MIB", "p2mpTlAction"), ("CISCO-WIRELESS-P2MP-RF-METRICS-MIB", "p2mpTlPostTrigBufMgmt"), ("CISCO-WIRELESS-P2MP-RF-METRICS-MIB", "p2mpTlOwner"), ("CISCO-WIRELESS-P2MP-RF-METRICS-MIB", "p2mpTlStatus"), ("CISCO-WIRELESS-P2MP-RF-METRICS-MIB", "p2mpTlRowStatus"), ("CISCO-WIRELESS-P2MP-RF-METRICS-MIB", "p2mpTlUpdateTime"), ("CISCO-WIRELESS-P2MP-RF-METRICS-MIB", "p2mpTlNumValues"), ("CISCO-WIRELESS-P2MP-RF-METRICS-MIB", "p2mpTlTriggerLoc"), ("CISCO-WIRELESS-P2MP-RF-METRICS-MIB", "p2mpTlValue"))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
p2mpComplianceTimelineGroup = p2mpComplianceTimelineGroup.setStatus('current')
p2mpComplianceSnapshotGroup = ObjectGroup((1, 3, 6, 1, 4, 1, 9, 9, 180, 5, 2, 4)).setObjects(("CISCO-WIRELESS-P2MP-RF-METRICS-MIB", "p2mpSnapshotType"), ("CISCO-WIRELESS-P2MP-RF-METRICS-MIB", "p2mpSnapshotDataScale"), ("CISCO-WIRELESS-P2MP-RF-METRICS-MIB", "p2mpSnapshotDataPrecision"), ("CISCO-WIRELESS-P2MP-RF-METRICS-MIB", "p2mpSnapshotOwner"), ("CISCO-WIRELESS-P2MP-RF-METRICS-MIB", "p2mpSnapshotAction"), ("CISCO-WIRELESS-P2MP-RF-METRICS-MIB", "p2mpSnapshotStatus"), ("CISCO-WIRELESS-P2MP-RF-METRICS-MIB", "p2mpSnapshotRowStatus"), ("CISCO-WIRELESS-P2MP-RF-METRICS-MIB", "p2mpSnapAttr1Id"), ("CISCO-WIRELESS-P2MP-RF-METRICS-MIB", "p2mpSnapAttr1Size"), ("CISCO-WIRELESS-P2MP-RF-METRICS-MIB", "p2mpSnapAttr2Id"), ("CISCO-WIRELESS-P2MP-RF-METRICS-MIB", "p2mpSnapAttr2Size"), ("CISCO-WIRELESS-P2MP-RF-METRICS-MIB", "p2mpSnapAttr3Id"), ("CISCO-WIRELESS-P2MP-RF-METRICS-MIB", "p2mpSnapAttr3Size"), ("CISCO-WIRELESS-P2MP-RF-METRICS-MIB", "p2mpSnapAttr4Id"), ("CISCO-WIRELESS-P2MP-RF-METRICS-MIB", "p2mpSnapAttr4Size"), ("CISCO-WIRELESS-P2MP-RF-METRICS-MIB", "p2mpRealPart"), ("CISCO-WIRELESS-P2MP-RF-METRICS-MIB", "p2mpImaginaryPart"))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
p2mpComplianceSnapshotGroup = p2mpComplianceSnapshotGroup.setStatus('current')
mibBuilder.exportSymbols("CISCO-WIRELESS-P2MP-RF-METRICS-MIB", p2mpRadioSnapshotGroup=p2mpRadioSnapshotGroup, p2mpHistSummaryEntry=p2mpHistSummaryEntry, p2mpSnapAttr4Size=p2mpSnapAttr4Size, p2mpUpdateRate=p2mpUpdateRate, p2mpSnapAttr2Size=p2mpSnapAttr2Size, p2mpSnapshotDataPrecision=p2mpSnapshotDataPrecision, p2mpSnapDataEntry=p2mpSnapDataEntry, p2mpTlSamplePeriod=p2mpTlSamplePeriod, p2mpRfMetricsMIBNotification=p2mpRfMetricsMIBNotification, p2mpTlAction=p2mpTlAction, p2mpThreshAttribute=p2mpThreshAttribute, p2mpTlNumValues=p2mpTlNumValues, p2mpThreshType=p2mpThreshType, p2mpRadioRfCompliances=p2mpRadioRfCompliances, p2mpTlThreshType=p2mpTlThreshType, p2mpSnapshotDataScale=p2mpSnapshotDataScale, p2mpTlPostTrigBufMgmt=p2mpTlPostTrigBufMgmt, p2mpHistSuMacAddress=p2mpHistSuMacAddress, p2mpComplianceThresholdGroup=p2mpComplianceThresholdGroup, p2mpCollDuration=p2mpCollDuration, p2mpHistCtrlEntry=p2mpHistCtrlEntry, p2mpTlNumDataValues=p2mpTlNumDataValues, p2mpSnapSummaryEntry=p2mpSnapSummaryEntry, p2mpRadioTimelineGroup=p2mpRadioTimelineGroup, p2mpHistOwner=p2mpHistOwner, p2mpHistRowStatus=p2mpHistRowStatus, p2mpRadioRfGroups=p2mpRadioRfGroups, p2mpTlRowStatus=p2mpTlRowStatus, p2mpHistBinIndex=p2mpHistBinIndex, p2mpStartBinValue=p2mpStartBinValue, p2mpTlDataPrecision=p2mpTlDataPrecision, p2mpTlSuMacAddress=p2mpTlSuMacAddress, p2mpSnapshotCtrlEntry=p2mpSnapshotCtrlEntry, p2mpComplianceHistogramGroup=p2mpComplianceHistogramGroup, p2mpSnapAttr1Id=p2mpSnapAttr1Id, p2mpHistCtrlTable=p2mpHistCtrlTable, p2mpRadioRfCompliance=p2mpRadioRfCompliance, p2mpHistSize=p2mpHistSize, p2mpHistAction=p2mpHistAction, p2mpThreshLimitTime=p2mpThreshLimitTime, p2mpValue=p2mpValue, p2mpTlSummaryEntry=p2mpTlSummaryEntry, p2mpSnapAttr4Id=p2mpSnapAttr4Id, p2mpPeriodicSum=p2mpPeriodicSum, p2mpHistDataEntry=p2mpHistDataEntry, p2mpTlThreshAttribute=p2mpTlThreshAttribute, p2mpSnapAttr3Id=p2mpSnapAttr3Id, p2mpHistDataTable=p2mpHistDataTable, p2mpThreshHysteresisTime=p2mpThreshHysteresisTime, p2mpSnapDataTable=p2mpSnapDataTable, p2mpSnapshotStatus=p2mpSnapshotStatus, p2mpTlValueIndex=p2mpTlValueIndex, p2mpHistMax=p2mpHistMax, p2mpTlValue=p2mpTlValue, p2mpSnapValueIndex=p2mpSnapValueIndex, p2mpSnapAttr3Size=p2mpSnapAttr3Size, p2mpTlCtrlTable=p2mpTlCtrlTable, p2mpThreshSuMacAddr=p2mpThreshSuMacAddr, p2mpThresholdEntry=p2mpThresholdEntry, p2mpSnapshotCtrlTable=p2mpSnapshotCtrlTable, p2mpThreshValue=p2mpThreshValue, p2mpHistMean=p2mpHistMean, p2mpHistStatus=p2mpHistStatus, p2mpTlDataEntry=p2mpTlDataEntry, p2mpRadioRfConformance=p2mpRadioRfConformance, p2mpSnapAttr2Id=p2mpSnapAttr2Id, p2mpHistSumPrecision=p2mpHistSumPrecision, p2mpTlTriggerLoc=p2mpTlTriggerLoc, p2mpSnapshotAction=p2mpSnapshotAction, p2mpHistSumScale=p2mpHistSumScale, p2mpSnapshotRowStatus=p2mpSnapshotRowStatus, p2mpRadioHistoryGroup=p2mpRadioHistoryGroup, p2mpSnapSummaryTable=p2mpSnapSummaryTable, p2mpTlDataTable=p2mpTlDataTable, p2mpTlStatus=p2mpTlStatus, p2mpImaginaryPart=p2mpImaginaryPart, ciscoWirelessRfMetricsMIB=ciscoWirelessRfMetricsMIB, p2mpSnapAttr1Size=p2mpSnapAttr1Size, p2mpSnapshotOwner=p2mpSnapshotOwner, p2mpThreshRowStatus=p2mpThreshRowStatus, p2mpTlUpdateTime=p2mpTlUpdateTime, p2mpRfMetricsMIBNotificationPrefix=p2mpRfMetricsMIBNotificationPrefix, p2mpComplianceTimelineGroup=p2mpComplianceTimelineGroup, p2mpTrapThresh=p2mpTrapThresh, p2mpSnapshotType=p2mpSnapshotType, PYSNMP_MODULE_ID=ciscoWirelessRfMetricsMIB, p2mpHistUpdateTime=p2mpHistUpdateTime, p2mpTlSummaryTable=p2mpTlSummaryTable, p2mpRealPart=p2mpRealPart, p2mpTlCtrlEntry=p2mpTlCtrlEntry, p2mpTlDataScale=p2mpTlDataScale, p2mpHistMin=p2mpHistMin, p2mpHistClass=p2mpHistClass, p2mpEndBinValue=p2mpEndBinValue, p2mpThresholdTable=p2mpThresholdTable, p2mpTlClass=p2mpTlClass, p2mpTlOwner=p2mpTlOwner, p2mpComplianceSnapshotGroup=p2mpComplianceSnapshotGroup, p2mpRadioThresholdGroup=p2mpRadioThresholdGroup, p2mpSnapshotDspNum=p2mpSnapshotDspNum, p2mpHistSummaryTable=p2mpHistSummaryTable)
| 137.115741 | 3,946 | 0.759091 |
97c1b71b3e56bfe4d5cd92f4af99022e21fc48fa | 3,321 | py | Python | MarkovChain/distribution.py | kierke-gaard/markov-chains-py | 19ec0e1e9dbc9db6eac59c9906c732fe3a0396ac | [
"MIT"
] | null | null | null | MarkovChain/distribution.py | kierke-gaard/markov-chains-py | 19ec0e1e9dbc9db6eac59c9906c732fe3a0396ac | [
"MIT"
] | null | null | null | MarkovChain/distribution.py | kierke-gaard/markov-chains-py | 19ec0e1e9dbc9db6eac59c9906c732fe3a0396ac | [
"MIT"
] | null | null | null | """
distibtion - basic functionality for distibution handling like
histogram and inverse distribution look ups.
Assuming that the observations are integers starting from 0.
"""
#%% Dependencies and Configuration
import numpy as np
from collections import Counter
from itertools import groupby
state_type = np.uint8
#%% Discrete cdf tables from samples
def histogram(samples):
'''Returns array of named tuples: 'x' for the realizations, 'count' the number of occurences'''
cnt = np.array(list(Counter(samples).items()), dtype=[('x', state_type), ('count', float)])
cnt.sort(order='x')
m = np.sum(cnt['count'])
cnt['count'] = cnt['count']/m
return cnt
def complete_histogram(samples, number_of_states):
'''Add counts of zeros if there are no observations.
The index of the returned array represents the observation.'''
cnt = np.zeros(number_of_states, float)
hist = histogram(samples)
cnt[hist['x']] = hist['count']
return cnt
def inverse_cdf_lookup_table(complete_hist, granularity):
'''Return a lookup table for state space indices as array.
The index of the array represent a point in [0,1], namely
a midpoint of an even partition of #granularity intervals.
The values in the array represent an index of the statespace.'''
# note that an empty histogram will be mapped to all zeros
upper_bounds = np.cumsum(complete_hist)
mid_points = (np.arange(granularity) + 0.5)/granularity
arr = np.zeros(granularity, dtype=state_type)
j, i = 0, 0
while i < granularity:
if mid_points[i] > upper_bounds[j]:
j += 1
else: #<=
arr[i] = j
i +=1
return arr
def inverse_cdf_lookup(samples, number_of_states, granularity):
hist_complete = complete_histogram(samples, number_of_states)
lookup_table_of_inverse_cdf = inverse_cdf_lookup_table(hist_complete, granularity)
return lookup_table_of_inverse_cdf
def realization(inverse_cdf_lookup_tbl, index):
return inverse_cdf_lookup_tbl[index]
#%% Retrieve samples from time series
# Assumptions: 1d time series as 1d array with values in state space, no missing values, equitemporal
def sliding_window(arr, window_len=2):
return np.vstack([arr[i:len(arr) - window_len + i + 1]
for i in range(window_len)]).transpose()
def groupby_butlast(arr):
'''Groups an array with keys as its entries apart from the last dimension
and value as list of occurences in the last dimension.'''
butlast = lambda x: x[:-1]
lasts = lambda xs: [x[-1] for x in xs]
sorted_arr = sorted(arr, key=butlast)
butlast_lasts = [(k, lasts(v)) for k, v in groupby(sorted_arr, butlast)]
return butlast_lasts
def dist_tensor(samples, number_of_states, order, granularity):
'''Returns a tensor with index of states spaces in each dimension,
one dimension for each step back in time. The last dimension is preserved
for the distribution of future states in the form of a inverse cdf lookup table'''
dimension = order * [number_of_states] + [granularity]
dist = np.zeros(dimension, dtype = state_type)
sliding_win = sliding_window(samples, window_len = order + 1)
cnts = groupby_butlast(sliding_win)
for c in cnts:
index = tuple(c[0])
hist = complete_histogram(c[1], number_of_states)
value = inverse_cdf_lookup_table(hist, granularity)
dist[index] = value
return dist | 38.616279 | 101 | 0.732611 |
7df53458075e336ee378c6fd6d2bab13a3bd2efd | 1,541 | py | Python | qzails/utils/common/serialize_variable.py | pyxsqbs/qzails | 30f5bca3e8dd0c3b783a14bbbc22a6767a0bfa10 | [
"MIT"
] | null | null | null | qzails/utils/common/serialize_variable.py | pyxsqbs/qzails | 30f5bca3e8dd0c3b783a14bbbc22a6767a0bfa10 | [
"MIT"
] | 1 | 2019-12-13T10:14:05.000Z | 2019-12-13T10:14:26.000Z | qzails/utils/common/serialize_variable.py | pyxsqbs/qzails | 30f5bca3e8dd0c3b783a14bbbc22a6767a0bfa10 | [
"MIT"
] | null | null | null | #!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
@ Author: qinbaoshuai
@ Date: 2019-07-17 10:17:41
@ Email: qinbaoshuai@cloudwalk.cn
@ LastEditors: qinbaoshuai
@ LastEditTime: 2019-08-16 14:39:50
@ Description: 公共变量的保存、载入,所有算子都可以访问到公共变量
"""
import json
import os
import dill as pickle
# 公共变量的二进制文件的父目录
DATA_PK_PATH = 'data/serialize_variable'
def save_varible(_self, key, value):
"""
@ description: 保存或更新公共变量
@ param _self {dict} api参数字典,例如包括字典项"model_id"
@ param key {str} 将要保存的变量的key值
@ param value {any} 将要保存的变量的value
@ return: None
"""
assert 'model_id' in _self.keys()
assert 'version_id' in _self.keys()
model_id = _self['model_id']
version_id = _self['version_id']
with open(os.path.join(DATA_PK_PATH, '{}_{}_{}.pk'.format(key, model_id, version_id)), 'wb') as pk_f:
pickle.dump(value, pk_f, protocol=pickle.HIGHEST_PROTOCOL)
def load_varible(_self, key):
"""
@ description: 加载公共变量
@ param _self {dict} api参数字典,例如包括"model_id"
@ return: {any} 公共变量
"""
assert 'model_id' in _self.keys()
assert 'version_id' in _self.keys()
model_id = _self['model_id']
version_id = _self['version_id']
with open(os.path.join(DATA_PK_PATH, '{}_{}_{}.pk'.format(key, model_id, version_id)), 'rb') as pk_f:
value = pickle.load(pk_f)
return value
if __name__ == "__main__":
_self = {}
_self['model_id'] = '12345'
_self['version_id'] = '1.0.0'
save_varible(_self, 'test_str', '什么鬼')
print(load_varible(_self, 'test_str'))
| 27.517857 | 105 | 0.656716 |
2d71b9447d3069590e5440f58a288471e7946d17 | 13,640 | py | Python | nrcan_etl/tests/test_dwelling.py | cds-snc/nrcan_api | 795e001d24b67230cf92ba4cb409b37452c0d4a8 | [
"MIT"
] | 7 | 2017-12-15T03:58:29.000Z | 2018-04-23T22:48:53.000Z | nrcan_etl/tests/test_dwelling.py | NRCan/energuide_api | 795e001d24b67230cf92ba4cb409b37452c0d4a8 | [
"MIT"
] | 137 | 2018-01-24T16:44:57.000Z | 2018-06-25T14:02:10.000Z | nrcan_etl/tests/test_dwelling.py | cds-snc/nrcan_api | 795e001d24b67230cf92ba4cb409b37452c0d4a8 | [
"MIT"
] | 4 | 2018-02-15T12:40:52.000Z | 2018-07-03T14:22:51.000Z | import copy
import datetime
import typing
import pytest
from energuide import dwelling
from energuide.embedded import upgrade
from energuide.embedded import measurement
from energuide.embedded import composite
from energuide.embedded import walls
from energuide.embedded import region
from energuide.embedded import evaluation_type
from energuide.exceptions import InvalidInputDataError
from energuide.exceptions import InvalidGroupSizeError
# pylint: disable=no-self-use
@pytest.fixture
def upgrades_input() -> typing.List[str]:
return [
'<Ceilings cost="0" priority="12" />',
'<MainWalls cost="1" priority="2" />',
'<Foundation cost="2" priority="3" />',
]
@pytest.fixture
def sample_input_d(upgrades_input: typing.List[str]) -> typing.Dict[str, typing.Any]:
return {
'HOUSE_ID': '456',
'EVAL_ID': '123',
'EVAL_TYPE': 'D',
'ENTRYDATE': '2018-01-01',
'CREATIONDATE': '2018-01-08 09:00:00',
'MODIFICATIONDATE': '2018-06-01 09:00:00',
'CLIENTCITY': 'Ottawa',
'forwardSortationArea': 'K1P',
'HOUSEREGION': 'Ontario',
'YEARBUILT': '2000',
'BUILDER': '4K13D01404',
'HEATEDFLOORAREA': '12.34',
'TYPEOFHOUSE': 'Single detached',
'ERSRATING': '567',
'UGRERSRATING': '565',
'ERSGHG': '12.5',
'UGRERSGHG': '12.34',
'upgrades': upgrades_input,
'ERSENERGYINTENSITY': '0.82',
'UGRERSENERGYINTENSITY': '0.80',
'EGHRATING': '50.5',
'UGRRATING': '49.0',
'WALLDEF': '45.3;12;50;12;4.7;12',
'UGRWALLDEF': '45.3;12;50;12;4.7;10',
'EGHHLWALLS': '27799.9',
'UGRHLWALLS': '27799.9',
'EGHDESHTLOSS': '11242.1',
'UGRDESHTLOSS': '10757.3',
}
@pytest.fixture
def sample_input_e(sample_input_d: typing.Dict[str, typing.Any]) -> typing.Dict[str, typing.Any]:
output = copy.deepcopy(sample_input_d)
output['EVAL_TYPE'] = 'E'
output['ENTRYDATE'] = '2018-01-02'
return output
@pytest.fixture
def sample_input_missing(sample_input_d: typing.Dict[str, typing.Any]) -> typing.Dict[str, typing.Any]:
output = copy.deepcopy(sample_input_d)
output['MODIFICATIONDATE'] = None
output['ERSRATING'] = None
output['UGRERSRATING'] = None
return output
@pytest.fixture
def sample_parsed_d(sample_input_d: typing.Dict[str, typing.Any]) -> dwelling.ParsedDwellingDataRow:
return dwelling.ParsedDwellingDataRow.from_row(sample_input_d)
@pytest.fixture
def sample_parsed_e(sample_input_e: typing.Dict[str, typing.Any]) -> dwelling.ParsedDwellingDataRow:
return dwelling.ParsedDwellingDataRow.from_row(sample_input_e)
class TestParsedDwellingDataRow:
def test_from_row(self, sample_input_d: typing.Dict[str, typing.Any]) -> None:
output = dwelling.ParsedDwellingDataRow.from_row(sample_input_d)
assert output == dwelling.ParsedDwellingDataRow(
house_id=456,
eval_id=123,
file_id='4K13D01404',
eval_type=evaluation_type.EvaluationType.PRE_RETROFIT,
entry_date=datetime.date(2018, 1, 1),
creation_date=datetime.datetime(2018, 1, 8, 9),
modification_date=datetime.datetime(2018, 6, 1, 9),
year_built=2000,
city='Ottawa',
region=region.Region.ONTARIO,
forward_sortation_area='K1P',
energy_upgrades=[
upgrade.Upgrade(
upgrade_type='Ceilings',
cost=0,
priority=12,
),
upgrade.Upgrade(
upgrade_type='MainWalls',
cost=1,
priority=2,
),
upgrade.Upgrade(
upgrade_type='Foundation',
cost=2,
priority=3,
),
],
house_type='Single detached',
heated_floor_area=12.34,
egh_rating=measurement.Measurement(
measurement=50.5,
upgrade=49.0,
),
ers_rating=measurement.Measurement(
measurement=567,
upgrade=565,
),
greenhouse_gas_emissions=measurement.Measurement(
measurement=12.5,
upgrade=12.34,
),
energy_intensity=measurement.Measurement(
measurement=0.82,
upgrade=0.80,
),
walls=measurement.Measurement(
measurement=walls.Wall(
insulation=[
composite.CompositeValue(
percentage=45.3,
value=12.0,
value_name='rValue'
),
composite.CompositeValue(
percentage=50.0,
value=12.0,
value_name='rValue'
),
composite.CompositeValue(
percentage=4.7,
value=12.0,
value_name='rValue'
),
],
heat_lost=27799.9
),
upgrade=walls.Wall(
insulation=[
composite.CompositeValue(
percentage=45.3,
value=12.0,
value_name='rValue'
),
composite.CompositeValue(
percentage=50.0,
value=12.0,
value_name='rValue'
),
composite.CompositeValue(
percentage=4.7,
value=10.0,
value_name='rValue'
),
],
heat_lost=27799.9
)
),
design_heat_loss=measurement.Measurement(
measurement=11242.1,
upgrade=10757.3,
),
)
def test_null_fields_are_accepted(self, sample_input_missing: typing.Dict[str, typing.Any]) -> None:
output = dwelling.ParsedDwellingDataRow.from_row(sample_input_missing)
assert output.modification_date is None
assert output.ers_rating == measurement.Measurement(None, None)
def test_bad_postal_code(self, sample_input_d: typing.Dict[str, typing.Any]) -> None:
sample_input_d['forwardSortationArea'] = 'K16'
with pytest.raises(InvalidInputDataError):
dwelling.ParsedDwellingDataRow.from_row(sample_input_d)
def test_from_bad_row(self) -> None:
input_data = {
'EVAL_ID': 123
}
with pytest.raises(InvalidInputDataError) as ex:
dwelling.ParsedDwellingDataRow.from_row(input_data)
assert 'EVAL_TYPE' in ex.exconly()
assert 'EVAL_ID' not in ex.exconly()
class TestDwellingEvaluation:
def test_eval_type(self, sample_parsed_d: dwelling.ParsedDwellingDataRow) -> None:
output = dwelling.Evaluation.from_data(sample_parsed_d)
assert output.evaluation_type == evaluation_type.EvaluationType.PRE_RETROFIT
def test_entry_date(self, sample_parsed_d: dwelling.ParsedDwellingDataRow) -> None:
output = dwelling.Evaluation.from_data(sample_parsed_d)
assert output.entry_date == datetime.date(2018, 1, 1)
def test_creation_date(self, sample_parsed_d: dwelling.ParsedDwellingDataRow) -> None:
output = dwelling.Evaluation.from_data(sample_parsed_d)
assert output.creation_date == datetime.datetime(2018, 1, 8, 9)
def test_modification_date(self, sample_parsed_d: dwelling.ParsedDwellingDataRow) -> None:
output = dwelling.Evaluation.from_data(sample_parsed_d)
assert output.modification_date == datetime.datetime(2018, 6, 1, 9)
def test_to_dict(self, sample_parsed_d: dwelling.ParsedDwellingDataRow) -> None:
output = dwelling.Evaluation.from_data(sample_parsed_d).to_dict()
assert output == {
'fileId': '4K13D01404',
'evaluationId': 123,
'houseType': 'Single detached',
'evaluationType': evaluation_type.EvaluationType.PRE_RETROFIT.value,
'entryDate': '2018-01-01',
'creationDate': '2018-01-08T09:00:00',
'modificationDate': '2018-06-01T09:00:00',
'energyUpgrades': [
{
'upgradeType': 'Ceilings',
'cost': 0,
'priority': 12,
},
{
'upgradeType': 'MainWalls',
'cost': 1,
'priority': 2,
},
{
'upgradeType': 'Foundation',
'cost': 2,
'priority': 3,
},
],
'heatedFloorArea': 12.34,
'eghRating': {
'measurement': 50.5,
'upgrade': 49.0,
},
'ersRating': {
'measurement': 567,
'upgrade': 565,
},
'greenhouseGasEmissions': {
'measurement': 12.5,
'upgrade': 12.34,
},
'energyIntensity': {
'measurement': 0.82,
'upgrade': 0.80,
},
'walls': {
'measurement': {
'insulation': [
{
'percentage': 45.3,
'rValue': 12.0,
},
{
'percentage': 50.0,
'rValue': 12.0,
},
{
'percentage': 4.7,
'rValue': 12.0,
},
],
'heatLost': 27799.9
},
'upgrade': {
'insulation': [
{
'percentage': 45.3,
'rValue': 12.0,
},
{
'percentage': 50.0,
'rValue': 12.0,
},
{
'percentage': 4.7,
'rValue': 10.0,
},
],
'heatLost': 27799.9
}
},
'designHeatLoss': {
'measurement': 11242.1,
'upgrade': 10757.3,
}
}
class TestDwelling:
@pytest.fixture
def sample(self,
sample_input_d: typing.Dict[str, typing.Any],
sample_input_e: typing.Dict[str, typing.Any],
) -> typing.List[typing.Dict[str, typing.Any]]:
return [sample_input_d, sample_input_e].copy()
@pytest.fixture
def dummy_sample(self,
sample_input_d: typing.Dict[str, typing.Any],
sample_input_e: typing.Dict[str, typing.Any],
) -> typing.List[typing.Dict[str, typing.Any]]:
dummy_d = sample_input_e.copy()
dummy_d['EVAL_TYPE'] = 'D'
new_e = sample_input_e.copy()
new_e['ENTRYDATE'] = '2018-06-01'
new_f = sample_input_e.copy()
new_f['EVAL_TYPE'] = 'F'
new_f['ENTRYDATE'] = '2018-08-01'
return [sample_input_d, sample_input_e, dummy_d, new_e, new_f].copy()
def test_house_id(self, sample: typing.List[typing.Dict[str, typing.Any]]) -> None:
output = dwelling.Dwelling.from_group(sample)
assert output.house_id == 456
def test_year_built(self, sample: typing.List[typing.Dict[str, typing.Any]]) -> None:
output = dwelling.Dwelling.from_group(sample)
assert output.year_built == 2000
def test_address_data(self, sample: typing.List[typing.Dict[str, typing.Any]]) -> None:
output = dwelling.Dwelling.from_group(sample)
assert output.city == 'Ottawa'
assert output.region == region.Region.ONTARIO
assert output.forward_sortation_area == 'K1P'
def test_evaluations(self, sample: typing.List[typing.Dict[str, typing.Any]]) -> None:
output = dwelling.Dwelling.from_group(sample)
assert len(output.evaluations) == 2
def test_no_data(self) -> None:
data: typing.List[typing.Any] = []
with pytest.raises(InvalidGroupSizeError):
dwelling.Dwelling.from_group(data)
def test_to_dict(self, sample: typing.List[typing.Dict[str, typing.Any]]) -> None:
output = dwelling.Dwelling.from_group(sample).to_dict()
evaluations = output.pop('evaluations')
assert output == {
'houseId': 456,
'yearBuilt': 2000,
'city': 'Ottawa',
'region': region.Region.ONTARIO.value,
'forwardSortationArea': 'K1P',
}
assert 'postalCode' not in output
assert len(evaluations) == 2
def test_filter_dummies(self, dummy_sample: typing.List[typing.Dict[str, typing.Any]]) -> None:
output = dwelling.Dwelling.from_group(dummy_sample)
assert len(output.evaluations) == 4
| 35.428571 | 104 | 0.517595 |
93735b6e4d415232f3caeeae47ac7cc4aec58016 | 75,197 | py | Python | cinder-14.0.0/cinder/volume/drivers/ibm/ibm_storage/ds8k_proxy.py | scottwedge/OpenStack-Stein | 7077d1f602031dace92916f14e36b124f474de15 | [
"Apache-2.0"
] | 2 | 2019-05-24T14:13:50.000Z | 2019-05-24T14:21:13.000Z | cinder-14.0.0/cinder/volume/drivers/ibm/ibm_storage/ds8k_proxy.py | scottwedge/OpenStack-Stein | 7077d1f602031dace92916f14e36b124f474de15 | [
"Apache-2.0"
] | 5 | 2019-08-14T06:46:03.000Z | 2021-12-13T20:01:25.000Z | cinder-14.0.0/cinder/volume/drivers/ibm/ibm_storage/ds8k_proxy.py | scottwedge/OpenStack-Stein | 7077d1f602031dace92916f14e36b124f474de15 | [
"Apache-2.0"
] | 2 | 2020-03-15T01:24:15.000Z | 2020-07-22T20:34:26.000Z | # Copyright (c) 2016 IBM Corporation
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
#
"""
This is the driver that allows openstack to talk to DS8K.
All volumes are thin provisioned by default, if the machine is licensed for it.
This can be overridden by creating a volume type and specifying a key like so:
.. code:: console
#> cinder type-create my_type
#> cinder type-key my_type set drivers:thin_provision=False
#> cinder create --volume-type my_type 123
Sample settings for cinder.conf:
.. code:: ini
enabled_backends = ibm_ds8k_1, ibm_ds8k_2
[ibm_ds8k_1]
proxy = cinder.volume.drivers.ibm.ibm_storage.ds8k_proxy.DS8KProxy
volume_backend_name = ibm_ds8k_1
san_clustername = P2,P3
san_password = actual_password
san_login = actual_username
san_ip = foo.com
volume_driver =
cinder.volume.drivers.ibm.ibm_storage.ibm_storage.IBMStorageDriver
chap = disabled
connection_type = fibre_channel
replication_device = connection_type: fibre_channel, backend_id: bar,
san_ip: bar.com, san_login: actual_username,
san_password: actual_password, san_clustername: P4,
port_pairs: I0236-I0306; I0237-I0307
[ibm_ds8k_2]
proxy = cinder.volume.drivers.ibm.ibm_storage.ds8k_proxy.DS8KProxy
volume_backend_name = ibm_ds8k_2
san_clustername = P4,P5
san_password = actual_password
san_login = actual_username
san_ip = bar.com
volume_driver =
cinder.volume.drivers.ibm.ibm_storage.ibm_storage.IBMStorageDriver
chap = disabled
connection_type = fibre_channel
"""
import ast
import json
import six
import eventlet
from oslo_config import cfg
from oslo_log import log as logging
from cinder import context
from cinder import coordination
from cinder import exception
from cinder.i18n import _
from cinder import objects
from cinder.objects import fields
from cinder.volume import configuration
import cinder.volume.drivers.ibm.ibm_storage as storage
from cinder.volume.drivers.ibm.ibm_storage import (
ds8k_replication as replication)
from cinder.volume.drivers.ibm.ibm_storage import ds8k_helper as helper
from cinder.volume.drivers.ibm.ibm_storage import ds8k_restclient as restclient
from cinder.volume.drivers.ibm.ibm_storage import proxy
from cinder.volume.drivers.ibm.ibm_storage import strings
from cinder.volume import utils
from cinder.volume import volume_types
LOG = logging.getLogger(__name__)
VALID_OS400_VOLUME_TYPES = {
'A01': 8, 'A02': 17, 'A04': 66,
'A05': 33, 'A06': 132, 'A07': 263,
'A81': 8, 'A82': 17, 'A84': 66,
'A85': 33, 'A86': 132, 'A87': 263,
'050': '', '099': ''
}
EXTRA_SPECS_DEFAULTS = {
'thin': True,
'replication_enabled': False,
'consistency': False,
'os400': '',
'storage_pool_ids': '',
'storage_lss_ids': '',
'async_clone': False,
'multiattach': False
}
ds8k_opts = [
cfg.StrOpt(
'ds8k_devadd_unitadd_mapping',
default='',
help='Mapping between IODevice address and unit address.'),
cfg.StrOpt(
'ds8k_ssid_prefix',
default='FF',
help='Set the first two digits of SSID.'),
cfg.StrOpt(
'lss_range_for_cg',
default='',
help='Reserve LSSs for consistency group.'),
cfg.StrOpt(
'ds8k_host_type',
default='auto',
help='Set to zLinux if your OpenStack version is prior to '
'Liberty and you\'re connecting to zLinux systems. '
'Otherwise set to auto. Valid values for this parameter '
'are: %s.' % six.text_type(helper.VALID_HOST_TYPES)[1:-1])
]
CONF = cfg.CONF
CONF.register_opts(ds8k_opts, group=configuration.SHARED_CONF_GROUP)
class Lun(object):
"""provide volume information for driver from volume db object.
Version history:
.. code-block:: none
1.0.0 - initial revision.
2.1.0 - Added support for specify pool and lss, also improve the code.
2.1.1 - Added support for replication consistency group.
2.1.2 - Added support for cloning volume asynchronously.
2.3.0 - Added support for reporting backend state.
"""
VERSION = "2.3.0"
class FakeLun(object):
def __init__(self, lun, **overrides):
self.size = lun.size
self.os_id = lun.os_id
self.cinder_name = lun.cinder_name
self.is_snapshot = lun.is_snapshot
self.ds_name = lun.ds_name
self.ds_id = lun.ds_id
self.type_thin = lun.type_thin
self.type_os400 = lun.type_os400
self.data_type = lun.data_type
self.type_replication = lun.type_replication
self.group = lun.group
self.specified_pool = lun.specified_pool
self.specified_lss = lun.specified_lss
self.async_clone = lun.async_clone
self.multiattach = lun.multiattach
self.status = lun.status
if not self.is_snapshot:
self.replica_ds_name = lun.replica_ds_name
self.replication_driver_data = (
lun.replication_driver_data.copy())
self.replication_status = lun.replication_status
self.pool_lss_pair = lun.pool_lss_pair
def update_volume(self, lun):
lun.data_type = self.data_type
volume_update = lun.get_volume_update()
volume_update['provider_location'] = six.text_type({
'vol_hex_id': self.ds_id})
if self.type_replication:
volume_update['replication_driver_data'] = json.dumps(
self.replication_driver_data)
volume_update['metadata']['replication'] = six.text_type(
self.replication_driver_data)
else:
volume_update.pop('replication_driver_data', None)
volume_update['metadata'].pop('replication', None)
volume_update['metadata']['vol_hex_id'] = self.ds_id
volume_update['multiattach'] = self.multiattach
return volume_update
def __init__(self, volume, is_snapshot=False):
volume_type_id = volume.get('volume_type_id')
self.specs = volume_types.get_volume_type_extra_specs(
volume_type_id) if volume_type_id else {}
os400 = self.specs.get(
'drivers:os400', EXTRA_SPECS_DEFAULTS['os400']
).strip().upper()
self.type_thin = self.specs.get(
'drivers:thin_provision', '%s' % EXTRA_SPECS_DEFAULTS['thin']
).upper() == 'TRUE'
self.type_replication = self.specs.get(
'replication_enabled',
'<is> %s' % EXTRA_SPECS_DEFAULTS['replication_enabled']
).upper() == strings.METADATA_IS_TRUE
self.specified_pool = self.specs.get(
'drivers:storage_pool_ids',
EXTRA_SPECS_DEFAULTS['storage_pool_ids']
)
self.specified_lss = self.specs.get(
'drivers:storage_lss_ids',
EXTRA_SPECS_DEFAULTS['storage_lss_ids']
)
self.multiattach = self.specs.get(
'multiattach', '<is> %s' % EXTRA_SPECS_DEFAULTS['multiattach']
).upper() == strings.METADATA_IS_TRUE
if volume.provider_location:
provider_location = ast.literal_eval(volume.provider_location)
self.ds_id = provider_location['vol_hex_id']
else:
self.ds_id = None
self.cinder_name = volume.display_name
self.pool_lss_pair = {}
self.is_snapshot = is_snapshot
if self.is_snapshot:
self.group = (Group(volume.group_snapshot, True)
if volume.group_snapshot else None)
self.size = volume.volume_size
# ds8k supports at most 16 chars
self.ds_name = (
"OS%s:%s" % ('snap', helper.filter_alnum(self.cinder_name))
)[:16]
self.metadata = self._get_snapshot_metadata(volume)
self.source_volid = volume.volume_id
else:
self.group = Group(volume.group) if volume.group else None
self.size = volume.size
self.ds_name = (
"OS%s:%s" % ('vol', helper.filter_alnum(self.cinder_name))
)[:16]
self.replica_ds_name = (
"OS%s:%s" % ('Replica', helper.filter_alnum(self.cinder_name))
)[:16]
self.previous_status = volume.previous_status
self.replication_status = volume.replication_status
self.replication_driver_data = (
json.loads(volume.replication_driver_data)
if volume.replication_driver_data else {})
if self.replication_driver_data:
# now only support one replication target.
replication_target = sorted(
self.replication_driver_data.values())[0]
self.replica_ds_id = replication_target['vol_hex_id']
self.pool_lss_pair = {
'source': (None, self.ds_id[0:2]),
'target': (None, self.replica_ds_id[0:2])
}
# Don't use self.replication_status to judge if volume has
# been failed over or not, because when user fail over a
# group, replication_status of each volume in group is
# failing over.
self.failed_over = (True if 'default' in
self.replication_driver_data.keys()
else False)
else:
self.failed_over = False
self.metadata = self._get_volume_metadata(volume)
self.source_volid = volume.source_volid
self.async_clone = self.metadata.get(
'async_clone',
'%s' % EXTRA_SPECS_DEFAULTS['async_clone']
).upper() == 'TRUE'
if os400:
if os400 not in VALID_OS400_VOLUME_TYPES.keys():
raise restclient.APIException(
data=(_("The OS400 volume type provided, %s, is not "
"a valid volume type.") % os400))
self.type_os400 = os400
if os400 not in ['050', '099']:
self.size = VALID_OS400_VOLUME_TYPES[os400]
else:
self.type_os400 = EXTRA_SPECS_DEFAULTS['os400']
self.data_type = self._create_datatype(self.type_os400)
self.os_id = volume.id
self.status = volume.status
self.volume = volume
def _get_volume_metadata(self, volume):
if 'volume_metadata' in volume:
metadata = volume.volume_metadata
return {m['key']: m['value'] for m in metadata}
if 'metadata' in volume:
return volume.metadata
return {}
def _get_snapshot_metadata(self, snapshot):
if 'snapshot_metadata' in snapshot:
metadata = snapshot.snapshot_metadata
return {m['key']: m['value'] for m in metadata}
if 'metadata' in snapshot:
return snapshot.metadata
return {}
def shallow_copy(self, **overrides):
return Lun.FakeLun(self, **overrides)
def _create_datatype(self, t):
if t[0:2] == 'A0':
datatype = t + ' FB 520P'
elif t[0:2] == 'A8':
datatype = t + ' FB 520U'
elif t == '050':
datatype = t + ' FB 520UV'
elif t == '099':
datatype = t + ' FB 520PV'
else:
datatype = None
return datatype
# Note: updating metadata in vol related funcs deletes all prior metadata
def get_volume_update(self):
volume_update = {}
volume_update['provider_location'] = six.text_type(
{'vol_hex_id': self.ds_id})
# update metadata
if not self.is_snapshot:
if self.type_replication:
self.metadata['replication'] = six.text_type(
self.replication_driver_data)
else:
self.metadata.pop('replication', None)
volume_update['replication_driver_data'] = json.dumps(
self.replication_driver_data)
volume_update['replication_status'] = (
self.replication_status or
fields.ReplicationStatus.NOT_CAPABLE)
volume_update['multiattach'] = self.multiattach
self.metadata['data_type'] = (self.data_type or
self.metadata['data_type'])
self.metadata['vol_hex_id'] = self.ds_id
volume_update['metadata'] = self.metadata
# need to update volume size for OS400
if self.type_os400:
volume_update['size'] = self.size
return volume_update
class Group(object):
"""provide group information for driver from group db object."""
def __init__(self, group, is_snapshot=False):
self.id = group.id
self.host = group.host
self.consisgroup_snapshot_enabled = (
utils.is_group_a_cg_snapshot_type(group))
self.group_replication_enabled = (
utils.is_group_a_type(group,
"group_replication_enabled"))
self.consisgroup_replication_enabled = (
utils.is_group_a_type(group,
"consistent_group_replication_enabled"))
if is_snapshot:
self.snapshots = group.snapshots
else:
self.failed_over = (
group.replication_status ==
fields.ReplicationStatus.FAILED_OVER)
# create_volume needs to check volumes in the group,
# so get it from volume.group object.
self.volumes = group.volumes
class DS8KProxy(proxy.IBMStorageProxy):
prefix = "[IBM DS8K STORAGE]:"
def __init__(self, storage_info, logger, exception, driver,
active_backend_id=None, HTTPConnectorObject=None, host=None):
proxy.IBMStorageProxy.__init__(
self, storage_info, logger, exception, driver, active_backend_id)
self._helper = None
self._replication = None
self._connector_obj = HTTPConnectorObject
self._host = host
self._replication_enabled = False
self._active_backend_id = active_backend_id
self.configuration = driver.configuration
self.configuration.append_config_values(ds8k_opts)
# TODO(jiamin): this cache is used to handle concurrency issue, but it
# hurts HA, we will find whether is it possible to store it in storage.
self.consisgroup_cache = {}
@proxy._trace_time
def setup(self, ctxt):
LOG.info("Initiating connection to IBM DS8K storage system.")
connection_type = self.configuration.safe_get('connection_type')
replication_devices = self.configuration.safe_get('replication_device')
if connection_type == storage.XIV_CONNECTION_TYPE_FC:
if not replication_devices:
self._helper = helper.DS8KCommonHelper(self.configuration,
self._connector_obj)
else:
self._helper = (
helper.DS8KReplicationSourceHelper(self.configuration,
self._connector_obj))
elif connection_type == storage.XIV_CONNECTION_TYPE_FC_ECKD:
self._helper = helper.DS8KECKDHelper(self.configuration,
self._connector_obj)
else:
raise exception.InvalidParameterValue(
err=(_("Param [connection_type] %s is invalid.")
% connection_type))
if replication_devices:
self._do_replication_setup(replication_devices, self._helper)
# checking volumes which are still in clone process.
self._check_async_cloned_volumes()
@proxy.logger
def _check_async_cloned_volumes(self):
ctxt = context.get_admin_context()
volumes = objects.VolumeList.get_all_by_host(ctxt, self._host)
src_luns = []
tgt_luns = []
for volume in volumes:
tgt_lun = Lun(volume)
if tgt_lun.metadata.get('flashcopy') == 'started':
try:
src_vol = objects.Volume.get_by_id(
ctxt, tgt_lun.source_volid)
except exception.VolumeNotFound:
LOG.error("Failed to get source volume %(src)s for "
"target volume %(tgt)s",
{'src': tgt_lun.source_volid,
'tgt': tgt_lun.ds_id})
else:
src_luns.append(Lun(src_vol))
tgt_luns.append(tgt_lun)
if src_luns and tgt_luns:
eventlet.spawn(self._wait_flashcopy, src_luns, tgt_luns)
@proxy.logger
def _do_replication_setup(self, devices, src_helper):
if len(devices) >= 2:
raise exception.InvalidParameterValue(
err=_("Param [replication_device] is invalid, Driver "
"support only one replication target."))
self._replication = replication.Replication(src_helper, devices[0])
self._replication.check_physical_links()
self._replication.check_connection_type()
if self._active_backend_id:
self._replication.switch_source_and_target_client()
self._replication_enabled = True
@staticmethod
def _b2gb(b):
return b // (2 ** 30)
@proxy._trace_time
def _update_stats(self):
if self._helper:
storage_pools = self._helper.get_pools()
else:
raise exception.VolumeDriverException(
message=(_('Backend %s is not initialized.')
% self.configuration.volume_backend_name))
stats = {
"volume_backend_name":
self.configuration.volume_backend_name,
"serial_number": self._helper.backend['storage_unit'],
"reserved_percentage":
self.configuration.reserved_percentage,
"consistent_group_snapshot_enabled": True,
"group_replication_enabled": True,
"consistent_group_replication_enabled": True,
"multiattach": True,
"vendor_name": 'IBM',
"driver_version": self.full_version,
"storage_protocol": self._helper.get_connection_type(),
"extent_pools": 'None',
"total_capacity_gb": 0,
"free_capacity_gb": 0,
"backend_state": 'up'
}
if not len(storage_pools):
msg = _('No pools found - make sure san_clustername '
'is defined in the config file and that the '
'pools exist on the storage.')
LOG.error(msg)
stats.update({
"extent_pools": 'None',
"total_capacity_gb": 0,
"free_capacity_gb": 0,
"backend_state": 'down'
})
else:
self._helper.update_storage_pools(storage_pools)
stats.update({
"extent_pools": ','.join(p for p in storage_pools.keys()),
"total_capacity_gb": self._b2gb(
sum(p['cap'] for p in storage_pools.values())),
"free_capacity_gb": self._b2gb(
sum(p['capavail'] for p in storage_pools.values())),
"backend_state": 'up'
})
if self._replication_enabled:
stats['replication_enabled'] = self._replication_enabled
self.meta['stat'] = stats
def _assert(self, assert_condition, exception_message=''):
if not assert_condition:
LOG.error(exception_message)
raise exception.VolumeDriverException(message=exception_message)
@proxy.logger
def _create_lun_helper(self, lun, pool=None, find_new_pid=True):
connection_type = self._helper.get_connection_type()
if connection_type == storage.XIV_CONNECTION_TYPE_FC_ECKD:
if lun.type_thin:
if self._helper.get_thin_provision():
msg = (_("Backend %s can not support ECKD ESE volume.")
% self._helper.backend['storage_unit'])
LOG.error(msg)
raise exception.VolumeDriverException(message=msg)
if lun.type_replication:
target_helper = self._replication.get_target_helper()
# PPRC can not copy from ESE volume to standard volume
# or vice versa.
if target_helper.get_thin_provision():
msg = (_("Secondary storage %s can not support ECKD "
"ESE volume.")
% target_helper.backend['storage_unit'])
LOG.error(msg)
raise exception.VolumeDriverException(message=msg)
# There is a time gap between find available LSS slot and
# lun actually occupies it.
excluded_lss = set()
while True:
try:
if lun.specified_pool or lun.specified_lss:
lun.pool_lss_pair = {
'source': self._find_pool_lss_pair_from_spec(
lun, excluded_lss)}
elif lun.group and (lun.group.consisgroup_snapshot_enabled or
lun.group.consisgroup_replication_enabled):
lun.pool_lss_pair = (
self._find_pool_lss_pair_for_cg(lun, excluded_lss))
else:
if lun.type_replication and not lun.is_snapshot:
lun.pool_lss_pair = (
self._replication.find_pool_lss_pair(
excluded_lss))
else:
lun.pool_lss_pair = {
'source': self._helper.find_pool_lss_pair(
pool, find_new_pid, excluded_lss)}
return self._helper.create_lun(lun)
except restclient.LssFullException:
excluded_lss.add(lun.pool_lss_pair['source'][1])
if lun.group and (lun.group.consisgroup_snapshot_enabled or
lun.group.consisgroup_replication_enabled):
msg = _("The reserve LSS for CG is full. "
"Volume can not be created on it.")
LOG.error(msg)
raise exception.VolumeDriverException(message=msg)
else:
LOG.warning("LSS %s is full, find another one.",
lun.pool_lss_pair['source'][1])
def _find_pool_lss_pair_from_spec(self, lun, excluded_lss):
if lun.group and (lun.group.consisgroup_snapshot_enabled or
lun.group.consisgroup_replication_enabled):
msg = _("No support for specifying pool or lss for "
"volumes that belong to consistency group.")
LOG.error(msg)
raise exception.VolumeDriverException(message=msg)
else:
pool, lss = self._helper.find_biggest_pool_and_lss(
excluded_lss, (lun.specified_pool, lun.specified_lss))
return (pool, lss)
@coordination.synchronized('{self.prefix}-consistency-group')
def _find_pool_lss_pair_for_cg(self, lun, excluded_lss):
# NOTE: a group may have multiple LSSs.
lss_pairs_in_cache = self.consisgroup_cache.get(lun.group.id, set())
if not lss_pairs_in_cache:
lss_pairs_in_group = self._get_lss_pairs_in_group(lun.group,
lun.is_snapshot)
LOG.debug("LSSs used by group %(grp)s are %(lss_pair)s.",
{'grp': lun.group.id, 'lss_pair': lss_pairs_in_group})
available_lss_pairs = set(pair for pair in lss_pairs_in_group
if pair[0] != excluded_lss)
else:
available_lss_pairs = set(pair for pair in lss_pairs_in_cache
if pair[0] != excluded_lss)
if not available_lss_pairs:
available_lss_pairs = self._find_lss_pair_for_cg(lun.group,
excluded_lss,
lun.is_snapshot)
pool_lss_pair, lss_pair = self._find_pool_for_lss(available_lss_pairs)
if pool_lss_pair:
lss_pairs_in_cache.add(lss_pair)
self.consisgroup_cache[lun.group.id] = lss_pairs_in_cache
else:
raise exception.VolumeDriverException(
message=(_('There are still some available LSSs %s for CG, '
'but they are not in the same node as pool.')
% available_lss_pairs))
return pool_lss_pair
def _get_lss_pairs_in_group(self, group, is_snapshot=False):
lss_pairs_in_group = set()
if is_snapshot:
luns = [Lun(snapshot, is_snapshot=True)
for snapshot in group.snapshots]
else:
luns = [Lun(volume) for volume in group.volumes]
if group.consisgroup_replication_enabled and not is_snapshot:
lss_pairs_in_group = set((lun.ds_id[:2], lun.replica_ds_id[:2])
for lun in luns if lun.ds_id and
lun.replica_ds_id)
else:
lss_pairs_in_group = set((lun.ds_id[:2], None)
for lun in luns if lun.ds_id)
return lss_pairs_in_group
def _find_lss_pair_for_cg(self, group, excluded_lss, is_snapshot):
lss_pairs_used = set()
ctxt = context.get_admin_context()
filters_groups = {'host': group.host, 'status': 'available'}
groups = objects.GroupList.get_all(ctxt, filters=filters_groups)
for grp in groups:
grp = Group(grp)
if (grp.consisgroup_snapshot_enabled or
grp.consisgroup_replication_enabled):
lss_pairs_used |= self._get_lss_pairs_in_group(grp)
filters_group_snapshots = {'status': 'available'}
group_snapshots = objects.GroupSnapshotList.get_all_by_group(
ctxt, grp.id, filters=filters_group_snapshots)
for sgrp in group_snapshots:
sgrp = Group(sgrp, True)
if (sgrp.consisgroup_snapshot_enabled or
sgrp.consisgroup_replication_enabled):
lss_pairs_used |= self._get_lss_pairs_in_group(sgrp,
True)
# in order to keep one-to-one pprc mapping relationship, zip LSSs
# which reserved by user.
if not is_snapshot:
if group.consisgroup_replication_enabled:
target_helper = self._replication.get_target_helper()
source_lss_for_cg = self._helper.backend['lss_ids_for_cg']
target_lss_for_cg = target_helper.backend['lss_ids_for_cg']
available_lss_pairs = zip(source_lss_for_cg, target_lss_for_cg)
else:
available_lss_pairs = [(lss, None) for lss in
self._helper.backend['lss_ids_for_cg']]
source_lss_used = set()
for lss_pair in lss_pairs_used:
source_lss_used.add(lss_pair[0])
# in concurrency case, lss may be reversed in cache but the group
# has not been committed into DB.
for lss_pairs_set in self.consisgroup_cache.values():
source_lss_used |= set(
lss_pair[0] for lss_pair in lss_pairs_set)
available_lss_pairs = [lss_pair for lss_pair in available_lss_pairs
if lss_pair[0] not in source_lss_used]
self._assert(available_lss_pairs,
"All LSSs reserved for CG have been used out, "
"please reserve more LSS for CG if there are still "
"some empty LSSs left.")
else:
available_lss_pairs = set()
excluded_lss |= lss_pairs_used
for node in (0, 1):
available_lss_pairs |= {(self._helper._find_lss(
node, excluded_lss), None)}
if not available_lss_pairs:
raise restclient.LssIDExhaustError(
message=_('All LSS/LCU IDs for configured pools '
'on storage are exhausted.'))
LOG.debug('_find_lss_pair_for_cg: available LSSs for consistency '
'group are %s', available_lss_pairs)
return available_lss_pairs
@proxy.logger
def _find_pool_for_lss(self, available_lss_pairs):
# all LSS pairs have target LSS or do not have.
for src_lss, tgt_lss in available_lss_pairs:
src_pid = self._helper.get_pool(src_lss)
if not src_pid:
continue
if tgt_lss:
target_helper = self._replication.get_target_helper()
tgt_pid = target_helper.get_pool(tgt_lss)
if tgt_pid:
return ({'source': (src_pid, src_lss),
'target': (tgt_pid, tgt_lss)},
(src_lss, tgt_lss))
else:
return {'source': (src_pid, src_lss)}, (src_lss, tgt_lss)
raise exception.VolumeDriverException(
message=(_("Can not find pool for LSSs %s.")
% available_lss_pairs))
@proxy.logger
def _clone_lun(self, src_lun, tgt_lun):
self._assert(src_lun.size <= tgt_lun.size,
_('Target volume should be bigger or equal '
'to the Source volume in size.'))
self._ensure_vol_not_fc_target(src_lun.ds_id)
# image volume cache brings two cases for clone lun:
# 1. volume ID of src_lun and tgt_lun will be the same one because
# _clone_image_volume does not pop the provider_location.
# 2. if creating image volume failed at the first time, tgt_lun will be
# deleted, so when it is sent to driver again, it will not exist.
if (tgt_lun.ds_id is None or
src_lun.ds_id == tgt_lun.ds_id or
not self._helper.lun_exists(tgt_lun.ds_id)):
# It is a preferred practice to locate the FlashCopy target
# volume on the same DS8000 server as the FlashCopy source volume.
pool = self._helper.get_pool(src_lun.ds_id[0:2])
# flashcopy to larger target only works with thick vols, so we
# emulate for thin by extending after copy
if tgt_lun.type_thin and tgt_lun.size > src_lun.size:
tmp_size = tgt_lun.size
tgt_lun.size = src_lun.size
self._create_lun_helper(tgt_lun, pool)
tgt_lun.size = tmp_size
else:
self._create_lun_helper(tgt_lun, pool)
else:
self._assert(
src_lun.size == tgt_lun.size,
_('When target volume is pre-created, it must be equal '
'in size to source volume.'))
vol_pairs = [{
"source_volume": src_lun.ds_id,
"target_volume": tgt_lun.ds_id
}]
try:
self._helper.start_flashcopy(vol_pairs)
if ((tgt_lun.type_thin and tgt_lun.size > src_lun.size) or
(not tgt_lun.async_clone)):
self._helper.wait_flashcopy_finished([src_lun], [tgt_lun])
if (tgt_lun.status == 'available' and
tgt_lun.type_thin and
tgt_lun.size > src_lun.size):
param = {
'cap': self._helper._gb2b(tgt_lun.size),
'captype': 'bytes'
}
self._helper.change_lun(tgt_lun.ds_id, param)
else:
LOG.info("Clone volume %(tgt)s from volume %(src)s "
"in the background.",
{'src': src_lun.ds_id, 'tgt': tgt_lun.ds_id})
tgt_lun.metadata['flashcopy'] = "started"
eventlet.spawn(self._wait_flashcopy, [src_lun], [tgt_lun])
finally:
if not tgt_lun.async_clone and tgt_lun.status == 'error':
self._helper.delete_lun(tgt_lun)
return tgt_lun
def _wait_flashcopy(self, src_luns, tgt_luns):
# please note that the order of volumes should be fixed.
self._helper.wait_flashcopy_finished(src_luns, tgt_luns)
for src_lun, tgt_lun in zip(src_luns, tgt_luns):
if tgt_lun.status == 'available':
tgt_lun.volume.metadata['flashcopy'] = 'success'
elif tgt_lun.status == 'error':
tgt_lun.volume.metadata['flashcopy'] = "error"
tgt_lun.volume.metadata['error_msg'] = (
"FlashCopy from source volume %(src)s to target volume "
"%(tgt)s fails, the state of target volume %(id)s is set "
"to error." % {'src': src_lun.ds_id,
'tgt': tgt_lun.ds_id,
'id': tgt_lun.os_id})
tgt_lun.volume.status = 'error'
self._helper.delete_lun(tgt_lun)
else:
self._helper.delete_lun(tgt_lun)
raise exception.VolumeDriverException(
message=_("Volume %(id)s is in unexpected state "
"%(state)s.") % {'id': tgt_lun.ds_id,
'state': tgt_lun.status})
tgt_lun.volume.save()
def _ensure_vol_not_fc_target(self, vol_hex_id):
for cp in self._helper.get_flashcopy(vol_hex_id):
if cp['targetvolume']['id'] == vol_hex_id:
raise restclient.APIException(
data=(_('Volume %s is currently a target of another '
'FlashCopy operation') % vol_hex_id))
def _create_replica_helper(self, lun):
if not lun.pool_lss_pair.get('target'):
lun = self._replication.establish_replication(lun, True)
else:
lun = self._replication.create_replica(lun)
return lun
@proxy._trace_time
def create_volume(self, volume):
lun = self._create_lun_helper(Lun(volume))
if lun.type_replication:
lun = self._create_replica_helper(lun)
return lun.get_volume_update()
@proxy._trace_time
def create_cloned_volume(self, target_vol, source_vol):
lun = self._clone_lun(Lun(source_vol), Lun(target_vol))
if lun.type_replication:
lun = self._create_replica_helper(lun)
return lun.get_volume_update()
@proxy._trace_time
def create_volume_from_snapshot(self, volume, snapshot):
lun = self._clone_lun(Lun(snapshot, is_snapshot=True), Lun(volume))
if lun.type_replication:
lun = self._create_replica_helper(lun)
return lun.get_volume_update()
@proxy._trace_time
def extend_volume(self, volume, new_size):
lun = Lun(volume)
param = {
'cap': self._helper._gb2b(new_size),
'captype': 'bytes'
}
if lun.type_replication:
if not self._active_backend_id:
self._replication.delete_pprc_pairs(lun)
self._helper.change_lun(lun.ds_id, param)
self._replication.extend_replica(lun, param)
self._replication.create_pprc_pairs(lun)
else:
raise exception.VolumeDriverException(
message=(_("The volume %s has been failed over, it is "
"not suggested to extend it.") % lun.ds_id))
else:
self._helper.change_lun(lun.ds_id, param)
@proxy._trace_time
def volume_exists(self, volume):
return self._helper.lun_exists(Lun(volume).ds_id)
@proxy._trace_time
def delete_volume(self, volume):
lun = Lun(volume)
if lun.type_replication:
lun = self._replication.delete_replica(lun)
self._helper.delete_lun(lun)
@proxy._trace_time
def create_snapshot(self, snapshot):
return self._clone_lun(Lun(snapshot['volume']), Lun(
snapshot, is_snapshot=True)).get_volume_update()
@proxy._trace_time
def delete_snapshot(self, snapshot):
self._helper.delete_lun(Lun(snapshot, is_snapshot=True))
@proxy._trace_time
def migrate_volume(self, ctxt, volume, backend):
# this and retype is a complete mess, pending cinder changes for fix.
# currently this is only for migrating between pools on the same
# physical machine but different cinder.conf backends.
# volume not allowed to get here if cg or repl
# should probably check volume['status'] in ['available', 'in-use'],
# especially for flashcopy
lun = Lun(volume)
if lun.type_replication:
raise exception.VolumeDriverException(
message=_('Driver does not support migrate replicated '
'volume, it can be done via retype.'))
stats = self.meta['stat']
if backend['capabilities']['vendor_name'] != stats['vendor_name']:
raise exception.VolumeDriverException(_(
'source and destination vendors differ.'))
if backend['capabilities']['serial_number'] != stats['serial_number']:
raise exception.VolumeDriverException(_(
'source and destination serial numbers differ.'))
new_pools = self._helper.get_pools(
backend['capabilities']['extent_pools'])
cur_pool_id = self._helper.get_lun_pool(lun.ds_id)['id']
cur_node = self._helper.get_storage_pools()[cur_pool_id]['node']
# try pools in same rank
for pid, pool in new_pools.items():
if pool['node'] == cur_node:
try:
self._helper.change_lun(lun.ds_id, {'pool': pid})
return (True, None)
except Exception:
pass
# try pools in opposite rank
for pid, pool in new_pools.items():
if pool['node'] != cur_node:
try:
new_lun = lun.shallow_copy()
self._create_lun_helper(new_lun, pid, False)
self._clone_lun(lun, new_lun)
volume_update = new_lun.update_volume(lun)
try:
self._helper.delete_lun(lun)
except Exception:
pass
return (True, volume_update)
except Exception:
# will ignore missing ds_id if failed create volume
self._helper.delete_lun(new_lun)
return (False, None)
@proxy._trace_time
def retype(self, ctxt, volume, new_type, diff, host):
"""retype the volume.
:param ctxt: Context
:param volume: A dictionary describing the volume to migrate
:param new_type: A dictionary describing the volume type to convert to
:param diff: A dictionary with the difference between the two types
:param host: A dictionary describing the host to migrate to, where
host['host'] is its name, and host['capabilities'] is a
dictionary of its reported capabilities.
"""
def _check_extra_specs(key, value=None):
extra_specs = diff.get('extra_specs')
specific_type = extra_specs.get(key) if extra_specs else None
old_type = None
new_type = None
if specific_type:
old_type, new_type = specific_type
if value:
old_type = (True if old_type and old_type.upper() == value
else False)
new_type = (True if new_type and new_type.upper() == value
else False)
return old_type, new_type
lun = Lun(volume)
# check user specify pool or lss or not
old_specified_pool, new_specified_pool = _check_extra_specs(
'drivers:storage_pool_ids')
old_specified_lss, new_specified_lss = _check_extra_specs(
'drivers:storage_lss_ids')
# check thin or thick
old_type_thick, new_type_thick = _check_extra_specs(
'drivers:thin_provision', 'FALSE')
# check replication capability
old_type_replication, new_type_replication = _check_extra_specs(
'replication_enabled', strings.METADATA_IS_TRUE)
# check multiattach capability
old_multiattach, new_multiattach = _check_extra_specs(
'multiattach', strings.METADATA_IS_TRUE)
# start retype, please note that the order here is important
# because of rollback problem once failed to retype.
new_props = {}
if old_type_thick != new_type_thick:
new_props['type_thin'] = not new_type_thick
if (old_specified_pool == new_specified_pool and
old_specified_lss == new_specified_lss):
LOG.info("Same pool and lss.")
elif ((old_specified_pool or old_specified_lss) and
(new_specified_pool or new_specified_lss)):
raise exception.VolumeDriverException(
message=_("Retype does not support to move volume from "
"specified pool or lss to another specified "
"pool or lss."))
elif ((old_specified_pool is None and new_specified_pool) or
(old_specified_lss is None and new_specified_lss)):
storage_pools = self._helper.get_pools(new_specified_pool)
self._helper.verify_pools(storage_pools)
storage_lss = self._helper.verify_lss_ids(new_specified_lss)
vol_pool = self._helper.get_lun_pool(lun.ds_id)['id']
vol_lss = lun.ds_id[:2].upper()
# if old volume is in the specified LSS, but it is needed
# to be changed from thin to thick or vice versa, driver
# needs to make sure the new volume will be created in the
# specified LSS.
if ((storage_lss and vol_lss not in storage_lss) or
new_props.get('type_thin')):
new_props['specified_pool'] = new_specified_pool
new_props['specified_lss'] = new_specified_lss
elif vol_pool not in storage_pools.keys():
vol_node = int(vol_lss, 16) % 2
new_pool_id = None
for pool_id, pool in storage_pools.items():
if vol_node == pool['node']:
new_pool_id = pool_id
break
if new_pool_id:
self._helper.change_lun(lun.ds_id, {'pool': new_pool_id})
else:
raise exception.VolumeDriverException(
message=_("Can not change the pool volume allocated."))
new_lun = None
if new_props:
new_lun = lun.shallow_copy()
for key, value in new_props.items():
setattr(new_lun, key, value)
self._clone_lun(lun, new_lun)
volume_update = None
if new_lun:
# if new lun meets all requirements of retype successfully,
# exception happens during clean up can be ignored.
if new_type_replication:
new_lun.type_replication = True
new_lun = self._replication.establish_replication(new_lun,
True)
elif old_type_replication:
new_lun.type_replication = False
try:
self._replication.delete_replica(lun)
except Exception:
pass
if new_multiattach:
new_lun.multiattach = True
elif old_multiattach:
new_lun.multiattach = False
try:
self._helper.delete_lun(lun)
except Exception:
pass
volume_update = new_lun.update_volume(lun)
else:
# if driver does not create new lun, don't delete source
# lun when failed to enable replication or delete replica.
if not old_type_replication and new_type_replication:
lun.type_replication = True
lun = self._replication.establish_replication(lun)
elif old_type_replication and not new_type_replication:
lun = self._replication.delete_replica(lun)
lun.type_replication = False
if not old_multiattach and new_multiattach:
lun.multiattach = True
elif old_multiattach and not new_multiattach:
lun.multiattach = False
volume_update = lun.get_volume_update()
return True, volume_update
@proxy._trace_time
@proxy.logger
def initialize_connection(self, volume, connector, **kwargs):
"""Attach a volume to the host."""
lun = Lun(volume)
LOG.info('Attach the volume %s.', lun.ds_id)
if lun.group and lun.failed_over:
backend_helper = self._replication.get_target_helper()
else:
backend_helper = self._helper
return backend_helper.initialize_connection(lun.ds_id, connector,
**kwargs)
@proxy._trace_time
@proxy.logger
def terminate_connection(self, volume, connector, force=False, **kwargs):
"""Detach a volume from a host."""
ret_info = {
'driver_volume_type': 'fibre_channel',
'data': {}
}
lun = Lun(volume)
if (lun.group and lun.failed_over) and not self._active_backend_id:
backend_helper = self._replication.get_target_helper()
else:
backend_helper = self._helper
if isinstance(backend_helper, helper.DS8KECKDHelper):
LOG.info('Detach the volume %s.', lun.ds_id)
return backend_helper.terminate_connection(lun.ds_id, connector,
force, **kwargs)
else:
vol_mapped, host_id, map_info = (
backend_helper.check_vol_mapped_to_host(connector, lun.ds_id))
if host_id is None or not vol_mapped:
if host_id is None and not lun.type_replication:
LOG.warning('Failed to find the Host information.')
return ret_info
if host_id and not lun.type_replication and not vol_mapped:
LOG.warning("Volume %(vol)s is already not mapped to "
"host %(host)s.",
{'vol': lun.ds_id, 'host': host_id})
return ret_info
if lun.type_replication:
if backend_helper == self._replication.get_target_helper():
backend_helper = self._replication.get_source_helper()
else:
backend_helper = self._replication.get_target_helper()
try:
if backend_helper.lun_exists(lun.replica_ds_id):
LOG.info('Detaching volume %s from the '
'Secondary site.', lun.replica_ds_id)
mapped, host_id, map_info = (
backend_helper.check_vol_mapped_to_host(
connector, lun.replica_ds_id))
else:
msg = (_('Failed to find the attached '
'Volume %s.') % lun.ds_id)
LOG.error(msg)
raise exception.VolumeDriverException(message=msg)
except Exception as ex:
LOG.warning('Failed to get host mapping for volume '
'%(volume)s in the secondary site. '
'Exception: %(err)s.',
{'volume': lun.replica_ds_id, 'err': ex})
return ret_info
if not mapped:
return ret_info
else:
LOG.info('Detach the volume %s.', lun.replica_ds_id)
return backend_helper.terminate_connection(
lun.replica_ds_id, host_id, connector, map_info)
elif host_id and vol_mapped:
LOG.info('Detaching volume %s.', lun.ds_id)
return backend_helper.terminate_connection(lun.ds_id, host_id,
connector, map_info)
@proxy.logger
def create_group(self, ctxt, group):
"""Create consistency group of FlashCopy or RemoteCopy."""
model_update = {}
grp = Group(group)
# verify replication.
if (grp.group_replication_enabled or
grp.consisgroup_replication_enabled):
for volume_type in group.volume_types:
replication_type = utils.is_replicated_spec(
volume_type.extra_specs)
self._assert(replication_type,
'Unable to create group: group %(grp)s '
'is for replication type, but volume '
'%(vtype)s is a non-replication one.'
% {'grp': grp.id, 'vtype': volume_type.id})
model_update['replication_status'] = (
fields.ReplicationStatus.ENABLED)
# verify consistency group.
if (grp.consisgroup_snapshot_enabled or
grp.consisgroup_replication_enabled):
self._assert(self._helper.backend['lss_ids_for_cg'],
'No LSS(s) for CG, please make sure you have '
'reserved LSS for CG via param lss_range_for_cg.')
if grp.consisgroup_replication_enabled:
self._helper.verify_rest_version_for_pprc_cg()
target_helper = self._replication.get_target_helper()
target_helper.verify_rest_version_for_pprc_cg()
# driver will create replication group because base cinder
# doesn't update replication_status of the group, otherwise
# base cinder can take over it.
if (grp.consisgroup_snapshot_enabled or
grp.consisgroup_replication_enabled or
grp.group_replication_enabled):
model_update.update(self._helper.create_group(group))
return model_update
else:
raise NotImplementedError()
@proxy.logger
def delete_group(self, ctxt, group, volumes):
"""Delete consistency group and volumes in it."""
grp = Group(group)
if grp.consisgroup_snapshot_enabled:
luns = [Lun(volume) for volume in volumes]
return self._delete_group_with_lock(group, luns)
elif grp.consisgroup_replication_enabled:
self._assert(not grp.failed_over,
'Group %s has been failed over, it does '
'not support to delete it' % grp.id)
luns = [Lun(volume) for volume in volumes]
for lun in luns:
self._replication.delete_replica(lun)
return self._delete_group_with_lock(group, luns)
else:
raise NotImplementedError()
@coordination.synchronized('{self.prefix}-consistency-group')
def _delete_group_with_lock(self, group, luns):
model_update, volumes_model_update = (
self._helper.delete_group(group, luns))
if model_update['status'] == fields.GroupStatus.DELETED:
self._remove_record_from_consisgroup_cache(group.id)
return model_update, volumes_model_update
@proxy.logger
def delete_group_snapshot(self, ctxt, group_snapshot, snapshots):
"""Delete volume group snapshot."""
grp = Group(group_snapshot, True)
if (grp.consisgroup_snapshot_enabled or
grp.consisgroup_replication_enabled):
tgt_luns = [Lun(s, is_snapshot=True) for s in snapshots]
return self._delete_group_snapshot_with_lock(
group_snapshot, tgt_luns)
else:
raise NotImplementedError()
@coordination.synchronized('{self.prefix}-consistency-group')
def _delete_group_snapshot_with_lock(self, group_snapshot, tgt_luns):
model_update, snapshots_model_update = (
self._helper.delete_group_snapshot(group_snapshot, tgt_luns))
if model_update['status'] == fields.GroupStatus.DELETED:
self._remove_record_from_consisgroup_cache(group_snapshot.id)
return model_update, snapshots_model_update
@proxy.logger
def create_group_snapshot(self, ctxt, group_snapshot, snapshots):
"""Create volume group snapshot."""
tgt_group = Group(group_snapshot, True)
if (not tgt_group.consisgroup_snapshot_enabled and
not tgt_group.consisgroup_replication_enabled):
raise NotImplementedError()
src_group = Group(group_snapshot.group)
self._assert(not src_group.failed_over,
'Group %s has been failed over, it does not '
'support to create group snapshot.' % src_group.id)
snapshots_model_update = []
model_update = {'status': fields.GroupStatus.AVAILABLE}
src_luns = [Lun(snapshot.volume) for snapshot in snapshots]
tgt_luns = [Lun(snapshot, is_snapshot=True) for snapshot in snapshots]
try:
if src_luns and tgt_luns:
self._clone_group(src_luns, tgt_luns)
except restclient.APIException:
model_update['status'] = fields.GroupStatus.ERROR
LOG.exception('Failed to create group snapshot.')
for tgt_lun in tgt_luns:
snapshot_model_update = tgt_lun.get_volume_update()
snapshot_model_update.update({
'id': tgt_lun.os_id,
'status': model_update['status']
})
snapshots_model_update.append(snapshot_model_update)
return model_update, snapshots_model_update
@proxy.logger
def update_group(self, ctxt, group, add_volumes, remove_volumes):
"""Update generic volume group."""
grp = Group(group)
if (grp.consisgroup_snapshot_enabled or
grp.consisgroup_replication_enabled):
self._assert(not grp.failed_over,
'Group %s has been failed over, it does not '
'support to update it.' % grp.id)
return self._update_consisgroup(grp, add_volumes, remove_volumes)
else:
raise NotImplementedError()
def _update_consisgroup(self, grp, add_volumes, remove_volumes):
add_volumes_update = []
if add_volumes:
add_volumes_update = self._add_volumes_into_consisgroup(
grp, add_volumes)
remove_volumes_update = []
if remove_volumes:
remove_volumes_update = self._remove_volumes_from_consisgroup(
grp, add_volumes, remove_volumes)
return None, add_volumes_update, remove_volumes_update
@proxy.logger
def _add_volumes_into_consisgroup(self, grp, add_volumes):
add_volumes_update = []
for vol in add_volumes:
if vol.status == 'in-use':
msg = (_("add volume %(vol)s into group %(grp)s failed "
"since this volume is 'in-use' status")
% {'vol': vol.id, 'grp': grp.id})
LOG.error(msg)
raise exception.VolumeDriverException(message=msg)
new_add_luns, old_add_luns = (
self._clone_lun_for_consisgroup(add_volumes, grp))
for new_add_lun, old_add_lun in zip(new_add_luns, old_add_luns):
volume_update = new_add_lun.update_volume(old_add_lun)
volume_update['id'] = new_add_lun.os_id
add_volumes_update.append(volume_update)
return add_volumes_update
@proxy.logger
@coordination.synchronized('{self.prefix}-consistency-group')
def _remove_volumes_from_consisgroup(self, grp, add_volumes,
remove_volumes):
remove_volumes_update = []
for vol in remove_volumes:
if vol.status == 'in-use':
msg = (_("remove volume %(vol)s from group %(grp)s failed "
"since this volume is 'in-use' status")
% {'vol': vol.id, 'grp': grp.id})
LOG.error(msg)
raise exception.VolumeDriverException(message=msg)
new_remove_luns, old_remove_luns = (
self._clone_lun_for_consisgroup(remove_volumes))
for new_remove_lun, old_remove_lun in zip(new_remove_luns,
old_remove_luns):
volume_update = new_remove_lun.update_volume(old_remove_lun)
volume_update['id'] = new_remove_lun.os_id
remove_volumes_update.append(volume_update)
if len(remove_volumes) == len(grp.volumes) + len(add_volumes):
self._remove_record_from_consisgroup_cache(grp.id)
return remove_volumes_update
def _clone_lun_for_consisgroup(self, volumes, grp=None):
new_luns = []
old_luns = []
for volume in volumes:
old_lun = Lun(volume)
if old_lun.ds_id:
new_lun = old_lun.shallow_copy()
new_lun.group = grp
self._clone_lun(old_lun, new_lun)
if old_lun.type_replication:
new_lun = self._create_replica_helper(new_lun)
old_lun = self._replication.delete_replica(old_lun)
self._helper.delete_lun(old_lun)
new_luns.append(new_lun)
old_luns.append(old_lun)
return new_luns, old_luns
@proxy.logger
def _remove_record_from_consisgroup_cache(self, group_id):
lss_pairs = self.consisgroup_cache.get(group_id)
if lss_pairs:
LOG.debug('Consistecy Group %(id)s owns LSS %(lss)s in the cache.',
{'id': group_id, 'lss': lss_pairs})
self.consisgroup_cache.pop(group_id)
@proxy._trace_time
def create_group_from_src(self, ctxt, group, volumes, group_snapshot,
sorted_snapshots, source_group,
sorted_source_vols):
"""Create volume group from volume group or volume group snapshot."""
grp = Group(group)
if (not grp.consisgroup_snapshot_enabled and
not grp.consisgroup_replication_enabled and
not grp.group_replication_enabled):
raise NotImplementedError()
model_update = {
'status': fields.GroupStatus.AVAILABLE,
'replication_status': fields.ReplicationStatus.DISABLED
}
if (grp.group_replication_enabled or
grp.consisgroup_replication_enabled):
model_update['replication_status'] = (
fields.ReplicationStatus.ENABLED)
volumes_model_update = []
if group_snapshot and sorted_snapshots:
src_luns = [Lun(snapshot, is_snapshot=True)
for snapshot in sorted_snapshots]
elif source_group and sorted_source_vols:
src_luns = [Lun(source_vol)
for source_vol in sorted_source_vols]
src_group = Group(source_group)
self._assert(not src_group.failed_over,
'Group %s has been failed over, it does not '
'support to create a group from it.' % src_group.id)
else:
msg = _("_create_group_from_src supports a group snapshot "
"source or a group source, other sources can not "
"be used.")
LOG.error(msg)
raise exception.InvalidInput(message=msg)
try:
tgt_luns = [Lun(volume) for volume in volumes]
if src_luns and tgt_luns:
self._clone_group(src_luns, tgt_luns)
for tgt_lun in tgt_luns:
if tgt_lun.type_replication:
self._create_replica_helper(tgt_lun)
except restclient.APIException:
model_update['status'] = fields.GroupStatus.ERROR
LOG.exception("Failed to create group from group snapshot.")
for tgt_lun in tgt_luns:
volume_model_update = tgt_lun.get_volume_update()
volume_model_update.update({
'id': tgt_lun.os_id,
'status': model_update['status'],
'replication_status': model_update['replication_status']
})
volumes_model_update.append(volume_model_update)
return model_update, volumes_model_update
def _clone_group(self, src_luns, tgt_luns):
for src_lun in src_luns:
self._ensure_vol_not_fc_target(src_lun.ds_id)
try:
vol_pairs = []
for src_lun, tgt_lun in zip(src_luns, tgt_luns):
pool = self._helper.get_pool(src_lun.ds_id[0:2])
if tgt_lun.ds_id is None:
self._create_lun_helper(tgt_lun, pool)
vol_pairs.append({
"source_volume": src_lun.ds_id,
"target_volume": tgt_lun.ds_id
})
if tgt_lun.group.consisgroup_snapshot_enabled:
self._do_flashcopy_with_freeze(vol_pairs)
else:
self._helper.start_flashcopy(vol_pairs)
self._helper.wait_flashcopy_finished(src_luns, tgt_luns)
finally:
# if one of volume failed, delete all volumes.
error_luns = [lun for lun in tgt_luns if lun.status == 'error']
if error_luns:
self._helper.delete_lun(tgt_luns)
@coordination.synchronized('{self.prefix}-consistency-group')
@proxy._trace_time
def _do_flashcopy_with_freeze(self, vol_pairs):
# issue flashcopy with freeze
self._helper.start_flashcopy(vol_pairs, True)
# unfreeze the LSS where source volumes are in
lss_ids = list(set(p['source_volume'][0:2] for p in vol_pairs))
LOG.debug('Unfreezing the LSS: %s', ','.join(lss_ids))
self._helper.unfreeze_lss(lss_ids)
def freeze_backend(self, ctxt):
"""Notify the backend that it's frozen."""
pass
def thaw_backend(self, ctxt):
"""Notify the backend that it's unfrozen/thawed."""
pass
@proxy.logger
@proxy._trace_time
def failover_host(self, ctxt, volumes, secondary_id, groups=None):
"""Fail over the volume back and forth.
if secondary_id is 'default', volumes will be failed back,
otherwize failed over.
"""
volume_update_list = []
if secondary_id == strings.PRIMARY_BACKEND_ID:
if not self._active_backend_id:
LOG.info("Host has been failed back. doesn't need "
"to fail back again.")
return self._active_backend_id, volume_update_list, []
else:
if self._active_backend_id:
LOG.info("Host has been failed over to %s.",
self._active_backend_id)
return self._active_backend_id, volume_update_list, []
target_helper = self._replication.get_target_helper()
if secondary_id is None:
secondary_id = target_helper.backend['id']
elif secondary_id != target_helper.backend['id']:
raise exception.InvalidReplicationTarget(
message=(_('Invalid secondary_backend_id specified. '
'Valid backend id is %s.')
% target_helper.backend['id']))
LOG.debug("Starting failover host to %s.", secondary_id)
# all volumes passed to failover_host are replicated.
replicated_luns = [Lun(volume) for volume in volumes if
volume.status in ('available', 'in-use')]
# volumes in group may have been failed over.
if secondary_id != strings.PRIMARY_BACKEND_ID:
failover_luns = [lun for lun in replicated_luns if
not lun.failed_over]
else:
failover_luns = [lun for lun in replicated_luns if
lun.failed_over]
if failover_luns:
try:
if secondary_id != strings.PRIMARY_BACKEND_ID:
self._replication.start_host_pprc_failover(
failover_luns, secondary_id)
self._active_backend_id = secondary_id
else:
self._replication.start_host_pprc_failback(
failover_luns, secondary_id)
self._active_backend_id = ""
self._helper = self._replication.get_source_helper()
except restclient.APIException as e:
raise exception.UnableToFailOver(
reason=(_("Unable to failover host to %(id)s. "
"Exception= %(ex)s")
% {'id': secondary_id, 'ex': six.text_type(e)}))
for lun in failover_luns:
volume_update = lun.get_volume_update()
# failover_host in base cinder has considered previous status
# of the volume, it doesn't need to return it for update.
volume_update['replication_status'] = (
fields.ReplicationStatus.FAILED_OVER
if self._active_backend_id else
fields.ReplicationStatus.ENABLED)
model_update = {'volume_id': lun.os_id,
'updates': volume_update}
volume_update_list.append(model_update)
else:
LOG.info("No volume has replication capability.")
if secondary_id != strings.PRIMARY_BACKEND_ID:
LOG.info("Switch to the target %s", secondary_id)
self._replication.switch_source_and_target_client()
self._active_backend_id = secondary_id
else:
LOG.info("Switch to the primary %s", secondary_id)
self._replication.switch_source_and_target_client()
self._active_backend_id = ""
# No group entity in DS8K, so just need to update replication_status
# of the group.
group_update_list = []
groups = [grp for grp in groups if grp.status == 'available']
if groups:
if secondary_id != strings.PRIMARY_BACKEND_ID:
update_groups = [grp for grp in groups
if grp.replication_status ==
fields.ReplicationStatus.ENABLED]
repl_status = fields.ReplicationStatus.FAILED_OVER
else:
update_groups = [grp for grp in groups
if grp.replication_status ==
fields.ReplicationStatus.FAILED_OVER]
repl_status = fields.ReplicationStatus.ENABLED
if update_groups:
for group in update_groups:
group_update = {
'group_id': group.id,
'updates': {'replication_status': repl_status}
}
group_update_list.append(group_update)
return secondary_id, volume_update_list, group_update_list
def enable_replication(self, context, group, volumes):
"""Resume pprc pairs.
if user wants to adjust group, he/she does not need to pause/resume
pprc pairs, here just provide a way to resume replicaiton.
"""
volumes_model_update = []
model_update = (
{'replication_status': fields.ReplicationStatus.ENABLED})
if volumes:
luns = [Lun(volume) for volume in volumes]
try:
self._replication.enable_replication(luns)
except restclient.APIException as e:
msg = (_('Failed to enable replication for group %(id)s, '
'Exception: %(ex)s.')
% {'id': group.id, 'ex': six.text_type(e)})
LOG.exception(msg)
raise exception.VolumeDriverException(message=msg)
for lun in luns:
volumes_model_update.append(
{'id': lun.os_id,
'replication_status': fields.ReplicationStatus.ENABLED})
return model_update, volumes_model_update
def disable_replication(self, context, group, volumes):
"""Pause pprc pairs.
if user wants to adjust group, he/she does not need to pause/resume
pprc pairs, here just provide a way to pause replicaiton.
"""
volumes_model_update = []
model_update = (
{'replication_status': fields.ReplicationStatus.DISABLED})
if volumes:
luns = [Lun(volume) for volume in volumes]
try:
self._replication.disable_replication(luns)
except restclient.APIException as e:
msg = (_('Failed to disable replication for group %(id)s, '
'Exception: %(ex)s.')
% {'id': group.id, 'ex': six.text_type(e)})
LOG.exception(msg)
raise exception.VolumeDriverException(message=msg)
for lun in luns:
volumes_model_update.append(
{'id': lun.os_id,
'replication_status': fields.ReplicationStatus.DISABLED})
return model_update, volumes_model_update
def failover_replication(self, context, group, volumes,
secondary_backend_id):
"""Fail over replication for a group and volumes in the group."""
volumes_model_update = []
model_update = {}
luns = [Lun(volume) for volume in volumes]
if secondary_backend_id == strings.PRIMARY_BACKEND_ID:
if luns:
if not luns[0].failed_over:
LOG.info("Group %s has been failed back. it doesn't "
"need to fail back again.", group.id)
return model_update, volumes_model_update
else:
return model_update, volumes_model_update
else:
target_helper = self._replication.get_target_helper()
backend_id = target_helper.backend['id']
if secondary_backend_id is None:
secondary_backend_id = backend_id
elif secondary_backend_id != backend_id:
raise exception.InvalidReplicationTarget(
message=(_('Invalid secondary_backend_id %(id)s. '
'Valid backend ids are %(ids)s.')
% {'id': secondary_backend_id,
'ids': (strings.PRIMARY_BACKEND_ID,
backend_id)}))
if luns:
if luns[0].failed_over:
LOG.info("Group %(grp)s has been failed over to %(id)s.",
{'grp': group.id, 'id': backend_id})
return model_update, volumes_model_update
else:
return model_update, volumes_model_update
LOG.debug("Starting failover group %(grp)s to %(id)s.",
{'grp': group.id, 'id': secondary_backend_id})
try:
if secondary_backend_id != strings.PRIMARY_BACKEND_ID:
self._replication.start_group_pprc_failover(
luns, secondary_backend_id)
model_update['replication_status'] = (
fields.ReplicationStatus.FAILED_OVER)
else:
self._replication.start_group_pprc_failback(
luns, secondary_backend_id)
model_update['replication_status'] = (
fields.ReplicationStatus.ENABLED)
except restclient.APIException as e:
raise exception.VolumeDriverException(
message=(_("Unable to failover group %(grp_id)s to "
"backend %(bck_id)s. Exception= %(ex)s")
% {'grp_id': group.id,
'bck_id': secondary_backend_id,
'ex': six.text_type(e)}))
for lun in luns:
volume_model_update = lun.get_volume_update()
# base cinder doesn't consider previous status of the volume
# in failover_replication, so here returns it for update.
volume_model_update['replication_status'] = (
model_update['replication_status'])
volume_model_update['id'] = lun.os_id
volumes_model_update.append(volume_model_update)
return model_update, volumes_model_update
def get_replication_error_status(self, context, groups):
"""Return error info for replicated groups and its volumes.
all pprc copy related APIs wait until copy is finished, so it does
not need to check their status afterwards.
"""
return [], []
| 44.733492 | 79 | 0.579199 |
eba3bd349eaac8627e58f49b576fd73b30890f51 | 4,027 | py | Python | e2e_testing/torchscript/conv.py | pashu123/torch-mlir | 7c3ba25238ac73850fcdd698be1fb084f8a58e49 | [
"Apache-2.0"
] | null | null | null | e2e_testing/torchscript/conv.py | pashu123/torch-mlir | 7c3ba25238ac73850fcdd698be1fb084f8a58e49 | [
"Apache-2.0"
] | null | null | null | e2e_testing/torchscript/conv.py | pashu123/torch-mlir | 7c3ba25238ac73850fcdd698be1fb084f8a58e49 | [
"Apache-2.0"
] | null | null | null | # Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
# See https://llvm.org/LICENSE.txt for license information.
# SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
# Also available under a BSD-style license. See LICENSE.
import torch
from torch_mlir_e2e_test.torchscript.framework import TestUtils
from torch_mlir_e2e_test.torchscript.registry import register_test_case
from torch_mlir_e2e_test.torchscript.annotations import annotate_args, export
# ==============================================================================
class Conv2dNoPaddingModule(torch.nn.Module):
def __init__(self):
super().__init__()
torch.manual_seed(0)
self.conv = torch.nn.Conv2d(2, 10, 3, bias=False)
self.train(False)
@export
@annotate_args([
None,
([-1, -1, -1, -1], torch.float32, True),
])
def forward(self, x):
return self.conv(x)
@register_test_case(module_factory=lambda: Conv2dNoPaddingModule())
def Conv2dNoPaddingModule_basic(module, tu: TestUtils):
t = tu.rand(5, 2, 10, 20)
module.forward(t)
class Conv2dBiasNoPaddingModule(torch.nn.Module):
def __init__(self):
super().__init__()
torch.manual_seed(0)
self.conv = torch.nn.Conv2d(2, 10, 3, bias=True)
self.train(False)
@export
@annotate_args([
None,
([-1, -1, -1, -1], torch.float32, True),
])
def forward(self, x):
return self.conv(x)
@register_test_case(module_factory=lambda: Conv2dBiasNoPaddingModule())
def Conv2dBiasNoPaddingModule_basic(module, tu: TestUtils):
t = tu.rand(5, 2, 10, 20)
module.forward(t)
class Conv2dWithPaddingModule(torch.nn.Module):
def __init__(self):
super().__init__()
torch.manual_seed(0)
self.conv = torch.nn.Conv2d(2, 10, 3, bias=False, padding=3)
self.train(False)
@export
@annotate_args([
None,
([-1, -1, -1, -1], torch.float32, True),
])
def forward(self, x):
return self.conv(x)
@register_test_case(module_factory=lambda: Conv2dWithPaddingModule())
def Conv2dWithPaddingModule_basic(module, tu: TestUtils):
t = tu.rand(5, 2, 10, 20)
module.forward(t)
class Conv2dWithPaddingDilationStrideModule(torch.nn.Module):
def __init__(self):
super().__init__()
torch.manual_seed(0)
self.conv = torch.nn.Conv2d(in_channels=2,
out_channels=10,
kernel_size=3,
padding=3,
stride=2,
dilation=3,
bias=False)
self.train(False)
@export
@annotate_args([
None,
([-1, -1, -1, -1], torch.float32, True),
])
def forward(self, x):
return self.conv(x)
@register_test_case(
module_factory=lambda: Conv2dWithPaddingDilationStrideModule())
def Conv2dWithPaddingDilationStrideModule_basic(module, tu: TestUtils):
t = tu.rand(5, 2, 10, 20)
module.forward(t)
class Conv2dWithPaddingDilationStrideStaticModule(torch.nn.Module):
def __init__(self):
super().__init__()
torch.manual_seed(0)
self.conv = torch.nn.Conv2d(in_channels=2,
out_channels=10,
kernel_size=3,
padding=3,
stride=2,
dilation=3,
bias=False)
self.train(False)
@export
@annotate_args([
None,
([5, 2, 10, 20], torch.float32, True),
])
def forward(self, x):
return self.conv(x)
@register_test_case(
module_factory=lambda: Conv2dWithPaddingDilationStrideStaticModule())
def Conv2dWithPaddingDilationStrideStaticModule_basic(module, tu: TestUtils):
t = tu.rand(5, 2, 10, 20)
module.forward(t)
| 29.610294 | 80 | 0.583561 |
54fb626267d97bc470e45076008d4e0141e587c0 | 1,469 | py | Python | samples/snippets/noxfile_config.py | tmdiep/python-pubsublite | 8edef6708fab60ce29c040f3de60783fe31b55ae | [
"Apache-2.0"
] | 15 | 2020-11-10T15:36:52.000Z | 2022-03-06T15:00:25.000Z | samples/snippets/noxfile_config.py | tmdiep/python-pubsublite | 8edef6708fab60ce29c040f3de60783fe31b55ae | [
"Apache-2.0"
] | 110 | 2020-11-11T18:14:31.000Z | 2022-03-30T22:42:17.000Z | samples/snippets/noxfile_config.py | tmdiep/python-pubsublite | 8edef6708fab60ce29c040f3de60783fe31b55ae | [
"Apache-2.0"
] | 6 | 2020-11-13T19:24:27.000Z | 2022-01-29T08:13:14.000Z | # Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# Default TEST_CONFIG_OVERRIDE for python repos.
# You can copy this file into your directory, then it will be inported from
# the noxfile.py.
# The source of truth:
# https://github.com/GoogleCloudPlatform/python-docs-samples/blob/master/noxfile_config.py
TEST_CONFIG_OVERRIDE = {
# You can opt out from the test for specific Python versions.
"ignored_versions": ["2.7"],
# An envvar key for determining the project id to use. Change it
# to 'BUILD_SPECIFIC_GCLOUD_PROJECT' if you want to opt in using a
# build specific Cloud project. You can also use your own string
# to use your own Cloud project.
"gcloud_project_env": "GOOGLE_CLOUD_PROJECT",
# 'gcloud_project_env': 'BUILD_SPECIFIC_GCLOUD_PROJECT',
# A dictionary you want to inject into your test. Don't put any
# secrets here. These values will override predefined values.
"envs": {},
}
| 40.805556 | 90 | 0.744044 |
1f1b36e479edcabc1fdc1ba9d42fa1ca1355c00a | 17,320 | py | Python | PyPoE/cli/exporter/wiki/parsers/area.py | aang521/PyPoE | b11f751b27d6fa917b895c1844b9f3955f23702c | [
"MIT"
] | 247 | 2015-07-06T19:39:11.000Z | 2022-03-30T13:11:03.000Z | PyPoE/cli/exporter/wiki/parsers/area.py | dbjorge/PyPoE | 0932bd729211488cfb3f57ed63fcb358a22b6bff | [
"MIT"
] | 121 | 2015-09-01T23:50:22.000Z | 2021-08-23T21:06:47.000Z | PyPoE/cli/exporter/wiki/parsers/area.py | dbjorge/PyPoE | 0932bd729211488cfb3f57ed63fcb358a22b6bff | [
"MIT"
] | 109 | 2015-09-09T06:37:56.000Z | 2022-03-20T16:06:33.000Z | """
Overview
===============================================================================
+----------+------------------------------------------------------------------+
| Path | PyPoE/cli/exporter/wiki/parsers/area.py |
+----------+------------------------------------------------------------------+
| Version | 1.0.0a0 |
+----------+------------------------------------------------------------------+
| Revision | $Id: 1f1b36e479edcabc1fdc1ba9d42fa1ca1355c00a $ |
+----------+------------------------------------------------------------------+
| Author | Omega_K2 |
+----------+------------------------------------------------------------------+
Description
===============================================================================
Agreement
===============================================================================
See PyPoE/LICENSE
Documentation
===============================================================================
Public API
-------------------------------------------------------------------------------
Interal API
-------------------------------------------------------------------------------
"""
# =============================================================================
# Imports
# =============================================================================
# Python
import re
from functools import partialmethod
from collections import OrderedDict
# 3rd-party
# self
from PyPoE.cli.core import console, Msg
from PyPoE.cli.exporter import config
from PyPoE.cli.exporter.wiki import parser
from PyPoE.cli.exporter.wiki.handler import ExporterHandler, ExporterResult
# =============================================================================
# Globals
# =============================================================================
__all__ = []
# =============================================================================
# Classes
# =============================================================================
class WikiCondition(parser.WikiCondition):
COPY_KEYS = (
'main_page',
'release_version',
'screenshot_ext',
)
NAME = 'Area'
ADD_INCLUDE = False
INDENT = 33
class AreaCommandHandler(ExporterHandler):
def __init__(self, sub_parser):
self.parser = sub_parser.add_parser(
'area',
help='Area Exporter',
)
self.parser.set_defaults(func=lambda args: self.parser.print_help())
sub = self.parser.add_subparsers()
# By id
a_id = sub.add_parser(
'id',
help='Extract areas by their id.'
)
self.add_default_parsers(
parser=a_id,
cls=AreaParser,
func=AreaParser.by_id,
)
a_id.add_argument(
'area_id',
help='Id of the area, can be specified multiple times.',
nargs='+',
)
# by name
a_name = sub.add_parser(
'name',
help='Extract areas by their name.'
)
self.add_default_parsers(
parser=a_name,
cls=AreaParser,
func=AreaParser.by_name,
)
a_name.add_argument(
'area_name',
help='Visible name of the area (localized), can be specified multiple times.',
nargs='+',
)
# by row ID
a_rid = sub.add_parser(
'rowid',
help='Extract areas by rowid.'
)
self.add_default_parsers(
parser=a_rid,
cls=AreaParser,
func=AreaParser.by_rowid,
)
a_rid.add_argument(
'start',
help='Starting index',
nargs='?',
type=int,
default=0,
)
a_rid.add_argument(
'end',
nargs='?',
help='Ending index',
type=int,
)
# filtering
a_filter = sub.add_parser(
'filter',
help='Extract areas using filters.'
)
self.add_default_parsers(
parser=a_filter,
cls=AreaParser,
func=AreaParser.by_filter,
)
a_filter.add_argument(
'-ft-id', '--filter-id', '--filter-metadata-id',
help='Regular expression on the id',
type=str,
dest='re_id',
)
def add_default_parsers(self, *args, **kwargs):
super().add_default_parsers(*args, **kwargs)
parser = kwargs['parser']
self.add_format_argument(parser)
parser.add_argument(
'--skip-main-page',
help='Skip adding main_page argument to the template',
action='store_true',
default=False,
dest='skip_main_page',
)
class AreaParser(parser.BaseParser):
_files = [
'WorldAreas.dat',
'MapPins.dat',
'AtlasNode.dat',
]
_area_column_index_filter = partialmethod(
parser.BaseParser._column_index_filter,
dat_file_name='WorldAreas.dat',
error_msg='Several areas have not been found:\n%s',
)
_COPY_KEYS = OrderedDict((
('Id', {
'template': 'id',
}),
('Name', {
'template': 'name',
}),
('Act', {
'template': 'act',
}),
('AreaLevel', {
'template': 'area_level',
}),
('MaxLevel', {
'template': 'level_restriction_max',
'default': 100,
}),
('AreaType_TagsKeys', {
'template': 'area_type_tags',
'format': lambda value: ', '.join([
tag['Id'] for tag in value
]),
'default': [],
}),
('TagsKeys', {
'template': 'tags',
'format': lambda value: ', '.join([
tag['Id'] for tag in value
]),
'default': [],
}),
('LoadingScreen_DDSFile', {
'template': 'loading_screen',
'format': lambda value: value.replace('Art/Textures/Interface/Loadi'
'ngImages/', '').replace('.dds', ''),
}),
('Connections_WorldAreasKeys', {
'template': 'connection_ids',
'format': lambda value: ', '.join(OrderedDict.fromkeys([
area['Id'] for area in value
]).keys()),
'default': [],
}),
('ParentTown_WorldAreasKey', {
'template': 'parent_area_id',
'format': lambda value: value['Id'],
}),
('ModsKeys', {
'template': 'modifier_ids',
'format': lambda value: ', '.join([
mod['Id'] for mod in value
]),
'default': [],
}),
('Bosses_MonsterVarietiesKeys', {
'template': 'boss_monster_ids',
'format': lambda value: ', '.join([
mv['Id'] for mv in value
]),
'default': [],
}),
('Monsters_MonsterVarietiesKeys', {
'template': 'monster_ids',
'format': lambda value: ', '.join([
mv['Id'] for mv in value
]),
'default': [],
}),
('FirstEntry_NPCTextAudioKey', {
'template': 'entry_text',
'format': lambda value: value['Text'],
}),
('FirstEntry_NPCsKey', {
'template': 'entry_npc',
'condition': lambda area:
area['FirstEntry_NPCTextAudioKey'] is not None,
'format': lambda value: value['Name'],
}),
# Spawn chances section
('VaalArea_SpawnChance', {
'template': 'vaal_area_spawn_chance',
'condition': lambda area: area['VaalArea_SpawnChance'] > 0 and
area['VaalArea_WorldAreasKeys'],
}),
('VaalArea_WorldAreasKeys', {
'template': 'vaal_area_ids',
'condition': lambda area: area['VaalArea_SpawnChance'] > 0 and
area['VaalArea_WorldAreasKeys'],
'format': lambda value: ', '.join([
area['Id'] for area in value
]),
}),
('Strongbox_SpawnChance', {
'template': 'strongbox_spawn_chance',
'condition': lambda area: area['Strongbox_SpawnChance'] > 0,
}),
('Strongbox_MaxCount', {
'template': 'strongbox_max',
'condition': lambda area: area['Strongbox_SpawnChance'] > 0,
'default': 0,
}),
('Strongbox_RarityWeight', {
'template': 'strongbox_rarity_weight',
'condition': lambda area: area['Strongbox_SpawnChance'] > 0,
'default': '',
'format': lambda value: ', '.join([str(v) for v in value]),
}),
# bools
('IsMapArea', {
'template': 'is_map_area',
'default': False,
}),
('IsUniqueMapArea', {
'template': 'is_unique_map_area',
'default': False,
}),
('IsTown', {
'template': 'is_town_area',
'default': False,
}),
('IsHideout', {
'template': 'is_hideout_area',
'default': False,
}),
('IsVaalArea', {
'template': 'is_vaal_area',
'default': False,
}),
('IsLabyrinthArea', {
'template': 'is_labyrinth_area',
'default': False,
}),
('IsLabyrinthAirlock', {
'template': 'is_labyrinth_airlock_area',
'default': False,
}),
('IsLabyrinthBossArea', {
'template': 'is_labyrinth_boss_area',
'default': False,
}),
('HasWaypoint', {
'template': 'has_waypoint',
'default': False,
}),
))
_LANG = {
'English': {
'Low': 'Low Tier',
'Mid': 'Mid Tier',
'High': 'High Tier',
'Uber': 'Maximum Tier',
},
'German': {
'Low': 'Niedrige Stufe',
'Mid': 'Mittlere Stufe',
'High': 'Hohe Stufe',
'Uber': 'Maximale Stufe',
},
'Russian': {
'Low': 'низкий уровень',
'Mid': 'средний уровень',
'High': 'высокий уровень',
'Uber': 'максимальный уровень',
},
}
def by_rowid(self, parsed_args):
return self.export(
parsed_args,
self.rr['WorldAreas.dat'][parsed_args.start:parsed_args.end],
)
def by_id(self, parsed_args):
return self.export(parsed_args, self._area_column_index_filter(
column_id='Id', arg_list=parsed_args.area_id
))
def by_name(self, parsed_args):
return self.export(parsed_args, self._area_column_index_filter(
column_id='Name', arg_list=parsed_args.area_name
))
def by_filter(self, parsed_args):
re_id = re.compile(parsed_args.re_id) if parsed_args.re_id else None
out = []
for row in self.rr['WorldAreas.dat']:
if re_id:
if not re_id.match(row['Id']):
continue
out.append(row)
return self.export(parsed_args, out)
def export(self, parsed_args, areas):
console('Found %s areas, parsing...' % len(areas))
r = ExporterResult()
if not areas:
console(
'No areas found for the specified parameters. Quitting.',
msg=Msg.warning,
)
return r
console('Accessing additional data...')
self.rr['MapPins.dat'].build_index('WorldAreasKeys')
self.rr['AtlasNode.dat'].build_index('WorldAreasKey')
self.rr['MapSeries.dat'].build_index('Id')
if not parsed_args.skip_main_page:
self.rr['Maps.dat'].build_index('Regular_WorldAreasKey')
self.rr['UniqueMaps.dat'].build_index('WorldAreasKey')
console('Found %s areas. Processing...' % len(areas))
lang = self._LANG[config.get_option('language')]
for area in areas:
data = OrderedDict()
for row_key, copy_data in self._COPY_KEYS.items():
value = area[row_key]
condition = copy_data.get('condition')
if condition is not None and not condition(area):
continue
# Skip default values to reduce size of template
if value == copy_data.get('default'):
continue
'''default = copy_data.get('default')
if default is not None and value == default:
continue'''
fmt = copy_data.get('format')
if fmt:
value = fmt(value)
data[copy_data['template']] = value
for i, (tag, value) in enumerate(zip(area['SpawnWeight_TagsKeys'],
area['SpawnWeight_Values']),
start=1):
data['spawn_weight%s_tag' % i] = tag['Id']
data['spawn_weight%s_value' % i] = value
map_pin = self.rr['MapPins.dat'].index['WorldAreasKeys'].get(area)
if map_pin:
data['flavour_text'] = map_pin[0]['FlavourText']
atlas_node = self.rr['AtlasNode.dat'].index['WorldAreasKey'].get(
area)
if atlas_node:
data['flavour_text'] = atlas_node[0]['FlavourTextKey']['Text']
#
# Add main-page if possible
#
if not parsed_args.skip_main_page:
map = self.rr['Maps.dat'].index['Regular_WorldAreasKey'].get(
area)
if map:
map = map[0]
if map['MapSeriesKey']['Id'] == 'MapWorlds':
data['main_page'] = map['BaseItemTypesKey']['Name']
else:
data['main_page'] = '%s (%s)' % (
map['BaseItemTypesKey']['Name'],
map['MapSeriesKey']['Name']
)
elif data.get('tags') and 'map' in data['tags']:
map_version = None
for row in self.rr['MapSeries.dat']:
if not area['Id'].startswith(row['Id']):
continue
map_version = row['Name']
if map_version:
if map_version == self.rr['MapSeries.dat'].index['Id'][
'MapWorlds']['Name']:
map_version = None
if 'Unique' in area['Id'] or 'BreachBoss' in area['Id']\
or area['Id'].endswith('ShapersRealm'):
if map_version is None:
data['main_page'] = area['Name']
else:
data['main_page'] = '%s (%s)' % (
area['Name'], map_version
)
elif 'Harbinger' in area['Id']:
tier = re.sub('^.*Harbinger', '', area['Id'])
if tier:
if map_version is None:
data['main_page'] = '%s (%s)' % (
area['Name'],
lang[tier],
)
else:
data['main_page'] = '%s (%s) (%s)' % (
area['Name'],
lang[tier],
map_version,
)
else:
if map_version is None:
data['main_page'] = area['Name']
else:
data['main_page'] = '%s (%s)' % (
area['Name'],
map_version,
)
cond = WikiCondition(
data=data,
cmdargs=parsed_args,
)
r.add_result(
text=cond,
out_file='area_%s.txt' % data['id'],
wiki_page=[
{
'page': 'Area:' + self._format_wiki_title(data['id']),
'condition': cond,
},
],
wiki_message='Area updater',
)
return r
# =============================================================================
# Functions
# =============================================================================
| 32.990476 | 90 | 0.411836 |
cc690a2fd8757a4dc3a5206495d6e5b9c4e57053 | 4,108 | py | Python | setup.py | Acidburn0zzz/pkgbuilder | f8a62d9f232178e628d7404bbe4efccb05f2a857 | [
"BSD-3-Clause"
] | 1 | 2018-06-30T17:10:17.000Z | 2018-06-30T17:10:17.000Z | setup.py | Acidburn0zzz/pkgbuilder | f8a62d9f232178e628d7404bbe4efccb05f2a857 | [
"BSD-3-Clause"
] | null | null | null | setup.py | Acidburn0zzz/pkgbuilder | f8a62d9f232178e628d7404bbe4efccb05f2a857 | [
"BSD-3-Clause"
] | null | null | null | #!/usr/bin/env python3
# -*- encoding: utf-8 -*-
import sys
from setuptools import setup
from setuptools.command.test import test as TestCommand
class PyTest(TestCommand):
def finalize_options(self):
TestCommand.finalize_options(self)
self.test_args = ['tests/']
self.test_suite = True
def run_tests(self):
# import here, cause outside the eggs aren't loaded
import pytest
errno = pytest.main(self.test_args)
sys.exit(errno)
setup(name='pkgbuilder',
version='4.2.17',
description='An AUR helper (and library) in Python 3.',
keywords='arch pkgbuild',
author='Chris Warrick',
author_email='chris@chriswarrick.com',
url='https://github.com/Kwpolska/pkgbuilder',
license='3-clause BSD',
long_description=open('./docs/README.rst', 'r', encoding='utf-8').read(),
platforms='Arch Linux',
zip_safe=False,
include_package_data=True,
cmdclass={'test': PyTest},
classifiers=['Development Status :: 6 - Mature',
'Environment :: Console',
'Intended Audience :: End Users/Desktop',
'Intended Audience :: System Administrators',
'License :: OSI Approved :: BSD License',
'Natural Language :: English',
'Operating System :: POSIX :: Linux',
'Programming Language :: Python',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.6',
'Topic :: System',
'Topic :: System :: Archiving :: Packaging',
'Topic :: Utilities'],
packages=['pkgbuilder'],
install_requires=['pyalpm', 'requests', 'srcinfo'],
data_files=[('share/man/man8', ['docs/pkgbuilder.8.gz']),
('share/man/man8', ['docs/pb.8.gz']),
('share/locale/pl/LC_MESSAGES', ['locale/pl/LC_MESSAGES/'
'pkgbuilder.mo']),
('share/locale/ar/LC_MESSAGES', ['locale/ar/LC_MESSAGES/'
'pkgbuilder.mo']),
('share/locale/cs/LC_MESSAGES', ['locale/cs/LC_MESSAGES/'
'pkgbuilder.mo']),
('share/locale/de/LC_MESSAGES', ['locale/de/LC_MESSAGES/'
'pkgbuilder.mo']),
('share/locale/es/LC_MESSAGES', ['locale/es/LC_MESSAGES/'
'pkgbuilder.mo']),
('share/locale/id/LC_MESSAGES', ['locale/id/LC_MESSAGES/'
'pkgbuilder.mo']),
('share/locale/it/LC_MESSAGES', ['locale/it/LC_MESSAGES/'
'pkgbuilder.mo']),
('share/locale/ja/LC_MESSAGES', ['locale/ja/LC_MESSAGES/'
'pkgbuilder.mo']),
('share/locale/pt/LC_MESSAGES', ['locale/pt/LC_MESSAGES/'
'pkgbuilder.mo']),
#('share/locale/pt_BR/LC_MESSAGES',
#['locale/pt_BR/LC_MESSAGES/pkgbuilder.mo']),
('share/locale/sk/LC_MESSAGES', ['locale/sk/LC_MESSAGES/'
'pkgbuilder.mo']),
('share/locale/sv/LC_MESSAGES', ['locale/sv/LC_MESSAGES/'
'pkgbuilder.mo']),
('share/locale/tr/LC_MESSAGES', ['locale/tr/LC_MESSAGES/'
'pkgbuilder.mo']),
('share/locale/vi/LC_MESSAGES', ['locale/vi/LC_MESSAGES/'
'pkgbuilder.mo'])],
entry_points={
'console_scripts': [
'pkgbuilder = pkgbuilder.__main__:main',
'pb = pkgbuilder.wrapper:main'
]
},
)
| 48.329412 | 79 | 0.47371 |
a1bc3e66fb1e8dad7af339815e183c5138c659c1 | 1,220 | py | Python | orchestrator/helpers/vendor/zipstream/compat.py | darius-kia/director4 | 1d2c2c4c3ec12cc9b7f846d5dc075ea3bbef36f9 | [
"MIT"
] | 7 | 2020-08-23T23:08:34.000Z | 2021-12-02T04:17:37.000Z | orchestrator/helpers/vendor/zipstream/compat.py | darius-kia/director4 | 1d2c2c4c3ec12cc9b7f846d5dc075ea3bbef36f9 | [
"MIT"
] | 43 | 2020-08-24T16:48:29.000Z | 2022-03-02T19:45:54.000Z | orchestrator/helpers/vendor/zipstream/compat.py | darius-kia/director4 | 1d2c2c4c3ec12cc9b7f846d5dc075ea3bbef36f9 | [
"MIT"
] | 10 | 2020-08-17T20:42:52.000Z | 2021-07-16T03:46:51.000Z | # -*- coding: utf-8 -*-
"""
pythoncompat
Copied from requests
"""
import sys
# -------
# Pythons
# -------
PY2 = sys.version_info[0] == 2
PY3 = sys.version_info[0] == 3
# ---------
# Specifics
# ---------
if PY2:
builtin_str = str
bytes = str
str = unicode
basestring = basestring
numeric_types = (int, long, float)
elif PY3:
builtin_str = str
str = str
bytes = bytes
basestring = (str, bytes)
numeric_types = (int, float)
try:
from zipfile import ZIP64_VERSION
except ImportError:
ZIP64_VERSION = 45
try:
from zipfile import BZIP2_VERSION
except ImportError:
BZIP2_VERSION = 46
try:
from zipfile import ZIP_BZIP2
except ImportError:
ZIP_BZIP2 = 12
try:
from zipfile import LZMA_VERSION
except ImportError:
LZMA_VERSION = 63
try:
from zipfile import ZIP_LZMA
except ImportError:
ZIP_LZMA = 14
try:
from zipfile import ZIP_MAX_COMMENT
except ImportError:
ZIP_MAX_COMMENT = (1 << 16) - 1
# Copy from io
SEEK_SET = 0 # start of the stream (the default); offset should be zero or positive
SEEK_CUR = 1 # current stream position; offset may be negative
SEEK_END = 2 # end of the stream; offset is usually negative
| 16.266667 | 84 | 0.661475 |
e62ea412d6cc66afdfd5979917c68d0914421328 | 4,278 | py | Python | adobe_aam/segmentFolders/segmentFolders.py | TrevorMcCormick/adobe_aam | 8ea92c8e199647382947f68f384e887ce7385cff | [
"MIT"
] | null | null | null | adobe_aam/segmentFolders/segmentFolders.py | TrevorMcCormick/adobe_aam | 8ea92c8e199647382947f68f384e887ce7385cff | [
"MIT"
] | null | null | null | adobe_aam/segmentFolders/segmentFolders.py | TrevorMcCormick/adobe_aam | 8ea92c8e199647382947f68f384e887ce7385cff | [
"MIT"
] | null | null | null | # Import packages
import os
import json
from datetime import datetime, timedelta
import requests
import jwt
import pandas as pd
from adobe_aam.helpers.headers import *
from adobe_aam.helpers.simplify import *
from pandas import json_normalize
def bytesToJson(response_content):
json_response = json.loads(response_content.decode('utf-8'))
df = json_normalize(json_response)
return(df)
def flattenJson(nested_json):
"""
Flatten json object with nested keys into a single level.
Args:
nested_json: A nested json object.
Returns:
The flattened json object if successful, None otherwise.
"""
out = {}
def flatten(x, name=''):
if type(x) is dict:
for a in x:
flatten(x[a], name + a + '/')
elif type(x) is list:
i = 0
for a in x:
flatten(a, name + str(i) + '/')
i += 1
else:
out[name[:-1]] = x
flatten(nested_json)
return out
class SegmentFolders:
@classmethod
def get_many(cls):
"""
Get multiple AAM segmentFolders.
Args:
includeThirdParty: (bool) Includes 3rd Party segmentFolders (defaults True).
dataSourceId: (int) Filter segmentFolders by Data Source ID.
Returns:
df of all folderIds, parentFolderIds, and paths to which the AAM API user has READ access.
"""
request_url = "https://api.demdex.com/v1/folders/segments"
request_data = {}
## Make request
response = requests.get(url = request_url,
headers = Headers.createHeaders(),
params = request_data)
## Print error code if get request is unsuccessful
if response.status_code != 200:
print(response.content)
else:
folders_json = response.json()
folders_flat = flattenJson(folders_json)
df = folders_flat
folderIDs = []
parentFolderIDs = []
paths = []
for k, v in folders_flat.items():
if k.endswith("folderId") == True:
folderIDs.append(v)
elif k.endswith("parentFolderId"):
parentFolderIDs.append(v)
elif k.endswith("path"):
paths.append(v)
df = pd.DataFrame({'folderId':folderIDs, 'parentFolderId':parentFolderIDs, 'path':paths})
return df
@classmethod
def get_one(cls,
folderId,
get_children=None,
get_parents=None):
"""
Get one AAM SegmentFolder.
Args:
includeSubFolders: (bool) Scans subfolders and returns in df.
Returns:
df of one folderId, with optional subfolders, provided the AAM API user has READ access.
"""
df = SegmentFolders.get_many()
df1 = df[df['folderId']==folderId]
df1['level'] = ['0'] * len(df1)
if get_children:
df_children = df[df['parentFolderId']==folderId]
df_children['level'] = ['-1'] * len(df_children)
df1 = df1.append(df_children)
if get_parents:
df_parents = df[df['folderId']==df1['parentFolderId'].iloc[0]]
df_parents['level'] = ['+1'] * len(df_parents)
df1 = df1.append(df_parents)
return df1
@classmethod
def search(cls, search, keywords):
segmentFolders = segmentFolders.get_many()
if type(keywords) != list:
split = keywords.split(",")
keywords = split
if search=="any":
result = segmentFolders.path.apply(lambda sentence: any(keyword in sentence for keyword in keywords))
df = segmentFolders[result]
elif search=="all":
result = segmentFolders.path.apply(lambda sentence: all(keyword in sentence for keyword in keywords))
df = segmentFolders[result]
return df
| 35.355372 | 113 | 0.536466 |
6875185254c19a0b6dda325d5dab51ca6489776d | 3,072 | py | Python | .history/classes/Handler_20171106214820.py | reecebenson/DADSA-Tennis-PartA | d0763f819b300fcd0ce27041f5bc4ef0519c00bf | [
"MIT"
] | null | null | null | .history/classes/Handler_20171106214820.py | reecebenson/DADSA-Tennis-PartA | d0763f819b300fcd0ce27041f5bc4ef0519c00bf | [
"MIT"
] | null | null | null | .history/classes/Handler_20171106214820.py | reecebenson/DADSA-Tennis-PartA | d0763f819b300fcd0ce27041f5bc4ef0519c00bf | [
"MIT"
] | null | null | null | # DADSA - Assignment 1
# Reece Benson
import json
from classes import Player as Player
from classes import Season as Season
from classes import Tournament as Tournament
from classes import Round as Round
from classes import Match as Match
class Handler():
# Define the variables we will be using
app = None
prize_money = None
player_count = None
seasons = { }
def __init__(self, _app):
if(_app.debug):
print("[LOAD]: Loaded Handler!")
# Define our Application within this Handler class
self.app = _app
# Used to load all data into memory
def load(self):
# This function will create our seasons and implement the genders & players
self.load_players()
self.load_prize_money()
#TODO: Implement load_seasons()
# Used to load prize money
def load_prize_money(self):
with open('./data/rankingPoints.json') as tData:
data = json.load(tData)
# Make our prize_money a dictionary
if(self.prize_money == None):
self.prize_money = { }
# Set the prize money to the actual rank and points received
self.prize_money = [ pts for pts in data for rank in data[pts] ]
# We want to set the prize money for all indexes possible via the player
self.prize_money += [ 0 ] * ( self.player_count - len(self.prize_money))
print(self.prize_money)
# Used to load players from all seasons into memory
def load_players(self):
# Set our player (in gender) count
self.player_count = 0
with open('./data/players.json') as tData:
data = json.load(tData)
# Players are classed within Seasons
for season in data:
# If the season does not yet exist, create it
if(not season in self.seasons):
self.seasons[season] = { "players": { } }
# Players are then stored within Gender classifications
for gender in data[season]:
if(not gender in self.seasons[season]["players"]):
self.seasons[season]["players"][gender] = [ ]
# Append our player in the season, within the gender
for player in data[season][gender]:
#TODO: Change to using Player class
self.seasons[season]["players"][gender].append(player)
# Update our player count
if(len(self.seasons[season]["players"][gender]) > self.player_count):
self.player_count = len(self.seasons[season]["players"][gender])
def get_players(self, season):
# Check our Season exists
if(not season in self.seasons):
return None
else:
# Check we have players within our Season
if("players" in self.seasons[season]):
return self.seasons[season]["players"]
else:
return None | 35.72093 | 93 | 0.581706 |
070463d0b3a3cbb93eac672995e253db731f0ace | 286 | py | Python | atbash-cipher/atbash_cipher.py | pierrebeaucamp/Exercism-Python | 910b764c6726e9f131fb3a394c70d9b5bb167be9 | [
"Unlicense"
] | null | null | null | atbash-cipher/atbash_cipher.py | pierrebeaucamp/Exercism-Python | 910b764c6726e9f131fb3a394c70d9b5bb167be9 | [
"Unlicense"
] | null | null | null | atbash-cipher/atbash_cipher.py | pierrebeaucamp/Exercism-Python | 910b764c6726e9f131fb3a394c70d9b5bb167be9 | [
"Unlicense"
] | null | null | null | import regex
import string
abc = list(string.ascii_lowercase)
def decode(txt):
txt = regex.sub(r'\p{P}+|\s', "", txt)
return regex.sub(r'[a-z]', lambda m: abc[-abc.index(m.group(0)) -1], txt)
def encode(txt):
return regex.sub(r'(.{5})(?!$)', '\\1 ', decode(txt.lower()))
| 23.833333 | 77 | 0.594406 |
57f239b9fe682826249ac62dbee0ff3448f5c215 | 766 | py | Python | napalm_flexfabric/__init__.py | firefly-serenity/napalm-flexfabric | a9ce5d696f4bb5d1b03b0c49f2fcbd1588499543 | [
"Apache-2.0"
] | 6 | 2019-09-22T05:38:50.000Z | 2021-09-09T08:52:01.000Z | napalm_flexfabric/__init__.py | fmbrieva/napalm-flexfabric | 997e70780c0ff44942f6dfa27375c8124865aa0f | [
"Apache-2.0"
] | null | null | null | napalm_flexfabric/__init__.py | fmbrieva/napalm-flexfabric | 997e70780c0ff44942f6dfa27375c8124865aa0f | [
"Apache-2.0"
] | 3 | 2020-07-26T15:17:10.000Z | 2022-02-05T09:53:19.000Z | # Copyright 2019 Steffen Walter. All rights reserved.
#
# The contents of this file are licensed under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with the
# License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations under
# the License.
"""napalm-flexfabric package."""
from napalm_flexfabric.flexfabric import FlexFabricDriver
__all__ = ["FlexFabricDriver"] | 42.555556 | 80 | 0.759791 |
fa5547775aec543812a612c5e8d7d8a0434cd8d7 | 4,965 | py | Python | random-images/wallpapers/dots.py | dominicschaff/random | 14a19b976a09c768ab8844b7cda237c17a92c9ae | [
"MIT"
] | null | null | null | random-images/wallpapers/dots.py | dominicschaff/random | 14a19b976a09c768ab8844b7cda237c17a92c9ae | [
"MIT"
] | null | null | null | random-images/wallpapers/dots.py | dominicschaff/random | 14a19b976a09c768ab8844b7cda237c17a92c9ae | [
"MIT"
] | null | null | null | import PIL.Image as Image
import PIL.ImageDraw as ImageDraw
from random import *
import sys
from math import cos, sin, pi, sqrt, tan
amount = 1
prepend = "wp-"
if len(sys.argv)>1:
amount = int(sys.argv[1])
if len(sys.argv)>2:
prepend = sys.argv[2]
#formulas:
def form_circle(x,y,i,d):
return int(x + d*cos(i)), int(y + d*sin(i))
def form_spiral(x, y, t, s):
if t < 0: t=-t
r = sqrt(t)
if r == 0:
r = 1.0
else:
r = 1.0/r
return int(x+s*r*cos(t)), int(y+s*r*sin(t))
def form_loxodrome(x, y, t, s):
c = tan (5*t)
if c == 0: c = 1
c = cos (1.0/c)
x+=s*cos(t) * c
y+=s*sin(t) * c
return int(x), int(y)
#shapes:
def circle(id,c,r,f,col):
for i in xrange(f):
x0 = int(c[0] - r + i)
x1 = int(c[0] + r - i)
y0 = int(c[1] - r + i)
y1 = int(c[1] + r - i)
p = min((1.0, 1.0 * i / f + 0.3))
n = (int(col[0] * p), int(col[1] * p), int(col[2] * p))
id.ellipse((x0,y0,x1,y1),n)
x0 = int(c[0] - r + f)
x1 = int(c[0] + r - f)
y0 = int(c[1] - r + f)
y1 = int(c[1] + r - f)
id.ellipse((x0,y0,x1,y1),col)
def arc(id, c, r, f, s, e, col):
p=0.0
for i in xrange(-f,f):
x0 = int(c[0] - r + i)
x1 = int(c[0] + r - i)
y0 = int(c[1] - r + i)
y1 = int(c[1] + r - i)
if i < 0:
p = 0.7 - 1.0 * i / f
else:
p = 0.7 - 1.0 * i / f
p = min((1.0,p))
n = (int(col[0] * p), int(col[1] * p), int(col[2] * p))
id.pieslice((x0,y0,x1,y1), int(s + i), int(e - i), n)
i += 1.0
def curve(pix, w, h, x0, y0, d, a, col):
i = 0
s = a*pi
p=0.0
while i < d:
x,y = form_circle(x0,y0,i/s,d)
#x,y = form_loxodrome(x0,y0,i/100.0, 100.0)
for j in xrange(-d,+d):
if j < 0:
p = 0.7 + j / d
else:
p = 0.7 - j / d
p = min((1.0,p))
c2 = (int(col[0] * p), int(col[1] * p), int(col[2] * p))
if x >= 0 and x < w and y+j >= 0 and y+j < h:
pix[x,y+j] = c2
i +=0.1
def makeImage(w,h,p,t):
im = Image.new('RGB',(w,h),(0,0,0))
pix = im.load()
id = ImageDraw.Draw(im)
rm = min(w,h)/50
for i in xrange(p):
x = randint(0,w)
y = randint(0,h)
r = randint(rm,rm*4)
d = randint(r/4,r)
c = (randint(35,255),randint(35,50),randint(35,255))
if randint(0,10) <= t:
circle(id, (x,y), r, d, c)
else:
curve(pix, w, h, x, y, r, d/10.0, c)
#s = randint(0,180)
#e = randint(s,180)
#arc(id, (x,y), r, d, s, e, c)
return im
#movie Functions
def makePoints(amount, width, height):
return [[randint(0,10),randint(0,width-1), randint(0,height)] for i in xrange(amount)]
def applyTypeExtras(points, w, h, chance):
rm = min(w,h)/40
for i in xrange(len(points)):
x = randint(0,w)
y = randint(0,h)
r = randint(rm,rm*4)
d = randint(r/2,r)
c = (randint(0,250),randint(0,250),randint(0,250))
if points[i][0] <= chance:
points[i][0] = 0
points[i].append(r)
points[i].append(d)
points[i].append(c)
points[i].append(0)
points[i].append(1)
else:
points[i][0] = 1
points[i].append(r)
points[i].append(d/10.0)
points[i].append(c)
points[i].append(0)
points[i].append(1)
def makeWallpapers(amount, prepend, width, height):
for j in xrange(amount):
makeImage(width,height,25, 10).save(prepend + str(j) + '.png')
print "DONE:",j,'=(',(100.0*(j+1)/amount),'% complete)'
def makeMovie(amount, frames, prepend, width, height):
points = makePoints(amount,width,height)
applyTypeExtras(points, width, height, 100)
for f in xrange(frames):
im = Image.new('RGB',(width,height),(0,0,0))
pix = im.load()
id = ImageDraw.Draw(im)
for i in xrange(amount):
if points[i][0] == 0:
points[i][6] += points[i][7]
if points[i][6] > points[i][3]:
points[i][7] = -1
if points[i][6] >= 0:
points[i][7] = 1
circle(id, (points[i][1], points[i][2]), points[i][6], points[i][4], points[i][5])
elif points[i][0] == 1:
points[i][6] += points[i][7]
if points[i][6] > points[i][3]:
points[i][7] = -1
if points[i][6] >= 0:
points[i][7] = 1
curve(pix, width, height, points[i][1], points[i][2], points[i][6], points[i][4], points[i][5])
print "SAVING FRAME: %5d/%5d"%(f,frames)
s = str(f)
while len(s)<4:s="0"+s
im.save(prepend+s+".png")
makeWallpapers(amount, prepend,2560,1600) | 28.699422 | 111 | 0.453978 |
7db5968171497a419fef5fbf99463e981519a929 | 714 | py | Python | setup.py | AkadioInc/firefly | d6c48ff9999ffedcaa294fcd956eb97b90408583 | [
"BSD-2-Clause"
] | null | null | null | setup.py | AkadioInc/firefly | d6c48ff9999ffedcaa294fcd956eb97b90408583 | [
"BSD-2-Clause"
] | null | null | null | setup.py | AkadioInc/firefly | d6c48ff9999ffedcaa294fcd956eb97b90408583 | [
"BSD-2-Clause"
] | null | null | null | from setuptools import setup, find_packages
setup(
name='firefly',
version='0.0.5',
description='Scripts and modules for the FIREfly project',
long_description='To be provided.',
long_description_content_type='text',
url='TBA',
author='Akadio Inc',
author_email='admin@akadio.com',
packages=find_packages(exclude=['contrib', 'docs', 'tests']),
python_requires='>=3.7, <4',
install_requires=[
'h5py>=2.9',
'h5pyd>=0.8.4',
'ipyleaflet>=0.11.1',
'hvplot>=0.4'
],
scripts=[
'scripts/ch10-to-h5.py',
'scripts/ch10summary.py',
'scripts/derive-6dof.py'
],
package_data={'': ['FY18_MIRTA_Points.csv']}
)
| 25.5 | 65 | 0.60084 |
4a9264a3c4801e478aa30d02ad56633a054a8f8e | 1,152 | py | Python | api.py | zlikun/python-proxy-ip-pool | fac087e4abfb85771505fb6eabd5ce9eb434f7d5 | [
"Apache-2.0"
] | 3 | 2018-07-20T12:45:38.000Z | 2019-05-09T04:20:30.000Z | api.py | zlikun/python-proxy-ip-pool | fac087e4abfb85771505fb6eabd5ce9eb434f7d5 | [
"Apache-2.0"
] | null | null | null | api.py | zlikun/python-proxy-ip-pool | fac087e4abfb85771505fb6eabd5ce9eb434f7d5 | [
"Apache-2.0"
] | 2 | 2019-02-02T14:32:42.000Z | 2019-03-08T06:44:26.000Z | # -*- coding: utf-8 -*-
import redis
from flask import Flask, jsonify
import config
from process import DataProcessor
app = Flask(__name__)
dp = DataProcessor()
client = redis.StrictRedis(host=config.redis_host,
port=config.redis_port,
decode_responses=True,
charset='utf-8')
@app.route('/')
def index():
return 'OK'
@app.route('/<protocol>/random')
def random(protocol):
"""
随机返回一个优质代理IP
:param protocol:
:return:
"""
return client.srandmember('{}:proxies:{}'.format(protocol, 1))
@app.route('/proxies')
def proxies():
"""
返回全部代理IP
:return:
"""
return jsonify([proxy for proxy, _ in dp.query()])
@app.route('/<protocol>/proxies')
def proxies_by_protocol(protocol):
"""
选择符合指定协议的全部代理IP
:param protocol:
:return:
"""
return jsonify([proxy for proxy, _ in dp.query()
if proxy.startswith('{}://'.format(protocol))])
def run_api_server():
"""
启动API服务
:return:
"""
app.run(host="0.0.0.0", port=8888)
if __name__ == '__main__':
run_api_server()
| 18.285714 | 67 | 0.578125 |
89f7190af160b033cd6a60bffba646b9af3b6804 | 4,897 | py | Python | flexbe_core/flexbe_core/core/event_state.py | Jmz919/flexbe_behavior_engine | bdb85de41fafbfea6e4eb74c271b9cee18be4d8b | [
"BSD-3-Clause"
] | 1 | 2022-03-11T04:56:31.000Z | 2022-03-11T04:56:31.000Z | flexbe_core/flexbe_core/core/event_state.py | FlexBE/flexbe_behavior_engine | 735a3b68dfbe817db9383e53fef63afd6868219d | [
"BSD-3-Clause"
] | null | null | null | flexbe_core/flexbe_core/core/event_state.py | FlexBE/flexbe_behavior_engine | 735a3b68dfbe817db9383e53fef63afd6868219d | [
"BSD-3-Clause"
] | null | null | null | #!/usr/bin/env python
from flexbe_core.logger import Logger
from flexbe_core.state_logger import StateLogger
from flexbe_core.core.preemptable_state import PreemptableState
from flexbe_core.core.priority_container import PriorityContainer
from flexbe_msgs.msg import CommandFeedback
from std_msgs.msg import Bool, Empty
from flexbe_core.core.operatable_state import OperatableState
@StateLogger.log_events('flexbe.events',
start='on_start', stop='on_stop',
pause='on_pause', resume='on_resume',
enter='on_enter', exit='on_exit')
@StateLogger.log_userdata('flexbe.userdata')
class EventState(OperatableState):
"""
A state that allows implementing certain events.
"""
def __init__(self, *args, **kwargs):
super(EventState, self).__init__(*args, **kwargs)
self.__execute = self.execute
self.execute = self._event_execute
self._entering = True
self._skipped = False
self._paused = False
self._last_active_container = None
self._feedback_topic = 'flexbe/command_feedback'
self._repeat_topic = 'flexbe/command/repeat'
self._pause_topic = 'flexbe/command/pause'
def _event_execute(self, *args, **kwargs):
if self._is_controlled and self._sub.has_msg(self._pause_topic):
msg = self._sub.get_last_msg(self._pause_topic)
self._sub.remove_last_msg(self._pause_topic)
if msg.data:
Logger.localinfo("--> Pausing in state %s", self.name)
self._pub.publish(self._feedback_topic, CommandFeedback(command="pause"))
self._last_active_container = PriorityContainer.active_container
# claim priority to propagate pause event
PriorityContainer.active_container = self.path
self._paused = True
else:
Logger.localinfo("--> Resuming in state %s", self.name)
self._pub.publish(self._feedback_topic, CommandFeedback(command="resume"))
PriorityContainer.active_container = self._last_active_container
self._last_active_container = None
self._paused = False
if self._paused and not PreemptableState.preempt:
self._notify_skipped()
return None
if self._entering:
self._entering = False
self.on_enter(*args, **kwargs)
if self._skipped and not PreemptableState.preempt:
self._skipped = False
self.on_resume(*args, **kwargs)
self._last_execution = EventState._node.get_clock().now()
outcome = self.__execute(*args, **kwargs)
repeat = False
if self._is_controlled and self._sub.has_msg(self._repeat_topic):
Logger.localinfo("--> Repeating state %s", self.name)
self._sub.remove_last_msg(self._repeat_topic)
self._pub.publish(self._feedback_topic, CommandFeedback(command="repeat"))
repeat = True
if repeat or outcome is not None and not PreemptableState.preempt:
self._entering = True
self.on_exit(*args, **kwargs)
return outcome
def _notify_skipped(self):
if not self._skipped:
self.on_pause()
self._skipped = True
super(EventState, self)._notify_skipped()
def _enable_ros_control(self):
super(EventState, self)._enable_ros_control()
self._pub.createPublisher(self._feedback_topic, CommandFeedback)
self._sub.subscribe(self._repeat_topic, Empty)
self._sub.subscribe(self._pause_topic, Bool)
def _disable_ros_control(self):
super(EventState, self)._disable_ros_control()
self._sub.unsubscribe_topic(self._repeat_topic)
self._sub.unsubscribe_topic(self._pause_topic)
self._last_active_container = None
if self._paused:
PriorityContainer.active_container = None
# Events
# (just implement the ones you need)
def on_start(self):
"""
Will be executed once when the behavior starts.
"""
pass
def on_stop(self):
"""
Will be executed once when the behavior stops or is preempted.
"""
pass
def on_pause(self):
"""
Will be executed each time this state is paused.
"""
pass
def on_resume(self, userdata):
"""
Will be executed each time this state is resumed.
"""
pass
def on_enter(self, userdata):
"""
Will be executed each time the state is entered from any other state (but not from itself).
"""
pass
def on_exit(self, userdata):
"""
Will be executed each time the state will be left to any other state (but not to itself).
"""
pass
| 34.730496 | 99 | 0.633449 |
489ca84ac778f45da6f4fe68301b47a277486321 | 30,489 | py | Python | core/sawtooth/cli/stats_client.py | jrineck/sawtooth-core | e3eb79f32c97a25993c87eda7f77a02fd2086c7c | [
"Apache-2.0"
] | null | null | null | core/sawtooth/cli/stats_client.py | jrineck/sawtooth-core | e3eb79f32c97a25993c87eda7f77a02fd2086c7c | [
"Apache-2.0"
] | null | null | null | core/sawtooth/cli/stats_client.py | jrineck/sawtooth-core | e3eb79f32c97a25993c87eda7f77a02fd2086c7c | [
"Apache-2.0"
] | null | null | null | # Copyright 2016 Intel Corporation
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ------------------------------------------------------------------------------
import collections
import sys
import time
import signal
import json
import psutil
from twisted.internet import reactor
from twisted.internet import task
from twisted.web.client import Agent
from sawtooth.cli.stats_lib.stats_print import ConsolePrint
from sawtooth.cli.stats_lib.stats_print import StatsPrintManager
from sawtooth.cli.stats_lib.stats_utils import PlatformIntervalStats
from sawtooth.cli.stats_lib.stats_utils import SummaryStatsCsvManager
from sawtooth.cli.stats_lib.stats_utils import TopologyManager
from sawtooth.cli.stats_lib.stats_utils import TransactionRate
from sawtooth.cli.stats_lib.stats_utils import ValidatorStatsCsvManager
from sawtooth.cli.stats_lib.stats_utils import ValidatorCommunications
from sawtooth.cli.stats_lib.stats_utils import named_tuple_init
from sawtooth.cli.stats_lib.stats_utils import StatsSnapshotWriter
from sawtooth.cli.stats_lib.fork_detect import BranchManager
from sawtooth.cli.exceptions import CliException
CURSES_IMPORTED = True
try:
import curses
except ImportError:
CURSES_IMPORTED = False
class StatsClient(object):
def __init__(self, val_id, fullurl):
self.val_id = val_id
self.url = fullurl
self.name = "validator_{0}".format(val_id)
self.state = "UNKNWN"
self.ledgerstats = {}
self.nodestats = {}
self.vsm = ValidatorStatsManager()
self.responding = False
self.no_response_reason = ""
self.request_start = 0.0
self.request_complete = 0.0
self.response_time = 0.0
self.validator_comm = ValidatorCommunications(Agent(reactor))
self.path = None
def stats_request(self):
# request stats from specified validator url
self.request_start = time.clock()
self.path = self.url + "/statistics/all"
self.validator_comm.get_request(
self.path,
self._stats_completion,
self._stats_error)
def _stats_completion(self, json_stats, response_code):
self.request_complete = time.clock()
self.response_time = self.request_complete - self.request_start
self.state = "RESP_{}".format(response_code)
if response_code is 200:
self.vsm.update_stats(json_stats, True, self.request_start,
self.request_complete)
self.responding = True
else:
self.responding = False
self.no_response_reason = ""
def _stats_error(self, failure):
self.vsm.update_stats(self.ledgerstats, False, 0, 0)
self.responding = False
self.state = "NO_RESP"
self.no_response_reason = failure.type.__name__
return
ValStats = collections.namedtuple('calculated_validator_stats',
'packet_bytes_received_total '
'pacet_bytes_received_average '
'packet_bytes_sent_total '
'packet_bytes_sent_average '
'average_transaction_rate '
'average_block_time')
class ValidatorStatsManager(object):
def __init__(self):
self.calculated_stats = named_tuple_init(ValStats, 0)
self.val_stats = None
# self.val_name = None
# self.val_url = None
self.active = False
self.request_time = 0.0
self.response_time = 0.0
self.txn_rate = TransactionRate()
self.psis = PlatformIntervalStats()
def update_stats(self, json_stats, active, starttime, endtime):
if active:
self.val_stats = json_stats.copy()
# unpack stats that are delivered as lists of unnamed values
bytes_received_total, bytes_received_average = \
json_stats["packet"]["BytesReceived"]
bytes_sent_total, bytes_sent_average = \
json_stats["packet"]["BytesSent"]
self.txn_rate.calculate_txn_rate(
self.val_stats["journal"]["CommittedBlockCount"],
self.val_stats["journal"].get("CommittedTxnCount", 0)
)
self.calculated_stats = ValStats(
bytes_received_total,
bytes_received_average,
bytes_sent_total,
bytes_sent_average,
self.txn_rate.avg_txn_rate,
self.txn_rate.avg_block_time
)
self.active = True
self.request_time = starttime
self.response_time = endtime - starttime
self.psis.calculate_interval_stats(self.val_stats)
else:
self.active = False
self.request_time = starttime
self.response_time = endtime - starttime
SysClient = collections.namedtuple('sys_client',
'starttime '
'runtime '
'known_validators '
'active_validators '
'avg_client_time '
'max_client_time')
SysBlocks = collections.namedtuple('sys_blocks',
'blocks_max_committed '
'blocks_max_committed_count '
'blocks_min_committed '
'blocks_max_pending '
'blocks_max_pending_count '
'blocks_min_pending '
'blocks_max_claimed '
'blocks_min_claimed')
SysTxns = collections.namedtuple('sys_txns',
'txns_max_committed '
'txns_max_committed_count '
'txns_min_committed '
'txns_max_pending '
'txns_max_pending_count '
'txns_min_pending '
'txn_rate')
SysPackets = collections.namedtuple('sys_packets',
'packets_max_dropped '
'packets_min_dropped '
'packets_max_duplicates '
'packets_min_duplicates '
'packets_max_acks_received '
'packets_min_acks_received')
SysMsgs = collections.namedtuple('sys_messages',
'msgs_max_handled '
'msgs_min_handled '
'msgs_max_acked '
'msgs_min_acked')
PoetStats = collections.namedtuple('poet_stats',
'avg_local_mean '
'max_local_mean '
'min_local_mean '
'last_unique_blockID')
class StatsCollector(object):
def __init__(self):
self.statslist = []
def get_names(self):
"""
Returns: All data element names as list - for csv writer (header)
"""
names = []
for stat in self.statslist:
statname = type(stat).__name__
for name in stat._fields:
names.append(statname + "_" + name)
return names
def get_data(self):
"""
Returns: All data element values in list - for csv writer
"""
values = []
for stat in self.statslist:
for value in stat:
values.append(value)
return values
def get_data_as_dict(self):
"""
Returns: returns platform stats as dictionary - for stats web interface
"""
p_stats = collections.OrderedDict()
for stat in self.statslist:
statname = type(stat).__name__
p_stats[statname] = stat._asdict()
return p_stats
def pprint_stats(self):
p_stats = self.get_data_as_dict()
print json.dumps(p_stats, indent=4)
CpuStats = collections.namedtuple("scpu",
'percent '
'user_time '
'system_time '
'idle_time')
class PlatformStats(StatsCollector):
def __init__(self):
super(PlatformStats, self).__init__()
self.get_stats()
def get_stats(self):
cpct = psutil.cpu_percent(interval=0)
ctimes = psutil.cpu_times_percent()
self.cpu_stats = CpuStats(cpct, ctimes.user, ctimes.system,
ctimes.idle)
self.vmem_stats = psutil.virtual_memory()
self.disk_stats = psutil.disk_io_counters()
self.net_stats = psutil.net_io_counters()
# must create new stats list each time stats are updated
# because named tuples are immutable
self.statslist = [self.cpu_stats, self.vmem_stats, self.disk_stats,
self.net_stats]
class SystemStats(StatsCollector):
def __init__(self):
super(SystemStats, self).__init__()
self.starttime = int(time.time())
self.runtime = 0
self.known_validators = 0
self.active_validators = 0
self.avg_client_time = 0
self.max_client_time = 0
self.txn_rate = 0
self.sys_client = named_tuple_init(
SysClient, 0, {'starttime': self.starttime})
self.sys_blocks = named_tuple_init(SysBlocks, 0)
self.sys_txns = named_tuple_init(SysTxns, 0)
self.sys_packets = named_tuple_init(SysPackets, 0)
self.sys_msgs = named_tuple_init(SysMsgs, 0)
self.poet_stats = named_tuple_init(
PoetStats, 0.0, {'last_unique_blockID': ''})
self.statslist = [self.sys_client, self.sys_blocks, self.sys_txns,
self.sys_packets, self.sys_msgs, self.poet_stats]
self.last_unique_block_id = None
# accumulators
self.response_times = []
self.blocks_claimed = []
self.blocks_committed = []
self.blocks_pending = []
self.txns_committed = []
self.txns_pending = []
self.packets_dropped = []
self.packets_duplicates = []
self.packets_acks_received = []
self.msgs_handled = []
self.msgs_acked = []
self.local_mean = []
self.previous_blockid = []
self.avg_local_mean = None
def collect_stats(self, stats_clients):
# must clear the accumulators at start of each sample interval
self.clear_accumulators()
for c in stats_clients:
if c.responding:
self.active_validators += 1
self.response_times.append(c.vsm.response_time)
self.blocks_claimed.append(
c.vsm.val_stats["journal"]["BlocksClaimed"])
self.blocks_committed.append(
c.vsm.val_stats["journal"]["CommittedBlockCount"])
self.blocks_pending.append(
c.vsm.val_stats["journal"]["PendingBlockCount"])
self.txns_committed.append(
c.vsm.val_stats["journal"].get("CommittedTxnCount", 0))
self.txns_pending.append(
c.vsm.val_stats["journal"].get("PendingTxnCount", 0))
self.packets_dropped.append(
c.vsm.val_stats["packet"]["DroppedPackets"])
self.packets_duplicates.append(
c.vsm.val_stats["packet"]["DuplicatePackets"])
self.packets_acks_received.append(
c.vsm.val_stats["packet"]["AcksReceived"])
self.msgs_handled.append(
c.vsm.val_stats["packet"]["MessagesHandled"])
self.msgs_acked.append(
c.vsm.val_stats["packet"]["MessagesAcked"])
self.local_mean.append(
c.vsm.val_stats["journal"].get(
"LocalMeanTime", 0.0))
self.previous_blockid.append(
c.vsm.val_stats["journal"].get(
"PreviousBlockID", 'broken'))
def calculate_stats(self):
self.runtime = int(time.time()) - self.starttime
if self.active_validators > 0:
self.avg_client_time = sum(self.response_times)\
/ len(self.response_times)
self.max_client_time = max(self.response_times)
self.sys_client = SysClient(
self.starttime,
self.runtime,
self.known_validators,
self.active_validators,
self.avg_client_time,
self.max_client_time
)
blocks_max_committed = max(self.blocks_committed)
blocks_max_pending = max(self.blocks_pending)
self.sys_blocks = SysBlocks(
blocks_max_committed,
self.blocks_committed.count(blocks_max_committed),
min(self.blocks_committed),
blocks_max_pending,
self.blocks_pending.count(blocks_max_pending),
min(self.blocks_pending),
max(self.blocks_claimed),
min(self.blocks_claimed)
)
txns_max_committed = max(self.txns_committed)
txns_max_pending = max(self.txns_pending)
self.sys_txns = SysTxns(
txns_max_committed,
self.txns_committed.count(txns_max_committed),
min(self.txns_committed),
txns_max_pending,
self.txns_pending.count(txns_max_pending),
min(self.txns_pending),
0
)
self.sys_packets = SysPackets(
max(self.packets_dropped),
min(self.packets_dropped),
max(self.packets_duplicates),
min(self.packets_duplicates),
max(self.packets_acks_received),
min(self.packets_acks_received)
)
self.sys_msgs = SysMsgs(
max(self.msgs_handled),
min(self.msgs_handled),
max(self.msgs_acked),
min(self.msgs_acked)
)
self.avg_local_mean = sum(self.local_mean) \
/ len(self.local_mean)
unique_blockid_list = list(set(self.previous_blockid))
self.last_unique_block_id = \
unique_blockid_list[len(unique_blockid_list) - 1]
self.poet_stats = PoetStats(
self.avg_local_mean,
max(self.local_mean),
min(self.local_mean),
self.last_unique_block_id
)
# because named tuples are immutable,
# must create new stats list each time stats are updated
self.statslist = [self.sys_client, self.sys_blocks,
self.sys_txns, self.sys_packets, self.sys_msgs]
def clear_accumulators(self):
self.blocks_claimed = []
self.blocks_committed = []
self.blocks_pending = []
self.txns_committed = []
self.txns_pending = []
self.packets_dropped = []
self.packets_duplicates = []
self.packets_acks_received = []
self.msgs_handled = []
self.msgs_acked = []
self.local_mean = []
self.previous_blockid = []
def get_stats_as_dict(self):
pass
class StatsManager(object):
def __init__(self, endpointmanager):
self.epm = endpointmanager
self.console_print = ConsolePrint()
self.system_stats = SystemStats()
self.platform_stats = PlatformStats()
self.psis = PlatformIntervalStats()
self.platform_stats.psis = self.psis
self.previous_net_bytes_recv = 0
self.previous_net_bytes_sent = 0
self.clients = []
self.known_endpoint_names = []
self.endpoints = {}
self.stats_loop_count = 0
self.topology_mgr = TopologyManager(self.clients)
self.branch_manager = BranchManager(self.epm, Agent(reactor))
stats_providers = [self.system_stats,
self.platform_stats,
self.topology_mgr.topology_stats,
self.branch_manager,
self.clients]
self.spm = StatsPrintManager(*stats_providers)
self.ssw = StatsSnapshotWriter(*stats_providers)
self.sscm = SummaryStatsCsvManager(
self.system_stats, self.platform_stats)
self.vscm = ValidatorStatsCsvManager(self.clients)
def initialize_client_list(self, endpoints):
self.endpoints = endpoints
# add validator stats client for each endpoint
for val_num, endpoint in enumerate(endpoints.values()):
url = 'http://{0}:{1}'.format(
endpoint["Host"], endpoint["HttpPort"])
c = StatsClient(val_num, url)
c.name = endpoint["Name"]
self.known_endpoint_names.append(endpoint["Name"])
e = sys.exc_info()[0]
print ("error creating stats clients: ", e)
self.clients.append(c)
def update_client_list(self, endpoints):
self.endpoints = endpoints
# add validator stats client for each endpoint name
for val_num, endpoint in enumerate(endpoints.values()):
if endpoint["Name"] not in self.known_endpoint_names:
val_num = len(self.known_endpoint_names)
url = 'http://{0}:{1}'.format(
endpoint["Host"], endpoint["HttpPort"])
c = StatsClient(val_num, url)
c.name = endpoint["Name"]
self.clients.append(c)
self.known_endpoint_names.append(endpoint["Name"])
def stats_loop(self):
self.process_stats(self.clients)
self.print_stats()
self.csv_write()
self.ssw.write_snapshot()
for c in self.clients:
c.stats_request()
self.stats_loop_count += 1
return
def stats_loop_done(self, result):
reactor.stop()
def stats_loop_error(self, failure):
self.console_print.cpstop()
print failure
reactor.stop()
def process_stats(self, statsclients):
self.system_stats.known_validators = len(statsclients)
self.system_stats.active_validators = 0
self.system_stats.collect_stats(statsclients)
self.system_stats.calculate_stats()
self.platform_stats.get_stats()
psr = {"platform": self.platform_stats.get_data_as_dict()}
self.psis.calculate_interval_stats(psr)
self.topology_mgr.update_topology()
self.branch_manager.update_client_list(self.endpoints)
self.branch_manager.update()
def print_stats(self):
self.spm.print_stats()
def csv_init(self, enable_summary, enable_validator):
if enable_summary is True:
self.sscm.initialize()
if enable_validator is True:
self.vscm.initialize()
def csv_write(self):
self.sscm.write_stats()
self.vscm.write_stats()
def csv_stop(self):
self.sscm.stop()
self.vscm.stop()
def snapshot_write(self, signum, frame):
self.ssw.do_snapshot = True
def stats_stop(self):
print "StatsManager is stopping"
self.console_print.cpstop()
self.csv_stop()
class EndpointManager(object):
def __init__(self):
self.error_count = 0
self.no_endpoint_responders = False
self.initial_discovery = True
self.endpoint_urls = []
self.endpoints = {} # None
self.validator_comm = ValidatorCommunications(Agent(reactor))
self.contact_list = None
self.endpoint_completion_cb = None
self.initial_url = None
self.init_path = None
self.endpoint_completion_cb_args = None
def initialize_endpoint_discovery(self, url, init_cb, init_args=None):
# initialize endpoint urls from specified validator url
self.initial_url = url
self.endpoint_completion_cb = init_cb
self.endpoint_completion_cb_args = init_args or {}
path = url + "/store/{0}/*".format('EndpointRegistryTransaction')
self.init_path = path
self.validator_comm.get_request(
path, self.endpoint_discovery_response, self._init_terminate)
def update_endpoint_discovery(self, update_cb):
# initiates update of endpoint urls
self.endpoint_completion_cb = update_cb
self.endpoint_completion_cb_args = {}
self.contact_list = list(self.endpoint_urls)
url = self.contact_list.pop()
path = url + "/store/{0}/*".format('EndpointRegistryTransaction')
self.validator_comm.get_request(
path, self.endpoint_discovery_response,
self._update_endpoint_continue)
def endpoint_discovery_response(self, results, response_code):
# response has been received
# if response OK, then get host url & port number of each validator
# if response not OK, then validator must be busy,
# if initial discovery, try again, else try another validator
if response_code is 200:
updated_endpoint_urls = []
self.endpoints = results
for endpoint in results.values():
updated_endpoint_urls.append(
'http://{0}:{1}'.format(
endpoint["Host"], endpoint["HttpPort"]))
self.endpoint_urls = updated_endpoint_urls
self.endpoint_completion_cb(self.endpoints,
**self.endpoint_completion_cb_args)
self.initial_discovery = False
self.no_endpoint_responders = False
else:
if self.initial_discovery is True:
print "endpoint discovery: " \
"validator response not 200 - retrying"
self.contact_list = [self.initial_url]
self._update_endpoint_continue(None)
def _update_endpoint_continue(self, failure):
# if no response (or did not respond with 200 - see above),
# then try with another url from the contact list
# if all urls have been tried, set "no update" flag and be done
if len(self.contact_list) > 0:
url = self.contact_list.pop()
path = url + "/store/{0}/*".format('EndpointRegistryTransaction')
self.validator_comm.get_request(
path, self.endpoint_discovery_response,
self._update_endpoint_continue)
else:
self.no_endpoint_responders = True
def update_endpoint_loop_done(self, result):
reactor.stop()
def update_endpoint_loop_error(self, failure):
print "update endpoint loop error: "
print failure
reactor.stop()
def _init_terminate(self, failure):
print "failure during initial endpoint discovery"
print "request to {} returned {}".format(
self.init_path, failure.type.__name__)
print "error message: "
print failure.getErrorMessage()
print "stopping stats client"
reactor.stop()
return
def add_stats_parser(subparsers, parent_parser):
parser = subparsers.add_parser('stats', parents=[parent_parser])
parser.add_argument('--url',
metavar="",
help='Base validator url '
'(default: %(default)s)',
default="http://localhost:8800")
parser.add_argument('--stats-time',
metavar="",
help='Interval between stats updates (s) '
'(default: %(default)s)',
default=3,
type=int)
parser.add_argument('--endpoint-time',
metavar="",
help='Interval between endpoint updates (s) '
'(default: %(default)s)',
default=10,
type=int)
parser.add_argument('--csv-enable-summary',
metavar="",
help='Enables summary CSV file generation'
'(default: %(default)s)',
default=False,
type=bool)
parser.add_argument('--csv-enable-validator',
metavar="",
help='Enables per-validator CSV file generation'
'(default: %(default)s)',
default=False,
type=bool)
def startup(urls, loop_times, stats_man, ep_man):
stats_man.initialize_client_list(ep_man.endpoints)
# start loop to periodically collect and report stats
stats_loop = task.LoopingCall(stats_man.stats_loop)
stats_loop_deferred = stats_loop.start(loop_times["stats"])
stats_loop_deferred.addCallback(stats_man.stats_loop_done)
stats_loop_deferred.addErrback(stats_man.stats_loop_error)
# start loop to periodically update the list of validator endpoints
# and call WorkManager.update_client_list
ep_loop = task.LoopingCall(ep_man.update_endpoint_discovery,
stats_man.update_client_list)
ep_loop_deferred = ep_loop.start(loop_times["endpoint"], now=False)
ep_loop_deferred.addCallback(ep_man.update_endpoint_loop_done)
ep_loop_deferred.addErrback(ep_man.update_endpoint_loop_error)
def run_stats(url,
stats_update_frequency=3,
endpoint_update_frequency=30,
csv_enable_summary=False,
csv_enable_validator=False
):
try:
# initialize globals when we are read for stats display. This keeps
# curses from messing up the status prints prior to stats start up.
epm = EndpointManager()
sm = StatsManager(epm) # sm assumes epm is created!
# initialize csv stats file generation
print "initializing csv"
sm.csv_init(csv_enable_summary, csv_enable_validator)
# set up SIGUSR1 handler for stats snapshots
signal.signal(signal.SIGUSR1, sm.snapshot_write)
# prevent curses import from modifying normal terminal operation
# (suppression of cr-lf) during display of help screen, config settings
if CURSES_IMPORTED:
curses.endwin()
# discover validator endpoints; if successful, continue with startup()
epm.initialize_endpoint_discovery(
url,
startup,
{
'loop_times': {
"stats": stats_update_frequency,
'endpoint': endpoint_update_frequency},
'stats_man': sm,
'ep_man': epm
})
reactor.run()
sm.stats_stop()
except Exception as e:
if CURSES_IMPORTED:
curses.endwin()
print e
raise
def do_stats(opts):
# Synopsis:
#
# 1) Twisted http Agent
# a) Handles http communications
# 2) EndpointManager
# a) Maintains list of validator endpoints and their associated urls
# b) update_endpoint_urls is called periodically to update the list of
# registered urls
# 3) StatsManager
# a) Creates instance of SystemStats and PlatformStats
# b) Maintains list of validator StatsClient instances
# using url list maintained by EndpointManager
# c) StatsManager.stats_loop is called periodically to...
# i) Call SystemStats.process() to generate summary statistics
# ii) Call StatsPrintManager.stats_print()
# iii) Call CsvManager.write() to write stats to CSV file
# iv) Call each StatsClient instance to initiate a stats request
# 4) StatsClient
# a) Sends stats requests to its associated validator url
# b) Handles stats response
# c) Handles any errors, including unresponsive validator
# 5) Global
# a) Creates instance of twisted http agent,
# StatsManager, and EndpointManager
# 6) Main
# a) calls endpoint manager to initialize url list.
# i) Program continues at Setup() if request succeeds
# ii) Program terminates request fails
# b) sets up looping call for StatsManager.stats_loop
# c) sets up looping call for EndpointManager.update_validator_urls
# 7) StatsPrintManager
# a) Handles formatting of console output
# 8) ConsolePrint() manages low-level details of printing to console.
# When printing to posix (linux)console, curses allows a "top"-like
# non-scrolling display to be implemented. When printing to a
# non-posix console, results simply scroll.
# 9) CsvManager
# a) Handles file management and timestamped output
# for csv file generation
# 10) ValidatorCommunications
# a) Handles low-level details of issuing an http request
# via twisted http agent async i/o
try:
run_stats(opts.url,
csv_enable_summary=opts.csv_enable_summary,
csv_enable_validator=opts.csv_enable_validator,
stats_update_frequency=opts.stats_time,
endpoint_update_frequency=opts.endpoint_time)
except Exception as e:
raise CliException(e)
| 36.956364 | 80 | 0.585195 |
4b6a98eb9a682d64ba53b329f1482c0068f26b4b | 177 | py | Python | 2. Programming Fundamentals With Python (May 2021)/05. Exercise - Data Types and Variables/01_integer_operations.py | kzborisov/SoftUni | ccb2b8850adc79bfb2652a45124c3ff11183412e | [
"MIT"
] | 1 | 2021-02-07T07:51:12.000Z | 2021-02-07T07:51:12.000Z | 2. Programming Fundamentals With Python (May 2021)/05. Exercise - Data Types and Variables/01_integer_operations.py | kzborisov/softuni | 9c5b45c74fa7d9748e9b3ea65a5ae4e15c142751 | [
"MIT"
] | null | null | null | 2. Programming Fundamentals With Python (May 2021)/05. Exercise - Data Types and Variables/01_integer_operations.py | kzborisov/softuni | 9c5b45c74fa7d9748e9b3ea65a5ae4e15c142751 | [
"MIT"
] | null | null | null | # Task 01. Integer Operations
first = int(input())
second = int(input())
third = int(input())
fourth = int(input())
result = ((first + second) // third) * fourth
print(result)
| 19.666667 | 45 | 0.655367 |
141fd4410fc8885a8e15101fa5abb828b7d0bb18 | 3,037 | py | Python | azure-servicefabric/azure/servicefabric/models/stateful_service_type_description.py | SUSE/azure-sdk-for-python | 324f99d26dd6f4ee9793b9bf1d4d5f928e4b6c2f | [
"MIT"
] | 2 | 2020-07-29T14:22:17.000Z | 2020-11-06T18:47:40.000Z | azure-servicefabric/azure/servicefabric/models/stateful_service_type_description.py | SUSE/azure-sdk-for-python | 324f99d26dd6f4ee9793b9bf1d4d5f928e4b6c2f | [
"MIT"
] | 1 | 2016-08-01T07:37:04.000Z | 2016-08-01T07:37:04.000Z | azure-servicefabric/azure/servicefabric/models/stateful_service_type_description.py | SUSE/azure-sdk-for-python | 324f99d26dd6f4ee9793b9bf1d4d5f928e4b6c2f | [
"MIT"
] | 1 | 2020-12-12T21:04:41.000Z | 2020-12-12T21:04:41.000Z | # coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
#
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
# --------------------------------------------------------------------------
from .service_type_description import ServiceTypeDescription
class StatefulServiceTypeDescription(ServiceTypeDescription):
"""Describes a stateful service type defined in the service manifest of a
provisioned application type.
:param is_stateful: Indicates whether the service type is a stateful
service type or a stateless service type. This property is true if the
service type is a stateful service type, false otherwise.
:type is_stateful: bool
:param service_type_name: Name of the service type.
:type service_type_name: str
:param placement_constraints: The placement constraint to be used when
instantiating this service in a Service Fabric cluster.
:type placement_constraints: str
:param service_placement_policies:
:type service_placement_policies: list of
:class:`ServicePlacementPolicyDescription
<azure.servicefabric.models.ServicePlacementPolicyDescription>`
:param extensions:
:type extensions: list of :class:`ServiceTypeExtensionDescription
<azure.servicefabric.models.ServiceTypeExtensionDescription>`
:param Kind: Polymorphic Discriminator
:type Kind: str
:param has_persisted_state: A flag indicating whether this is a
persistent service which stores states on the local disk. If it is then
the value of this property is true, if not it is false.
:type has_persisted_state: bool
"""
_validation = {
'Kind': {'required': True},
}
_attribute_map = {
'is_stateful': {'key': 'IsStateful', 'type': 'bool'},
'service_type_name': {'key': 'ServiceTypeName', 'type': 'str'},
'placement_constraints': {'key': 'PlacementConstraints', 'type': 'str'},
'service_placement_policies': {'key': 'ServicePlacementPolicies', 'type': '[ServicePlacementPolicyDescription]'},
'extensions': {'key': 'Extensions', 'type': '[ServiceTypeExtensionDescription]'},
'Kind': {'key': 'Kind', 'type': 'str'},
'has_persisted_state': {'key': 'HasPersistedState', 'type': 'bool'},
}
def __init__(self, is_stateful=None, service_type_name=None, placement_constraints=None, service_placement_policies=None, extensions=None, has_persisted_state=None):
super(StatefulServiceTypeDescription, self).__init__(is_stateful=is_stateful, service_type_name=service_type_name, placement_constraints=placement_constraints, service_placement_policies=service_placement_policies, extensions=extensions)
self.has_persisted_state = has_persisted_state
self.Kind = 'Stateful'
| 49.786885 | 245 | 0.702667 |
748f13b5ee88241ff423517351f146cd908d63a8 | 253 | py | Python | eval/ds/ds1/partie_c/probleme1.py | icecodder/nsi | eeb08932c1aa11f31bbdaae01361a526c5279527 | [
"MIT"
] | 4 | 2021-09-24T16:19:06.000Z | 2021-10-06T16:21:53.000Z | eval/ds/ds1/partie_c/probleme1.py | icecodder/nsi | eeb08932c1aa11f31bbdaae01361a526c5279527 | [
"MIT"
] | 1 | 2021-10-06T16:25:25.000Z | 2021-11-28T08:11:14.000Z | eval/ds/ds1/partie_c/probleme1.py | icecodder/nsi | eeb08932c1aa11f31bbdaae01361a526c5279527 | [
"MIT"
] | null | null | null | """
Sujet: NSI DS1 - Partie C : Problème 1
Nom: Charrier
Prénom: Max
Date: 7/10/2021
"""
def tiragePhotos(n):
if n < 50:
return 0.2 * n
elif n >= 100 and n < 100:
return 0.15 * n
elif n >= 100:
return 0.1 * n
print(tiragePhotos(10))
| 14.882353 | 38 | 0.588933 |
2fee837fe917d9bc1f8cedc1cd348c0b69e89044 | 20,851 | py | Python | packages/pytea/pytest/benchmarks/transformers/examples/seq2seq/test_seq2seq_examples.py | lego0901/pytea | 8ede650def2e68f4610ba816451d8b9e28f09f76 | [
"MIT"
] | 1 | 2020-11-14T06:08:38.000Z | 2020-11-14T06:08:38.000Z | packages/pytea/pytest/benchmarks/transformers/examples/seq2seq/test_seq2seq_examples.py | lego0901/pytea | 8ede650def2e68f4610ba816451d8b9e28f09f76 | [
"MIT"
] | null | null | null | packages/pytea/pytest/benchmarks/transformers/examples/seq2seq/test_seq2seq_examples.py | lego0901/pytea | 8ede650def2e68f4610ba816451d8b9e28f09f76 | [
"MIT"
] | 1 | 2020-11-16T23:12:50.000Z | 2020-11-16T23:12:50.000Z | import argparse
import logging
import os
import sys
import tempfile
from pathlib import Path
from unittest.mock import patch
import pytest
import pytorch_lightning as pl
import torch
import lightning_base
from convert_pl_checkpoint_to_hf import convert_pl_to_hf
from distillation import distill_main
from finetune import SummarizationModule, main
from parameterized import parameterized
from run_eval import generate_summaries_or_translations, run_generate
from run_eval_search import run_search
from transformers import AutoConfig, AutoModelForSeq2SeqLM
from transformers.hf_api import HfApi
from transformers.testing_utils import (
CaptureStderr,
CaptureStdout,
TestCasePlus,
require_torch_gpu,
require_torch_non_multi_gpu_but_fix_me,
slow,
)
from utils import ROUGE_KEYS, label_smoothed_nll_loss, lmap, load_json
logging.basicConfig(level=logging.DEBUG)
logger = logging.getLogger()
CUDA_AVAILABLE = torch.cuda.is_available()
CHEAP_ARGS = {
"max_tokens_per_batch": None,
"supervise_forward": True,
"normalize_hidden": True,
"label_smoothing": 0.2,
"eval_max_gen_length": None,
"eval_beams": 1,
"val_metric": "loss",
"save_top_k": 1,
"adafactor": True,
"early_stopping_patience": 2,
"logger_name": "default",
"length_penalty": 0.5,
"cache_dir": "",
"task": "summarization",
"num_workers": 2,
"alpha_hid": 0,
"freeze_embeds": True,
"enc_only": False,
"tgt_suffix": "",
"resume_from_checkpoint": None,
"sortish_sampler": True,
"student_decoder_layers": 1,
"val_check_interval": 1.0,
"output_dir": "",
"fp16": False, # TODO(SS): set this to CUDA_AVAILABLE if ci installs apex or start using native amp
"no_teacher": False,
"fp16_opt_level": "O1",
"gpus": 1 if CUDA_AVAILABLE else 0,
"n_tpu_cores": 0,
"max_grad_norm": 1.0,
"do_train": True,
"do_predict": True,
"accumulate_grad_batches": 1,
"server_ip": "",
"server_port": "",
"seed": 42,
"model_name_or_path": "sshleifer/bart-tiny-random",
"config_name": "",
"tokenizer_name": "facebook/bart-large",
"do_lower_case": False,
"learning_rate": 0.3,
"lr_scheduler": "linear",
"weight_decay": 0.0,
"adam_epsilon": 1e-08,
"warmup_steps": 0,
"max_epochs": 1,
"train_batch_size": 2,
"eval_batch_size": 2,
"max_source_length": 12,
"max_target_length": 12,
"val_max_target_length": 12,
"test_max_target_length": 12,
"fast_dev_run": False,
"no_cache": False,
"n_train": -1,
"n_val": -1,
"n_test": -1,
"student_encoder_layers": 1,
"freeze_encoder": False,
"auto_scale_batch_size": False,
"overwrite_output_dir": False,
"student": None,
}
def _dump_articles(path: Path, articles: list):
content = "\n".join(articles)
Path(path).open("w").writelines(content)
ARTICLES = [" Sam ate lunch today.", "Sams lunch ingredients."]
SUMMARIES = ["A very interesting story about what I ate for lunch.", "Avocado, celery, turkey, coffee"]
T5_TINY = "patrickvonplaten/t5-tiny-random"
T5_TINIER = "sshleifer/t5-tinier-random"
BART_TINY = "sshleifer/bart-tiny-random"
MBART_TINY = "sshleifer/tiny-mbart"
MARIAN_TINY = "sshleifer/tiny-marian-en-de"
FSMT_TINY = "stas/tiny-wmt19-en-de"
stream_handler = logging.StreamHandler(sys.stdout)
logger.addHandler(stream_handler)
logging.disable(logging.CRITICAL) # remove noisy download output from tracebacks
def make_test_data_dir(tmp_dir):
for split in ["train", "val", "test"]:
_dump_articles(os.path.join(tmp_dir, f"{split}.source"), ARTICLES)
_dump_articles(os.path.join(tmp_dir, f"{split}.target"), SUMMARIES)
return tmp_dir
class TestSummarizationDistiller(TestCasePlus):
@classmethod
def setUpClass(cls):
logging.disable(logging.CRITICAL) # remove noisy download output from tracebacks
return cls
@slow
@require_torch_gpu
@require_torch_non_multi_gpu_but_fix_me
def test_hub_configs(self):
"""I put require_torch_gpu cause I only want this to run with self-scheduled."""
model_list = HfApi().model_list()
org = "sshleifer"
model_ids = [x.modelId for x in model_list if x.modelId.startswith(org)]
allowed_to_be_broken = ["sshleifer/blenderbot-3B", "sshleifer/blenderbot-90M"]
failures = []
for m in model_ids:
if m in allowed_to_be_broken:
continue
try:
AutoConfig.from_pretrained(m)
except Exception:
failures.append(m)
assert not failures, f"The following models could not be loaded through AutoConfig: {failures}"
@require_torch_non_multi_gpu_but_fix_me
def test_distill_no_teacher(self):
updates = dict(student_encoder_layers=2, student_decoder_layers=1, no_teacher=True)
self._test_distiller_cli(updates)
@require_torch_non_multi_gpu_but_fix_me
def test_distill_checkpointing_with_teacher(self):
updates = dict(
student_encoder_layers=2,
student_decoder_layers=1,
max_epochs=4,
val_check_interval=0.25,
alpha_hid=2.0,
model_name_or_path="IGNORE_THIS_IT_DOESNT_GET_USED",
)
model = self._test_distiller_cli(updates, check_contents=False)
ckpts = list(Path(model.output_dir).glob("*.ckpt"))
self.assertEqual(1, len(ckpts))
transformer_ckpts = list(Path(model.output_dir).glob("**/*.bin"))
self.assertEqual(len(transformer_ckpts), 2)
examples = lmap(str.strip, Path(model.hparams.data_dir).joinpath("test.source").open().readlines())
out_path = tempfile.mktemp() # XXX: not being cleaned up
generate_summaries_or_translations(examples, out_path, str(model.output_dir / "best_tfmr"))
self.assertTrue(Path(out_path).exists())
out_path_new = self.get_auto_remove_tmp_dir()
convert_pl_to_hf(ckpts[0], transformer_ckpts[0].parent, out_path_new)
assert os.path.exists(os.path.join(out_path_new, "pytorch_model.bin"))
@require_torch_non_multi_gpu_but_fix_me
def test_loss_fn(self):
model = AutoModelForSeq2SeqLM.from_pretrained(BART_TINY, return_dict=True)
input_ids, mask = model.dummy_inputs["input_ids"], model.dummy_inputs["attention_mask"]
target_ids = torch.tensor([[0, 4, 8, 2], [0, 8, 2, 1]], dtype=torch.long, device=model.device)
decoder_input_ids = target_ids[:, :-1].contiguous() # Why this line?
lm_labels = target_ids[:, 1:].clone() # why clone?
model_computed_loss = model(
input_ids, attention_mask=mask, decoder_input_ids=decoder_input_ids, labels=lm_labels, use_cache=False
).loss
logits = model(input_ids, attention_mask=mask, decoder_input_ids=decoder_input_ids, use_cache=False).logits
lprobs = torch.nn.functional.log_softmax(logits, dim=-1)
smoothed_loss, nll_loss = label_smoothed_nll_loss(
lprobs, lm_labels, 0.1, ignore_index=model.config.pad_token_id
)
with self.assertRaises(AssertionError):
# TODO: understand why this breaks
self.assertEqual(nll_loss, model_computed_loss)
@require_torch_non_multi_gpu_but_fix_me
def test_distill_mbart(self):
updates = dict(
student_encoder_layers=2,
student_decoder_layers=1,
num_train_epochs=4,
val_check_interval=0.25,
alpha_hid=2.0,
task="translation",
model_name_or_path="IGNORE_THIS_IT_DOESNT_GET_USED",
tokenizer_name=MBART_TINY,
teacher=MBART_TINY,
src_lang="en_XX",
tgt_lang="ro_RO",
)
model = self._test_distiller_cli(updates, check_contents=False)
assert model.model.config.model_type == "mbart"
ckpts = list(Path(model.output_dir).glob("*.ckpt"))
self.assertEqual(1, len(ckpts))
transformer_ckpts = list(Path(model.output_dir).glob("**/*.bin"))
all_files = list(Path(model.output_dir).glob("best_tfmr/*"))
assert len(all_files) > 2
self.assertEqual(len(transformer_ckpts), 2)
@require_torch_non_multi_gpu_but_fix_me
def test_distill_t5(self):
updates = dict(
student_encoder_layers=1,
student_decoder_layers=1,
alpha_hid=2.0,
teacher=T5_TINY,
model_name_or_path=T5_TINY,
tokenizer_name=T5_TINY,
)
self._test_distiller_cli(updates)
@require_torch_non_multi_gpu_but_fix_me
def test_distill_different_base_models(self):
updates = dict(
teacher=T5_TINY,
student=T5_TINIER,
model_name_or_path=T5_TINIER,
tokenizer_name=T5_TINIER,
)
self._test_distiller_cli(updates)
def _test_distiller_cli(self, updates, check_contents=True):
default_updates = dict(
label_smoothing=0.0,
early_stopping_patience=-1,
train_batch_size=1,
eval_batch_size=2,
max_epochs=2,
alpha_mlm=0.2,
alpha_ce=0.8,
do_predict=True,
model_name_or_path="sshleifer/tinier_bart",
teacher=CHEAP_ARGS["model_name_or_path"],
val_check_interval=0.5,
)
default_updates.update(updates)
args_d: dict = CHEAP_ARGS.copy()
tmp_dir = make_test_data_dir(tmp_dir=self.get_auto_remove_tmp_dir())
output_dir = self.get_auto_remove_tmp_dir()
args_d.update(data_dir=tmp_dir, output_dir=output_dir, **default_updates)
model = distill_main(argparse.Namespace(**args_d))
if not check_contents:
return model
contents = os.listdir(output_dir)
contents = {os.path.basename(p) for p in contents}
ckpt_files = [p for p in contents if p.endswith("ckpt")]
assert len(ckpt_files) > 0
self.assertIn("test_generations.txt", contents)
self.assertIn("test_results.txt", contents)
metrics = load_json(model.metrics_save_path)
last_step_stats = metrics["val"][-1]
self.assertGreaterEqual(last_step_stats["val_avg_gen_time"], 0.01)
self.assertGreaterEqual(1.0, last_step_stats["val_avg_gen_time"])
self.assertIsInstance(last_step_stats[f"val_avg_{model.val_metric}"], float)
desired_n_evals = int(args_d["max_epochs"] * (1 / args_d["val_check_interval"]) + 1)
self.assertEqual(len(metrics["val"]), desired_n_evals)
self.assertEqual(len(metrics["test"]), 1)
return model
class TestTheRest(TestCasePlus):
def run_eval_tester(self, model):
input_file_name = Path(self.get_auto_remove_tmp_dir()) / "utest_input.source"
output_file_name = input_file_name.parent / "utest_output.txt"
assert not output_file_name.exists()
articles = [" New York (CNN)When Liana Barrientos was 23 years old, she got married in Westchester County."]
_dump_articles(input_file_name, articles)
score_path = str(Path(self.get_auto_remove_tmp_dir()) / "scores.json")
task = "translation_en_to_de" if model == T5_TINY else "summarization"
testargs = f"""
run_eval_search.py
{model}
{input_file_name}
{output_file_name}
--score_path {score_path}
--task {task}
--num_beams 2
--length_penalty 2.0
""".split()
with patch.object(sys, "argv", testargs):
run_generate()
assert Path(output_file_name).exists()
# os.remove(Path(output_file_name))
# test one model to quickly (no-@slow) catch simple problems and do an
# extensive testing of functionality with multiple models as @slow separately
@require_torch_non_multi_gpu_but_fix_me
def test_run_eval(self):
self.run_eval_tester(T5_TINY)
# any extra models should go into the list here - can be slow
@parameterized.expand([BART_TINY, MBART_TINY])
@slow
@require_torch_non_multi_gpu_but_fix_me
def test_run_eval_slow(self, model):
self.run_eval_tester(model)
# testing with 2 models to validate: 1. translation (t5) 2. summarization (mbart)
@parameterized.expand([T5_TINY, MBART_TINY])
@slow
@require_torch_non_multi_gpu_but_fix_me
def test_run_eval_search(self, model):
input_file_name = Path(self.get_auto_remove_tmp_dir()) / "utest_input.source"
output_file_name = input_file_name.parent / "utest_output.txt"
assert not output_file_name.exists()
text = {
"en": ["Machine learning is great, isn't it?", "I like to eat bananas", "Tomorrow is another great day!"],
"de": [
"Maschinelles Lernen ist großartig, oder?",
"Ich esse gerne Bananen",
"Morgen ist wieder ein toller Tag!",
],
}
tmp_dir = Path(self.get_auto_remove_tmp_dir())
score_path = str(tmp_dir / "scores.json")
reference_path = str(tmp_dir / "val.target")
_dump_articles(input_file_name, text["en"])
_dump_articles(reference_path, text["de"])
task = "translation_en_to_de" if model == T5_TINY else "summarization"
testargs = f"""
run_eval_search.py
{model}
{str(input_file_name)}
{str(output_file_name)}
--score_path {score_path}
--reference_path {reference_path}
--task {task}
""".split()
testargs.extend(["--search", "num_beams=1:2 length_penalty=0.9:1.0"])
with patch.object(sys, "argv", testargs):
with CaptureStdout() as cs:
run_search()
expected_strings = [" num_beams | length_penalty", model, "Best score args"]
un_expected_strings = ["Info"]
if "translation" in task:
expected_strings.append("bleu")
else:
expected_strings.extend(ROUGE_KEYS)
for w in expected_strings:
assert w in cs.out
for w in un_expected_strings:
assert w not in cs.out
assert Path(output_file_name).exists()
os.remove(Path(output_file_name))
@parameterized.expand(
[T5_TINY, BART_TINY, MBART_TINY, MARIAN_TINY, FSMT_TINY],
)
@require_torch_non_multi_gpu_but_fix_me
def test_finetune(self, model):
args_d: dict = CHEAP_ARGS.copy()
task = "translation" if model in [MBART_TINY, MARIAN_TINY, FSMT_TINY] else "summarization"
args_d["label_smoothing"] = 0.1 if task == "translation" else 0
tmp_dir = make_test_data_dir(tmp_dir=self.get_auto_remove_tmp_dir())
output_dir = self.get_auto_remove_tmp_dir()
args_d.update(
data_dir=tmp_dir,
model_name_or_path=model,
tokenizer_name=None,
train_batch_size=2,
eval_batch_size=2,
output_dir=output_dir,
do_predict=True,
task=task,
src_lang="en_XX",
tgt_lang="ro_RO",
freeze_encoder=True,
freeze_embeds=True,
)
assert "n_train" in args_d
args = argparse.Namespace(**args_d)
module = main(args)
input_embeds = module.model.get_input_embeddings()
assert not input_embeds.weight.requires_grad
if model == T5_TINY:
lm_head = module.model.lm_head
assert not lm_head.weight.requires_grad
assert (lm_head.weight == input_embeds.weight).all().item()
elif model == FSMT_TINY:
fsmt = module.model.model
embed_pos = fsmt.decoder.embed_positions
assert not embed_pos.weight.requires_grad
assert not fsmt.decoder.embed_tokens.weight.requires_grad
# check that embeds are not the same
assert fsmt.decoder.embed_tokens != fsmt.encoder.embed_tokens
else:
bart = module.model.model
embed_pos = bart.decoder.embed_positions
assert not embed_pos.weight.requires_grad
assert not bart.shared.weight.requires_grad
# check that embeds are the same
assert bart.decoder.embed_tokens == bart.encoder.embed_tokens
assert bart.decoder.embed_tokens == bart.shared
example_batch = load_json(module.output_dir / "text_batch.json")
assert isinstance(example_batch, dict)
assert len(example_batch) >= 4
@require_torch_non_multi_gpu_but_fix_me
def test_finetune_extra_model_args(self):
args_d: dict = CHEAP_ARGS.copy()
task = "summarization"
tmp_dir = make_test_data_dir(tmp_dir=self.get_auto_remove_tmp_dir())
args_d.update(
data_dir=tmp_dir,
tokenizer_name=None,
train_batch_size=2,
eval_batch_size=2,
do_predict=False,
task=task,
src_lang="en_XX",
tgt_lang="ro_RO",
freeze_encoder=True,
freeze_embeds=True,
)
# test models whose config includes the extra_model_args
model = BART_TINY
output_dir = self.get_auto_remove_tmp_dir()
args_d1 = args_d.copy()
args_d1.update(
model_name_or_path=model,
output_dir=output_dir,
)
extra_model_params = ("encoder_layerdrop", "decoder_layerdrop", "dropout", "attention_dropout")
for p in extra_model_params:
args_d1[p] = 0.5
args = argparse.Namespace(**args_d1)
model = main(args)
for p in extra_model_params:
assert getattr(model.config, p) == 0.5, f"failed to override the model config for param {p}"
# test models whose config doesn't include the extra_model_args
model = T5_TINY
output_dir = self.get_auto_remove_tmp_dir()
args_d2 = args_d.copy()
args_d2.update(
model_name_or_path=model,
output_dir=output_dir,
)
unsupported_param = "encoder_layerdrop"
args_d2[unsupported_param] = 0.5
args = argparse.Namespace(**args_d2)
with pytest.raises(Exception) as excinfo:
model = main(args)
assert str(excinfo.value) == f"model config doesn't have a `{unsupported_param}` attribute"
@require_torch_non_multi_gpu_but_fix_me
def test_finetune_lr_schedulers(self):
args_d: dict = CHEAP_ARGS.copy()
task = "summarization"
tmp_dir = make_test_data_dir(tmp_dir=self.get_auto_remove_tmp_dir())
model = BART_TINY
output_dir = self.get_auto_remove_tmp_dir()
args_d.update(
data_dir=tmp_dir,
model_name_or_path=model,
output_dir=output_dir,
tokenizer_name=None,
train_batch_size=2,
eval_batch_size=2,
do_predict=False,
task=task,
src_lang="en_XX",
tgt_lang="ro_RO",
freeze_encoder=True,
freeze_embeds=True,
)
# emulate finetune.py
parser = argparse.ArgumentParser()
parser = pl.Trainer.add_argparse_args(parser)
parser = SummarizationModule.add_model_specific_args(parser, os.getcwd())
args = {"--help": True}
# --help test
with pytest.raises(SystemExit) as excinfo:
with CaptureStdout() as cs:
args = parser.parse_args(args)
assert False, "--help is expected to sys.exit"
assert excinfo.type == SystemExit
expected = lightning_base.arg_to_scheduler_metavar
assert expected in cs.out, "--help is expected to list the supported schedulers"
# --lr_scheduler=non_existing_scheduler test
unsupported_param = "non_existing_scheduler"
args = {f"--lr_scheduler={unsupported_param}"}
with pytest.raises(SystemExit) as excinfo:
with CaptureStderr() as cs:
args = parser.parse_args(args)
assert False, "invalid argument is expected to sys.exit"
assert excinfo.type == SystemExit
expected = f"invalid choice: '{unsupported_param}'"
assert expected in cs.err, f"should have bailed on invalid choice of scheduler {unsupported_param}"
# --lr_scheduler=existing_scheduler test
supported_param = "cosine"
args_d1 = args_d.copy()
args_d1["lr_scheduler"] = supported_param
args = argparse.Namespace(**args_d1)
model = main(args)
assert (
getattr(model.hparams, "lr_scheduler") == supported_param
), f"lr_scheduler={supported_param} shouldn't fail"
| 37.773551 | 118 | 0.646204 |
7de9086bfc4dca142bb0d5c83f797c7d24a24d2c | 1,224 | py | Python | nomadgram/notifications/models.py | HaeSeon0363/instaclone | fd65bb466769175dc607d369e10d01dbd077af06 | [
"MIT"
] | null | null | null | nomadgram/notifications/models.py | HaeSeon0363/instaclone | fd65bb466769175dc607d369e10d01dbd077af06 | [
"MIT"
] | null | null | null | nomadgram/notifications/models.py | HaeSeon0363/instaclone | fd65bb466769175dc607d369e10d01dbd077af06 | [
"MIT"
] | null | null | null | from django.db import models
from django.utils.encoding import python_2_unicode_compatible
from nomadgram.users import models as user_models
from nomadgram.images import models as image_models
class Notification(image_models.TimeStampedModel):
TYPE_CHOICES = (
('like', 'Like'),
('comment', 'Comment'),
('follow', 'Follow'),
)
creator = models.ForeignKey(user_models.User, related_name='creator', on_delete=models.CASCADE)
to = models.ForeignKey(user_models.User, related_name='to', on_delete=models.CASCADE)
notification_type = models.CharField(max_length=20, choices=TYPE_CHOICES)
image = models.ForeignKey(image_models.Image, null=True, blank=True, on_delete=models.CASCADE)
comment = models.TextField(null=True, blank=True)
class Meta:
ordering = ['created_at']
def __str__(self):
return 'From: {} - To: {}'.format(self.creator, self.to)
def create_notification(creator, to, type, image=None, comment=None):
notification = models.Notification.objects.create(
creator = creator,
to=to,
notification_type= notification_type,
image=image,
comment=comment
)
notification.save()
| 29.142857 | 100 | 0.696078 |
82739159fa9261b001680091bf5a1572bf54d1c5 | 3,298 | py | Python | src/emptool/emp_utils.py | Visoar/EMP-FOR-ESP8266 | e045ed711f3c7a4de059bba3b8351ccdaa72eff7 | [
"MIT"
] | 4 | 2018-12-21T14:01:34.000Z | 2018-12-22T08:08:44.000Z | src/emptool/emp_utils.py | Visoar/EMP-FOR-ESP8266 | e045ed711f3c7a4de059bba3b8351ccdaa72eff7 | [
"MIT"
] | 1 | 2018-12-22T04:56:05.000Z | 2018-12-22T05:58:18.000Z | src/emptool/emp_utils.py | Visoar/EMP-FOR-ESP8266 | e045ed711f3c7a4de059bba3b8351ccdaa72eff7 | [
"MIT"
] | 1 | 2018-12-29T17:06:53.000Z | 2018-12-29T17:06:53.000Z | import gc
import os
class _const:
class ConstError(TypeError):
pass
def __setattr__(self, name, value):
if self.__dict__.get(name):
raise self.ConstError("Can't rebind const (%s)" % name)
else:
self.__dict__[name] = value
def is_folder(path):
try:
os.listdir(path)
return True
except:
return False
def post_ip(ip):
import urequests
urequests.post('http://www.1zlab.com/ide/post/ip/?esp_ip=%s,' % ip)
def traverse(path):
n = dict(name=path, children=[])
for i in os.listdir(path):
if is_folder(path + '/' + i):
n['children'].append(traverse(path + '/' + i))
else:
n['children'].append(dict(name=path + '/' + i))
return n
def config_path():
try:
return len(os.listdir('config'))
except:
os.mkdir('config')
finally:
return len(os.listdir('config'))
def webrepl_pass():
with open('config/webrepl.pass', 'r') as f:
return f.read()
def rainbow(output, color=None):
if color:
if color == 'green':
return '\033[1;32m%s\033[0m' % output
if color == 'red':
return '\033[1;31m%s\033[0m' % output
if color == 'blue':
return '\033[1;34m%s\033[0m' % output
else:
return output
def print_left_just(output, length=None):
if length == None:
length = len(output)
return output + (length - len(output)) * ' '
def print_right_just(output, length):
if length == None:
length = len(output)
return (length - len(output)) * ' ' + output
def print_as_a_list_item(index, title, subtile=None):
# esp8266 don't support center
index = '[%s]' % str(index)
index = index + (8-len(index)) * ' '
title = print_left_just(rainbow(title, color='green'))
if subtile:
subtile = '\n' + len(index) * ' ' + subtile
else:
subtile = ''
return index + title + subtile
def selection(hint, range):
index = input(rainbow(hint, color='blue'))
if int(index) > range or int(index) < 0:
print(rainbow('out of range!', color='red'))
selection(hint, range)
else:
return int(index)
def mem_analyze(func):
"""
mem_analyze
"""
def wrapper(*args, **kwargs):
memory_alloc = 'memory alloced: %s kb' % str(gc.mem_alloc() / 1024)
memory_free = 'memory free: %s kb' % str(gc.mem_free() / 1024)
gc.collect()
memory_after_collect = 'after collect: %s kb available' % str(
gc.mem_free() / 1024)
print(rainbow(memory_alloc, color='red'))
print(rainbow(memory_free, color='green'))
print(rainbow(memory_after_collect, color='blue'))
func(*args, **kwargs)
memory_after_func_excute = 'after %s excuted: %s kb available' % (
func.__name__, str(gc.mem_free() / 1024))
print(rainbow(memory_after_func_excute, color='red'))
return wrapper
def sync_time():
import urequests
from machine import RTC
rtc = RTC()
print('before sync: ', rtc.datetime())
time = urequests.get('http://www.1zlab.com/api/get-time/').json()
# print(time)
rtc.init(tuple(time['rtc']))
print('after sync: ', rtc.datetime())
| 24.984848 | 75 | 0.576713 |
6b539cbe98866b68a65660f750af25209b36b6ee | 10,723 | py | Python | linemorph.py | reidevries/picmorph | be0ae0cb327b2560bfb57a81f26b2c8049fb4091 | [
"Unlicense"
] | null | null | null | linemorph.py | reidevries/picmorph | be0ae0cb327b2560bfb57a81f26b2c8049fb4091 | [
"Unlicense"
] | null | null | null | linemorph.py | reidevries/picmorph | be0ae0cb327b2560bfb57a81f26b2c8049fb4091 | [
"Unlicense"
] | null | null | null | from PIL import Image, ImageDraw, ImageChops
from scipy.spatial import Delaunay
import numpy as np
import os
import math
import subprocess
import shlex
M_PI = 3.14
reduce_width = 512
reduce_height = 512
def getPixel(image, coord):
i = coord[0]
j = coord[1]
width, height = image.size
pixel = image.getpixel((max(0,min(width-1,i)),max(0,min(height-1,j))))
return pixel
def sumRGB(rgb):
return rgb[0]+rgb[1]+rgb[2]
def addColors(colora, colorb): #expects three-element tuples representing the colours
newr = max(0, min(1, colora[0]+colorb[0]))
newg = max(0, min(1, colora[1]+colorb[1]))
newb = max(0, min(1, colora[2]+colorb[2]))
return (newr, newg, newb, 255)
def blendColors(colora, colorb, pos, alpha):
newr = int((colora[0]*(1-pos) + colorb[0]*pos))
newg = int((colora[1]*(1-pos) + colorb[1]*pos))
newb = int((colora[2]*(1-pos) + colorb[2]*pos))
return (newr, newg, newb, int(alpha))
def reduceSize(image):
print("resizing to " +str(reduce_width)+"x"+str(reduce_height)+"...")
return image.resize((reduce_width, reduce_height), resample=Image.BILINEAR)
def equalSize(a, b):
aw, ah = a.size
bw, bh = b.size
if (bw == aw and bh == ah):
a_out = Image.new("RGBA", (bw,bh), "white")
a_out.paste(a)
b_out = Image.new("RGBA", (bw,bh), "white")
b_out.paste(b)
return a_out,b_out
ow, oh = a.size
if (bw > aw):
ow = bw
if (bh > ah):
oh = bh
print("resizing both images to " +str(ow)+"x"+str(oh)+"...")
a_out = Image.new("RGBA", (ow,oh), "white")
a_out.paste(a.resize((ow,oh), resample=Image.BILINEAR))
b_out = Image.new("RGBA", (ow, oh), "white")
b_out.paste(b.resize((ow, oh), resample=Image.BILINEAR))
return a_out,b_out
def edgedetect(image, line_width=1):
print("applying sobel edge detection...")
width,height = image.size
pixel = image.load()
newimage = Image.new("RGBA", (width, height), "white")
newdrawing = ImageDraw.Draw(newimage)
for i in range(width):
for j in range(height):
il = max(i-1, 0)
ir = min(i,width-1)
ju = max(j-1, 0)
jd = min(j,height-1)
tl = sumRGB(pixel[il,ju])
t = sumRGB(pixel[i,ju])
tr = sumRGB(pixel[ir,ju])
l = sumRGB(pixel[il,j])
r = sumRGB(pixel[ir,j])
bl = sumRGB(pixel[il,jd])
b = sumRGB(pixel[i,jd])
br = sumRGB(pixel[ir,jd])
gx = abs(tr-tl+2*(r-l)+br-bl)
gy = abs(tl-bl+2*(t-b)+tr-br)
g = int(math.sqrt(gx*gx + gy*gy))
if (g > 96):
if (line_width > 1):
newdrawing.ellipse([(i-line_width/2, j-line_width/2), (i+line_width/2, j+line_width/2)], fill=(0,0,0))
else:
newdrawing.point((i,j), fill=(0,0,0))
return newimage
def twotone(image, split=127):
print("applying two tone filter to r,g,b channels with split point", split, "...")
width,height = image.size
newimage = Image.new("RGB", (width, height), "white")
newpixel = newimage.load()
for i in range(width):
for j in range(height):
pixel = getPixel(image,(i,j))
r = 0
g = 0
b = 0
if (pixel[0] > split):
r = 255
if (pixel[1] > split):
g = 255
if (pixel[2] > split):
b = 255
newpixel[i,j] = (r,g,b)
return newimage
def normalise(image):
print("normalising...")
width,height = image.size
newimage = Image.new("RGB", (width, height), "white")
newpixel = newimage.load()
maxbright = 0
minbright = 765
for i in range(width):
for j in range(height):
pixel = getPixel(image,(i,j))
maxbright = max(maxbright, sumRGB(pixel))
minbright = min(minbright, sumRGB(pixel))
if (maxbright > 0):
maxbright = 765/maxbright
else:
maxbright = 255
minbright = minbright/3
for i in range(width):
for j in range(height):
pixel = getPixel(image,(i,j))
newpixel[i,j] = (int(maxbright*(pixel[0]-minbright)), int(maxbright*(pixel[1]-minbright)), int(maxbright*(pixel[2]-minbright)))
return newimage
def drawEllipseMask(xy, quality=10):
newimage = Image.new("RGBA", (xy[1][0]-xy[0][0], xy[1][1]-xy[0][1]), (0,0,0,0))
newdrawing = ImageDraw.Draw(newimage)
centre_xy = (xy[1][0]/2 + xy[0][0]/2, xy[1][1]/2 + xy[0][1]/2)
for i in range(quality):
pos = (i/quality)
new_xy = ((centre_xy[0]*(1-pos)+xy[0][0]*pos, centre_xy[1]*(1-pos)+xy[0][1]*pos),
(centre_xy[0]*(1-pos)+xy[1][0]*pos, centre_xy[1]*(1-pos)+xy[1][1]*pos))
newdrawing.ellipse(new_xy, fill=(255,255,255,255/10))
def drawPolygonMask(xy, size):
newimage = Image.new("RGBA", size, (0,0,0,0))
newdrawing = ImageDraw.Draw(newimage)
fake_quad = np.array((xy[0], xy[1], xy[2], xy[2])) #for some reason PIL only likes quads and not triangles, so I convert it to a fake quad
newdrawing.polygon(fake_quad, fill=(255,255,255,255), outline=(128,128,128,128))
return newimage
def getDelaunay(points):
nppoints = np.array(points)
return Delaunay(nppoints).points
def polygonCrop(image, xy):
mask = drawPolygonMask(xy, image.size)
new_mask, new_image = equalSize(mask, image)
return ImageChops.composite(new_image,image,new_mask)
def transformTriangle(image, xy, target_xy):
cropped_image = polygonCrop(image, xy)
coefficients = (target_xy[0][0], target_xy[0][1], target_xy[1][0], target_xy[1][1],
target_xy[2][0], target_xy[2][1], target_xy[2][0], target_xy[2][1])
new_image = cropped_image.transform(image.size, Image.PERSPECTIVE, coefficients, resample=Image.BILINEAR)
return ImageChops.composite(image, new_image, new_image)
def sortPointListByDistance(points, centre): #sort with the points closest to the centre first
p_num = len(points)
p_dist2 = [0]*p_num
for i in range(p_num):
dist2 = (points[i][0]-centre[0])**2 + (points[i][1]-centre[1])**2
p_dist2[i] = dist2
new_p = points
for j in range(p_num):
furthest = -1
furthest_i = -1
for i in range(len(points)):
if (p_dist2[i] > furthest):
furthest = p_dist2[i]
furthest_i = i
p_dist2[furthest_i] = -1
new_p[p_num-j-1] = points[furthest_i]
return new_p
def matchPointLists(a, b):
#find the list with more elements and the one with fewer elements
less = a
more = b
swapped = False
if (len(b) < len(a)):
less = b
more = a
swapped = True
more_matched = []
for j in range(len(more)):
more_matched.append(-1) #stores which indices of the larger have been matched
matches = []
for i in range(len(less)): #first, go through the smaller array and match every element to something
nearest = 1000
nearestj = -1
for j in range(len(more)):
dist = abs(less[i][0]-more[j][0])+abs(less[i][1]-more[j][1])
if (more_matched[j] < 0 or dist < more_matched[j]):
if (dist < nearest):
nearest = dist
nearestj = j
more_matched[nearestj] = nearest
if swapped:
matches.append((nearestj, i))
else:
matches.append((i,nearestj))
for j in range(len(more)): #second pass to match all the as-of-yet unmatched elements of 'more'
if (more_matched[j] < 0):
nearest = 1000
nearesti = -1
for i in range(len(less)):
dist = abs(less[i][0]-more[j][0])+abs(less[i][1]-more[j][1])
if (dist < nearest):
nearest = dist
nearesti = i
if swapped:
matches.append((j, nearesti))
else:
matches.append((nearesti, j))
return matches
#interpolate two sets of point lists, 0 < pos < 1. 'matches' is an array of tuples (i,j) where 'i' is an index of a_p and 'j' is an index of b_p
def interpolatePointLists(a_p, b_p, matches, pos):
newlist = []
for m in matches:
newlist.append((a_p[m[0]][0]*(1-pos) + b_p[m[1]][0]*pos, a_p[m[0]][1]*(1-pos) + b_p[m[1]][1]*pos))
return newlist
def clampToSize(coord, size):
return (max(min(coord[0], size[0]-1), 0), max(min(coord[1], size[1]-1),0))
def interpolateWithDots(a_pixel,b_pixel,size,a_p,b_p,matches,pos): #expects a and b to be same-sized images
print("interpolating two images using dots...")
if pos == 0 or pos == 1:
return Image.new("RGBA", size, (255,255,255,0))
close_to_b = False
if (pos > 0.5):
close_to_b = True
blend_pos = min(max(pos*2-0.5,0),1)
new_image = Image.new("RGBA", size, (255,255,255,0))
new_drawing = ImageDraw.Draw(new_image)
points = interpolatePointLists(a_p,b_p,matches,pos)
m = matches
for i in range(len(points)):
a_p_i = a_p[m[i][0]]
b_p_i = b_p[m[i][1]]
dist2_p = 0
if close_to_b:
dist2_p = (int(abs(b_p_i[0] - points[i][0]) + abs(b_p_i[1] - points[i][1])))
else:
dist2_p = (int(abs(a_p_i[0] - points[i][0]) + abs(a_p_i[1] - points[i][1])))
dist2_p = dist2_p/2
dist2_p_07 = dist2_p*0.7
dist2_p_15 = dist2_p*1.5
for u in range(int(-dist2_p), int(dist2_p)):
for v in range(int(-dist2_p), int(dist2_p)):
uv_dist2 = abs(u)+abs(v)
if (uv_dist2 <= dist2_p_15):
if (uv_dist2 <= dist2_p_07):
x = u
y = v
else:
x = abs(u*3)-dist2_p_07
y = abs(v*3)-dist2_p_07
x = math.copysign(u,x)
y = math.copysign(v, y)
a_coord = clampToSize((a_p_i[0]+x, a_p_i[1]+y), size)
b_coord = clampToSize((b_p_i[0]+x, b_p_i[1]+y), size)
coord = clampToSize((points[i][0]+x, points[i][1]+y), size)
alpha = int(256-200*float(uv_dist2)/dist2_p)
new_colour = blendColors(a_pixel[a_coord], b_pixel[b_coord], blend_pos, alpha)
new_drawing.point(coord, fill=new_colour)
print("drew dot " + str(i) + "/" + str(len(points)) + " of size " + str(dist2_p*2), end="\t\t\t", flush=True)
return new_image
def drawImageFromPoints(pointlist):
print("saving image drawn from points...")
width,height = reduce_width,reduce_height
newimage = Image.new("RGB", (width,height), "white")
newdrawing = ImageDraw.Draw(newimage)
for i in range(len(pointlist)):
pointa = pointlist[i]
x = pointa[0]
y = pointa[1]
newdrawing.ellipse([(x-1, y-1), (x+1, y+1)], fill=(1,1,1))
del newdrawing
return newimage
def getPointsFromAutotrace(image, output_scale=(1,1)):
if not os.path.exists("./autotrace_temp"):
os.makedirs("./autotrace_temp")
print("saving image as .bmp format...")
width,height = image.size
image.save("./autotrace_temp/input.bmp", "BMP")
print("starting autotrace to get points from image... (please make sure autotrace is installed)")
cmd = "autotrace --centerline --color-count=2 --output-file=./autotrace_temp/output.gnuplot --output-format=gnuplot ./autotrace_temp/input.bmp"
args = shlex.split(cmd)
subprocess.run(args)
print("getting autotraced image and converting to a list of points...")
pointlist = []
plot = open("./autotrace_temp/output.gnuplot", "r")
plotlines = plot.readlines()
for p in plotlines:
if p[0].isdigit():
twostrings = p.split()
pointtuple = (output_scale[0]*float(twostrings[0]), output_scale[1]*(height-float(twostrings[1]))) #the y value is inverted in the gnuplot
pointlist.append(pointtuple)
plot.close()
print("deleting temporary files...")
os.remove("./autotrace_temp/input.bmp")
os.remove("./autotrace_temp/output.gnuplot")
return pointlist
| 29.458791 | 144 | 0.6556 |
b67700b44bbf74ca214d531938c9d9df1fcd044e | 535 | py | Python | kubernetes/get_wandb_api_key.py | ClashLuke/gpt-neox | 3291d0e6c867d9d328b96e8377f5b77c6f66c323 | [
"MIT"
] | 3 | 2021-02-13T21:51:45.000Z | 2021-02-14T23:15:02.000Z | kubernetes/get_wandb_api_key.py | ClashLuke/gpt-neox | 3291d0e6c867d9d328b96e8377f5b77c6f66c323 | [
"MIT"
] | 13 | 2021-02-08T11:22:38.000Z | 2021-02-18T20:13:10.000Z | kubernetes/get_wandb_api_key.py | ClashLuke/gpt-neox | 3291d0e6c867d9d328b96e8377f5b77c6f66c323 | [
"MIT"
] | 2 | 2021-02-13T22:13:21.000Z | 2021-10-12T06:39:33.000Z | #!/usr/bin/env python
"""
Get Weights and Biases API key
"""
import requests
import os
def get_wandb_api_key():
""" Get Weights and Biases API key from ENV or .netrc file. Otherwise return None """
if 'WANDB_API_KEY' in os.environ:
return os.environ['WANDB_API_KEY']
wandb_token = requests.utils.get_netrc_auth('https://api.wandb.ai')
if wandb_token is not None:
return wandb_token[1]
if __name__ == "__main__":
api_key = get_wandb_api_key()
if api_key is not None:
print(api_key)
| 22.291667 | 89 | 0.678505 |
8fdec65b6686dd49b78df3a2805193f81a366b40 | 19,133 | py | Python | zentral/contrib/santa/views.py | johnmikep/zentral | e321e877b3759bffd8fecdcdad3d9535ea78c579 | [
"Apache-2.0"
] | null | null | null | zentral/contrib/santa/views.py | johnmikep/zentral | e321e877b3759bffd8fecdcdad3d9535ea78c579 | [
"Apache-2.0"
] | null | null | null | zentral/contrib/santa/views.py | johnmikep/zentral | e321e877b3759bffd8fecdcdad3d9535ea78c579 | [
"Apache-2.0"
] | null | null | null | import base64
import json
import logging
from django.core.exceptions import SuspiciousOperation
from django.urls import reverse
from django.contrib.auth.mixins import LoginRequiredMixin
from django.db import transaction
from django.http import Http404, HttpResponseRedirect, JsonResponse
from django.shortcuts import get_object_or_404
from django.utils.crypto import get_random_string
from django.views.generic import DetailView, ListView, TemplateView, View
from django.views.generic.edit import CreateView, FormView, UpdateView
from zentral.contrib.inventory.exceptions import EnrollmentSecretVerificationFailed
from zentral.contrib.inventory.forms import EnrollmentSecretForm
from zentral.contrib.inventory.models import Certificate, MachineTag, MetaMachine
from zentral.contrib.inventory.utils import (commit_machine_snapshot_and_trigger_events,
verify_enrollment_secret)
from zentral.core.events.base import post_machine_conflict_event
from zentral.core.probes.models import ProbeSource
from zentral.utils.api_views import APIAuthError, verify_secret, JSONPostAPIView
from zentral.utils.http import user_agent_and_ip_address_from_request
from .conf import build_santa_conf
from .events import post_enrollment_event, post_events, post_preflight_event
from .forms import (CertificateSearchForm, CollectedApplicationSearchForm,
ConfigurationForm, CreateProbeForm, EnrollmentForm, RuleForm)
from .models import CollectedApplication, Configuration, EnrolledMachine, Enrollment
from .probes import Rule
from .osx_package.builder import SantaZentralEnrollPkgBuilder
from .utils import build_config_plist, build_configuration_profile
logger = logging.getLogger('zentral.contrib.santa.views')
# configuration / enrollment
class ConfigurationListView(LoginRequiredMixin, ListView):
model = Configuration
def get_context_data(self, **kwargs):
ctx = super().get_context_data(**kwargs)
ctx["setup"] = True
ctx["configurations_count"] = ctx["object_list"].count()
return ctx
class CreateConfigurationView(LoginRequiredMixin, CreateView):
model = Configuration
form_class = ConfigurationForm
def get_context_data(self, **kwargs):
ctx = super().get_context_data(**kwargs)
ctx["setup"] = True
return ctx
class ConfigurationView(LoginRequiredMixin, DetailView):
model = Configuration
def get_context_data(self, **kwargs):
ctx = super().get_context_data(**kwargs)
ctx["setup"] = True
enrollments = list(self.object.enrollment_set.select_related("secret").all().order_by("id"))
ctx["enrollments"] = enrollments
ctx["enrollments_count"] = len(enrollments)
return ctx
class UpdateConfigurationView(LoginRequiredMixin, UpdateView):
model = Configuration
form_class = ConfigurationForm
def get_context_data(self, **kwargs):
ctx = super().get_context_data(**kwargs)
ctx["setup"] = True
return ctx
class CreateEnrollmentView(LoginRequiredMixin, TemplateView):
template_name = "santa/enrollment_form.html"
def dispatch(self, request, *args, **kwargs):
self.configuration = get_object_or_404(Configuration, pk=kwargs["pk"])
return super().dispatch(request, *args, **kwargs)
def get_forms(self):
secret_form_kwargs = {"prefix": "secret"}
enrollment_form_kwargs = {"configuration": self.configuration,
"initial": {"configuration": self.configuration}}
if self.request.method == "POST":
secret_form_kwargs["data"] = self.request.POST
enrollment_form_kwargs["data"] = self.request.POST
return (EnrollmentSecretForm(**secret_form_kwargs),
EnrollmentForm(**enrollment_form_kwargs))
def get_context_data(self, **kwargs):
ctx = super().get_context_data(**kwargs)
ctx["setup"] = True
ctx["configuration"] = self.configuration
if "secret_form" not in kwargs or "enrollment_form" not in kwargs:
ctx["secret_form"], ctx["enrollment_form"] = self.get_forms()
return ctx
def forms_invalid(self, secret_form, enrollment_form):
return self.render_to_response(self.get_context_data(secret_form=secret_form,
enrollment_form=enrollment_form))
def forms_valid(self, secret_form, enrollment_form):
secret = secret_form.save()
secret_form.save_m2m()
enrollment = enrollment_form.save(commit=False)
enrollment.secret = secret
if self.configuration:
enrollment.configuration = self.configuration
enrollment.save()
return HttpResponseRedirect(enrollment.get_absolute_url())
def post(self, request, *args, **kwargs):
secret_form, enrollment_form = self.get_forms()
if secret_form.is_valid() and enrollment_form.is_valid():
return self.forms_valid(secret_form, enrollment_form)
else:
return self.forms_invalid(secret_form, enrollment_form)
class EnrollmentPackageView(LoginRequiredMixin, View):
def get(self, request, *args, **kwargs):
enrollment = get_object_or_404(Enrollment, pk=kwargs["pk"], configuration__pk=kwargs["configuration_pk"])
builder = SantaZentralEnrollPkgBuilder(enrollment)
return builder.build_and_make_response()
# enrollment endpoint called by enrollment script
class EnrollView(View):
def post(self, request, *args, **kwargs):
self.user_agent, self.ip = user_agent_and_ip_address_from_request(request)
try:
request_json = json.loads(request.body.decode("utf-8"))
secret = request_json["secret"]
serial_number = request_json["serial_number"]
uuid = request_json["uuid"]
es_request = verify_enrollment_secret(
"santa_enrollment", secret,
self.user_agent, self.ip,
serial_number, uuid
)
except (ValueError, KeyError, EnrollmentSecretVerificationFailed):
raise SuspiciousOperation
else:
# get or create enrolled machine
enrolled_machine, enrolled_machine_created = EnrolledMachine.objects.get_or_create(
enrollment=es_request.enrollment_secret.santa_enrollment,
serial_number=serial_number,
defaults={"machine_id": get_random_string(64)}
)
# apply enrollment secret tags
for tag in es_request.enrollment_secret.tags.all():
MachineTag.objects.get_or_create(serial_number=serial_number, tag=tag)
# response
response = {"machine_id": enrolled_machine.machine_id}
cp_name, cp_content = build_configuration_profile(enrolled_machine)
cp_content = base64.b64encode(cp_content).decode("utf-8")
response["configuration_profile"] = {"name": cp_name, "content": cp_content}
cpl_name, cpl_content = build_config_plist(enrolled_machine)
response["config_plist"] = {"name": cpl_name, "content": cpl_content}
# post event
post_enrollment_event(serial_number, self.user_agent, self.ip,
{'action': "enrollment" if enrolled_machine_created else "re-enrollment"})
return JsonResponse(response)
# probes
class CreateProbeView(LoginRequiredMixin, FormView):
form_class = CreateProbeForm
template_name = "santa/create_probe.html"
def get_context_data(self, **kwargs):
ctx = super().get_context_data(**kwargs)
ctx["probes"] = True
return ctx
def form_valid(self, form):
probe_source = form.save()
return HttpResponseRedirect(probe_source.get_absolute_url())
class AddProbeRuleView(LoginRequiredMixin, FormView):
form_class = RuleForm
template_name = "santa/rule_form.html"
def dispatch(self, request, *args, **kwargs):
self.probe_source = get_object_or_404(ProbeSource, pk=kwargs["probe_id"])
self.probe = self.probe_source.load()
return super().dispatch(request, *args, **kwargs)
def get_initial(self):
initial = {}
self.collected_app = None
self.certificate = None
if "app_id" in self.request.GET:
try:
self.collected_app = CollectedApplication.objects.get(pk=self.request.GET["app_id"])
except (KeyError, CollectedApplication.DoesNotExist):
pass
else:
initial["rule_type"] = Rule.BINARY
initial["sha256"] = self.collected_app.sha_256
elif "cert_id" in self.request.GET:
try:
self.certificate = Certificate.objects.get(pk=self.request.GET["cert_id"])
except (KeyError, CollectedApplication.DoesNotExist):
pass
else:
initial["rule_type"] = Rule.CERTIFICATE
initial["sha256"] = self.certificate.sha_256
return initial
def get_form_kwargs(self):
kwargs = super().get_form_kwargs()
kwargs["collected_app"] = self.collected_app
kwargs["certificate"] = self.certificate
return kwargs
def get_context_data(self, **kwargs):
ctx = super().get_context_data(**kwargs)
ctx['probes'] = True
ctx['probe_source'] = self.probe_source
ctx['probe'] = self.probe
ctx['add_rule'] = True
ctx['cancel_url'] = self.probe_source.get_absolute_url("santa")
ctx['collected_app'] = self.collected_app
ctx['certificate'] = self.certificate
if self.collected_app:
ctx["title"] = "Add collected application santa rule"
elif self.certificate:
ctx["title"] = "Add collected certificate santa rule"
else:
ctx["title"] = "Add santa rule"
return ctx
def form_valid(self, form):
rule_d = form.get_rule_d()
def func(probe_d):
rules = probe_d.setdefault("rules", [])
rules.append(rule_d)
self.probe_source.update_body(func)
return super().form_valid(form)
def get_success_url(self):
return self.probe_source.get_absolute_url("santa")
class UpdateProbeRuleView(LoginRequiredMixin, FormView):
form_class = RuleForm
template_name = "santa/rule_form.html"
def dispatch(self, request, *args, **kwargs):
self.probe_source = get_object_or_404(ProbeSource, pk=kwargs["probe_id"])
self.probe = self.probe_source.load()
self.rule_id = int(kwargs["rule_id"])
try:
self.rule = self.probe.rules[self.rule_id]
except IndexError:
raise Http404
return super().dispatch(request, *args, **kwargs)
def get_initial(self):
return self.form_class.get_initial(self.rule)
def get_context_data(self, **kwargs):
ctx = super().get_context_data(**kwargs)
ctx['probes'] = True
ctx['probe_source'] = self.probe_source
ctx['probe'] = self.probe
ctx['add_rule'] = False
ctx['title'] = "Update santa rule"
ctx['cancel_url'] = self.probe_source.get_absolute_url("santa")
return ctx
def form_valid(self, form):
rule_d = form.get_rule_d()
def func(probe_d):
probe_d["rules"][self.rule_id] = rule_d
self.probe_source.update_body(func)
return super().form_valid(form)
def get_success_url(self):
return self.probe_source.get_absolute_url("santa")
class DeleteProbeRuleView(LoginRequiredMixin, TemplateView):
template_name = "santa/delete_rule.html"
def dispatch(self, request, *args, **kwargs):
self.probe_source = get_object_or_404(ProbeSource, pk=kwargs["probe_id"])
self.probe = self.probe_source.load()
if not self.probe.can_delete_rules:
return HttpResponseRedirect(self.probe_source.get_absolute_url("santa"))
self.rule_id = int(kwargs["rule_id"])
return super().dispatch(request, *args, **kwargs)
def get_context_data(self, **kwargs):
ctx = super().get_context_data(**kwargs)
ctx['probes'] = True
ctx['probe_source'] = self.probe_source
ctx['probe'] = self.probe
ctx['cancel_url'] = self.probe_source.get_absolute_url("santa")
return ctx
def post(self, request, *args, **kwargs):
def func(probe_d):
probe_d["rules"].pop(self.rule_id)
if not probe_d["rules"]:
probe_d.pop("rules")
self.probe_source.update_body(func)
return HttpResponseRedirect(self.probe_source.get_absolute_url("santa"))
class PickRuleApplicationView(LoginRequiredMixin, TemplateView):
template_name = "santa/pick_rule_app.html"
def dispatch(self, request, *args, **kwargs):
self.probe_source = get_object_or_404(ProbeSource, pk=kwargs["probe_id"])
self.probe = self.probe_source.load()
return super().dispatch(request, *args, **kwargs)
def get_context_data(self, **kwargs):
ctx = super().get_context_data(**kwargs)
ctx['probes'] = True
ctx['probe_source'] = self.probe_source
ctx['probe'] = self.probe
ctx['cancel_url'] = self.probe_source.get_absolute_url("santa")
form = CollectedApplicationSearchForm(self.request.GET)
form.is_valid()
ctx['apps'] = CollectedApplication.objects.search(**form.cleaned_data)
ctx['form'] = form
return ctx
class PickRuleCertificateView(LoginRequiredMixin, TemplateView):
template_name = "santa/pick_rule_cert.html"
def dispatch(self, request, *args, **kwargs):
self.probe_source = get_object_or_404(ProbeSource, pk=kwargs["probe_id"])
self.probe = self.probe_source.load()
return super().dispatch(request, *args, **kwargs)
def get_context_data(self, **kwargs):
ctx = super().get_context_data(**kwargs)
ctx['probes'] = True
ctx['probe_source'] = self.probe_source
ctx['probe'] = self.probe
ctx['cancel_url'] = self.probe_source.get_absolute_url("santa")
form = CertificateSearchForm(self.request.GET)
form.is_valid()
ctx['certs'] = CollectedApplication.objects.search_certificates(**form.cleaned_data)
ctx['form'] = form
return ctx
# API
class BaseView(JSONPostAPIView):
def verify_enrolled_machine_id(self):
"""Find the corresponding enrolled machine"""
try:
self.enrolled_machine = (EnrolledMachine.objects
.select_related("enrollment__secret__meta_business_unit")
.get(machine_id=self.machine_id))
except EnrolledMachine.DoesNotExist:
raise APIAuthError("Could not authorize the request")
else:
self.machine_serial_number = self.enrolled_machine.serial_number
self.business_unit = self.enrolled_machine.enrollment.secret.get_api_enrollment_business_unit()
def verify_signed_machine_id(self):
"""Verify the secret signature"""
# TODO: deprecate and remove
data = verify_secret(self.machine_id, "zentral.contrib.santa")
self.machine_serial_number = data.get('machine_serial_number', None)
self.business_unit = data.get('business_unit', None)
def check_request_secret(self, request, *args, **kwargs):
self.enrolled_machine = None
self.machine_id = kwargs['machine_id']
if ":" not in self.machine_id:
# new way, machine_id is an attribute of EnrolledMachine
self.verify_enrolled_machine_id()
else:
# old way
self.verify_signed_machine_id()
class PreflightView(BaseView):
def check_data_secret(self, data):
reported_serial_number = data['serial_num']
if reported_serial_number != self.machine_serial_number:
# the SN reported by osquery is not the one configured in the enrollment secret
auth_err = "santa reported SN {} different from enrollment SN {}".format(reported_serial_number,
self.machine_serial_number)
machine_info = {k: v for k, v in data.items()
if k in ("hostname", "os_build", "os_version", "serial_num", "primary_user") and v}
post_machine_conflict_event(self.request, "zentral.contrib.santa",
reported_serial_number, self.machine_serial_number,
machine_info)
raise APIAuthError(auth_err)
@transaction.non_atomic_requests
def do_post(self, data):
post_preflight_event(self.machine_serial_number,
self.user_agent,
self.ip,
data)
os_version = dict(zip(('major', 'minor', 'patch'),
(int(s) for s in data['os_version'].split('.'))))
os_version.update({'name': 'Mac OS X',
'build': data['os_build']})
tree = {'source': {'module': 'zentral.contrib.santa',
'name': 'Santa'},
'serial_number': self.machine_serial_number,
'os_version': os_version,
'system_info': {'computer_name': data['hostname']},
'public_ip_address': self.ip,
}
if self.enrolled_machine:
# new way
tree["reference"] = self.enrolled_machine.machine_id
else:
# old way
# TODO: remove it
tree["reference"] = self.machine_serial_number
if self.business_unit:
tree['business_unit'] = self.business_unit.serialize()
commit_machine_snapshot_and_trigger_events(tree)
config_dict = {'UploadLogsUrl': 'https://{host}{path}'.format(host=self.request.get_host(),
path=reverse('santa:logupload',
args=(self.machine_id,)))}
if self.enrolled_machine:
config_dict.update(self.enrolled_machine.enrollment.configuration.get_sync_server_config())
else:
config_dict['BatchSize'] = Configuration.DEFAULT_BATCH_SIZE
return config_dict
class RuleDownloadView(BaseView):
def do_post(self, data):
return build_santa_conf(MetaMachine(self.machine_serial_number))
class EventUploadView(BaseView):
def do_post(self, data):
post_events(self.machine_serial_number,
self.user_agent,
self.ip,
data)
return {}
class LogUploadView(BaseView):
pass
class PostflightView(BaseView):
def do_post(self, data):
return {}
| 40.195378 | 113 | 0.644436 |
b258af18b22b3a7c4d3a4c202c4a533f6a3b5803 | 10,503 | py | Python | nailgun/nailgun/test/integration/test_network_models.py | dnikishov/fuel-web | 152c2072cf585fc61d7e157ccf9a7ea1d0377daa | [
"Apache-2.0"
] | null | null | null | nailgun/nailgun/test/integration/test_network_models.py | dnikishov/fuel-web | 152c2072cf585fc61d7e157ccf9a7ea1d0377daa | [
"Apache-2.0"
] | null | null | null | nailgun/nailgun/test/integration/test_network_models.py | dnikishov/fuel-web | 152c2072cf585fc61d7e157ccf9a7ea1d0377daa | [
"Apache-2.0"
] | null | null | null | # -*- coding: utf-8 -*-
# Copyright 2013 Mirantis, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import copy
from mock import patch
from oslo_serialization import jsonutils
import yaml
from nailgun.objects import Cluster
from nailgun.objects.serializers.network_configuration \
import NeutronNetworkConfigurationSerializer
from nailgun.objects.serializers.network_configuration \
import NovaNetworkConfigurationSerializer
from nailgun import consts
from nailgun.db.sqlalchemy.models import NeutronConfig
from nailgun.db.sqlalchemy.models import NovaNetworkConfig
from nailgun.test.base import BaseIntegrationTest
from nailgun.utils import reverse
class TestNetworkModels(BaseIntegrationTest):
network_config = {
"net_l23_provider": consts.NEUTRON_L23_PROVIDERS.ovs,
"segmentation_type": consts.NEUTRON_SEGMENT_TYPES.gre,
"vlan_range": [1000, 1030],
"gre_id_range": [2, 65534],
"base_mac": "fa:16:3e:00:00:00",
"internal_cidr": "192.168.111.0/24",
"internal_gateway": "192.168.111.1",
"internal_name": "my_internal_name",
"floating_name": "my_floating_name",
"floating_ranges": [
["172.16.0.130", "172.16.0.150"],
["172.16.0.160", "172.16.0.254"]
],
"dns_nameservers": ["8.8.4.4", "8.8.8.8"],
"configuration_template": {}
}
def tearDown(self):
self._wait_for_threads()
super(TestNetworkModels, self).tearDown()
def create_env_using_statuses(self, cluster_status, node_status):
self.env.create(
cluster_kwargs={
'net_provider': consts.CLUSTER_NET_PROVIDERS.neutron,
'net_segment_type': consts.NEUTRON_SEGMENT_TYPES.gre,
'status': cluster_status
},
nodes_kwargs=[
{'pending_addition': False, 'status': node_status},
{'pending_addition': False, 'status': node_status},
{'pending_deletion': False, 'status': node_status}])
def test_cluster_locking_during_deployment(self):
self.create_env_using_statuses(consts.CLUSTER_STATUSES.deployment,
consts.NODE_STATUSES.deploying)
test_nets = self.env.neutron_networks_get(
self.env.clusters[0].id).json_body
resp_nova_net = self.env.nova_networks_put(
self.env.clusters[0].id,
test_nets,
expect_errors=True)
resp_neutron_net = self.env.neutron_networks_put(
self.env.clusters[0].id,
test_nets,
expect_errors=True)
resp_cluster = self.app.put(
reverse('ClusterAttributesHandler',
kwargs={'cluster_id': self.env.clusters[0].id}),
jsonutils.dumps({
'editable': {
"foo": {"bar": None}
}
}),
headers=self.default_headers,
expect_errors=True)
self.assertEqual(resp_nova_net.status_code, 400)
# it's 400 because we used Nova network
self.assertEqual(resp_neutron_net.status_code, 403)
self.assertEqual(resp_cluster.status_code, 403)
def test_networks_update_after_deployment(self):
self.create_env_using_statuses(consts.CLUSTER_STATUSES.operational,
consts.NODE_STATUSES.ready)
test_nets = self.env.neutron_networks_get(
self.env.clusters[0].id).json_body
test_network_params = copy.deepcopy(test_nets['networking_parameters'])
# change something from 'networking_parameters'
test_nets['networking_parameters']['dns_nameservers'] = \
['8.8.8.8', '8.8.4.4']
# let's change for example management network
test_network_name = consts.NETWORKS.management
mgmt_net = filter(lambda x: x['name'] == test_network_name,
test_nets['networks'])[0]
mgmt_net['cidr'] = u'1.1.1.0/24'
resp_neutron_net = self.env.neutron_networks_put(
self.env.clusters[0].id, test_nets, expect_errors=True)
self.assertEqual(400, resp_neutron_net.status_code)
self.assertEqual(
"New IP ranges for network '{0}'({1}) do not cover already "
"allocated IPs.".format(test_network_name, mgmt_net['id']),
resp_neutron_net.json_body['message'])
mgmt_net['cidr'] = u'192.168.0.0/30'
resp_neutron_net = self.env.neutron_networks_put(
self.env.clusters[0].id, test_nets)
self.assertEqual(200, resp_neutron_net.status_code)
new_nets = self.env.neutron_networks_get(
self.env.clusters[0].id).json_body
# test that network was changed
modified_net = filter(lambda x: x['name'] == test_network_name,
new_nets['networks'])[0]
self.assertEqual(u'192.168.0.0/30', modified_net['cidr'])
# test that networking_parameters were not changed
self.assertDictEqual(test_network_params,
new_nets['networking_parameters'])
def test_admin_network_update_after_deployment(self):
self.create_env_using_statuses(consts.CLUSTER_STATUSES.operational,
consts.NODE_STATUSES.ready)
test_nets = self.env.neutron_networks_get(
self.env.clusters[0].id).json_body
admin_net = filter(
lambda x: x['name'] == consts.NETWORKS.fuelweb_admin,
test_nets['networks'])[0]
admin_net['cidr'] = u'191.111.0.0/26'
admin_net['ip_ranges'] = [[u'191.111.0.5', u'191.111.0.62']]
resp_neutron_net = self.env.neutron_networks_put(
self.env.clusters[0].id, test_nets, expect_errors=True)
self.assertEqual(400, resp_neutron_net.status_code)
self.assertEqual(
"New IP ranges for network '{0}'({1}) do not cover already "
"allocated IPs.".format(admin_net['name'], admin_net['id']),
resp_neutron_net.json_body['message'])
for node in self.env.nodes:
self.db.delete(node)
self.db.commit()
with patch('task.task.rpc.cast'):
resp_neutron_net = self.env.neutron_networks_put(
self.env.clusters[0].id, test_nets)
self.assertEqual(200, resp_neutron_net.status_code)
def test_nova_net_networking_parameters(self):
cluster = self.env.create_cluster(api=False)
self.db.delete(cluster.network_config)
kw = {
"net_manager": consts.NOVA_NET_MANAGERS.VlanManager,
"fixed_networks_cidr": "10.0.0.0/16",
"fixed_networks_vlan_start": 103,
"fixed_network_size": 256,
"fixed_networks_amount": 16,
"floating_ranges": [["172.16.0.128", "172.16.0.254"]],
"dns_nameservers": ["8.8.4.4", "8.8.8.8"],
"cluster_id": cluster.id
}
nc = NovaNetworkConfig(**kw)
self.db.add(nc)
self.db.flush()
self.db.refresh(cluster)
nw_params = NovaNetworkConfigurationSerializer.\
serialize_network_params(cluster)
kw.pop("cluster_id")
self.assertEqual(nw_params, kw)
def check_neutron_networking_parameters(self, floating_ranges):
cluster = self.env.create_cluster(
api=False,
net_provider=consts.CLUSTER_NET_PROVIDERS.neutron)
self.db.delete(cluster.network_config)
self.network_config['floating_ranges'] = floating_ranges
self.network_config['cluster_id'] = cluster.id
nc = NeutronConfig(**self.network_config)
self.db.add(nc)
self.db.flush()
self.db.refresh(cluster)
nw_params = NeutronNetworkConfigurationSerializer. \
serialize_network_params(cluster)
self.network_config.pop("cluster_id")
self.assertItemsEqual(nw_params, self.network_config)
def test_neutron_networking_parameters_w_single_floating_ranges(self):
floating_ranges = [["172.16.0.130", "172.16.0.150"]]
self.check_neutron_networking_parameters(floating_ranges)
def test_neutron_networking_parameters_w_multiple_floating_ranges(self):
floating_ranges = [
["172.16.0.130", "172.16.0.150"],
["172.16.0.160", "172.16.0.254"]]
self.check_neutron_networking_parameters(floating_ranges)
def test_neutron_has_internal_and_floating_names(self):
cluster = self.env.create_cluster(
api=False,
net_provider=consts.CLUSTER_NET_PROVIDERS.neutron)
self.assertEqual(
"admin_internal_net", cluster.network_config.internal_name)
self.assertEqual(
"admin_floating_net", cluster.network_config.floating_name)
def test_neutron_networking_parameters_baremetal(self):
attributes_metadata = """
editable:
additional_components:
ironic:
value: %r
type: "checkbox"
"""
cluster = self.env.create_cluster(
api=False,
net_provider=consts.CLUSTER_NET_PROVIDERS.neutron)
# Ensure baremetal_* fields are not serialized when Ironic disabled
nw_params = NeutronNetworkConfigurationSerializer. \
serialize_network_params(cluster)
self.assertNotIn('baremetal_gateway', nw_params)
self.assertNotIn('baremetal_range', nw_params)
# Ensure baremetal_* fields are serialized when Ironic enabled
Cluster.patch_attributes(
cluster, yaml.load(attributes_metadata % True))
self.db.refresh(cluster)
nw_params = NeutronNetworkConfigurationSerializer. \
serialize_network_params(cluster)
self.assertIn('baremetal_gateway', nw_params)
self.assertIn('baremetal_range', nw_params)
| 39.190299 | 79 | 0.638199 |
a711f095b90ab3b66ee72c8a9401fdb7df3b25c5 | 1,124 | py | Python | setup.py | lucidrains/tf-bind-transformer | 420d9382305d99de8a604a980099b634361d21d0 | [
"MIT"
] | 43 | 2021-12-08T02:20:58.000Z | 2022-03-29T18:18:10.000Z | setup.py | lucidrains/tf-bind-transformer | 420d9382305d99de8a604a980099b634361d21d0 | [
"MIT"
] | null | null | null | setup.py | lucidrains/tf-bind-transformer | 420d9382305d99de8a604a980099b634361d21d0 | [
"MIT"
] | 4 | 2021-12-24T02:10:00.000Z | 2022-01-11T19:49:10.000Z | from setuptools import setup, find_packages
setup(
name = 'tf-bind-transformer',
packages = find_packages(exclude=[]),
version = '0.0.118',
license='MIT',
description = 'Transformer for Transcription Factor Binding',
author = 'Phil Wang',
author_email = 'lucidrains@gmail.com',
url = 'https://github.com/lucidrains/tf-bind-transformer',
long_description_content_type = 'text/markdown',
keywords = [
'artificial intelligence',
'deep learning',
'attention mechanism',
'transformers',
'transcription factors',
'gene expression'
],
install_requires=[
'bidirectional-cross-attention',
'biopython',
'click',
'einops>=0.3',
'enformer-pytorch>=0.5',
'fair-esm',
'logavgexp-pytorch',
'polars',
'python-dotenv',
'sentencepiece',
'torch>=1.6',
'transformers>=4.0',
'tqdm'
],
classifiers=[
'Development Status :: 4 - Beta',
'Intended Audience :: Developers',
'Topic :: Scientific/Engineering :: Artificial Intelligence',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 3.6',
],
)
| 25.545455 | 65 | 0.643238 |
a13a9214ce80a0b44a8a9fdb48e545575be22846 | 26,193 | py | Python | pyscf/dft/xcfun.py | 1QB-Information-Technologies/pyscf | 8730b90439ca68106dca54d22c0d61e7422e557f | [
"BSD-2-Clause"
] | null | null | null | pyscf/dft/xcfun.py | 1QB-Information-Technologies/pyscf | 8730b90439ca68106dca54d22c0d61e7422e557f | [
"BSD-2-Clause"
] | null | null | null | pyscf/dft/xcfun.py | 1QB-Information-Technologies/pyscf | 8730b90439ca68106dca54d22c0d61e7422e557f | [
"BSD-2-Clause"
] | 1 | 2018-12-06T03:10:50.000Z | 2018-12-06T03:10:50.000Z | #!/usr/bin/env python
#
# Author: Qiming Sun <osirpt.sun@gmail.com>
#
'''
XC functional, the interface to xcfun (https://github.com/dftlibs/xcfun)
U. Ekstrom et al, J. Chem. Theory Comput., 6, 1971
'''
import copy
import ctypes
import math
import numpy
from pyscf import lib
_itrf = lib.load_library('libxcfun_itrf')
XC = XC_CODES = {
'SLATERX' : 0, # Slater LDA exchange
'VWN5C' : 1, # VWN5 LDA Correlation functional
'BECKEX' : 2, # Becke 88 exchange
'BECKECORRX' : 3, # Becke 88 exchange correction
'BECKESRX' : 4, # Short range Becke 88 exchange
'OPTX' : 5, # OPTX Handy & Cohen exchange
'LYPC' : 6, # LYP correlation
'PBEX' : 7, # PBE Exchange Functional
'REVPBEX' : 8, # Revised PBE Exchange Functional
'RPBEX' : 9, # RPBE Exchange Functional
'PBEC' : 10, # PBE correlation functional
'SPBEC' : 11, # sPBE correlation functional
'VWN_PBEC' : 12, # PBE correlation functional using VWN LDA correlation.
#'RANGESEP_MU' : 16, # Error function range separation parameter (1/a0)
'KTX' : 17, # KT exchange GGA correction
#'TFK' : 18, # Thomas-Fermi Kinetic Energy Functional
'PW91X' : 19, # Perdew-Wang 1991 GGA Exchange Functional
#'PW91K' : 20, # PW91 GGA Kinetic Energy Functional
'PW92C' : 21, # PW92 LDA correlation
'M05X' : 22, # M05 exchange
'M05X2X' : 23, # M05-2X exchange
'M06X' : 24, # M06 exchange
'M06X2X' : 25, # M06-2X exchange
'M06LX' : 26, # M06-L exchange
'M06HFX' : 27, # M06-HF exchange
'BRX' : 28, # BR exchange. Becke-Roussels exchange functional.
'M05X2C' : 29, # M05-2X Correlation
'M05C' : 30, # M05 Correlation
'M06C' : 31, # M06 Correlation
'M06LC' : 32, # M06-L Correlation
'M06X2C' : 33, # M06-2X Correlation
'TPSSC' : 34, # TPSS original correlation functional
'TPSSX' : 35, # TPSS original exchange functional
'REVTPSSC' : 36, # Revised TPSS correlation functional
'REVTPSSX' : 37, # Reviewed TPSS exchange functional
#
# alias
#
'SLATER' : 0, # SLATERX
'LDA' : 0, # SLATERX
'VWN' : 1, # VWN5C
'VWN5' : 1, # VWN5C
'B88' : 2, # BECKEX
'LYP' : 6, # LYP correlation
'P86' : None,
'BLYP' : 'BECKEX + LYP',
'BP86' : None,
'BPW91' : 'BECKEX + PW91C',
'BPW92' : 'BECKEX + PW92C',
'OLYP' : '2.4832*SLATER - 1.43169*OPTX + LYP', # CPL, 341, 319
'KT1' : '1.006*SLATER - .006*KTX + VWN5', # JCP, 119, 3015
'KT2' : '1.07773*SLATER - .006*KTX + 0.576727*VWN5', # JCP, 119, 3015
'KT3' : '2.021452*SLATER - .004*KTX - .925452*OPTX + .864409*LYP', # JCP, 121, 5654
'PBE0' : '.25*HF + .75*PBEX + PBEC', # JCP, 110, 6158
'PBE1PBE' : 'PBE0',
'B3PW91' : None,
'B3P86' : None,
# Note, use VWN5 for B3LYP. It is different to the libxc default B3LYP
'B3LYP' : 'B3LYP5',
'B3LYP5' : '.2*HF + .08*SLATER + .72*BECKE + .81*LYP + .19*VWN5',
'B3LYPG' : None, # B3LYP-VWN3 used by Gaussian and libxc
'O3LYP' : '.1161*HF + .1129*SLATER + .8133*OPTX + .81*LYP + .19*VWN5', # Mol. Phys. 99 607
'M062X' : 'M06X2X, M062XC',
'CAMB3LYP' : None,
}
LDA_IDS = set([0, 1, 13, 14, 15, 16, 18, 21])
GGA_IDS = set([2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 17, 19, 20])
MGGA_IDS = set([22, 23, 24, 25, 26, 27, 29, 30, 31, 32, 33, 34, 35, 36, 37])
MLGGA_IDS = set([28])
HYB_XC = set(('PBE0' , 'PBE1PBE' , 'B3PW91' , 'B3P86' , 'B3LYP' ,
'B3LYPG' , 'O3LYP' , 'M062X' , 'CAMB3LYP',))
MAX_DERIV_ORDER = 3
def xc_type(xc_code):
if isinstance(xc_code, str):
hyb, fn_facs = parse_xc(xc_code)
else:
fn_facs = [(xc_code, 1)] # mimic fn_facs
if not fn_facs:
return 'HF'
elif all(xid in LDA_IDS for xid, val in fn_facs):
return 'LDA'
elif any(xid in MGGA_IDS or xid in MLGGA_IDS for xid, val in fn_facs):
return 'MGGA'
else:
# all((xid in GGA_IDS or xid in LDA_IDS for xid, val in fn_fns)):
# include hybrid_xc
return 'GGA'
def is_lda(xc_code):
return xc_type(xc_code) == 'LDA'
def is_hybrid_xc(xc_code):
if isinstance(xc_code, str):
return ('HF' in xc_code or xc_code in HYB_XC or
hybrid_coeff(xc_code) != 0)
elif isinstance(xc_code, int):
return False
else:
return any((is_hybrid_xc(x) for x in xc_code))
def is_meta_gga(xc_code):
return xc_type(xc_code) == 'MGGA'
def is_gga(xc_code):
return xc_type(xc_code) == 'GGA'
def max_deriv_order(xc_code):
hyb, fn_facs = parse_xc(xc_code)
return MAX_DERIV_ORDER
def test_deriv_order(xc_code, deriv, raise_error=False):
support = deriv <= max_deriv_order(xc_code)
if not support and raise_error:
raise NotImplementedError('xcfun library does not support derivative '
'order %d for %s' % (deriv, xc_code))
return support
def hybrid_coeff(xc_code, spin=0):
return parse_xc(xc_code)[0]
def parse_xc_name(xc_name):
fn_facs = parse_xc(xc_name)[1]
return fn_facs[0][0], fn_facs[1][0]
def parse_xc(description):
'''Rules to input functional description:
* The given functional description must be a one-line string.
* The functional description is case-insensitive.
* The functional description string has two parts, separated by ",". The
first part describes the exchange functional, the second is the correlation
functional.
- If "," was not appeared in string, the entire string is considered as
X functional.
- To neglect X functional (just apply C functional), leave blank in the
first part, eg description=',vwn' for pure VWN functional
* The functional name can be placed in arbitrary order. Two name needs to
be separated by operators "+" or "-". Blank spaces are ignored.
NOTE the parser only reads operators "+" "-" "*". / is not in support.
* A functional name is associated with one factor. If the factor is not
given, it is assumed equaling 1.
* String "HF" stands for exact exchange (HF K matrix). It is allowed to
put in C functional part.
* Be careful with the xcfun convention on GGA functional, in which the LDA
contribution is included.
'''
if isinstance(description, int):
return 0, ((description, 1.))
elif not isinstance(description, str): #isinstance(description, (tuple,list)):
return parse_xc('%s,%s' % tuple(description))
if ',' in description:
x_code, c_code = description.replace(' ','').replace('_','').upper().split(',')
else:
x_code, c_code = description.replace(' ','').replace('_','').upper(), ''
hyb = [0]
fn_facs = []
def parse_token(token, suffix):
if token:
if '*' in token:
fac, key = token.split('*')
if fac[0].isalpha():
fac, key = key, fac
fac = float(fac)
else:
fac, key = 1, token
if key == 'HF':
hyb[0] += fac
elif key.isdigit():
fn_facs.append((int(key), fac))
else:
if key in XC_CODES:
x_id = XC_CODES[key]
elif key+suffix in XC_CODES:
x_id = XC_CODES[key+suffix]
else:
raise KeyError('Unknown key %s' % key)
if isinstance(x_id, str):
hyb1, fn_facs1 = parse_xc(x_id)
hyb[0] += hyb1
fn_facs.extend(fn_facs1)
elif x_id is None:
raise NotImplementedError(key)
else:
fn_facs.append((x_id, fac))
def remove_dup(fn_facs):
fn_ids = []
facs = []
n = 0
for key, val in fn_facs:
if key in fn_ids:
facs[fn_ids.index(key)] += val
else:
fn_ids.append(key)
facs.append(val)
n += 1
return list(zip(fn_ids, facs))
for token in x_code.replace('-', '+-').split('+'):
parse_token(token, 'X')
for token in c_code.replace('-', '+-').split('+'):
parse_token(token, 'C')
return hyb[0], remove_dup(fn_facs)
def eval_xc(xc_code, rho, spin=0, relativity=0, deriv=1, verbose=None):
r'''Interface to call xcfun library to evaluate XC functional, potential
and functional derivatives.
* The given functional xc_code must be a one-line string.
* The functional xc_code is case-insensitive.
* The functional xc_code string has two parts, separated by ",". The
first part describes the exchange functional, the second is the correlation
functional. If "," not appeared in string, entire string is considered as
functional.
- If "," not appeared in string, the entire string is considered as X functional.
- To neglect X functional (just apply C functional), leave blank in the
first part, eg description=',vwn' for pure VWN functional
* The functional name can be placed in arbitrary order. Two name needs to
be separated by operators "+" or "-". Blank spaces are ignored.
NOTE the parser only reads operators "+" "-" "*". / is not in support.
* A functional name is associated with one factor. If the factor is not
given, it is assumed equaling 1.
* String "HF" stands for exact exchange (HF K matrix). It is allowed to
put in C functional part.
* Be careful with the xcfun convention on GGA functional, in which the LDA
contribution is included.
Args:
xc_code : str
A string to describe the linear combination of different XC functionals.
The X and C functional are separated by comma like '.8*LDA+.2*B86,VWN'.
If "HF" was appeared in the string, it stands for the exact exchange.
rho : ndarray
Shape of ((*,N)) for electron density (and derivatives) if spin = 0;
Shape of ((*,N),(*,N)) for alpha/beta electron density (and derivatives) if spin > 0;
where N is number of grids.
rho (*,N) are ordered as (den,grad_x,grad_y,grad_z,laplacian,tau)
where grad_x = d/dx den, laplacian = \nabla^2 den, tau = 1/2(\nabla f)^2
In spin unrestricted case,
rho is ((den_u,grad_xu,grad_yu,grad_zu,laplacian_u,tau_u)
(den_d,grad_xd,grad_yd,grad_zd,laplacian_d,tau_d))
Kwargs:
spin : int
spin polarized if spin > 0
relativity : int
No effects.
verbose : int or object of :class:`Logger`
No effects.
Returns:
ex, vxc, fxc, kxc
where
* vxc = (vrho, vsigma, vlapl, vtau) for restricted case
* vxc for unrestricted case
| vrho[:,2] = (u, d)
| vsigma[:,3] = (uu, ud, dd)
| vlapl[:,2] = (u, d)
| vtau[:,2] = (u, d)
* fxc for restricted case:
(v2rho2, v2rhosigma, v2sigma2, v2lapl2, vtau2, v2rholapl, v2rhotau, v2lapltau, v2sigmalapl, v2sigmatau)
* fxc for unrestricted case:
| v2rho2[:,3] = (u_u, u_d, d_d)
| v2rhosigma[:,6] = (u_uu, u_ud, u_dd, d_uu, d_ud, d_dd)
| v2sigma2[:,6] = (uu_uu, uu_ud, uu_dd, ud_ud, ud_dd, dd_dd)
| v2lapl2[:,3]
| vtau2[:,3]
| v2rholapl[:,4]
| v2rhotau[:,4]
| v2lapltau[:,4]
| v2sigmalapl[:,6]
| v2sigmatau[:,6]
* kxc for restricted case:
v3rho3, v3rho2sigma, v3rhosigma2, v3sigma3,
v3rho2tau, v3rhosigmatau, v3rhotau2, v3sigma2tau, v3sigmatau2, v3tau3
* kxc for unrestricted case:
| v3rho3[:,4] = (u_u_u, u_u_d, u_d_d, d_d_d)
| v3rho2sigma[:,9] = (u_u_uu, u_u_ud, u_u_dd, u_d_uu, u_d_ud, u_d_dd, d_d_uu, d_d_ud, d_d_dd)
| v3rhosigma2[:,12] = (u_uu_uu, u_uu_ud, u_uu_dd, u_ud_ud, u_ud_dd, u_dd_dd, d_uu_uu, d_uu_ud, d_uu_dd, d_ud_ud, d_ud_dd, d_dd_dd)
| v3sigma3[:,10] = (uu_uu_uu, uu_uu_ud, uu_uu_dd, uu_ud_ud, uu_ud_dd, uu_dd_dd, ud_ud_ud, ud_ud_dd, ud_dd_dd, dd_dd_dd)
| v3rho2tau
| v3rhosigmatau
| v3rhotau2
| v3sigma2tau
| v3sigmatau2
| v3tau3
see also libxc_itrf.c
'''
hyb, fn_facs = parse_xc(xc_code)
return _eval_xc(fn_facs, rho, spin, relativity, deriv, verbose)
XC_D0 = 0
XC_D1 = 1
XC_D2 = 2
XC_D3 = 3
XC_D4 = 4
XC_D00 = 0
XC_D10 = 1
XC_D01 = 2
XC_D20 = 3
XC_D11 = 4
XC_D02 = 5
XC_D30 = 6
XC_D21 = 7
XC_D12 = 8
XC_D03 = 9
XC_D40 = 10
XC_D31 = 11
XC_D22 = 12
XC_D13 = 13
XC_D04 = 14
XC_D000 = 0
XC_D100 = 1
XC_D010 = 2
XC_D001 = 3
XC_D200 = 4
XC_D110 = 5
XC_D101 = 6
XC_D020 = 7
XC_D011 = 8
XC_D002 = 9
XC_D300 = 10
XC_D210 = 11
XC_D201 = 12
XC_D120 = 13
XC_D111 = 14
XC_D102 = 15
XC_D030 = 16
XC_D021 = 17
XC_D012 = 18
XC_D003 = 19
XC_D400 = 20
XC_D310 = 21
XC_D301 = 22
XC_D220 = 23
XC_D211 = 24
XC_D202 = 25
XC_D130 = 26
XC_D121 = 27
XC_D112 = 28
XC_D103 = 29
XC_D040 = 30
XC_D031 = 31
XC_D022 = 32
XC_D013 = 33
XC_D004 = 34
XC_D00000 = 0
XC_D10000 = 1
XC_D01000 = 2
XC_D00100 = 3
XC_D00010 = 4
XC_D00001 = 5
XC_D20000 = 6
XC_D11000 = 7
XC_D10100 = 8
XC_D10010 = 9
XC_D10001 = 10
XC_D02000 = 11
XC_D01100 = 12
XC_D01010 = 13
XC_D01001 = 14
XC_D00200 = 15
XC_D00110 = 16
XC_D00101 = 17
XC_D00020 = 18
XC_D00011 = 19
XC_D00002 = 20
XC_D30000 = 21
XC_D21000 = 22
XC_D20100 = 23
XC_D20010 = 24
XC_D20001 = 25
XC_D12000 = 26
XC_D11100 = 27
XC_D11010 = 28
XC_D11001 = 29
XC_D10200 = 30
XC_D10110 = 31
XC_D10101 = 32
XC_D10020 = 33
XC_D10011 = 34
XC_D10002 = 35
XC_D03000 = 36
XC_D02100 = 37
XC_D02010 = 38
XC_D02001 = 39
XC_D01200 = 40
XC_D01110 = 41
XC_D01101 = 42
XC_D01020 = 43
XC_D01011 = 44
XC_D01002 = 45
XC_D00300 = 46
XC_D00210 = 47
XC_D00201 = 48
XC_D00120 = 49
XC_D00111 = 50
XC_D00102 = 51
XC_D00030 = 52
XC_D00021 = 53
XC_D00012 = 54
XC_D00003 = 55
XC_D40000 = 56
XC_D31000 = 57
XC_D30100 = 58
XC_D30010 = 59
XC_D30001 = 60
XC_D22000 = 61
XC_D21100 = 62
XC_D21010 = 63
XC_D21001 = 64
XC_D20200 = 65
XC_D20110 = 66
XC_D20101 = 67
XC_D20020 = 68
XC_D20011 = 69
XC_D20002 = 70
XC_D13000 = 71
XC_D12100 = 72
XC_D12010 = 73
XC_D12001 = 74
XC_D11200 = 75
XC_D11110 = 76
XC_D11101 = 77
XC_D11020 = 78
XC_D11011 = 79
XC_D11002 = 80
XC_D10300 = 81
XC_D10210 = 82
XC_D10201 = 83
XC_D10120 = 84
XC_D10111 = 85
XC_D10102 = 86
XC_D10030 = 87
XC_D10021 = 88
XC_D10012 = 89
XC_D10003 = 90
XC_D04000 = 91
XC_D03100 = 92
XC_D03010 = 93
XC_D03001 = 94
XC_D02200 = 95
XC_D02110 = 96
XC_D02101 = 97
XC_D02020 = 98
XC_D02011 = 99
XC_D02002 = 100
XC_D01300 = 101
XC_D01210 = 102
XC_D01201 = 103
XC_D01120 = 104
XC_D01111 = 105
XC_D01102 = 106
XC_D01030 = 107
XC_D01021 = 108
XC_D01012 = 109
XC_D01003 = 110
XC_D00400 = 111
XC_D00310 = 112
XC_D00301 = 113
XC_D00220 = 114
XC_D00211 = 115
XC_D00202 = 116
XC_D00130 = 117
XC_D00121 = 118
XC_D00112 = 119
XC_D00103 = 120
XC_D00040 = 121
XC_D00031 = 122
XC_D00022 = 123
XC_D00013 = 124
XC_D00004 = 125
XC_D0000000 = 0
XC_D1000000 = 1
XC_D0100000 = 2
XC_D0010000 = 3
XC_D0001000 = 4
XC_D0000100 = 5
XC_D0000010 = 6
XC_D0000001 = 7
XC_D2000000 = 8
XC_D1100000 = 9
XC_D1010000 = 10
XC_D1001000 = 11
XC_D1000100 = 12
XC_D1000010 = 13
XC_D1000001 = 14
XC_D0200000 = 15
XC_D0110000 = 16
XC_D0101000 = 17
XC_D0100100 = 18
XC_D0100010 = 19
XC_D0100001 = 20
XC_D0020000 = 21
XC_D0011000 = 22
XC_D0010100 = 23
XC_D0010010 = 24
XC_D0010001 = 25
XC_D0002000 = 26
XC_D0001100 = 27
XC_D0001010 = 28
XC_D0001001 = 29
XC_D0000200 = 30
XC_D0000110 = 31
XC_D0000101 = 32
XC_D0000020 = 33
XC_D0000011 = 34
XC_D0000002 = 35
XC_D3000000 = 36
XC_D2100000 = 37
XC_D2010000 = 38
XC_D2001000 = 39
XC_D2000100 = 40
XC_D2000010 = 41
XC_D2000001 = 42
XC_D1200000 = 43
XC_D1110000 = 44
XC_D1101000 = 45
XC_D1100100 = 46
XC_D1100010 = 47
XC_D1100001 = 48
XC_D1020000 = 49
XC_D1011000 = 50
XC_D1010100 = 51
XC_D1010010 = 52
XC_D1010001 = 53
XC_D1002000 = 54
XC_D1001100 = 55
XC_D1001010 = 56
XC_D1001001 = 57
XC_D1000200 = 58
XC_D1000110 = 59
XC_D1000101 = 60
XC_D1000020 = 61
XC_D1000011 = 62
XC_D1000002 = 63
XC_D0300000 = 64
XC_D0210000 = 65
XC_D0201000 = 66
XC_D0200100 = 67
XC_D0200010 = 68
XC_D0200001 = 69
XC_D0120000 = 70
XC_D0111000 = 71
XC_D0110100 = 72
XC_D0110010 = 73
XC_D0110001 = 74
XC_D0102000 = 75
XC_D0101100 = 76
XC_D0101010 = 77
XC_D0101001 = 78
XC_D0100200 = 79
XC_D0100110 = 80
XC_D0100101 = 81
XC_D0100020 = 82
XC_D0100011 = 83
XC_D0100002 = 84
XC_D0030000 = 85
XC_D0021000 = 86
XC_D0020100 = 87
XC_D0020010 = 88
XC_D0020001 = 89
XC_D0012000 = 90
XC_D0011100 = 91
XC_D0011010 = 92
XC_D0011001 = 93
XC_D0010200 = 94
XC_D0010110 = 95
XC_D0010101 = 96
XC_D0010020 = 97
XC_D0010011 = 98
XC_D0010002 = 99
XC_D0003000 = 100
XC_D0002100 = 101
XC_D0002010 = 102
XC_D0002001 = 103
XC_D0001200 = 104
XC_D0001110 = 105
XC_D0001101 = 106
XC_D0001020 = 107
XC_D0001011 = 108
XC_D0001002 = 109
XC_D0000300 = 110
XC_D0000210 = 111
XC_D0000201 = 112
XC_D0000120 = 113
XC_D0000111 = 114
XC_D0000102 = 115
XC_D0000030 = 116
XC_D0000021 = 117
XC_D0000012 = 118
XC_D0000003 = 119
def _eval_xc(fn_facs, rho, spin=0, relativity=0, deriv=1, verbose=None):
assert(deriv < 4)
if spin == 0:
rho_u = rho_d = numpy.asarray(rho, order='C')
else:
rho_u = numpy.asarray(rho[0], order='C')
rho_d = numpy.asarray(rho[1], order='C')
if rho_u.ndim == 2:
ngrids = rho_u.shape[1]
else:
ngrids = len(rho_u)
fn_ids = [x[0] for x in fn_facs]
facs = [x[1] for x in fn_facs]
if all((is_lda(x) for x in fn_ids)): # LDA
if spin == 0:
nvar = 1
else:
nvar = 2
elif any((is_meta_gga(x) for x in fn_ids)):
raise RuntimeError('xcfun MGGA interface not correct')
if spin == 0:
nvar = 3
else:
nvar = 7
else: # GGA
if spin == 0:
nvar = 2
else:
nvar = 5
outlen = (math.factorial(nvar+deriv) //
(math.factorial(nvar) * math.factorial(deriv)))
outbuf = numpy.empty((ngrids,outlen))
n = len(fn_ids)
_itrf.XCFUN_eval_xc(ctypes.c_int(n),
(ctypes.c_int*n)(*fn_ids), (ctypes.c_double*n)(*facs),
ctypes.c_int(spin),
ctypes.c_int(deriv), ctypes.c_int(ngrids),
rho_u.ctypes.data_as(ctypes.c_void_p),
rho_d.ctypes.data_as(ctypes.c_void_p),
outbuf.ctypes.data_as(ctypes.c_void_p))
outbuf = outbuf.T
exc = outbuf[0]
vxc = fxc = kxc = None
if nvar == 1:
if deriv > 0:
vxc = (outbuf[1], None, None, None)
if deriv > 1:
fxc = (outbuf[2],) + (None,)*9
if deriv > 2:
kxc = (outbuf[3], None, None, None)
elif nvar == 2:
if spin == 0: # GGA
if deriv > 0:
vxc = (outbuf[1], outbuf[2], None, None)
if deriv > 1:
fxc = (outbuf[3], outbuf[4], outbuf[5],) + (None,)*7
if deriv > 2:
kxc = outbuf[6:10]
else: # LDA
if deriv > 0:
vxc = (outbuf[1:3].T, None, None, None)
if deriv > 1:
fxc = (outbuf[3:6].T,) + (None,)*9
if deriv > 2:
kxc = (outbuf[6:10].T, None, None, None)
elif nvar == 5:
if deriv > 0:
vxc = (outbuf[1:3].T, outbuf[3:6].T, None, None)
if deriv > 1:
fxc = (outbuf[[XC_D20000,XC_D11000,XC_D02000]].T,
outbuf[[XC_D10100,XC_D10010,XC_D10001,
XC_D01100,XC_D01010,XC_D01001]].T,
outbuf[[XC_D00200,XC_D00110,XC_D00101,XC_D00020,XC_D00011,XC_D00002]].T) + (None,)*7
if deriv > 2:
kxc = (outbuf[[XC_D30000,XC_D21000,XC_D12000,XC_D03000]].T,
outbuf[[XC_D20100,XC_D20010,XC_D20001,
XC_D11100,XC_D11010,XC_D11001,
XC_D02100,XC_D02010,XC_D02001]].T,
outbuf[[XC_D10200,XC_D10110,XC_D10101,XC_D10020,XC_D10011,XC_D10002,
XC_D01200,XC_D01110,XC_D01101,XC_D01020,XC_D01011,XC_D01002]].T,
outbuf[[XC_D00300,XC_D00210,XC_D00201,XC_D00120,XC_D00111,
XC_D00102,XC_D00030,XC_D00021,XC_D00012,XC_D00003]].T)
# MGGA/MLGGA: Note the MLGGA interface are not implemented. MGGA only needs 3
# input arguments. To make the interface compatible with libxc, treat MGGA as
# MLGGA
elif nvar == 3:
if deriv > 0:
vxc = (outbuf[1], outbuf[2], numpy.zeros_like(outbuf[1]), outbuf[3])
if deriv > 1:
fxc = (outbuf[XC_D200], outbuf[XC_D110], outbuf[XC_D020],
None, outbuf[XC_D002], None, outbuf[XC_D101], None, None, outbuf[XC_D011])
if deriv > 2:
kxc = (output[XC_D300], output[XC_D210], output[XC_D120], output[XC_D030],
output[XC_D201], output[XC_D111], output[XC_D102],
output[XC_D021], output[XC_D012], output[XC_D003])
elif nvar == 7:
if deriv > 0:
vxc = (outbuf[1:3].T, outbuf[3:6].T, None, outbuf[6:8].T)
if deriv > 1:
fxc = (outbuf[[XC_D2000000,XC_D1100000,XC_D0200000]].T,
outbuf[[XC_D1010000,XC_D1001000,XC_D1000100,
XC_D0110000,XC_D0101000,XC_D0100100]].T,
outbuf[[XC_D0020000,XC_D0011000,XC_D0010100,
XC_D0002000,XC_D0001100,XC_D0000200]].T,
None,
outbuf[[XC_D0000020,XC_D0000011,XC_D0000002]].T,
None,
outbuf[[XC_D1000010,XC_D1000001,XC_D0100010,XC_D0100001]].T,
None, None,
outbuf[[XC_D0010010,XC_D0010001,XC_D0001010,XC_D0001001,
XC_D0000110,XC_D0000101]].T)
if deriv > 2:
kxc = (outbuf[[XC_D3000000,XC_D2100000,XC_D1200000,XC_D0300000]].T,
outbuf[[XC_D2010000,XC_D2001000,XC_D2000100,
XC_D1110000,XC_D1101000,XC_D1100100,
XC_D0210000,XC_D0201000,XC_D0200100]].T,
outbuf[[XC_D1020000,XC_D1011000,XC_D1010100,XC_D1002000,XC_D1001100,XC_D1000200,
XC_D0120000,XC_D0111000,XC_D0110100,XC_D0102000,XC_D0101100,XC_D0100200]].T,
outbuf[[XC_D0030000,XC_D0021000,XC_D0020100,XC_D0012000,XC_D0011100,
XC_D0010200,XC_D0003000,XC_D0002100,XC_D0001200,XC_D0000300]].T,
output[[XC_D2000010,XC_D2000001,XC_D1100010,XC_D1100001,XC_D0200010,XC_D0200001]].T,
output[[XC_D1010010,XC_D1010001,XC_D1001010,XC_D1001001,XC_D1000110,XC_D1000101,
XC_D0110010,XC_D0110001,XC_D0101010,XC_D0101001,XC_D0100110,XC_D0100101]].T,
output[[XC_D1000020,XC_D1000011,XC_D1000002,XC_D0100020,XC_D0100011,XC_D0100002]].T,
output[[XC_D0020010,XC_D0020001,XC_D0011010,XC_D0011001,XC_D0010110,XC_D0010101,
XC_D0002010,XC_D0002001,XC_D0001110,XC_D0001101,XC_D0000210,XC_D0000201]].T,
output[[XC_D0010020,XC_D0010011,XC_D0010002,
XC_D0001020,XC_D0001011,XC_D0001002,
XC_D0000120,XC_D0000111,XC_D0000102]].T,
output[[XC_D0000030,XC_D0000021,XC_D0000012,XC_D0000003]].T)
return exc, vxc, fxc, kxc
def define_xc_(ni, description, xctype='LDA', hyb=0):
'''Define XC functional. See also :func:`eval_xc` for the rules of input description.
Args:
ni : an instance of :class:`_NumInt`
description : str
A string to describe the linear combination of different XC functionals.
The X and C functional are separated by comma like '.8*LDA+.2*B86,VWN'.
If "HF" was appeared in the string, it stands for the exact exchange.
Examples:
>>> mol = gto.M(atom='O 0 0 0; H 0 0 1; H 0 1 0', basis='ccpvdz')
>>> mf = dft.RKS(mol)
>>> define_xc_(mf._numint, '.2*HF + .08*LDA + .72*B88, .81*LYP + .19*VWN')
>>> mf.kernel()
-76.3783361189611
>>> define_xc_(mf._numint, 'LDA*.08 + .72*B88 + .2*HF, .81*LYP + .19*VWN')
>>> mf.kernel()
-76.3783361189611
>>> def eval_xc(xc_code, rho, *args, **kwargs):
... exc = 0.01 * rho**2
... vrho = 0.01 * 2 * rho
... vxc = (vrho, None, None, None)
... fxc = None # 2nd order functional derivative
... kxc = None # 3rd order functional derivative
... return exc, vxc, fxc, kxc
>>> define_xc_(mf._numint, eval_xc, xctype='LDA')
>>> mf.kernel()
48.8525211046668
'''
if isinstance(description, str):
ni.eval_xc = lambda xc_code, rho, *args, **kwargs: \
eval_xc(description, rho, *args, **kwargs)
ni.hybrid_coeff = lambda *args, **kwargs: hybrid_coeff(description)
ni._xc_type = lambda *args: xc_type(description)
elif callable(description):
ni.eval_xc = description
ni.hybrid_coeff = lambda *args, **kwargs: hyb
ni._xc_type = lambda *args: xctype
else:
raise RuntimeError('Unknown description %s' % description)
return ni
def define_xc(ni, description):
return define_xc_(copy.copy(ni), description)
define_xc.__doc__ = define_xc_.__doc__
if __name__ == '__main__':
from pyscf import gto, dft
mol = gto.M(
atom = [
["O" , (0. , 0. , 0.)],
[1 , (0. , -0.757 , 0.587)],
[1 , (0. , 0.757 , 0.587)] ],
basis = '6311g',)
mf = dft.RKS(mol)
mf._numint.libxc = dft.xcfun
print(mf.kernel() - -75.8503877483363)
mf.xc = 'b88,lyp'
print(mf.kernel() - -76.3969707800463)
mf.xc = 'b3lyp'
print(mf.kernel() - -76.3969707800463)
| 31.182143 | 140 | 0.597221 |
fa870e5c9ad5f65e7bb5bfb556cdafda53687457 | 13,395 | py | Python | qiskit/algorithms/optimizers/aqgd.py | gadial/qiskit-terra | 0fc83f44a6e80969875c738b2cee7bc33223e45f | [
"Apache-2.0"
] | 1 | 2021-10-05T11:56:53.000Z | 2021-10-05T11:56:53.000Z | qiskit/algorithms/optimizers/aqgd.py | gadial/qiskit-terra | 0fc83f44a6e80969875c738b2cee7bc33223e45f | [
"Apache-2.0"
] | 24 | 2021-01-27T08:20:27.000Z | 2021-07-06T09:42:28.000Z | qiskit/algorithms/optimizers/aqgd.py | gadial/qiskit-terra | 0fc83f44a6e80969875c738b2cee7bc33223e45f | [
"Apache-2.0"
] | 4 | 2021-10-05T12:07:27.000Z | 2022-01-28T18:37:28.000Z | # This code is part of Qiskit.
#
# (C) Copyright IBM 2019, 2021.
#
# This code is licensed under the Apache License, Version 2.0. You may
# obtain a copy of this license in the LICENSE.txt file in the root directory
# of this source tree or at http://www.apache.org/licenses/LICENSE-2.0.
#
# Any modifications or derivative works of this code must retain this
# copyright notice, and modified files need to carry a notice indicating
# that they have been altered from the originals.
"""Analytical Quantum Gradient Descent (AQGD) optimizer."""
import logging
from typing import Callable, Tuple, List, Dict, Union
import numpy as np
from qiskit.utils.validation import validate_range_exclusive_max
from .optimizer import Optimizer, OptimizerSupportLevel
from ..exceptions import AlgorithmError
logger = logging.getLogger(__name__)
class AQGD(Optimizer):
"""Analytic Quantum Gradient Descent (AQGD) with Epochs optimizer.
Performs gradient descent optimization with a momentum term, analytic gradients,
and customized step length schedule for parameterized quantum gates, i.e.
Pauli Rotations. See, for example:
* K. Mitarai, M. Negoro, M. Kitagawa, and K. Fujii. (2018).
Quantum circuit learning. Phys. Rev. A 98, 032309.
https://arxiv.org/abs/1803.00745
* Maria Schuld, Ville Bergholm, Christian Gogolin, Josh Izaac, Nathan Killoran. (2019).
Evaluating analytic gradients on quantum hardware. Phys. Rev. A 99, 032331.
https://arxiv.org/abs/1811.11184
for further details on analytic gradients of parameterized quantum gates.
Gradients are computed "analytically" using the quantum circuit when evaluating
the objective function.
"""
_OPTIONS = ['maxiter', 'eta', 'tol', 'disp', 'momentum', 'param_tol', 'averaging']
def __init__(self,
maxiter: Union[int, List[int]] = 1000,
eta: Union[float, List[float]] = 1.0,
tol: float = 1e-6, # this is tol
momentum: Union[float, List[float]] = 0.25,
param_tol: float = 1e-6,
averaging: int = 10) -> None:
"""
Performs Analytical Quantum Gradient Descent (AQGD) with Epochs.
Args:
maxiter: Maximum number of iterations (full gradient steps)
eta: The coefficient of the gradient update. Increasing this value
results in larger step sizes: param = previous_param - eta * deriv
tol: Tolerance for change in windowed average of objective values.
Convergence occurs when either objective tolerance is met OR parameter
tolerance is met.
momentum: Bias towards the previous gradient momentum in current
update. Must be within the bounds: [0,1)
param_tol: Tolerance for change in norm of parameters.
averaging: Length of window over which to average objective values for objective
convergence criterion
Raises:
AlgorithmError: If the length of ``maxiter``, `momentum``, and ``eta`` is not the same.
"""
super().__init__()
if isinstance(maxiter, int):
maxiter = [maxiter]
if isinstance(eta, (int, float)):
eta = [eta]
if isinstance(momentum, (int, float)):
momentum = [momentum]
if len(maxiter) != len(eta) or len(maxiter) != len(momentum):
raise AlgorithmError("AQGD input parameter length mismatch. Parameters `maxiter`, "
"`eta`, and `momentum` must have the same length.")
for m in momentum:
validate_range_exclusive_max('momentum', m, 0, 1)
self._eta = eta
self._maxiter = maxiter
self._momenta_coeff = momentum
self._param_tol = param_tol
self._tol = tol
self._averaging = averaging
# state
self._avg_objval = None
self._prev_param = None
self._eval_count = 0 # function evaluations
self._prev_loss = [] # type: List[float]
self._prev_grad = [] # type: List[List[float]]
def get_support_level(self) -> Dict[str, OptimizerSupportLevel]:
""" Support level dictionary
Returns:
Dict[str, int]: gradient, bounds and initial point
support information that is ignored/required.
"""
return {
'gradient': OptimizerSupportLevel.ignored,
'bounds': OptimizerSupportLevel.ignored,
'initial_point': OptimizerSupportLevel.required
}
def _compute_objective_fn_and_gradient(self, params: List[float],
obj: Callable) -> Tuple[float, np.array]:
"""
Obtains the objective function value for params and the analytical quantum derivatives of
the objective function with respect to each parameter. Requires
2*(number parameters) + 1 objective evaluations
Args:
params: Current value of the parameters to evaluate the objective function
obj: Objective function of interest
Returns:
Tuple containing the objective value and array of gradients for the given parameter set.
"""
num_params = len(params)
param_sets_to_eval = params + np.concatenate(
(np.zeros((1, num_params)), # copy of the parameters as is
np.eye(num_params) * np.pi / 2, # copy of the parameters with the positive shift
-np.eye(num_params) * np.pi / 2), # copy of the parameters with the negative shift
axis=0)
# Evaluate,
# reshaping to flatten, as expected by objective function
values = np.array(obj(param_sets_to_eval.reshape(-1)))
# Update number of objective function evaluations
self._eval_count += 2 * num_params + 1
# return the objective function value
obj_value = values[0]
# return the gradient values
gradient = 0.5 * (values[1:num_params + 1] - values[1 + num_params:])
return obj_value, gradient
def _update(self, params: np.array, gradient: np.array, mprev: np.array,
step_size: float, momentum_coeff: float) -> Tuple[List[float], List[float]]:
"""
Updates full parameter array based on a step that is a convex
combination of the gradient and previous momentum
Args:
params: Current value of the parameters to evaluate the objective function at
gradient: Gradient of objective wrt parameters
mprev: Momentum vector for each parameter
step_size: The scaling of step to take
momentum_coeff: Bias towards previous momentum vector when updating current
momentum/step vector
Returns:
Tuple of the updated parameter and momentum vectors respectively.
"""
# Momentum update:
# Convex combination of previous momentum and current gradient estimate
mnew = (1 - momentum_coeff) * gradient + momentum_coeff * mprev
params -= step_size * mnew
return params, mnew
def _converged_objective(self, objval: float, tol: float, window_size: int) -> bool:
"""
Tests convergence based on the change in a moving windowed average of past objective values
Args:
objval: Current value of the objective function
tol: tolerance below which (average) objective function change must be
window_size: size of averaging window
Returns:
Bool indicating whether or not the optimization has converged.
"""
# If we haven't reached the required window length,
# append the current value, but we haven't converged
if len(self._prev_loss) < window_size:
self._prev_loss.append(objval)
return False
# Update last value in list with current value
self._prev_loss.append(objval)
# (length now = n+1)
# Calculate previous windowed average
# and current windowed average of objective values
prev_avg = np.mean(self._prev_loss[:window_size])
curr_avg = np.mean(self._prev_loss[1:window_size + 1])
self._avg_objval = curr_avg
# Update window of objective values
# (Remove earliest value)
self._prev_loss.pop(0)
if np.absolute(prev_avg - curr_avg) < tol:
# converged
logger.info("Previous obj avg: %f\nCurr obj avg: %f", prev_avg, curr_avg)
return True
return False
def _converged_parameter(self, parameter: List[float], tol: float) -> bool:
"""
Tests convergence based on change in parameter
Args:
parameter: current parameter values
tol: tolerance for change in norm of parameters
Returns:
Bool indicating whether or not the optimization has converged
"""
if self._prev_param is None:
self._prev_param = np.copy(parameter)
return False
order = np.inf
p_change = np.linalg.norm(self._prev_param - parameter, ord=order)
if p_change < tol:
# converged
logger.info("Change in parameters (%f norm): %f", order, p_change)
return True
return False
def _converged_alt(self, gradient: List[float], tol: float, window_size: int) -> bool:
"""
Tests convergence from norm of windowed average of gradients
Args:
gradient: current gradient
tol: tolerance for average gradient norm
window_size: size of averaging window
Returns:
Bool indicating whether or not the optimization has converged
"""
# If we haven't reached the required window length,
# append the current value, but we haven't converged
if len(self._prev_grad) < window_size - 1:
self._prev_grad.append(gradient)
return False
# Update last value in list with current value
self._prev_grad.append(gradient)
# (length now = n)
# Calculate previous windowed average
# and current windowed average of objective values
avg_grad = np.mean(self._prev_grad, axis=0)
# Update window of values
# (Remove earliest value)
self._prev_grad.pop(0)
if np.linalg.norm(avg_grad, ord=np.inf) < tol:
# converged
logger.info("Avg. grad. norm: %f", np.linalg.norm(avg_grad, ord=np.inf))
return True
return False
def optimize(self,
num_vars: int,
objective_function: Callable,
gradient_function: Callable = None,
variable_bounds: List[Tuple[float, float]] = None,
initial_point: np.ndarray = None) -> Tuple[np.ndarray, float, int]:
super().optimize(num_vars, objective_function, gradient_function, variable_bounds,
initial_point)
params = np.array(initial_point)
momentum = np.zeros(shape=(num_vars,))
# empty out history of previous objectives/gradients/parameters
# (in case this object is re-used)
self._prev_loss = []
self._prev_grad = []
self._prev_param = None
self._eval_count = 0 # function evaluations
iter_count = 0
logger.info("Initial Params: %s", params)
epoch = 0
converged = False
for (eta, mom_coeff) in zip(self._eta, self._momenta_coeff):
logger.info("Epoch: %4d | Stepsize: %6.4f | Momentum: %6.4f", epoch, eta, mom_coeff)
sum_max_iters = sum(self._maxiter[0:epoch + 1])
while iter_count < sum_max_iters:
# update the iteration count
iter_count += 1
# Check for parameter convergence before potentially costly function evaluation
converged = self._converged_parameter(params, self._param_tol)
if converged:
break
# Calculate objective function and estimate of analytical gradient
if gradient_function is None:
objval, gradient = \
self._compute_objective_fn_and_gradient(params, objective_function)
else:
objval = objective_function(params)
gradient = gradient_function(params)
logger.info(" Iter: %4d | Obj: %11.6f | Grad Norm: %f",
iter_count, objval, np.linalg.norm(gradient, ord=np.inf))
# Check for objective convergence
converged = self._converged_objective(objval, self._tol, self._averaging)
if converged:
break
# Update parameters and momentum
params, momentum = self._update(params, gradient, momentum, eta, mom_coeff)
# end inner iteration
# if converged, end iterating over epochs
if converged:
break
epoch += 1
# end epoch iteration
# return last parameter values, objval estimate, and objective evaluation count
return params, objval, self._eval_count
| 40.468278 | 100 | 0.617096 |
6559d35fe7c8cba23463979d48e8ff09c8d71274 | 1,380 | py | Python | main.py | jalaj-07/marx | 92191e844a09bff8c7adfb6bea1f5130ec2b4841 | [
"MIT"
] | 1 | 2022-02-07T10:52:02.000Z | 2022-02-07T10:52:02.000Z | main.py | jalaj-07/marx | 92191e844a09bff8c7adfb6bea1f5130ec2b4841 | [
"MIT"
] | null | null | null | main.py | jalaj-07/marx | 92191e844a09bff8c7adfb6bea1f5130ec2b4841 | [
"MIT"
] | null | null | null | from typing import Dict
import flask
import json
from bson.objectid import ObjectId
from pymongo import MongoClient
client = MongoClient("localhost", 27017)
db = client['marx']
notes = db['notes']
app = flask.Flask(__name__, static_folder="./build", static_url_path="/web")
def _convert_obj_id(lst):
for i in range(len(lst)):
d = {}
d.update(lst[i])
if "_id" in d:
d['_id'] = str(d['_id'])
lst[i] = d
return lst
@app.route("/api/get_notes")
def get_notes():
return json.dumps(_convert_obj_id(list(notes.find())))
@app.route("/api/create_note", methods=["POST"])
def create_note():
data: Dict = json.loads(flask.request.get_data(as_text=True))
if "title" not in data:
return "Title not provided", 400
notes.insert_one({
"title": data['title'],
"desc": data.get("desc", None)
})
return json.dumps(_convert_obj_id(list(notes.find())))
@app.route("/api/delete_note", methods=["POST"])
def delete_note():
data: Dict = json.loads(flask.request.get_data(as_text=True))
if "id" not in data:
return "ID not provided", 400
notes.delete_one({
"_id": ObjectId(data['id'])
})
return json.dumps(_convert_obj_id(list(notes.find())))
@app.route("/")
def index():
return flask.send_file("./build/index.html")
if __name__ == "__main__":
app.run()
| 26.538462 | 76 | 0.634058 |
f704311c1696242df8f2316227f5b99a2b3d08b4 | 506 | py | Python | Week1/Lecture2/Fexes/l2f1.py | MorbidValkyria/MIT6.0001x | 3c80ffd50871387f560c2e820ad1fa05c61d9132 | [
"MIT"
] | null | null | null | Week1/Lecture2/Fexes/l2f1.py | MorbidValkyria/MIT6.0001x | 3c80ffd50871387f560c2e820ad1fa05c61d9132 | [
"MIT"
] | null | null | null | Week1/Lecture2/Fexes/l2f1.py | MorbidValkyria/MIT6.0001x | 3c80ffd50871387f560c2e820ad1fa05c61d9132 | [
"MIT"
] | null | null | null | """
1) "a" + "bc" -> abc
2) 3 * "bc" -> bcbcbc
3) "3" * "bc" -> error as we can't use the * operator on two strings
4) abcd"[2] -> c (Just takes the character at index 2 in the string. a has index 0 and b index 1)
5) "abcd"[0:2] -> ab (Returns the substring from index 0 all the way to index n -1 in this case b)
6) "abcd"[:2] -> ab (Not giving a starting value to slice the string we start at 0)
7) "abcd"[2:] -> cd (When we don't give an end value it goes all the way to the end of the string)
""" | 31.625 | 98 | 0.626482 |
fc0934a20ac4500a2962f86dbed63c737f72823c | 2,326 | py | Python | pyabsa/core/apc/classic/__bert__/models/aoa_bert.py | brightgems/PyABSA | f51d18da12f12759cfc972369736e46232627d4d | [
"MIT"
] | null | null | null | pyabsa/core/apc/classic/__bert__/models/aoa_bert.py | brightgems/PyABSA | f51d18da12f12759cfc972369736e46232627d4d | [
"MIT"
] | null | null | null | pyabsa/core/apc/classic/__bert__/models/aoa_bert.py | brightgems/PyABSA | f51d18da12f12759cfc972369736e46232627d4d | [
"MIT"
] | 1 | 2022-03-01T08:35:37.000Z | 2022-03-01T08:35:37.000Z | # -*- coding: utf-8 -*-
# file: aoa.py
# author: gene_zc <gene_zhangchen@163.com>
# Copyright (C) 2018. All Rights Reserved.
import torch
import torch.nn as nn
import torch.nn.functional as F
from ..layers.dynamic_rnn import DynamicLSTM
class AOA_BERT(nn.Module):
inputs = ['text_bert_indices', 'aspect_indices', 'left_text_bert_indices', 'left_aspect_indices', 'right_text_bert_indices', 'right_aspect_indices']
def __init__(self, bert, opt):
super(AOA_BERT, self).__init__()
self.opt = opt
self.embed = bert
self.ctx_lstm = DynamicLSTM(opt.embed_dim, opt.hidden_dim, num_layers=1, batch_first=True, bidirectional=True)
self.asp_lstm = DynamicLSTM(opt.embed_dim, opt.hidden_dim, num_layers=1, batch_first=True, bidirectional=True)
self.dense = nn.Linear(2 * opt.hidden_dim, opt.polarities_dim)
def forward(self, inputs):
text_bert_indices = inputs['text_bert_indices'] # batch_size x seq_len
aspect_indices = inputs['aspect_indices'] # batch_size x seq_len
ctx_len = torch.sum(text_bert_indices != 0, dim=1)
asp_len = torch.sum(aspect_indices != 0, dim=1)
ctx = self.embed(text_bert_indices)['last_hidden_state'] # batch_size x seq_len x embed_dim
asp = self.embed(aspect_indices)['last_hidden_state'] # batch_size x seq_len x embed_dim
ctx_out, (_, _) = self.ctx_lstm(ctx, ctx_len) # batch_size x (ctx) seq_len x 2*hidden_dim
asp_out, (_, _) = self.asp_lstm(asp, asp_len) # batch_size x (asp) seq_len x 2*hidden_dim
interaction_mat = torch.matmul(ctx_out,
torch.transpose(asp_out, 1, 2)) # batch_size x (ctx) seq_len x (asp) seq_len
alpha = F.softmax(interaction_mat, dim=1) # col-wise, batch_size x (ctx) seq_len x (asp) seq_len
beta = F.softmax(interaction_mat, dim=2) # row-wise, batch_size x (ctx) seq_len x (asp) seq_len
beta_avg = beta.mean(dim=1, keepdim=True) # batch_size x 1 x (asp) seq_len
gamma = torch.matmul(alpha, beta_avg.transpose(1, 2)) # batch_size x (ctx) seq_len x 1
weighted_sum = torch.matmul(torch.transpose(ctx_out, 1, 2), gamma).squeeze(-1) # batch_size x 2*hidden_dim
out = self.dense(weighted_sum) # batch_size x polarity_dim
return {'logits': out}
| 54.093023 | 152 | 0.679278 |
15b55ab0bfe68a444fc825fe27f989d9dc96ad17 | 426 | py | Python | inventory/urls.py | CNicox/inventory | 6a85e3155a7215182f892bbc712f49f85db5d8f8 | [
"Unlicense"
] | 1 | 2022-01-11T13:51:35.000Z | 2022-01-11T13:51:35.000Z | inventory/urls.py | CNicox/inventory | 6a85e3155a7215182f892bbc712f49f85db5d8f8 | [
"Unlicense"
] | null | null | null | inventory/urls.py | CNicox/inventory | 6a85e3155a7215182f892bbc712f49f85db5d8f8 | [
"Unlicense"
] | null | null | null | from django.urls import path
from . import views
app_name = "inventory"
urlpatterns = [
path('index/', views.IndexView.as_view(), name="index"),
path('registration/', views.RegistrationView.as_view(), name='registration'),
path('change-password/', views.ChangePasswordView.as_view(), name='change-password'),
#path("login/", views.login_request, name="login"),
path('index/', views.index, name="index"),
] | 38.727273 | 89 | 0.694836 |
f95e1b7a7a76be5b87a819d24518fa641a926f28 | 649 | py | Python | vendor/github.com/DataDog/datadog-agent/pkg/collector/py/tests/kwargs_init_signature.py | dragon3/datadog-trace-agent | 5e69c6a432f0a9f50d4a95112e8d9861dd91243f | [
"BSD-3-Clause"
] | null | null | null | vendor/github.com/DataDog/datadog-agent/pkg/collector/py/tests/kwargs_init_signature.py | dragon3/datadog-trace-agent | 5e69c6a432f0a9f50d4a95112e8d9861dd91243f | [
"BSD-3-Clause"
] | null | null | null | vendor/github.com/DataDog/datadog-agent/pkg/collector/py/tests/kwargs_init_signature.py | dragon3/datadog-trace-agent | 5e69c6a432f0a9f50d4a95112e8d9861dd91243f | [
"BSD-3-Clause"
] | null | null | null | # Unless explicitly stated otherwise all files in this repository are licensed
# under the Apache License Version 2.0.
# This product includes software developed at Datadog (https://www.datadoghq.com/).
# Copyright 2017 Datadog, Inc.
from checks import AgentCheck
from common import assert_init_config_init, assert_agent_config_init, assert_instance_init
class TestCheck(AgentCheck):
def __init__(self, *args, **kwargs):
super(TestCheck, self).__init__(*args, **kwargs)
assert_init_config_init(self)
assert_agent_config_init(self, True)
assert_instance_init(self)
def check(self, instance):
pass
| 32.45 | 90 | 0.747304 |
b92be6de29f005417a8ca8424dcff728d6bc353d | 1,655 | py | Python | Code/hime_main.py | dcsozturk/hime | 07c056e48258d8e3de7c99cde9a9b1c8d073285e | [
"Apache-2.0"
] | 3 | 2020-09-02T05:21:01.000Z | 2021-03-19T06:28:18.000Z | Code/hime_main.py | dcsozturk/hime | 07c056e48258d8e3de7c99cde9a9b1c8d073285e | [
"Apache-2.0"
] | null | null | null | Code/hime_main.py | dcsozturk/hime | 07c056e48258d8e3de7c99cde9a9b1c8d073285e | [
"Apache-2.0"
] | null | null | null | #!/usr/bin/env python
# Author: Dogacan S. Ozturk
# Import default Python libraries.
import os
import sys
from glob import glob
import tables
import numpy as np
import datetime as dt
import matplotlib
import matplotlib.pyplot as plt
from matplotlib.colors import LinearSegmentedColormap
import matplotlib.dates as mdates
import matplotlib.ticker as mtick
myFmt = mdates.DateFormatter('%H:%M')
# Import custom Python libraries.
sys.path.insert(0, '../Code/')
from spacepy.pybats import gitm
import hime_helper_functions
from downsample_data import downsample_pfisr_data
from merge_potentials import merge_pfisr_with_gitm_potentials
# Enter filename for the PFISR 2D VEF estimates.
filename = '../Examples/Files/PFISR_Data/20191026.002_lp_1min-fitcal_2dVEF_001001-geo600km.h5'
# Enter desired grid resolution.
gridRes = 0.75
# Downsample the grid and calculate the potential differences.
PhiX, PhiY, Ex_downsampled, Ey_downsampled, Ex_calculated, Ey_calculated, XnewGrids, YnewGrids, experimentTimes = downsample_pfisr_data(filename, gridRes)
# Define the path to global potential values.
weimerSimulationList = glob('../Examples/Files/Simulations/3D*.bin')
# Define the merge parameter.
mergeParameter = 0.6
# Set plot potentials to True for saving plots.
plotPotentials = True
# Set save potentials to True for saving output ASCII files.
savePotentials = True
# Merge the local and global potentials together.
phiXhime, phiYhime, himeEx, himeEy, xHimeMesh, yHimeMesh, himeTimes = merge_pfisr_with_gitm_potentials(PhiX, PhiY, XnewGrids, YnewGrids, experimentTimes, weimerSimulationList, gridRes, mergeParameter, plotPotentials, savePotentials)
| 33.77551 | 232 | 0.812085 |
550aa19a5edd30baafd7edc92de0e668a4795f15 | 8,498 | py | Python | official/recommendation/ncf_test.py | Wu-Zhe/maskgan-local | 446688d9317fea0a5cbb4bd8b1cf227df6679dc7 | [
"Apache-2.0"
] | 4 | 2018-09-18T11:27:22.000Z | 2019-10-02T01:15:46.000Z | official/recommendation/ncf_test.py | Wu-Zhe/maskgan-local | 446688d9317fea0a5cbb4bd8b1cf227df6679dc7 | [
"Apache-2.0"
] | null | null | null | official/recommendation/ncf_test.py | Wu-Zhe/maskgan-local | 446688d9317fea0a5cbb4bd8b1cf227df6679dc7 | [
"Apache-2.0"
] | 4 | 2019-03-12T09:41:01.000Z | 2019-10-01T22:49:21.000Z | # Copyright 2018 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Tests NCF."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import math
import mock
import numpy as np
import tensorflow as tf
from absl.testing import flagsaver
from official.recommendation import constants as rconst
from official.recommendation import data_pipeline
from official.recommendation import neumf_model
from official.recommendation import ncf_common
from official.recommendation import ncf_estimator_main
from official.recommendation import ncf_keras_main
from official.utils.testing import integration
NUM_TRAIN_NEG = 4
class NcfTest(tf.test.TestCase):
@classmethod
def setUpClass(cls): # pylint: disable=invalid-name
super(NcfTest, cls).setUpClass()
ncf_common.define_ncf_flags()
def setUp(self):
self.top_k_old = rconst.TOP_K
self.num_eval_negatives_old = rconst.NUM_EVAL_NEGATIVES
rconst.NUM_EVAL_NEGATIVES = 2
def tearDown(self):
rconst.NUM_EVAL_NEGATIVES = self.num_eval_negatives_old
rconst.TOP_K = self.top_k_old
def get_hit_rate_and_ndcg(self, predicted_scores_by_user, items_by_user,
top_k=rconst.TOP_K, match_mlperf=False):
rconst.TOP_K = top_k
rconst.NUM_EVAL_NEGATIVES = predicted_scores_by_user.shape[1] - 1
batch_size = items_by_user.shape[0]
users = np.repeat(np.arange(batch_size)[:, np.newaxis],
rconst.NUM_EVAL_NEGATIVES + 1, axis=1)
users, items, duplicate_mask = \
data_pipeline.BaseDataConstructor._assemble_eval_batch(
users, items_by_user[:, -1:], items_by_user[:, :-1], batch_size)
g = tf.Graph()
with g.as_default():
logits = tf.convert_to_tensor(
predicted_scores_by_user.reshape((-1, 1)), tf.float32)
softmax_logits = tf.concat([tf.zeros(logits.shape, dtype=logits.dtype),
logits], axis=1)
duplicate_mask = tf.convert_to_tensor(duplicate_mask, tf.float32)
metric_ops = neumf_model._get_estimator_spec_with_metrics(
logits=logits, softmax_logits=softmax_logits,
duplicate_mask=duplicate_mask, num_training_neg=NUM_TRAIN_NEG,
match_mlperf=match_mlperf).eval_metric_ops
hr = metric_ops[rconst.HR_KEY]
ndcg = metric_ops[rconst.NDCG_KEY]
init = [tf.global_variables_initializer(),
tf.local_variables_initializer()]
with self.test_session(graph=g) as sess:
sess.run(init)
return sess.run([hr[1], ndcg[1]])
def test_hit_rate_and_ndcg(self):
# Test with no duplicate items
predictions = np.array([
[2., 0., 1.], # In top 2
[1., 0., 2.], # In top 1
[2., 1., 0.], # In top 3
[3., 4., 2.] # In top 3
])
items = np.array([
[2, 3, 1],
[3, 1, 2],
[2, 1, 3],
[1, 3, 2],
])
hr, ndcg = self.get_hit_rate_and_ndcg(predictions, items, 1)
self.assertAlmostEqual(hr, 1 / 4)
self.assertAlmostEqual(ndcg, 1 / 4)
hr, ndcg = self.get_hit_rate_and_ndcg(predictions, items, 2)
self.assertAlmostEqual(hr, 2 / 4)
self.assertAlmostEqual(ndcg, (1 + math.log(2) / math.log(3)) / 4)
hr, ndcg = self.get_hit_rate_and_ndcg(predictions, items, 3)
self.assertAlmostEqual(hr, 4 / 4)
self.assertAlmostEqual(ndcg, (1 + math.log(2) / math.log(3) +
2 * math.log(2) / math.log(4)) / 4)
hr, ndcg = self.get_hit_rate_and_ndcg(predictions, items, 1,
match_mlperf=True)
self.assertAlmostEqual(hr, 1 / 4)
self.assertAlmostEqual(ndcg, 1 / 4)
hr, ndcg = self.get_hit_rate_and_ndcg(predictions, items, 2,
match_mlperf=True)
self.assertAlmostEqual(hr, 2 / 4)
self.assertAlmostEqual(ndcg, (1 + math.log(2) / math.log(3)) / 4)
hr, ndcg = self.get_hit_rate_and_ndcg(predictions, items, 3,
match_mlperf=True)
self.assertAlmostEqual(hr, 4 / 4)
self.assertAlmostEqual(ndcg, (1 + math.log(2) / math.log(3) +
2 * math.log(2) / math.log(4)) / 4)
# Test with duplicate items. In the MLPerf case, we treat the duplicates as
# a single item. Otherwise, we treat the duplicates as separate items.
predictions = np.array([
[2., 2., 3., 1.], # In top 4. MLPerf: In top 3
[1., 0., 2., 3.], # In top 1. MLPerf: In top 1
[2., 3., 2., 0.], # In top 4. MLPerf: In top 3
[2., 4., 2., 3.] # In top 2. MLPerf: In top 2
])
items = np.array([
[2, 2, 3, 1],
[2, 3, 4, 1],
[2, 3, 2, 1],
[3, 2, 1, 4],
])
hr, ndcg = self.get_hit_rate_and_ndcg(predictions, items, 1)
self.assertAlmostEqual(hr, 1 / 4)
self.assertAlmostEqual(ndcg, 1 / 4)
hr, ndcg = self.get_hit_rate_and_ndcg(predictions, items, 2)
self.assertAlmostEqual(hr, 2 / 4)
self.assertAlmostEqual(ndcg, (1 + math.log(2) / math.log(3)) / 4)
hr, ndcg = self.get_hit_rate_and_ndcg(predictions, items, 3)
self.assertAlmostEqual(hr, 2 / 4)
self.assertAlmostEqual(ndcg, (1 + math.log(2) / math.log(3)) / 4)
hr, ndcg = self.get_hit_rate_and_ndcg(predictions, items, 4)
self.assertAlmostEqual(hr, 4 / 4)
self.assertAlmostEqual(ndcg, (1 + math.log(2) / math.log(3) +
2 * math.log(2) / math.log(5)) / 4)
hr, ndcg = self.get_hit_rate_and_ndcg(predictions, items, 1,
match_mlperf=True)
self.assertAlmostEqual(hr, 1 / 4)
self.assertAlmostEqual(ndcg, 1 / 4)
hr, ndcg = self.get_hit_rate_and_ndcg(predictions, items, 2,
match_mlperf=True)
self.assertAlmostEqual(hr, 2 / 4)
self.assertAlmostEqual(ndcg, (1 + math.log(2) / math.log(3)) / 4)
hr, ndcg = self.get_hit_rate_and_ndcg(predictions, items, 3,
match_mlperf=True)
self.assertAlmostEqual(hr, 4 / 4)
self.assertAlmostEqual(ndcg, (1 + math.log(2) / math.log(3) +
2 * math.log(2) / math.log(4)) / 4)
hr, ndcg = self.get_hit_rate_and_ndcg(predictions, items, 4,
match_mlperf=True)
self.assertAlmostEqual(hr, 4 / 4)
self.assertAlmostEqual(ndcg, (1 + math.log(2) / math.log(3) +
2 * math.log(2) / math.log(4)) / 4)
_BASE_END_TO_END_FLAGS = ['-batch_size', '1024', '-train_epochs', '1']
@mock.patch.object(rconst, "SYNTHETIC_BATCHES_PER_EPOCH", 100)
def test_end_to_end_estimator(self):
integration.run_synthetic(
ncf_estimator_main.main, tmp_root=self.get_temp_dir(), max_train=None,
extra_flags=self._BASE_END_TO_END_FLAGS)
@mock.patch.object(rconst, "SYNTHETIC_BATCHES_PER_EPOCH", 100)
def test_end_to_end_estimator_mlperf(self):
integration.run_synthetic(
ncf_estimator_main.main, tmp_root=self.get_temp_dir(), max_train=None,
extra_flags=self._BASE_END_TO_END_FLAGS + ['-ml_perf', 'True'])
@mock.patch.object(rconst, "SYNTHETIC_BATCHES_PER_EPOCH", 100)
def test_end_to_end_keras(self):
integration.run_synthetic(
ncf_keras_main.main, tmp_root=self.get_temp_dir(), max_train=None,
extra_flags=self._BASE_END_TO_END_FLAGS +
['-distribution_strategy', 'off'])
@mock.patch.object(rconst, "SYNTHETIC_BATCHES_PER_EPOCH", 100)
def test_end_to_end_keras_mlperf(self):
integration.run_synthetic(
ncf_keras_main.main, tmp_root=self.get_temp_dir(), max_train=None,
extra_flags=self._BASE_END_TO_END_FLAGS +
['-ml_perf', 'True', '-distribution_strategy', 'off'])
if __name__ == "__main__":
tf.logging.set_verbosity(tf.logging.INFO)
tf.test.main()
| 38.627273 | 80 | 0.63662 |
942e1960d84d97a50cfcfe8a0e311f47f1018ca9 | 4,081 | py | Python | pygs/test/unit_test/test_pypayload.py | graphserver/graphserver | 1fdce42a747df35a54ed4fa364837fb73710c226 | [
"BSD-3-Clause-Clear"
] | 58 | 2015-01-28T01:06:36.000Z | 2022-03-11T08:25:49.000Z | pygs/test/unit_test/test_pypayload.py | wlach/graphserver | 52dac7487673aa5f28bfe2342dbe93ce03880f7a | [
"BSD-3-Clause-Clear"
] | 1 | 2018-05-18T10:49:09.000Z | 2018-05-18T10:49:09.000Z | pygs/test/unit_test/test_pypayload.py | wlach/graphserver | 52dac7487673aa5f28bfe2342dbe93ce03880f7a | [
"BSD-3-Clause-Clear"
] | 12 | 2015-03-01T12:23:33.000Z | 2020-09-10T13:59:53.000Z | from graphserver.core import *
import unittest
import StringIO
import sys
class TestPyPayload(unittest.TestCase):
def _minimal_graph(self):
g = Graph()
g.add_vertex( "Seattle" )
g.add_vertex( "Portland" )
return g
def test_basic(self):
p = NoOpPyPayload(1.1)
def test_cast(self):
g = self._minimal_graph()
e = NoOpPyPayload(1.2)
ed = g.add_edge( "Seattle", "Portland", e )
assert e == ed.payload
ep = ed.payload # uses EdgePayload.from_pointer internally.
assert e == ep
assert ep.num == 1.2
def test_walk(self):
class IncTimePayload(GenericPyPayload):
def walk_impl(self, state, walkopts):
state.time = state.time + 10
state.weight = 5
return state
def walk_back_impl(self, state, walkopts):
state.time = state.time - 10
state.weight = 0
return state
g = self._minimal_graph()
ed = g.add_edge( "Seattle", "Portland", IncTimePayload())
assert(isinstance(ed.payload,IncTimePayload))
s = State(1,1)
assert s.time == 1
s1 = ed.walk(s, WalkOptions())
assert s1
assert s.time == 1
assert s1.soul != s.soul
assert s1.time == 11
assert s1.weight == 5
s2 = ed.walk_back(s1, WalkOptions())
assert s2
assert s2.time == 1
assert s2.weight == 0
g.destroy()
def test_failures(self):
class ExceptionRaiser(GenericPyPayload):
def walk_bad_stuff(self, state, walkopts):
raise Exception("I am designed to fail.")
walk_impl = walk_bad_stuff
walk_back_impl = walk_bad_stuff
g = self._minimal_graph()
ed = g.add_edge( "Seattle", "Portland", ExceptionRaiser())
# save stdout so we can set it back the way we found it
stderrsave = sys.stderr
# get a string-file to catch things placed into stdout
stderr_catcher = StringIO.StringIO()
sys.stderr = stderr_catcher
# this will barf into stdout
ed.walk(State(1,0), WalkOptions())
# the last line of the exception traceback just blurted out should be ...
stderr_catcher.seek(0)
self.assertEqual( stderr_catcher.read().split("\n")[-2] , "Exception: I am designed to fail." )
# set up a new buffer to catch a traceback
stderr_catcher = StringIO.StringIO()
sys.stderr = stderr_catcher
# blurt into it
ed.walk_back(State(1,0), WalkOptions())
# check that the last line of the traceback looks like we expect
stderr_catcher.seek(0)
self.assertEqual( stderr_catcher.read().split("\n")[-2] , "Exception: I am designed to fail." )
g.destroy()
sys.stderr = stderrsave
def test_basic_graph(self):
class MovingWalkway(GenericPyPayload):
def walk_impl(self, state, walkopts):
state.time = state.time + 10
state.weight = 5
return state
def walk_back_impl(self, state, walkopts):
state.time = state.time - 10
state.weight = 0
return state
g = self._minimal_graph()
g.add_edge( "Seattle", "Portland", MovingWalkway())
spt = g.shortest_path_tree("Seattle", "Portland", State(0,0), WalkOptions())
assert spt
assert spt.__class__ == ShortestPathTree
assert spt.get_vertex("Portland").state.weight==5
assert spt.get_vertex("Portland").state.time==10
spt.destroy()
g.destroy()
if __name__ == '__main__':
tl = unittest.TestLoader()
suite = tl.loadTestsFromTestCase(TestPyPayload)
unittest.TextTestRunner(verbosity=2).run(suite)
| 32.388889 | 103 | 0.557461 |
1b2431d5ef27a6b50153f3071f6f92d2f27f642e | 6,013 | py | Python | quran/usecase/ayah/find_ayah.py | octabytes/quran | 974d351cf5e6a12a28a5ac9f29c8d2753ae6dd86 | [
"Apache-2.0"
] | null | null | null | quran/usecase/ayah/find_ayah.py | octabytes/quran | 974d351cf5e6a12a28a5ac9f29c8d2753ae6dd86 | [
"Apache-2.0"
] | null | null | null | quran/usecase/ayah/find_ayah.py | octabytes/quran | 974d351cf5e6a12a28a5ac9f29c8d2753ae6dd86 | [
"Apache-2.0"
] | null | null | null | from quran.repository.repo_responses import AyahResponse
from quran.utils.response import Response
class FindAyah:
def __init__(self, ayah_repo, find_translation, find_surah, find_edition, find_audio, find_image):
self.ayah_repo = ayah_repo
self.find_translation = find_translation
self.find_surah = find_surah
self.find_edition = find_edition
self.find_audio = find_audio
self.find_image = find_image
def by_id(self, ayah_id, edition_id=None, parts=None):
ayah_res = self.ayah_repo.find_by_id(ayah_id)
response = self._ayah_response(ayah_res.ayah, edition_id, parts)
return AyahResponse(ayah=response, number_of_results=ayah_res.number_of_results)
def by_surah_id(self, surah_id, edition_id=None, parts=None, limit=None, cursor=None):
ayah_stream = self.ayah_repo.find_by_surah_id(surah_id, limit=limit, cursor=cursor)
ayah_list = []
for ayah in ayah_stream.ayah_list:
ayah_list.append(self._ayah_response(ayah, edition_id, parts))
return AyahResponse(ayah_list=ayah_list, number_of_results=ayah_stream.number_of_results,
cursor=ayah_stream.cursor)
def by_number(self, ayah_number, edition_id=None, parts=None):
ayah_res = self.ayah_repo.find_by_number(ayah_number)
response = self._ayah_response(ayah_res.ayah, edition_id, parts)
return AyahResponse(ayah=response, number_of_results=ayah_res.number_of_results)
def by_number_in_surah(self, number_in_surah, edition_id=None, parts=None):
ayah_res = self.ayah_repo.find_by_number_in_surah(number_in_surah)
response = self._ayah_response(ayah_res.ayah, edition_id, parts)
return AyahResponse(ayah=response, number_of_results=ayah_res.number_of_results)
def by_juz(self, juz, edition_id=None, parts=None, limit=None, cursor=None):
ayah_stream = self.ayah_repo.find_by_juz(juz, limit=limit, cursor=cursor)
ayah_list = []
for ayah in ayah_stream.ayah_list:
ayah_list.append(self._ayah_response(ayah, edition_id, parts))
return AyahResponse(ayah_list=ayah_list, number_of_results=ayah_stream.number_of_results,
cursor=ayah_stream.cursor)
def by_manzil(self, manzil, edition_id=None, parts=None, limit=None, cursor=None):
ayah_stream = self.ayah_repo.find_by_manzil(manzil, limit=limit, cursor=cursor)
ayah_list = []
for ayah in ayah_stream.ayah_list:
ayah_list.append(self._ayah_response(ayah, edition_id, parts))
return AyahResponse(ayah_list=ayah_list, number_of_results=ayah_stream.number_of_results,
cursor=ayah_stream.cursor)
def by_ruku(self, ruku, edition_id=None, parts=None, limit=None, cursor=None):
ayah_stream = self.ayah_repo.find_by_ruku(ruku, limit=limit, cursor=cursor)
ayah_list = []
for ayah_res in ayah_stream.ayah_list:
ayah_list.append(self._ayah_response(ayah_res, edition_id, parts))
return AyahResponse(ayah_list=ayah_list, number_of_results=ayah_stream.number_of_results,
cursor=ayah_stream.cursor)
def by_hizb_quarter(self, hizb_quarter, edition_id=None, parts=None, limit=None, cursor=None):
ayah_stream = self.ayah_repo.find_by_hizb_quarter(hizb_quarter, limit=limit, cursor=cursor)
ayah_list = []
for ayah in ayah_stream.ayah_list:
ayah_list.append(self._ayah_response(ayah, edition_id, parts))
return AyahResponse(ayah_list=ayah_list, number_of_results=ayah_stream.number_of_results,
cursor=ayah_stream.cursor)
def by_sajda(self, sajda, edition_id=None, parts=None, limit=None, cursor=None):
ayah_stream = self.ayah_repo.find_by_sajda(sajda, limit=limit, cursor=cursor)
ayah_list = []
for ayah in ayah_stream.ayah_list:
ayah_list.append(self._ayah_response(ayah, edition_id, parts))
return AyahResponse(ayah_list=ayah_list, number_of_results=ayah_stream.number_of_results,
cursor=ayah_stream.cursor)
def _ayah_response(self, ayah, edition_id, parts):
response = Response()
response.ayah = ayah
if parts:
surah_id = ayah.surah_id
self._get_ayah_parts(response, parts, ayah.id, edition_id, surah_id)
return response
def _get_ayah_parts(self, response, parts, ayah_id, edition_id='edition-1', surah_id=None):
# parts = ['Translation', 'Surah', 'Edition', 'Arabic_Audio', 'Translation_Audio', 'Image']
if 'Translation' in parts:
translation_res = self.find_translation.filter(ayah_id=ayah_id, edition_id=edition_id)
if translation_res:
response.translation = translation_res.translation
if 'Surah' in parts:
if surah_id is None:
ayah_res = self.ayah_repo.find_by_id(ayah_id)
surah_id = ayah_res.ayah.surah_id
surah_res = self.find_surah.by_id(surah_id)
if surah_res:
response.surah = surah_res.surah
if 'Edition' in parts:
edition_res = self.find_edition.by_id(edition_id)
if edition_res:
response.edition = edition_res.edition
if 'Arabic_Audio' in parts:
arabic_audio = self.find_audio.arabic_audio(ayah_id=ayah_id, edition_id=edition_id)
if arabic_audio:
response.arabic_audio = arabic_audio.audio
if 'Translation_Audio' in parts:
translation_audio = self.find_audio.translation_audio(ayah_id=ayah_id, edition_id=edition_id)
if translation_audio:
response.translation_audio = translation_audio.audio
if 'Image' in parts:
image_res = self.find_image.by_ayah_id(ayah_id)
if image_res:
response.ayah_image = image_res.image
| 48.491935 | 105 | 0.685515 |
194c02d96b499d398d0de7f0a574cc6a02a85d87 | 1,031 | py | Python | alipay/aop/api/response/AlipayDataAiserviceSgxGatewayQueryResponse.py | articuly/alipay-sdk-python-all | 0259cd28eca0f219b97dac7f41c2458441d5e7a6 | [
"Apache-2.0"
] | null | null | null | alipay/aop/api/response/AlipayDataAiserviceSgxGatewayQueryResponse.py | articuly/alipay-sdk-python-all | 0259cd28eca0f219b97dac7f41c2458441d5e7a6 | [
"Apache-2.0"
] | null | null | null | alipay/aop/api/response/AlipayDataAiserviceSgxGatewayQueryResponse.py | articuly/alipay-sdk-python-all | 0259cd28eca0f219b97dac7f41c2458441d5e7a6 | [
"Apache-2.0"
] | null | null | null | #!/usr/bin/env python
# -*- coding: utf-8 -*-
import simplejson as json
from alipay.aop.api.response.AlipayResponse import AlipayResponse
class AlipayDataAiserviceSgxGatewayQueryResponse(AlipayResponse):
def __init__(self):
super(AlipayDataAiserviceSgxGatewayQueryResponse, self).__init__()
self._request_uuid = None
self._result = None
@property
def request_uuid(self):
return self._request_uuid
@request_uuid.setter
def request_uuid(self, value):
self._request_uuid = value
@property
def result(self):
return self._result
@result.setter
def result(self, value):
self._result = value
def parse_response_content(self, response_content):
response = super(AlipayDataAiserviceSgxGatewayQueryResponse, self).parse_response_content(response_content)
if 'request_uuid' in response:
self.request_uuid = response['request_uuid']
if 'result' in response:
self.result = response['result']
| 28.638889 | 115 | 0.696411 |
2b49b2087b5cd1a0e60cff7e75a70dfd649f257a | 1,942 | py | Python | src/toil/batchSystems/__init__.py | YeoLab/toil | 9837c396b946bc4a0cf97e7c2705e5892b88707b | [
"Apache-2.0"
] | null | null | null | src/toil/batchSystems/__init__.py | YeoLab/toil | 9837c396b946bc4a0cf97e7c2705e5892b88707b | [
"Apache-2.0"
] | 1 | 2017-07-31T23:47:25.000Z | 2017-07-31T23:47:25.000Z | src/toil/batchSystems/__init__.py | lexentbio/toil | 6ad3813af4450962d0899aa6c821189f86472ef9 | [
"Apache-2.0"
] | 1 | 2020-09-17T17:49:32.000Z | 2020-09-17T17:49:32.000Z | # Copyright (C) 2015-2016 Regents of the University of California
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import absolute_import
import sys
if sys.version_info >= (3, 0):
# https://docs.python.org/3.0/whatsnew/3.0.html#ordering-comparisons
def cmp(a, b):
return (a > b) - (a < b)
class MemoryString:
def __init__(self, string):
if string[-1] == 'K' or string[-1] == 'M' or string[-1] == 'G' or string[-1] == 'T': #10K
self.unit = string[-1]
self.val = float(string[:-1])
elif len(string) >= 3 and (string[-2] == 'k' or string[-2] == 'M' or string[-2] == 'G' or string[-2] == 'T'):
self.unit = string[-2]
self.val = float(string[:-2])
else:
self.unit = 'B'
self.val = float(string)
self.bytes = self.byteVal()
def __str__(self):
if self.unit != 'B':
return str(self.val) + self.unit
else:
return str(self.val)
def byteVal(self):
if self.unit == 'B':
return self.val
elif self.unit == 'K':
return self.val * 1024
elif self.unit == 'M':
return self.val * 1048576
elif self.unit == 'G':
return self.val * 1073741824
elif self.unit == 'T':
return self.val * 1099511627776
def __cmp__(self, other):
return cmp(self.bytes, other.bytes)
| 34.070175 | 117 | 0.591658 |
f7640dd716bbc4e0653c1ad225bfb12ccc97959f | 1,933 | py | Python | tests/textunits/test_bosyl.py | Esukhia/botok | 9009581cc290c800e7d93a405969e10a7c9d2f51 | [
"Apache-2.0"
] | 17 | 2019-10-19T15:29:52.000Z | 2022-03-01T19:43:15.000Z | tests/textunits/test_bosyl.py | drupchen/pybo | eac38e7c574e2e99a4f43ca641782d8616bb684d | [
"Apache-2.0"
] | 29 | 2019-09-01T21:33:15.000Z | 2022-01-11T08:57:50.000Z | tests/textunits/test_bosyl.py | Esukhia/botok | 9009581cc290c800e7d93a405969e10a7c9d2f51 | [
"Apache-2.0"
] | 8 | 2020-01-14T17:45:11.000Z | 2022-03-28T09:31:35.000Z | # coding: utf8
from botok import BoSyl
bs = BoSyl()
def test_bosyl():
# is_affixable() Vs. SylComponents.is_thame()
assert bs.is_thame("ཀུན") is False and bs.is_affixable("ཀུན") is False
assert bs.is_thame("དེའིའམ") is True and bs.is_affixable("དེའིའམ") is False
assert bs.is_thame("དེའི") is True and bs.is_affixable("དེའི") is False
assert bs.is_thame("ང") is True and bs.is_affixable("ང") is True
# get_all_affixed()
affixed = bs.get_all_affixed("ང")
assert affixed == [
("ངར", {"len": 1, "type": "la", "aa": False}),
("ངས", {"len": 1, "type": "gis", "aa": False}),
("ངའི", {"len": 2, "type": "gi", "aa": False}),
("ངའམ", {"len": 2, "type": "am", "aa": False}),
("ངའང", {"len": 2, "type": "ang", "aa": False}),
("ངའོ", {"len": 2, "type": "o", "aa": False}),
("ངའིའོ", {"len": 4, "type": "gi+o", "aa": False}),
("ངའིའམ", {"len": 4, "type": "gi+am", "aa": False}),
("ངའིའང", {"len": 4, "type": "gi+ang", "aa": False}),
("ངའོའམ", {"len": 4, "type": "o+am", "aa": False}),
("ངའོའང", {"len": 4, "type": "o+ang", "aa": False}),
]
affixed = bs.get_all_affixed("མཐའ")
assert affixed == [
("མཐར", {"len": 1, "type": "la", "aa": True}),
("མཐས", {"len": 1, "type": "gis", "aa": True}),
("མཐའི", {"len": 2, "type": "gi", "aa": True}),
("མཐའམ", {"len": 2, "type": "am", "aa": True}),
("མཐའང", {"len": 2, "type": "ang", "aa": True}),
("མཐའོ", {"len": 2, "type": "o", "aa": True}),
("མཐའིའོ", {"len": 4, "type": "gi+o", "aa": True}),
("མཐའིའམ", {"len": 4, "type": "gi+am", "aa": True}),
("མཐའིའང", {"len": 4, "type": "gi+ang", "aa": True}),
("མཐའོའམ", {"len": 4, "type": "o+am", "aa": True}),
("མཐའོའང", {"len": 4, "type": "o+ang", "aa": True}),
]
affixed = bs.get_all_affixed("ཀུན")
assert affixed is None
| 41.12766 | 79 | 0.457838 |
162d9e96ee831393864e3bac624b027dda45ed50 | 3,712 | py | Python | e2e/Tests/RPC/Personal/PersonalDataTest.py | kayabaNerve/Currency | 260ebc20f1704f42ad6183fee39ad58ec6d07961 | [
"CC0-1.0"
] | 66 | 2019-01-14T08:39:52.000Z | 2022-01-06T11:39:15.000Z | e2e/Tests/RPC/Personal/PersonalDataTest.py | kayabaNerve/Currency | 260ebc20f1704f42ad6183fee39ad58ec6d07961 | [
"CC0-1.0"
] | 228 | 2019-01-16T15:42:44.000Z | 2022-02-05T07:48:07.000Z | e2e/Tests/RPC/Personal/PersonalDataTest.py | kayabaNerve/Currency | 260ebc20f1704f42ad6183fee39ad58ec6d07961 | [
"CC0-1.0"
] | 19 | 2019-01-14T08:53:04.000Z | 2021-11-03T20:19:28.000Z | from time import sleep
from typing import Dict, Any
from e2e.Meros.Meros import Meros
from e2e.Meros.RPC import RPC
from e2e.Tests.Errors import TestError
from e2e.Tests.RPC.Personal.Lib import decodeAddress
def checkData(
rpc: RPC,
dataHash: str,
expected: bytes
) -> str:
data: Dict[str, Any] = rpc.call("transactions", "getTransaction", {"hash": dataHash})
if len(data["inputs"]) != 1:
raise TestError("Data had multiple inputs.")
res: str = data["inputs"][0]["hash"]
del data["inputs"]
del data["signature"]
del data["proof"]
if data != {
"descendant": "Data",
"outputs": [],
"hash": dataHash,
"data": expected.hex().upper()
}:
raise TestError("Data wasn't as expected.")
return res
def PersonalDataTest(
rpc: RPC
) -> None:
#Create a Data.
firstData: str = rpc.call("personal", "data", {"data": "a"})
initial: str = checkData(rpc, firstData, b"a")
#Meros should've also created an initial Data.
if checkData(rpc, initial, decodeAddress(rpc.call("personal", "getAddress"))) != bytes(32).hex():
raise TestError("Initial Data didn't have a 0-hash input.")
#Create a Data using hex data. Also tests upper case hex.
if checkData(rpc, rpc.call("personal", "data", {"data": "AABBCC", "hex": True}), b"\xAA\xBB\xCC") != firstData:
raise TestError("Newly created Data wasn't a descendant of the existing Data.")
#Should support using 256 bytes of Data. Also tests lower case hex.
checkData(rpc, rpc.call("personal", "data", {"data": bytes([0xaa] * 256).hex(), "hex": True}), bytes([0xaa] * 256))
#Should properly error when we input no data. All Datas must have at least 1 byte of Data.
try:
rpc.call("personal", "data", {"data": ""})
raise Exception()
except Exception as e:
if str(e) != "-3 Data is too small or too large.":
raise TestError("Meros didn't handle Data that was too small.")
#Should properly error when we supply more than 256 bytes of data.
try:
rpc.call("personal", "data", {"data": "a" * 257})
raise Exception()
except Exception as e:
if str(e) != "-3 Data is too small or too large.":
raise TestError("Meros didn't handle Data that was too large.")
#Should properly error when we supply non-hex data with the hex flag.
try:
rpc.call("personal", "data", {"data": "zz", "hex": True})
raise Exception()
except Exception as e:
if str(e) != "-3 Invalid hex char `z` (ord 122).":
raise TestError("Meros didn't properly handle invalid hex.")
#Should properly error when we supply non-even hex data.
try:
rpc.call("personal", "data", {"data": "a", "hex": True})
raise Exception()
except Exception as e:
if str(e) != "-3 Incorrect hex string len.":
raise TestError("Meros didn't properly handle non-even hex.")
#Test Datas when the Wallet has a password.
rpc.call("personal", "setWallet", {"password": "password"})
#Shouldn't work due to the lack of a password.
try:
rpc.call("personal", "data", {"data": "abc"})
raise Exception()
except Exception as e:
if str(e) != "-3 Invalid password.":
raise TestError("Meros didn't properly handle creating a Data without a password.")
#Should work due to the existence of a password.
lastData: str = rpc.call("personal", "data", {"data": "abc", "password": "password"})
checkData(rpc, lastData, b"abc")
#Reboot the node and verify we can create a new Data without issue.
rpc.quit()
sleep(3)
rpc.meros = Meros(rpc.meros.db, rpc.meros.tcp, rpc.meros.rpc)
if checkData(rpc, rpc.call("personal", "data", {"data": "def", "password": "password"}), b"def") != lastData:
raise TestError("Couldn't create a new Data after rebooting.")
| 35.018868 | 117 | 0.656789 |
839342928073c27e67d5e3d25c523461f9cc3049 | 1,732 | py | Python | src/settings.py | gabrwagn/signerupper | 34072f2db8bbb87ce1d581cda140f15c35b52827 | [
"MIT"
] | 1 | 2021-11-10T00:10:37.000Z | 2021-11-10T00:10:37.000Z | src/settings.py | gabrwagn/signerupper | 34072f2db8bbb87ce1d581cda140f15c35b52827 | [
"MIT"
] | null | null | null | src/settings.py | gabrwagn/signerupper | 34072f2db8bbb87ce1d581cda140f15c35b52827 | [
"MIT"
] | null | null | null | # Formats
TIME_FORMAT = '%H:%M'
DATE_FORMAT = '%Y-%m-%d'
DATE_TIME_FORMAT = f'{DATE_FORMAT} {TIME_FORMAT}'
PARTICIPANT_MAX_NAME_LENGTH = 11
# Announcement
ANNOUNCEMENT_CHANNEL_NAME = "Announcements"
class MESSAGE:
NEW_EVENT = "New raid event: {} at {} {}, go sign up now in {}!"
REMINDER = "Hey! Dont for get you signed up for {} {} {}!"
PLACEMENT = "Hey! You've been assigned {} in the raid {} at {} (see: {})!"
# Event settings
DEFAULT_CAP_PARTICIPANTS = 40
SIGNUP_REACTION = '👍'
DECLINE_REACTION = '👎'
INSTRUCTIONS = f"*Write the command **+sign** or {SIGNUP_REACTION} to attend, " \
f"write **+decline** or {DECLINE_REACTION} if you can't attend.*"
# Roles
class ROLES:
# Required
ADMIN = "Officer"
DECLINED = "Declined"
BACKUP = "Backup"
# Server specific
TANK = "Tank"
PHYSICAL = "Physical"
CASTER = "Caster"
HEALER = "Healer"
ALL = [
TANK,
PHYSICAL,
CASTER,
HEALER,
DECLINED,
BACKUP
]
ACTIVE = [
TANK,
PHYSICAL,
CASTER,
HEALER,
]
@classmethod
def from_identifier_default(cls, identifier):
return {
"Warrior": ROLES.PHYSICAL,
"Rogue": ROLES.PHYSICAL,
"Hunter": ROLES.PHYSICAL,
"Paladin": ROLES.HEALER,
"Shaman": ROLES.HEALER,
"Priest": ROLES.HEALER,
"Druid": ROLES.HEALER,
"Warlock": ROLES.CASTER,
"Mage": ROLES.CASTER,
}[identifier]
# Identifiers
IDENTIFIERS = [
"Warrior",
"Rogue",
"Hunter",
"Paladin",
"Shaman",
"Priest",
"Warlock",
"Mage",
"Druid",
]
VALID_USER_ROLES = IDENTIFIERS
| 21.121951 | 81 | 0.561778 |
8be2ee55222f31e0aded677bc2e0a4893118146b | 3,103 | py | Python | mapping/enable/geojson_overlay.py | nmichaud/enable-mapping | 421aae6c3c700406df0f2438cec190daf5074084 | [
"BSD-3-Clause"
] | 1 | 2019-04-22T16:36:06.000Z | 2019-04-22T16:36:06.000Z | mapping/enable/geojson_overlay.py | pombreda/enable-mapping | 421aae6c3c700406df0f2438cec190daf5074084 | [
"BSD-3-Clause"
] | null | null | null | mapping/enable/geojson_overlay.py | pombreda/enable-mapping | 421aae6c3c700406df0f2438cec190daf5074084 | [
"BSD-3-Clause"
] | 2 | 2015-04-14T10:06:03.000Z | 2020-10-03T03:56:47.000Z |
import geojson
import numpy as np
# Enthought library imports
from traits.api import Str, List, Instance, Array, on_trait_change
from chaco.api import AbstractOverlay
from enable.compiled_path import CompiledPath
from kiva.constants import STROKE, FILL_STROKE
class GeoJSONOverlay(AbstractOverlay):
geojs_filename = Str
_polys = List
_paths = List(CompiledPath)
_colors = Array
def _geojs_filename_changed(self, name):
data = file(name).read()
polys = process_raw(data.replace('\r\n', ''))
# Generate compiled path from the polygons
paths = []
for poly in polys:
path = CompiledPath()
for p in poly:
path.lines(p)
paths.append(path)
self._paths = paths
red = np.array([202, 0, 32])/255.
blue = np.array([5, 113, 176])/255.
colors = red * np.random.random_integers(0,1,len(paths)).reshape(-1,1)
colors[np.sum(colors,axis=-1)==0] = blue
self._colors = colors
# Store the polygons just in case we need to regenerate the path
self._polys = polys
self.request_redraw()
def overlay(self, other_component, gc, view_bounds=None, mode="default"):
x, y, width, height = view_bounds
zoom = other_component._zoom_level
factor = 256 << zoom
with gc:
gc.clip_to_rect(x,y,width, height)
gc.set_stroke_color((1, 1, 1))
gc.set_line_width(1)
gc.scale_ctm(factor, factor)
for path, color in zip(self._paths, self._colors):
gc.begin_path()
gc.add_path(path)
gc.set_fill_color(color)
gc.draw_path(FILL_STROKE)
super(GeoJSONOverlay, self).overlay(other_component, gc, view_bounds, mode)
def process_raw(data):
# Process into a list of polygons?
geojs = geojson.loads(data.replace('\r\n', ''))
geotype = geojs.type
polys = []
if geotype == "FeatureCollection":
features = geojs.features
for feature in geojs.features:
p = []
if feature.geometry:
process_geometry(feature.geometry, p)
polys.append(p)
elif geotype == "Feature":
process_geometry(geojs.geometry, polys)
return polys
def process_geometry(obj, polys):
if obj.type == "MultiPolygon":
for poly in obj.coordinates:
polys.extend(WGS84_to_screen(np.array(poly)))
elif obj.type == "Polygon":
polys.extend(WGS84_to_screen(np.array(obj.coordinates)))
elif obj.type == "GeometryCollection":
for geo in obj.geometries:
process_geometry(geo, polys)
else:
raise Exception("Can't handle %s geometry"%obj.type)
def WGS84_to_screen(coords):
coords[:,:,0] = (coords[:,:,0] + 180.) / 360.
coords[:,:,1] = np.radians(coords[:,:,1])
coords[:,:,1] = (1 - (1. - np.log(np.tan(coords[:,:,1]) +
(1 / np.cos(coords[:,:,1]))) / np.pi) / 2.0)
return coords
| 31.663265 | 83 | 0.588463 |
3012e11e4e487556925210e8d7ecb657f3941127 | 971 | py | Python | thetacontroller/ptpcam_example.py | daniego/rover-thetacontroller | 043df9966f3313dcdde2e70091460fafe904af23 | [
"Apache-2.0"
] | null | null | null | thetacontroller/ptpcam_example.py | daniego/rover-thetacontroller | 043df9966f3313dcdde2e70091460fafe904af23 | [
"Apache-2.0"
] | null | null | null | thetacontroller/ptpcam_example.py | daniego/rover-thetacontroller | 043df9966f3313dcdde2e70091460fafe904af23 | [
"Apache-2.0"
] | null | null | null | import subprocess
## example of taking a picture
def takePicture():
subprocess.call("ptpcam -c", shell=True)
takePicture()
# example of grabbing device info and using it in your python program.
ptpinfo = subprocess.Popen(["ptpcam", "--info"], stdout=subprocess.PIPE)
# although this simply prints to stdout, you can parse
# the response for your program
for line in ptpinfo.stdout.readlines():
print(line.rstrip())
# find the last picture taken. Modify to parse for date or other
files = []
listFiles = subprocess.Popen(["ptpcam", "-L"], stdout=subprocess.PIPE)
for line in listFiles.stdout.readlines():
files.append(line.rstrip())
print("listFiles: " + str(listFiles))
print("Files:" + str(files))
lastLine = files[len(files) - 2].split(" ")
lastPicture = lastLine[0][:-1]
print("The handle for the last picture taken is " + lastPicture)
# download the picture
ptpcommand = "ptpcam --get-file=" + lastPicture
subprocess.call(ptpcommand, shell=True)
| 28.558824 | 72 | 0.722966 |
cc3245c70c25aa583f637fa8c48e6806946320bb | 1,875 | py | Python | languages/python/algorithm_scrmable.py | RohitAthithya/learntosolveit | fe1df98534d3af2fb3ba87c6540d9d8fa883c244 | [
"BSD-3-Clause"
] | 136 | 2015-03-06T18:11:21.000Z | 2022-03-10T22:31:40.000Z | languages/python/algorithm_scrmable.py | RohitAthithya/learntosolveit | fe1df98534d3af2fb3ba87c6540d9d8fa883c244 | [
"BSD-3-Clause"
] | 27 | 2015-01-07T01:38:03.000Z | 2021-12-22T19:20:15.000Z | languages/python/algorithm_scrmable.py | RohitAthithya/learntosolveit | fe1df98534d3af2fb3ba87c6540d9d8fa883c244 | [
"BSD-3-Clause"
] | 1,582 | 2015-01-01T20:37:06.000Z | 2022-03-30T12:29:24.000Z | #!/usr/bin/env python
# Cphryigot: O.R.Senthil Kumaran <orsenthil@gmail.com>
#
# Inrpeisd from jwz scrmable: http://www.jwz.org/hacks/scrmable.pl
#
# Tihs pgrarom is fere sortfwae; you can rrtiestiubde it ad/onr mdfioy
# it udenr the tmers of the GNU Graneel Pbuilc Liscene as phlibsued by
# the Fere Sfwartoe Fanouiodtn; eeihtr vierosn 2 of the Liscene, or
# (at your opotin) any leatr vierosn.
#
# Tihs pgrarom is diisertbtud in the hope taht it will be uusfel,
# but WTHOIUT ANY WRAANRTY; whitout eevn the iipemld watrarny of
# MNTIBRAEAHCITLY or FNTIESS FOR A PTULACRIAR PURPSOE. See the
# GNU Graneel Pbuilc Liscene for mroe dalites.
#
# You suolhd have reievced a copy of the GNU Graneel Pbuilc Liscene
# along wtih tihs pgrarom; if not, wtire to the Free Software
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
import random
import sys
def mxiup(ecah_wrod):
if len(ecah_wrod) <= 2:
return ecah_wrod
else:
nwewrod = ecah_wrod[0]
if ecah_wrod[-1] in ['.', ',', ':', ';', '-', '?', '!']:
inbet = ecah_wrod[1:-2]
for each in random.sample(list(inbet), len(inbet)):
nwewrod += each
nwewrod += ecah_wrod[-2]
else:
inbet = ecah_wrod[1:-1]
for each in random.sample(list(inbet), len(inbet)):
nwewrod += each
nwewrod += ecah_wrod[-1]
return nwewrod
def srcambel(line):
mixedwrods = []
wrods = line.split()
for ecah_wrod in wrods:
mixedwrods.append(mxiup(ecah_wrod))
for w, m in zip(wrods, mixedwrods):
line = line.replace(w, m)
print(line, end='')
def getgraparaph():
line = sys.stdin.read()
return line
def mian():
try:
line = getgraparaph()
srcambel(line)
except (EOFError, KeyboardInterrupt):
sys.exit(0)
mian()
| 28.409091 | 76 | 0.637333 |
d5d84d8398f22729d6f762f1cb9b9ab8050692ac | 145 | py | Python | ex003.py | LeoWshington/Exercicios_CursoEmVideo_Python | 294d14d9aaab5e32aaf39d70b0cd1266f0b55a02 | [
"MIT"
] | null | null | null | ex003.py | LeoWshington/Exercicios_CursoEmVideo_Python | 294d14d9aaab5e32aaf39d70b0cd1266f0b55a02 | [
"MIT"
] | null | null | null | ex003.py | LeoWshington/Exercicios_CursoEmVideo_Python | 294d14d9aaab5e32aaf39d70b0cd1266f0b55a02 | [
"MIT"
] | null | null | null | n1 = float(input('Digite um número: '))
n2 = float(input('Digiete outro número: '))
print(f'A soma entre {n1:.0f} e {n2:.0f} é {n1 + n2 :.0f}.')
| 36.25 | 60 | 0.606897 |
de68b780c59418e92c9ba2df56c4b93e94143723 | 281 | py | Python | tests/artificial/transf_RelativeDifference/trend_MovingMedian/cycle_5/ar_12/test_artificial_32_RelativeDifference_MovingMedian_5_12_20.py | jmabry/pyaf | afbc15a851a2445a7824bf255af612dc429265af | [
"BSD-3-Clause"
] | null | null | null | tests/artificial/transf_RelativeDifference/trend_MovingMedian/cycle_5/ar_12/test_artificial_32_RelativeDifference_MovingMedian_5_12_20.py | jmabry/pyaf | afbc15a851a2445a7824bf255af612dc429265af | [
"BSD-3-Clause"
] | 1 | 2019-11-30T23:39:38.000Z | 2019-12-01T04:34:35.000Z | tests/artificial/transf_RelativeDifference/trend_MovingMedian/cycle_5/ar_12/test_artificial_32_RelativeDifference_MovingMedian_5_12_20.py | jmabry/pyaf | afbc15a851a2445a7824bf255af612dc429265af | [
"BSD-3-Clause"
] | null | null | null | import pyaf.Bench.TS_datasets as tsds
import pyaf.tests.artificial.process_artificial_dataset as art
art.process_dataset(N = 32 , FREQ = 'D', seed = 0, trendtype = "MovingMedian", cycle_length = 5, transform = "RelativeDifference", sigma = 0.0, exog_count = 20, ar_order = 12); | 40.142857 | 176 | 0.743772 |
8f1304dee60ad2c28f64c5e77852b48a999211f9 | 3,489 | py | Python | handDetector.py | ryanyen2/CS4187-Final-Report-VirtualPiano | 7f1f2e3afbc6e2db3b41c851d29ce3648277fec7 | [
"MIT"
] | null | null | null | handDetector.py | ryanyen2/CS4187-Final-Report-VirtualPiano | 7f1f2e3afbc6e2db3b41c851d29ce3648277fec7 | [
"MIT"
] | null | null | null | handDetector.py | ryanyen2/CS4187-Final-Report-VirtualPiano | 7f1f2e3afbc6e2db3b41c851d29ce3648277fec7 | [
"MIT"
] | null | null | null | import cv2
import mediapipe as mp
import math
class HandDetector:
def __init__(self, mode=False, maxHands=2, detectionCon=0.5, minTrackCon=0.5):
self.mode = mode
self.maxHands = maxHands
self.detectionCon = detectionCon
self.minTrackCon = minTrackCon
self.mpHands = mp.solutions.hands
self.hands = self.mpHands.Hands(static_image_mode=self.mode, max_num_hands=self.maxHands,
min_detection_confidence=self.detectionCon, min_tracking_confidence = self.minTrackCon)
self.mpDraw = mp.solutions.drawing_utils
self.tipIds = [4, 8, 12, 16, 20]
self.fingers = []
self.lmList = []
def findHands(self, img, draw=True, flipType=True):
imgRGB = cv2.cvtColor(img, cv2.COLOR_BGR2RGB)
self.results = self.hands.process(imgRGB)
allHands = []
h, w, c = img.shape
if self.results.multi_hand_landmarks:
for handType,handLms in zip(self.results.multi_handedness,self.results.multi_hand_landmarks):
myHand={}
## lmList
mylmList = []
xList = []
yList = []
for id, lm in enumerate(handLms.landmark):
px, py = int(lm.x * w), int(lm.y * h)
mylmList.append([px, py])
xList.append(px)
yList.append(py)
## bbox
xmin, xmax = min(xList), max(xList)
ymin, ymax = min(yList), max(yList)
boxW, boxH = xmax - xmin, ymax - ymin
bbox = xmin, ymin, boxW, boxH
cx, cy = bbox[0] + (bbox[2] // 2), \
bbox[1] + (bbox[3] // 2)
myHand["lmList"] = mylmList
myHand["bbox"] = bbox
myHand["center"] = (cx, cy)
if flipType:
if handType.classification[0].label =="Right":
myHand["type"] = "Left"
else:
myHand["type"] = "Right"
else:
myHand["type"] = handType.classification[0].label
allHands.append(myHand)
## draw
if draw:
self.mpDraw.draw_landmarks(img, handLms,
self.mpHands.HAND_CONNECTIONS)
cv2.rectangle(img, (bbox[0] - 20, bbox[1] - 20),
(bbox[0] + bbox[2] + 20, bbox[1] + bbox[3] + 20),
(255, 0, 255), 2)
cv2.putText(img,myHand["type"],(bbox[0] - 30, bbox[1] - 30),cv2.FONT_HERSHEY_PLAIN,
2,(255, 0, 255),2)
if draw:
return allHands,img
else:
return allHands
def findDistance(self,p1, p2, img=None):
x1, y1 = p1
x2, y2 = p2
cx, cy = (x1 + x2) // 2, (y1 + y2) // 2
length = math.hypot(x2 - x1, y2 - y1)
info = (x1, y1, x2, y2, cx, cy)
if img is not None:
cv2.circle(img, (x1, y1), 15, (255, 0, 255), cv2.FILLED)
cv2.circle(img, (x2, y2), 15, (255, 0, 255), cv2.FILLED)
cv2.line(img, (x1, y1), (x2, y2), (255, 0, 255), 3)
cv2.circle(img, (cx, cy), 15, (255, 0, 255), cv2.FILLED)
return length,info, img
else:
return length, info | 39.647727 | 127 | 0.472342 |
6476f7ec856777272cce560374a37f91e7658674 | 2,897 | py | Python | treedb/raw/models.py | glottolog/treedb | 4aa735632d6add5c81cc1d7be42833446e2a447a | [
"MIT"
] | 4 | 2019-07-13T14:39:01.000Z | 2021-04-17T13:38:47.000Z | treedb/raw/models.py | glottolog/treedb | 4aa735632d6add5c81cc1d7be42833446e2a447a | [
"MIT"
] | 1 | 2020-12-02T12:02:47.000Z | 2020-12-02T15:05:25.000Z | treedb/raw/models.py | glottolog/treedb | 4aa735632d6add5c81cc1d7be42833446e2a447a | [
"MIT"
] | 2 | 2020-04-11T19:46:48.000Z | 2020-04-13T19:40:24.000Z | """Raw tables schema."""
import sqlalchemy as sa
from sqlalchemy import (Column, Integer, String, Text, Boolean,
ForeignKey, CheckConstraint, UniqueConstraint)
from .._globals import REGISTRY as registry
__all__ = ['File', 'Option', 'Value']
PREFIX = '_'
@registry.mapped
class File:
"""Forward-slash-joined ids from the root to each item."""
__tablename__ = f'{PREFIX}file'
id = Column(Integer, primary_key=True)
glottocode = Column(String(8), CheckConstraint('length(glottocode) = 8'),
nullable=False, unique=True)
path = Column(Text, CheckConstraint('length(path) >= 8 AND (length(path) + 1) % 9 = 0'),
nullable=False, unique=True)
size = Column(Integer, CheckConstraint('size > 0'), nullable=False)
sha256 = Column(String(64), CheckConstraint('length(sha256) = 64'),
unique=True, nullable=False)
__table_args__ = (CheckConstraint('substr(path, -length(glottocode))'
' = glottocode'),)
@classmethod
def path_depth(cls, label='path_depth'):
return ((sa.func.length(cls.path) + 1) / 9).label(label)
@registry.mapped
class Option:
"""Unique (section, option) key of the values with lines config."""
__tablename__ = f'{PREFIX}option'
id = Column(Integer, primary_key=True)
section = Column(Text, CheckConstraint("section != ''"), nullable=False)
option = Column(Text, CheckConstraint("option != ''"), nullable=False)
is_lines = Column(Boolean(create_constraint=True))
defined = Column(Boolean(create_constraint=True), nullable=False)
defined_any_options = Column(Boolean(create_constraint=True), nullable=False)
ord_section = Column(Integer, CheckConstraint('ord_section >= 1'))
ord_option = Column(Integer, CheckConstraint('ord_section >= 0'))
__table_args__ = (UniqueConstraint(section, option),
CheckConstraint('(is_lines IS NULL) = (defined = 0)'),
CheckConstraint('defined = 1 OR defined_any_options = 0'),
CheckConstraint('(defined = 0) = (ord_section IS NULL)'),
CheckConstraint('ord_section IS NOT NULL'
' OR ord_option IS NULL'))
@registry.mapped
class Value:
"""Item value as (path, section, option, line, value) combination."""
__tablename__ = f'{PREFIX}value'
file_id = Column(ForeignKey('_file.id'), primary_key=True)
option_id = Column(ForeignKey('_option.id'), primary_key=True)
line = Column(Integer, CheckConstraint('line > 0'), primary_key=True)
# TODO: consider adding version for selective updates
value = Column(Text, CheckConstraint("value != ''"), nullable=False)
__table_args__ = (UniqueConstraint(file_id, line),
{'info': {'without_rowid': True}})
| 34.903614 | 92 | 0.63583 |
772667b6b50578328331e03863723c31cade5e47 | 1,350 | py | Python | tests/components/nut/util.py | erogleva/core | 994ae09f69afe772150a698953c0d7386a745de2 | [
"Apache-2.0"
] | 6 | 2016-11-25T06:36:27.000Z | 2021-11-16T11:20:23.000Z | tests/components/nut/util.py | erogleva/core | 994ae09f69afe772150a698953c0d7386a745de2 | [
"Apache-2.0"
] | 56 | 2020-08-03T07:30:54.000Z | 2022-03-31T06:02:04.000Z | tests/components/nut/util.py | erogleva/core | 994ae09f69afe772150a698953c0d7386a745de2 | [
"Apache-2.0"
] | 2 | 2021-07-14T20:22:04.000Z | 2021-09-22T08:56:16.000Z | """Tests for the nut integration."""
import json
from homeassistant.components.nut.const import DOMAIN
from homeassistant.const import CONF_HOST, CONF_PORT, CONF_RESOURCES
from homeassistant.core import HomeAssistant
from tests.async_mock import MagicMock, patch
from tests.common import MockConfigEntry, load_fixture
def _get_mock_pynutclient(list_vars=None, list_ups=None):
pynutclient = MagicMock()
type(pynutclient).list_ups = MagicMock(return_value=list_ups)
type(pynutclient).list_vars = MagicMock(return_value=list_vars)
return pynutclient
async def async_init_integration(
hass: HomeAssistant, ups_fixture: str, resources: list
) -> MockConfigEntry:
"""Set up the nexia integration in Home Assistant."""
ups_fixture = f"nut/{ups_fixture}.json"
list_vars = json.loads(load_fixture(ups_fixture))
mock_pynut = _get_mock_pynutclient(list_ups={"ups1": "UPS 1"}, list_vars=list_vars)
with patch(
"homeassistant.components.nut.PyNUTClient",
return_value=mock_pynut,
):
entry = MockConfigEntry(
domain=DOMAIN,
data={CONF_HOST: "mock", CONF_PORT: "mock", CONF_RESOURCES: resources},
)
entry.add_to_hass(hass)
await hass.config_entries.async_setup(entry.entry_id)
await hass.async_block_till_done()
return entry
| 30.681818 | 87 | 0.73037 |
87eca60c100e6d585e10898064cd1514b5bf2f0e | 1,343 | py | Python | CsvCodeGen.py | f3wwrvf4/CsvCodeGen | 5acc5ceafce801a12d9b017aea93d69252953f24 | [
"MIT"
] | null | null | null | CsvCodeGen.py | f3wwrvf4/CsvCodeGen | 5acc5ceafce801a12d9b017aea93d69252953f24 | [
"MIT"
] | null | null | null | CsvCodeGen.py | f3wwrvf4/CsvCodeGen | 5acc5ceafce801a12d9b017aea93d69252953f24 | [
"MIT"
] | null | null | null | #!/usr/bin/env python3
# CsvCodeGen.py
import pandas as pd
import os
import sys
from jinja2 import Template, Environment, FileSystemLoader
def Parse(header, line):
tpl_root = 'TemplateFiles'
data = dict(zip(header, line))
tpl = line[0]
tpl_path = os.path.join(tpl_root, tpl)
env = Environment(loader=FileSystemLoader(tpl_root))
template = env.get_template(tpl)
return template.render(data)
def usage():
print("""usage: CsvCodeGen.py [INPUT] [OUTPUT]""")
if __name__ == "__main__":
args = sys.argv
of = None
try:
df = pd.read_excel(args[1], header=None)
if 2 < len(args):
of = open(args[2], 'w')
except:
usage()
exit(1)
header = []
for index, row_data in df.iterrows():
raw_line = row_data.astype(str).values.tolist()
line = [i for i in raw_line if i != 'nan']
# print("line(" + str(index) + "):" + str(line))
if not line:
header = []
pass
else:
if line[0][0] == '#':
continue
if not header:
header = line
pass
else:
result = Parse(header, raw_line)
if of:
of.write(result)
else:
print(result)
| 21.66129 | 58 | 0.516754 |
4a8f405ea868c18ee873802cfa153558d1d0b3fe | 546 | py | Python | server/noteapp/player/__init__.py | torniken/easynotes | be0654d1857e1975b3adb5928103be1b3b1ad7a0 | [
"MIT"
] | null | null | null | server/noteapp/player/__init__.py | torniken/easynotes | be0654d1857e1975b3adb5928103be1b3b1ad7a0 | [
"MIT"
] | 4 | 2021-03-09T10:04:58.000Z | 2022-02-18T03:40:05.000Z | server/noteapp/player/__init__.py | torniken/easynotes | be0654d1857e1975b3adb5928103be1b3b1ad7a0 | [
"MIT"
] | null | null | null | from flask import Blueprint, request, Response
import requests
player = Blueprint('player', __name__)
def generate_data_from_response(resp, chunk=2048):
for data_chunk in resp.iter_content(chunk_size=chunk):
yield data_chunk
@player.route('/play')
def play_youtube():
id = request.args.get('id', None)
if not id:
return jsonify('must supply id'), 400
r = requests.get(f"http://localhost:5000/api/v1/play?id={id}", stream=True)
return Response(generate_data_from_response(r), mimetype='video/mp4') | 28.736842 | 79 | 0.703297 |
e617225a2fa8e28139e906c8ad70959a8c1541bb | 16,190 | py | Python | daal4py/sklearn/neighbors/_base.py | KalyanovD/daal4py | 7b75aa795863415a1ae35e24ac4357ab7b6e2faa | [
"Apache-2.0"
] | null | null | null | daal4py/sklearn/neighbors/_base.py | KalyanovD/daal4py | 7b75aa795863415a1ae35e24ac4357ab7b6e2faa | [
"Apache-2.0"
] | null | null | null | daal4py/sklearn/neighbors/_base.py | KalyanovD/daal4py | 7b75aa795863415a1ae35e24ac4357ab7b6e2faa | [
"Apache-2.0"
] | null | null | null | #===============================================================================
# Copyright 2020-2021 Intel Corporation
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#===============================================================================
# daal4py KNN scikit-learn-compatible base classes
import numpy as np
import numbers
import daal4py as d4p
from scipy import sparse as sp
from .._utils import (
getFPType,
sklearn_check_version,
get_patch_message)
from sklearn.utils.validation import check_array, check_is_fitted, check_X_y
from sklearn.utils.multiclass import check_classification_targets
from sklearn.base import is_classifier, is_regressor
import logging
if sklearn_check_version("0.22"):
from sklearn.neighbors._base import KNeighborsMixin as BaseKNeighborsMixin
from sklearn.neighbors._base import RadiusNeighborsMixin as BaseRadiusNeighborsMixin
from sklearn.neighbors._base import NeighborsBase as BaseNeighborsBase
from sklearn.neighbors._ball_tree import BallTree
from sklearn.neighbors._kd_tree import KDTree
else:
from sklearn.neighbors.base import KNeighborsMixin as BaseKNeighborsMixin
from sklearn.neighbors.base import RadiusNeighborsMixin as BaseRadiusNeighborsMixin
from sklearn.neighbors.base import NeighborsBase as BaseNeighborsBase
from sklearn.neighbors.ball_tree import BallTree
from sklearn.neighbors.kd_tree import KDTree
def training_algorithm(method, fptype, params):
if method == 'brute':
train_alg = d4p.bf_knn_classification_training
else:
train_alg = d4p.kdtree_knn_classification_training
params['fptype'] = fptype
return train_alg(**params)
def prediction_algorithm(method, fptype, params):
if method == 'brute':
predict_alg = d4p.bf_knn_classification_prediction
else:
predict_alg = d4p.kdtree_knn_classification_prediction
params['fptype'] = fptype
return predict_alg(**params)
def parse_auto_method(estimator, method, n_samples, n_features):
result_method = method
if (method in ['auto', 'ball_tree']):
condition = estimator.n_neighbors is not None and \
estimator.n_neighbors >= estimator.n_samples_fit_ // 2
if estimator.metric == 'precomputed' or n_features > 11 or condition:
result_method = 'brute'
else:
if estimator.effective_metric_ in KDTree.valid_metrics:
result_method = 'kd_tree'
else:
result_method = 'brute'
return result_method
def daal4py_fit(estimator, X, fptype):
estimator._fit_X = X
estimator._fit_method = estimator.algorithm
estimator.effective_metric_ = 'euclidean'
estimator._tree = None
weights = getattr(estimator, 'weights', 'uniform')
params = {
'method': 'defaultDense',
'k': estimator.n_neighbors,
'voteWeights': 'voteUniform' if weights == 'uniform' else 'voteDistance',
'resultsToCompute': 'computeIndicesOfNeighbors|computeDistances',
'resultsToEvaluate': 'none' if estimator._y is None else 'computeClassLabels'
}
if hasattr(estimator, 'classes_'):
params['nClasses'] = len(estimator.classes_)
labels = None if estimator._y is None else estimator._y.reshape(-1, 1)
method = parse_auto_method(
estimator, estimator.algorithm,
estimator.n_samples_fit_, estimator.n_features_in_)
estimator._fit_method = method
train_alg = training_algorithm(method, fptype, params)
estimator._daal_model = train_alg.compute(X, labels).model
def daal4py_kneighbors(estimator, X=None, n_neighbors=None,
return_distance=True):
n_features = getattr(estimator, 'n_features_in_', None)
shape = getattr(X, 'shape', None)
if n_features and shape and len(shape) > 1 and shape[1] != n_features:
raise ValueError(
'Input data shape {} is inconsistent with the trained model'.format(X.shape))
if sklearn_check_version("0.22"):
check_is_fitted(estimator)
else:
check_is_fitted(estimator, [])
if n_neighbors is None:
n_neighbors = estimator.n_neighbors
elif n_neighbors <= 0:
raise ValueError(
"Expected n_neighbors > 0. Got %d" %
n_neighbors
)
else:
if not isinstance(n_neighbors, numbers.Integral):
raise TypeError(
"n_neighbors does not take %s value, "
"enter integer value" %
type(n_neighbors))
if X is not None:
query_is_train = False
X = check_array(X, accept_sparse='csr', dtype=[np.float64, np.float32])
else:
query_is_train = True
X = estimator._fit_X
# Include an extra neighbor to account for the sample itself being
# returned, which is removed later
n_neighbors += 1
n_samples_fit = estimator.n_samples_fit_
if n_neighbors > n_samples_fit:
raise ValueError(
"Expected n_neighbors <= n_samples, "
" but n_samples = %d, n_neighbors = %d" %
(n_samples_fit, n_neighbors)
)
chunked_results = None
try:
fptype = getFPType(X)
except ValueError:
fptype = None
weights = getattr(estimator, 'weights', 'uniform')
params = {
'method': 'defaultDense',
'k': n_neighbors,
'voteWeights': 'voteUniform' if weights == 'uniform' else 'voteDistance',
'resultsToCompute': 'computeIndicesOfNeighbors|computeDistances',
'resultsToEvaluate': 'none' if estimator._y is None else 'computeClassLabels'
}
if hasattr(estimator, 'classes_'):
params['nClasses'] = len(estimator.classes_)
method = parse_auto_method(
estimator, estimator._fit_method, estimator.n_samples_fit_, n_features)
predict_alg = prediction_algorithm(method, fptype, params)
prediction_result = predict_alg.compute(X, estimator._daal_model)
distances = prediction_result.distances
indices = prediction_result.indices
if method == 'kd_tree':
for i in range(distances.shape[0]):
seq = distances[i].argsort()
indices[i] = indices[i][seq]
distances[i] = distances[i][seq]
if return_distance:
results = distances, indices.astype(int)
else:
results = indices.astype(int)
if chunked_results is not None:
if return_distance:
neigh_dist, neigh_ind = zip(*chunked_results)
results = np.vstack(neigh_dist), np.vstack(neigh_ind)
else:
results = np.vstack(chunked_results)
if not query_is_train:
return results
# If the query data is the same as the indexed data, we would like
# to ignore the first nearest neighbor of every sample, i.e
# the sample itself.
if return_distance:
neigh_dist, neigh_ind = results
else:
neigh_ind = results
n_queries, _ = X.shape
sample_range = np.arange(n_queries)[:, None]
sample_mask = neigh_ind != sample_range
# Corner case: When the number of duplicates are more
# than the number of neighbors, the first NN will not
# be the sample, but a duplicate.
# In that case mask the first duplicate.
dup_gr_nbrs = np.all(sample_mask, axis=1)
sample_mask[:, 0][dup_gr_nbrs] = False
neigh_ind = np.reshape(
neigh_ind[sample_mask], (n_queries, n_neighbors - 1))
if return_distance:
neigh_dist = np.reshape(
neigh_dist[sample_mask], (n_queries, n_neighbors - 1))
return neigh_dist, neigh_ind
return neigh_ind
def validate_data(estimator, X, y=None, reset=True,
validate_separately=False, **check_params):
if y is None:
try:
requires_y = estimator._get_tags()["requires_y"]
except KeyError:
requires_y = False
if requires_y:
raise ValueError(
f"This {estimator.__class__.__name__} estimator "
f"requires y to be passed, but the target y is None."
)
X = check_array(X, **check_params)
out = X, y
else:
if validate_separately:
# We need this because some estimators validate X and y
# separately, and in general, separately calling check_array()
# on X and y isn't equivalent to just calling check_X_y()
# :(
check_X_params, check_y_params = validate_separately
X = check_array(X, **check_X_params)
y = check_array(y, **check_y_params)
else:
X, y = check_X_y(X, y, **check_params)
out = X, y
if sklearn_check_version("0.23") and check_params.get('ensure_2d', True):
estimator._check_n_features(X, reset=reset)
return out
class NeighborsBase(BaseNeighborsBase):
def __init__(self, n_neighbors=None, radius=None,
algorithm='auto', leaf_size=30, metric='minkowski',
p=2, metric_params=None, n_jobs=None):
super().__init__(
n_neighbors=n_neighbors, radius=radius,
algorithm=algorithm, leaf_size=leaf_size, metric=metric,
p=p, metric_params=metric_params, n_jobs=n_jobs)
def _fit(self, X, y=None):
X_incorrect_type = isinstance(
X, (KDTree, BallTree, NeighborsBase, BaseNeighborsBase))
single_output = True
self._daal_model = None
shape = None
correct_n_classes = True
try:
requires_y = self._get_tags()["requires_y"]
except KeyError:
requires_y = False
if y is not None or requires_y:
if not X_incorrect_type or y is None:
X, y = validate_data(
self, X, y, accept_sparse="csr", multi_output=True,
dtype=[np.float64, np.float32])
single_output = False if y.ndim > 1 and y.shape[1] > 1 else True
shape = y.shape
if is_classifier(self) or is_regressor(self):
if y.ndim == 1 or y.ndim == 2 and y.shape[1] == 1:
self.outputs_2d_ = False
y = y.reshape((-1, 1))
else:
self.outputs_2d_ = True
if is_classifier(self):
check_classification_targets(y)
self.classes_ = []
self._y = np.empty(y.shape, dtype=int)
for k in range(self._y.shape[1]):
classes, self._y[:, k] = np.unique(
y[:, k], return_inverse=True)
self.classes_.append(classes)
if not self.outputs_2d_:
self.classes_ = self.classes_[0]
self._y = self._y.ravel()
n_classes = len(self.classes_)
if n_classes < 2:
correct_n_classes = False
else:
self._y = y
else:
if not X_incorrect_type:
X, _ = validate_data(
self, X, accept_sparse='csr', dtype=[np.float64, np.float32])
self._y = None
if not X_incorrect_type:
self.n_samples_fit_ = X.shape[0]
self.n_features_in_ = X.shape[1]
try:
fptype = getFPType(X)
except ValueError:
fptype = None
weights = getattr(self, 'weights', 'uniform')
def stock_fit(self, X, y):
if sklearn_check_version("0.24"):
result = super(NeighborsBase, self)._fit(X, y)
else:
result = super(NeighborsBase, self)._fit(X)
return result
if self.n_neighbors is not None:
if self.n_neighbors <= 0:
raise ValueError(
"Expected n_neighbors > 0. Got %d" %
self.n_neighbors
)
if not isinstance(self.n_neighbors, numbers.Integral):
raise TypeError(
"n_neighbors does not take %s value, "
"enter integer value" %
type(self.n_neighbors))
condition = (self.metric == 'minkowski' and self.p == 2) or \
self.metric == 'euclidean'
if not X_incorrect_type and weights in ['uniform', 'distance'] \
and self.algorithm in ['brute', 'kd_tree', 'auto', 'ball_tree'] \
and condition \
and single_output and fptype is not None and not sp.issparse(X) and \
correct_n_classes:
try:
logging.info(
"sklearn.neighbors.KNeighborsMixin."
"kneighbors: " + get_patch_message("daal"))
daal4py_fit(self, X, fptype)
result = self
except RuntimeError:
logging.info(
"sklearn.neighbors.KNeighborsMixin."
"kneighbors: " + get_patch_message("sklearn_after_daal"))
result = stock_fit(self, X, y)
else:
logging.info(
"sklearn.neighbors.KNeighborsMixin."
"kneighbors: " + get_patch_message("sklearn"))
result = stock_fit(self, X, y)
if y is not None and is_regressor(self):
self._y = y if shape is None else y.reshape(shape)
return result
class KNeighborsMixin(BaseKNeighborsMixin):
def kneighbors(self, X=None, n_neighbors=None, return_distance=True):
daal_model = getattr(self, '_daal_model', None)
if X is not None:
X = check_array(
X, accept_sparse='csr', dtype=[
np.float64, np.float32])
x = self._fit_X if X is None else X
try:
fptype = getFPType(x)
except ValueError:
fptype = None
if daal_model is not None and fptype is not None and not sp.issparse(
X):
logging.info(
"sklearn.neighbors.KNeighborsMixin."
"kneighbors: " + get_patch_message("daal"))
result = daal4py_kneighbors(self, X, n_neighbors, return_distance)
else:
logging.info(
"sklearn.neighbors.KNeighborsMixin."
"kneighbors:" + get_patch_message("sklearn"))
if daal_model is not None or getattr(self, '_tree', 0) is None and \
self._fit_method == 'kd_tree':
if sklearn_check_version("0.24"):
BaseNeighborsBase._fit(self, self._fit_X, self._y)
else:
BaseNeighborsBase._fit(self, self._fit_X)
result = super(KNeighborsMixin, self).kneighbors(
X, n_neighbors, return_distance)
return result
class RadiusNeighborsMixin(BaseRadiusNeighborsMixin):
def radius_neighbors(self, X=None, radius=None, return_distance=True,
sort_results=False):
daal_model = getattr(self, '_daal_model', None)
if daal_model is not None or getattr(self, '_tree', 0) is None and \
self._fit_method == 'kd_tree':
if sklearn_check_version("0.24"):
BaseNeighborsBase._fit(self, self._fit_X, self._y)
else:
BaseNeighborsBase._fit(self, self._fit_X)
if sklearn_check_version("0.22"):
result = BaseRadiusNeighborsMixin.radius_neighbors(
self, X, radius, return_distance, sort_results)
else:
result = BaseRadiusNeighborsMixin.radius_neighbors(
self, X, radius, return_distance)
return result
| 36.628959 | 89 | 0.605435 |
fde5726f8d1251849648d860293f2cefe9f2f027 | 10,515 | py | Python | parsley/tests/tests.py | Tivix/Django-parsley | dbe1ab8b6c58168c5ae267241f0c849c9eae631b | [
"BSD-3-Clause"
] | 2 | 2015-06-02T22:49:52.000Z | 2016-09-28T23:08:09.000Z | parsley/tests/tests.py | Tivix/Django-parsley | dbe1ab8b6c58168c5ae267241f0c849c9eae631b | [
"BSD-3-Clause"
] | null | null | null | parsley/tests/tests.py | Tivix/Django-parsley | dbe1ab8b6c58168c5ae267241f0c849c9eae631b | [
"BSD-3-Clause"
] | null | null | null | import re
import six
from django import forms
from django.contrib import admin
from django.test import TestCase
from django.utils.translation import ugettext_lazy as _
from parsley.decorators import parsleyfy
from .forms import (TextForm, TextForm2, FieldTypeForm, ExtraDataForm,
ExtraDataMissingFieldForm, FormWithWidgets, StudentModelForm,
FormWithCleanField, FormWithCustomInit, FormWithCustomChoices,
FormWithMedia, FormWithoutMedia, MultiWidgetForm, CustomErrorMessageForm)
from .models import Student
from .admin import StudentAdmin
class ParsleyTestCase(TestCase):
def assertAttrsEqual(self, a, b):
for k in a.keys(): # ignore unspecified keys
if k in b:
if six.PY3:
x, y = str(a[k]), str(b[k])
else:
x, y = unicode(a[k]), unicode(b[k])
self.assertEqual(x, y)
class CharFieldTest(ParsleyTestCase):
def test_basic(self):
"""
Tests that parsleyfy will add data-required for required fields,
but not for required=False fields for CharFields
"""
form = TextForm()
self.assertEqual(form.fields["name"].widget.attrs, {})
self.assertEqual(form.fields["university"].widget.attrs, {})
ParsleyForm = parsleyfy(TextForm)
form = ParsleyForm()
self.assertAttrsEqual(form.fields["name"].widget.attrs, {
"data-required": "true",
"data-required-message": _("This field is required.")
})
self.assertEqual(form.fields["university"].widget.attrs, {})
class CharFieldDecoratedTest(ParsleyTestCase):
def test_decorated(self):
"Tests that parsleyfy works as a class Decorator"
form = TextForm2()
self.assertAttrsEqual(form.fields["name"].widget.attrs, {
"data-required": "true",
"data-required-message": _("This field is required.")
})
self.assertEqual(form.fields["university"].widget.attrs, {})
class FieldTypeFormTest(ParsleyTestCase):
def test_fields(self):
"Tests that parsleyfy adds data-required for things other than CharField"
form = FieldTypeForm()
fields = form.fields
self.assertEqual(fields["url"].widget.attrs["data-required"], "true")
self.assertFalse("data-required" in fields["url2"].widget.attrs)
self.assertEqual(fields["email"].widget.attrs["data-required"], "true")
self.assertFalse("data-required" in fields["email2"].widget.attrs)
class DataTypeTest(ParsleyTestCase):
def test_data_types(self):
"Test that different field types get correct data-type"
form = FieldTypeForm()
fields = form.fields
self.assertTrue("data-type" in fields["url"].widget.attrs)
self.assertEqual(fields["url"].widget.attrs["data-type"], "url")
self.assertTrue("data-type" in fields["email"].widget.attrs)
self.assertEqual(fields["email"].widget.attrs["data-type"], "email")
self.assertEqual(fields["age"].widget.attrs["data-type"], "digits")
self.assertEqual(fields["income"].widget.attrs["data-type"], "number")
self.assertEqual(fields["income2"].widget.attrs["data-type"], "number")
self.assertEqual(fields["topnav"].widget.attrs["data-regexp"], "#[A-Fa-f0-9]{6}")
self.assertNotIn("data-regexp-flag", fields["topnav"].widget.attrs)
self.assertEqual(fields["topnav2"].widget.attrs["data-regexp"], "#[a-z]+")
self.assertEqual(fields["topnav2"].widget.attrs["data-regexp-flag"], "i")
class LengthTest(ParsleyTestCase):
def test_length(self):
form = FieldTypeForm()
fields = form.fields
name_attrs = fields["name"].widget.attrs
self.assertTrue("data-minlength" in name_attrs)
self.assertEqual(name_attrs["data-minlength"], 3)
self.assertEqual(name_attrs["data-maxlength"], 30)
class ValueTest(ParsleyTestCase):
def test_value(self):
form = FieldTypeForm()
fields = form.fields
num_attrs = fields['some_num'].widget.attrs
self.assertTrue("data-min" in num_attrs, True)
self.assertTrue("data-max" in num_attrs, True)
self.assertEqual(num_attrs["data-min"], 10)
self.assertEqual(num_attrs["data-max"], 100)
class FormWithWidgetsTest(ParsleyTestCase):
def test_widgets(self):
"Assert that @parsleyfy doesn't cloober existing attrs"
form = FormWithWidgets()
self.assertTrue(form.fields["description"].widget, forms.TextInput)
self.assertEqual("highlight", form.fields["blurb"].widget.attrs["class"])
class TestMetadata(ParsleyTestCase):
def test_docstring(self):
form1 = TextForm()
form2 = parsleyfy(TextForm)()
self.assertEqual(form1.__doc__, form2.__doc__)
def test_module(self):
form1 = TextForm()
form2 = parsleyfy(TextForm)()
self.assertEqual(form1.__module__, form2.__module__)
def test_name(self):
form1 = TextForm()
form2 = parsleyfy(TextForm)()
self.assertEqual(form1.__class__.__name__, form2.__class__.__name__)
class TestModelForm(ParsleyTestCase):
def test_model_form(self):
form = StudentModelForm()
fields = form.fields
foo_attrs = fields["name"].widget.attrs
self.assertEqual(foo_attrs["data-required"], "true")
def test_model_form_save(self):
form = StudentModelForm({"name": "Luke Skywalker"})
form.save(commit=False)
class TestCustomInit(ParsleyTestCase):
def test_custom_init(self):
form = FormWithCustomInit()
self.assertEqual(form.fields["description"].initial, "Hello")
def test_custom_choices(self):
form = FormWithCustomChoices()
self.assertNotEqual(len(form.fields['state'].choices), 0)
self.assertEqual(form.fields['state'].choices,
[("NY", "NY"), ("OH", "OH")])
class TestCleanFields(ParsleyTestCase):
def test_clean(self):
form = FormWithCleanField(data={"description": "foo"})
self.assertEqual(form.is_bound, True)
self.assertEqual(form.is_valid(), False)
self.assertTrue(hasattr(form, "clean_description"))
def test_clean_parslyfied(self):
form = parsleyfy(FormWithCleanField)(data={"description": "foo"})
self.assertEqual(form.is_bound, True)
self.assertEqual(form.is_valid(), False)
self.assertTrue(hasattr(form, "clean_description"))
class TestExtraAttributes(ParsleyTestCase):
def test_equalto(self):
form = ExtraDataForm()
attrs = form.fields["email2"].widget.attrs
self.assertAttrsEqual(attrs, {
"data-type": "email",
"data-required": "true",
"data-equalto-message": "Must match",
"data-equalto": "#id_email",
"data-required-message": _("This field is required."),
})
def test_default_data(self):
form = ExtraDataForm()
attrs = form.fields["name"].widget.attrs
self.assertAttrsEqual(attrs, {
"data-required": "true",
"data-error-message": "Name invalid",
"data-required-message": _("This field is required.")
})
def test_boolean_values(self):
form = ExtraDataForm()
attrs = form.fields["hide_errors"].widget.attrs
self.assertAttrsEqual(attrs, {
"data-required": "true",
"data-show-errors": "false",
"data-required-message": _("This field is required.")
})
def test_missing_field(self):
ExtraDataMissingFieldForm() # No error should be raised
class TestAdminMixin(ParsleyTestCase):
def test_media(self):
student_admin = StudentAdmin(Student, admin.site)
js = student_admin.media.render_js()
self.assertIn(
'<script type="text/javascript" src="/static/parsley/js/parsley-standalone.min.js"></script>',
js
)
self.assertIn(
'<script type="text/javascript" src="/static/parsley/js/parsley.django-admin.js"></script>',
js
)
class TestFormMedia(ParsleyTestCase):
def test_form_media(self):
form = FormWithoutMedia()
js = form.media.render_js()
self.assertIn(
'<script type="text/javascript" src="/static/parsley/js/parsley-standalone.min.js"></script>',
js
)
def test_existing_form_media(self):
form = FormWithMedia()
js = form.media.render_js()
self.assertIn(
'<script type="text/javascript" src="/static/jquery.min.js"></script>',
js
)
self.assertIn(
'<script type="text/javascript" src="/static/parsley/js/parsley-standalone.min.js"></script>',
js
)
class TestMultiValueField(ParsleyTestCase):
def test_parsley_attributes(self):
form = MultiWidgetForm()
fields = form.fields["ssn"].fields
self.assertAttrsEqual(fields[0].widget.attrs, {
"data-minlength": 3,
"data-maxlength": 3,
"maxlength": "3",
"data-regexp": r'^(\d)+$',
})
self.assertAttrsEqual(fields[1].widget.attrs, {
"data-minlength": 3,
"data-maxlength": 3,
"maxlength": "3",
"data-regexp": r'^(\d)+$',
})
self.assertAttrsEqual(fields[2].widget.attrs, {
"data-minlength": 4,
"data-maxlength": 4,
"maxlength": "4",
"data-regexp": r'^(\d)+$',
})
class TestCustomErrorMessages(TestCase):
def test_new_message(self):
form = CustomErrorMessageForm()
attrs = form.fields['name'].widget.attrs
self.assertEqual(attrs, {
"maxlength": '30',
"data-maxlength": 30,
"data-maxlength-message": "Please only 30 characters"
})
def test_field_type_message(self):
form = CustomErrorMessageForm()
attrs = form.fields['email'].widget.attrs
self.assertEqual(attrs, {
"data-type": "email",
"data-type-email-message": "Invalid email"
})
def test_override_default_message(self):
form = CustomErrorMessageForm()
attrs = form.fields['favorite_color'].widget.attrs
self.assertEqual(attrs, {
"data-required": "true",
"data-required-message": "Favorite color is required"
})
| 35.765306 | 106 | 0.623585 |
887b2139291e8c2dcff729503edb3683c29e043a | 2,761 | py | Python | tests/snapshots/snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[sk_SK-2016] 1.py | gour/holidata | 89c7323f9c5345a3ecbf5cd5a835b0e08cfebc13 | [
"MIT"
] | 32 | 2019-04-12T08:01:34.000Z | 2022-02-28T04:41:50.000Z | tests/snapshots/snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[sk_SK-2016] 1.py | gour/holidata | 89c7323f9c5345a3ecbf5cd5a835b0e08cfebc13 | [
"MIT"
] | 74 | 2019-07-09T16:35:20.000Z | 2022-03-09T16:41:34.000Z | tests/snapshots/snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[sk_SK-2016] 1.py | gour/holidata | 89c7323f9c5345a3ecbf5cd5a835b0e08cfebc13 | [
"MIT"
] | 20 | 2019-01-28T07:41:02.000Z | 2022-02-16T02:38:57.000Z | [
{
'date': '2016-01-01',
'description': 'Deň vzniku Slovenskej republiky',
'locale': 'sk-SK',
'notes': '',
'region': '',
'type': 'NF'
},
{
'date': '2016-01-06',
'description': 'Zjavenie Pána / Traja králi',
'locale': 'sk-SK',
'notes': '',
'region': '',
'type': 'NRF'
},
{
'date': '2016-03-25',
'description': 'Veľký piatok',
'locale': 'sk-SK',
'notes': '',
'region': '',
'type': 'NRV'
},
{
'date': '2016-03-28',
'description': 'Veľkonočný pondelok',
'locale': 'sk-SK',
'notes': '',
'region': '',
'type': 'NRV'
},
{
'date': '2016-05-01',
'description': 'Sviatok práce',
'locale': 'sk-SK',
'notes': '',
'region': '',
'type': 'NF'
},
{
'date': '2016-05-08',
'description': 'Deň víťazstva nad fašizmom',
'locale': 'sk-SK',
'notes': '',
'region': '',
'type': 'NF'
},
{
'date': '2016-07-05',
'description': 'Sviatok svätého Cyrila a Metoda',
'locale': 'sk-SK',
'notes': '',
'region': '',
'type': 'NRF'
},
{
'date': '2016-08-29',
'description': 'Výročie SNP',
'locale': 'sk-SK',
'notes': '',
'region': '',
'type': 'NF'
},
{
'date': '2016-09-01',
'description': 'Deň Ústavy Slovenskej republiky',
'locale': 'sk-SK',
'notes': '',
'region': '',
'type': 'NF'
},
{
'date': '2016-09-15',
'description': 'Sedembolestná Panna Mária',
'locale': 'sk-SK',
'notes': '',
'region': '',
'type': 'NRF'
},
{
'date': '2016-11-01',
'description': 'Sviatok všetkých svätých',
'locale': 'sk-SK',
'notes': '',
'region': '',
'type': 'NRF'
},
{
'date': '2016-11-17',
'description': 'Deň boja za slobodu a demokraciu',
'locale': 'sk-SK',
'notes': '',
'region': '',
'type': 'NF'
},
{
'date': '2016-12-24',
'description': 'Štedrý deň',
'locale': 'sk-SK',
'notes': '',
'region': '',
'type': 'NRF'
},
{
'date': '2016-12-25',
'description': 'Prvý sviatok vianočný',
'locale': 'sk-SK',
'notes': '',
'region': '',
'type': 'NRF'
},
{
'date': '2016-12-26',
'description': 'Druhý sviatok vianočný',
'locale': 'sk-SK',
'notes': '',
'region': '',
'type': 'NRF'
}
] | 22.631148 | 58 | 0.383194 |
1dafa06ee04079fada7a42be81e5d5c1ca7031b4 | 3,224 | py | Python | scripts/examples/OpenMV/22-Optical-Flow/absolute-rotation-scale.py | jiskra/openmv | a0f321836f77f94d8118910598dcdb79eb784d58 | [
"MIT"
] | 1,761 | 2015-07-10T23:14:17.000Z | 2022-03-30T07:49:49.000Z | scripts/examples/OpenMV/22-Optical-Flow/absolute-rotation-scale.py | jiskra/openmv | a0f321836f77f94d8118910598dcdb79eb784d58 | [
"MIT"
] | 487 | 2015-07-07T23:21:20.000Z | 2022-03-30T17:13:22.000Z | scripts/examples/OpenMV/22-Optical-Flow/absolute-rotation-scale.py | jiskra/openmv | a0f321836f77f94d8118910598dcdb79eb784d58 | [
"MIT"
] | 882 | 2015-08-01T08:34:19.000Z | 2022-03-30T07:36:23.000Z | # Absolute Optical Flow Rotation/Scale
#
# This example shows off using your OpenMV Cam to measure
# rotation/scale by comparing the current and a previous
# image against each other. Note that only rotation/scale is
# handled - not X and Y translation in this mode.
# To run this demo effectively please mount your OpenMV Cam on a steady
# base and SLOWLY rotate the camera around the lens and move the camera
# forward/backwards to see the numbers change.
# I.e. Z direction changes only.
import sensor, image, time, math
# NOTE!!! You have to use a small power of 2 resolution when using
# find_displacement(). This is because the algorithm is powered by
# something called phase correlation which does the image comparison
# using FFTs. A non-power of 2 resolution requires padding to a power
# of 2 which reduces the usefulness of the algorithm results. Please
# use a resolution like B64X64 or B64X32 (2x faster).
# Your OpenMV Cam supports power of 2 resolutions of 64x32, 64x64,
# 128x64, and 128x128. If you want a resolution of 32x32 you can create
# it by doing "img.pool(2, 2)" on a 64x64 image.
sensor.reset() # Reset and initialize the sensor.
sensor.set_pixformat(sensor.RGB565) # Set pixel format to RGB565 (or GRAYSCALE)
sensor.set_framesize(sensor.B64X64) # Set frame size to 64x64... (or 64x32)...
sensor.skip_frames(time = 2000) # Wait for settings take effect.
clock = time.clock() # Create a clock object to track the FPS.
# Take from the main frame buffer's RAM to allocate a second frame buffer.
# There's a lot more RAM in the frame buffer than in the MicroPython heap.
# However, after doing this you have a lot less RAM for some algorithms...
# So, be aware that it's a lot easier to get out of RAM issues now.
extra_fb = sensor.alloc_extra_fb(sensor.width(), sensor.height(), sensor.RGB565)
extra_fb.replace(sensor.snapshot())
while(True):
clock.tick() # Track elapsed milliseconds between snapshots().
img = sensor.snapshot() # Take a picture and return the image.
# This algorithm is hard to test without a perfect jig... So, here's a cheat to see it works.
# Put in a z_rotation value below and you should see the r output be equal to that.
if(0):
expected_rotation = 20.0
img.rotation_corr(z_rotation=expected_rotation)
# This algorithm is hard to test without a perfect jig... So, here's a cheat to see it works.
# Put in a zoom value below and you should see the z output be equal to that.
if(0):
expected_zoom = 0.8
img.rotation_corr(zoom=expected_zoom)
# For this example we never update the old image to measure absolute change.
displacement = extra_fb.find_displacement(img, logpolar=True)
# Offset results are noisy without filtering so we drop some accuracy.
rotation_change = int(math.degrees(displacement.rotation()) * 5) / 5.0
zoom_amount = displacement.scale()
if(displacement.response() > 0.1): # Below 0.1 or so (YMMV) and the results are just noise.
print("{0:+f}r {1:+f}z {2} {3} FPS".format(rotation_change, zoom_amount, \
displacement.response(),
clock.fps()))
else:
print(clock.fps())
| 47.411765 | 97 | 0.71495 |
19a05eefbc8fa633dd497383f55e26bfe6b00a2a | 311 | py | Python | exercicios/Exercicios Diversos/ex027.py | Roberto-Sartore/Python | 98f91f13cf78d761893c4a1f3264ed999244d32b | [
"MIT"
] | null | null | null | exercicios/Exercicios Diversos/ex027.py | Roberto-Sartore/Python | 98f91f13cf78d761893c4a1f3264ed999244d32b | [
"MIT"
] | null | null | null | exercicios/Exercicios Diversos/ex027.py | Roberto-Sartore/Python | 98f91f13cf78d761893c4a1f3264ed999244d32b | [
"MIT"
] | null | null | null | """Faça um Programa que leia três números e mostre-os em ordem decrescente."""
n1 = int(input('Digite o 1º número: '))
n2 = int(input('Digite o 2º número: '))
n3 = int(input('Digite o 3º número: '))
lista = [n1, n2, n3]
lista.sort(reverse=True)
print(f'Os números digitado em ordem descrescente são {lista}.')
| 34.555556 | 78 | 0.691318 |
f897d65e559b5ac483688db183b5ae473323ca51 | 1,307 | py | Python | src/test/parser/template/graph_tests/test_authorise_passthrough.py | narnikgamarnikus/program-y | 777b9a8a75ec787c037de9f11a8527875ff450b1 | [
"MIT"
] | null | null | null | src/test/parser/template/graph_tests/test_authorise_passthrough.py | narnikgamarnikus/program-y | 777b9a8a75ec787c037de9f11a8527875ff450b1 | [
"MIT"
] | null | null | null | src/test/parser/template/graph_tests/test_authorise_passthrough.py | narnikgamarnikus/program-y | 777b9a8a75ec787c037de9f11a8527875ff450b1 | [
"MIT"
] | null | null | null | import xml.etree.ElementTree as ET
from programy.parser.exceptions import ParserException
from programy.parser.template.nodes.base import TemplateNode
from programy.parser.template.nodes.authorise import TemplateAuthoriseNode
from programy.config.sections.brain.brain import BrainConfiguration
from test.parser.template.graph_tests.graph_test_client import TemplateGraphTestClient
class TemplateGraphAuthoriseTests(TemplateGraphTestClient):
def get_brain_config(self):
return BrainConfiguration()
def test_authorise_with_role_as_attrib(self):
template = ET.fromstring("""
<template>
<authorise role="root">
Hello
</authorise>
</template>
""")
ast = self.parser.parse_template_expression(template)
self.assertIsNotNone(ast)
self.assertIsInstance(ast, TemplateNode)
self.assertIsNotNone(ast.children)
self.assertEqual(len(ast.children), 1)
auth_node = ast.children[0]
self.assertIsNotNone(auth_node)
self.assertIsInstance(auth_node, TemplateAuthoriseNode)
self.assertIsNotNone(auth_node.role)
self.assertEqual("root", auth_node.role)
result = auth_node.resolve(self.test_bot, "console")
self.assertIsNotNone(result)
self.assertEqual("Hello", result)
| 31.878049 | 86 | 0.738332 |
74a21dd90e9955f5c724d88c92867c87ae9fff32 | 4,021 | py | Python | perfkitbenchmarker/linux_benchmarks/openssl_speed_benchmark.py | Nowasky/PerfKitBenchmarker | cfa88e269eb373780910896ed4bdc8db09469753 | [
"Apache-2.0"
] | 3 | 2018-04-28T13:06:14.000Z | 2020-06-09T02:39:44.000Z | perfkitbenchmarker/linux_benchmarks/openssl_speed_benchmark.py | Nowasky/PerfKitBenchmarker | cfa88e269eb373780910896ed4bdc8db09469753 | [
"Apache-2.0"
] | 1 | 2018-03-15T21:01:27.000Z | 2018-03-15T21:01:27.000Z | perfkitbenchmarker/linux_benchmarks/openssl_speed_benchmark.py | Nowasky/PerfKitBenchmarker | cfa88e269eb373780910896ed4bdc8db09469753 | [
"Apache-2.0"
] | 6 | 2019-06-11T18:59:57.000Z | 2021-03-02T19:14:42.000Z | # Copyright 2021 PerfKitBenchmarker Authors. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Runs openssl speed.
Manual page:
https://www.openssl.org/docs/manmaster/man1/openssl-speed.html.
"""
from absl import flags
from perfkitbenchmarker import configs
from perfkitbenchmarker import regex_util
from perfkitbenchmarker import sample
BENCHMARK_NAME = 'openssl_speed'
BENCHMARK_CONFIG = """
openssl_speed:
description: >
Runs openssl-speed.
vm_groups:
default:
vm_spec: *default_single_core
"""
FLAGS = flags.FLAGS
_OPENSSL_SPEED_DURATION = flags.DEFINE_integer(
'openssl_speed_duration', 60, 'Duration of speed test in seconds.')
_OPENSSL_SPEED_ALGORITHM = flags.DEFINE_string(
'openssl_speed_algorithm', 'aes-256-ctr',
'Use the specified cipher or message digest algorithm.')
_OPENSSL_SPEED_MULTI = flags.DEFINE_integer(
'openssl_speed_multi', None, 'Run multiple operations in parallel. '
'By default, equals to number of vCPUs available for benchmark.')
# TODO(user): Support additional options.
# Block sizes for encryption/decryption. Openssl speed loop through following
# block sizes and measure how fast system able to encrypt/decrypt.
BLOCKSIZES_IN_BYTES = [16, 64, 256, 1024, 8192, 16384]
def GetConfig(user_config):
return configs.LoadConfig(BENCHMARK_CONFIG, user_config, BENCHMARK_NAME)
def Prepare(benchmark_spec):
del benchmark_spec
def ParseOpenSSLOutput(raw_result: str, version: str, parallelism: int):
"""Parse output from openssl speed and return as samples."""
matches = regex_util.ExtractExactlyOneMatch(r'evp\s+(.*)', raw_result).split()
results = []
for idx, blocksize in enumerate(BLOCKSIZES_IN_BYTES):
value_unit_tuple = regex_util.ExtractExactlyOneMatch(
r'([\d\.]+)(\w+)', matches[idx])
metadata = {
'duration': _OPENSSL_SPEED_DURATION.value,
'algorithm': _OPENSSL_SPEED_ALGORITHM.value,
'parallelism': parallelism,
'version': version,
'blocksize': blocksize
}
results.append(
sample.Sample('Throughput', float(value_unit_tuple[0]),
value_unit_tuple[1], metadata))
return results
def Run(benchmark_spec):
"""Run openssl-speed on the target vm.
Sample output:
OpenSSL 1.1.1k 25 Mar 2021
built on: Thu Mar 25 20:49:34 2021 UTC
options:bn(64,64) rc4(16x,int) des(int) aes(partial) blowfish(ptr)
compiler: gcc -fPIC -pthread -m64 -Wa ...
evp 730303.56k 2506149.08k 4473725.34k 5640335.56k 6048576.31k 6107063.91k
Args:
benchmark_spec: The benchmark specification. Contains all data that is
required to run the benchmark.
Returns:
A list of sample.Sample object.
"""
vms = benchmark_spec.vms
vm = vms[0]
stderr, _ = vm.RemoteCommand('openssl version')
version = regex_util.ExtractGroup(r'OpenSSL\s+([\w\.]+)\s+', stderr)
parallelism = _OPENSSL_SPEED_MULTI.value or vm.NumCpusForBenchmark()
raw_result, _ = vm.RemoteCommand('openssl speed -elapsed '
f'-seconds {_OPENSSL_SPEED_DURATION.value} '
f'-evp {_OPENSSL_SPEED_ALGORITHM.value} '
f'-multi {parallelism}')
return ParseOpenSSLOutput(raw_result, version, parallelism)
def Cleanup(benchmark_spec):
"""Cleanup openssl on the target vm (by uninstalling).
Args:
benchmark_spec: The benchmark specification. Contains all data that is
required to run the benchmark.
"""
del benchmark_spec
| 34.367521 | 80 | 0.716737 |
edddd2446d906bfc0b93df47b6f18a45ac42bc79 | 3,026 | py | Python | imaginaire/third_party/flow_net/flownet2/networks/flownet_fusion.py | hw07216/imaginaire | 87c774114622e39488a5ea8a7728b1a20896afb9 | [
"RSA-MD"
] | 3,308 | 2020-07-15T17:50:13.000Z | 2022-03-31T14:53:31.000Z | imaginaire/third_party/flow_net/flownet2/networks/flownet_fusion.py | hw07216/imaginaire | 87c774114622e39488a5ea8a7728b1a20896afb9 | [
"RSA-MD"
] | 132 | 2020-09-20T17:36:28.000Z | 2022-03-28T12:40:03.000Z | src/imaginaire/third_party/flow_net/flownet2/networks/flownet_fusion.py | livingbio/imaginaire-fsvid2vid | d82c87aced50afd44fd162491ba5b59056b74034 | [
"RSA-MD"
] | 370 | 2020-09-29T00:34:08.000Z | 2022-03-30T04:12:48.000Z | # Copyright (C) 2021 NVIDIA CORPORATION & AFFILIATES. All rights reserved.
#
# This work is made available under the Nvidia Source Code License-NC.
# To view a copy of this license, check out LICENSE.md
# The file is duplicated from https://github.com/NVIDIA/flownet2-pytorch
# with some modifications.
from torch.nn import init
import torch
import torch.nn as nn
from .submodules import conv, i_conv, predict_flow, deconv
class FlowNetFusion(nn.Module):
r"""FlowNet2 Fusion module. Check out the FlowNet2 paper for more details
https://arxiv.org/abs/1612.01925
Args:
args (obj): Network initialization arguments
use_batch_norm (bool): Use batch norm or not. Default is true.
"""
def __init__(self, args, use_batch_norm=True):
super(FlowNetFusion, self).__init__()
self.use_batch_norm = use_batch_norm
self.conv0 = conv(self.use_batch_norm, 11, 64)
self.conv1 = conv(self.use_batch_norm, 64, 64, stride=2)
self.conv1_1 = conv(self.use_batch_norm, 64, 128)
self.conv2 = conv(self.use_batch_norm, 128, 128, stride=2)
self.conv2_1 = conv(self.use_batch_norm, 128, 128)
self.deconv1 = deconv(128, 32)
self.deconv0 = deconv(162, 16)
self.inter_conv1 = i_conv(self.use_batch_norm, 162, 32)
self.inter_conv0 = i_conv(self.use_batch_norm, 82, 16)
self.predict_flow2 = predict_flow(128)
self.predict_flow1 = predict_flow(32)
self.predict_flow0 = predict_flow(16)
self.upsampled_flow2_to_1 = nn.ConvTranspose2d(2, 2, 4, 2, 1)
self.upsampled_flow1_to_0 = nn.ConvTranspose2d(2, 2, 4, 2, 1)
for m in self.modules():
if isinstance(m, nn.Conv2d):
if m.bias is not None:
init.uniform_(m.bias)
init.xavier_uniform_(m.weight)
if isinstance(m, nn.ConvTranspose2d):
if m.bias is not None:
init.uniform_(m.bias)
init.xavier_uniform_(m.weight)
# init_deconv_bilinear(m.weight)
def forward(self, x):
r"""
Args:
x (tensor): Input tensors of concatenated images.
Returns:
flow2 (tensor): Output flow tensors.
"""
out_conv0 = self.conv0(x)
out_conv1 = self.conv1_1(self.conv1(out_conv0))
out_conv2 = self.conv2_1(self.conv2(out_conv1))
flow2 = self.predict_flow2(out_conv2)
flow2_up = self.upsampled_flow2_to_1(flow2)
out_deconv1 = self.deconv1(out_conv2)
concat1 = torch.cat((out_conv1, out_deconv1, flow2_up), 1)
out_interconv1 = self.inter_conv1(concat1)
flow1 = self.predict_flow1(out_interconv1)
flow1_up = self.upsampled_flow1_to_0(flow1)
out_deconv0 = self.deconv0(concat1)
concat0 = torch.cat((out_conv0, out_deconv0, flow1_up), 1)
out_interconv0 = self.inter_conv0(concat0)
flow0 = self.predict_flow0(out_interconv0)
return flow0
| 36.457831 | 77 | 0.645737 |
fd609eb1c234e36a949fc7ca37f9baf61f7f61e0 | 20,550 | py | Python | flux_combined_high_binding/model_311.py | LoLab-VU/Bayesian_Inference_of_Network_Dynamics | 54a5ef7e868be34289836bbbb024a2963c0c9c86 | [
"MIT"
] | null | null | null | flux_combined_high_binding/model_311.py | LoLab-VU/Bayesian_Inference_of_Network_Dynamics | 54a5ef7e868be34289836bbbb024a2963c0c9c86 | [
"MIT"
] | null | null | null | flux_combined_high_binding/model_311.py | LoLab-VU/Bayesian_Inference_of_Network_Dynamics | 54a5ef7e868be34289836bbbb024a2963c0c9c86 | [
"MIT"
] | null | null | null | # exported from PySB model 'model'
from pysb import Model, Monomer, Parameter, Expression, Compartment, Rule, Observable, Initial, MatchOnce, Annotation, ANY, WILD
Model()
Monomer('Ligand', ['Receptor'])
Monomer('ParpU', ['C3A'])
Monomer('C8A', ['BidU', 'C3pro'])
Monomer('SmacM', ['BaxA'])
Monomer('BaxM', ['BidM', 'BaxA'])
Monomer('Apop', ['C3pro', 'Xiap'])
Monomer('Fadd', ['Receptor', 'C8pro'])
Monomer('SmacC', ['Xiap'])
Monomer('ParpC')
Monomer('Xiap', ['SmacC', 'Apop', 'C3A'])
Monomer('C9')
Monomer('C3ub')
Monomer('C8pro', ['Fadd', 'C6A'])
Monomer('Bcl2', ['BidM', 'BaxA'])
Monomer('C3pro', ['Apop', 'C8A'])
Monomer('CytoCM', ['BaxA'])
Monomer('CytoCC')
Monomer('BaxA', ['BaxM', 'Bcl2', 'BaxA_1', 'BaxA_2', 'SmacM', 'CytoCM'])
Monomer('ApafI')
Monomer('BidU', ['C8A'])
Monomer('BidT')
Monomer('C3A', ['Xiap', 'ParpU', 'C6pro'])
Monomer('ApafA')
Monomer('BidM', ['BaxM', 'Bcl2'])
Monomer('Receptor', ['Ligand', 'Fadd'])
Monomer('C6A', ['C8pro'])
Monomer('C6pro', ['C3A'])
Parameter('bind_0_Ligand_binder_Receptor_binder_target_2kf', 1.0)
Parameter('bind_0_Ligand_binder_Receptor_binder_target_1kr', 1.0)
Parameter('bind_0_Receptor_binder_Fadd_binder_target_2kf', 1.0)
Parameter('bind_0_Receptor_binder_Fadd_binder_target_1kr', 1.0)
Parameter('substrate_binding_0_Fadd_catalyzer_C8pro_substrate_2kf', 1.0)
Parameter('substrate_binding_0_Fadd_catalyzer_C8pro_substrate_1kr', 1.0)
Parameter('catalytic_step_0_Fadd_catalyzer_C8pro_substrate_C8A_product_1kc', 1.0)
Parameter('catalysis_0_C8A_catalyzer_BidU_substrate_BidT_product_2kf', 1.0)
Parameter('catalysis_0_C8A_catalyzer_BidU_substrate_BidT_product_1kr', 1.0)
Parameter('catalysis_1_C8A_catalyzer_BidU_substrate_BidT_product_1kc', 1.0)
Parameter('conversion_0_CytoCC_subunit_d_ApafI_subunit_c_ApafA_complex_2kf', 1.0)
Parameter('conversion_0_CytoCC_subunit_d_ApafI_subunit_c_ApafA_complex_1kr', 1.0)
Parameter('inhibition_0_SmacC_inhibitor_Xiap_inh_target_2kf', 1.0)
Parameter('inhibition_0_SmacC_inhibitor_Xiap_inh_target_1kr', 1.0)
Parameter('conversion_0_C9_subunit_d_ApafA_subunit_c_Apop_complex_2kf', 1.0)
Parameter('conversion_0_C9_subunit_d_ApafA_subunit_c_Apop_complex_1kr', 1.0)
Parameter('catalysis_0_Apop_catalyzer_C3pro_substrate_C3A_product_2kf', 1.0)
Parameter('catalysis_0_Apop_catalyzer_C3pro_substrate_C3A_product_1kr', 1.0)
Parameter('catalysis_1_Apop_catalyzer_C3pro_substrate_C3A_product_1kc', 1.0)
Parameter('inhibition_0_Xiap_inhibitor_Apop_inh_target_2kf', 1.0)
Parameter('inhibition_0_Xiap_inhibitor_Apop_inh_target_1kr', 1.0)
Parameter('catalysis_0_Xiap_catalyzer_C3A_substrate_C3ub_product_2kf', 1.0)
Parameter('catalysis_0_Xiap_catalyzer_C3A_substrate_C3ub_product_1kr', 1.0)
Parameter('catalysis_1_Xiap_catalyzer_C3A_substrate_C3ub_product_1kc', 1.0)
Parameter('catalysis_0_C3A_catalyzer_ParpU_substrate_ParpC_product_2kf', 1.0)
Parameter('catalysis_0_C3A_catalyzer_ParpU_substrate_ParpC_product_1kr', 1.0)
Parameter('catalysis_1_C3A_catalyzer_ParpU_substrate_ParpC_product_1kc', 1.0)
Parameter('equilibration_0_BidT_equil_a_BidM_equil_b_1kf', 1.0)
Parameter('equilibration_0_BidT_equil_a_BidM_equil_b_1kr', 1.0)
Parameter('catalysis_0_BidM_catalyzer_BaxM_substrate_BaxA_product_2kf', 1.0)
Parameter('catalysis_0_BidM_catalyzer_BaxM_substrate_BaxA_product_1kr', 1.0)
Parameter('catalysis_1_BidM_catalyzer_BaxM_substrate_BaxA_product_1kc', 1.0)
Parameter('self_catalyze_0_BaxA_self_catalyzer_BaxM_self_substrate_2kf', 1.0)
Parameter('self_catalyze_0_BaxA_self_catalyzer_BaxM_self_substrate_1kr', 1.0)
Parameter('self_catalyze_1_BaxA_self_catalyzer_BaxM_self_substrate_1kc', 1.0)
Parameter('inhibition_0_Bcl2_inhibitor_BidM_inh_target_2df', 1.0)
Parameter('inhibition_0_Bcl2_inhibitor_BidM_inh_target_1dr', 1.0)
Parameter('inhibition_0_Bcl2_inhibitor_BaxA_inh_target_2xf', 1.0)
Parameter('inhibition_0_Bcl2_inhibitor_BaxA_inh_target_1xr', 1.0)
Parameter('pore_formation_0_BaxA_pore_2kf', 1.0)
Parameter('pore_formation_0_BaxA_pore_1kr', 1.0)
Parameter('pore_formation_1_BaxA_pore_2kf', 1.0)
Parameter('pore_formation_1_BaxA_pore_1kr', 1.0)
Parameter('pore_formation_2_BaxA_pore_2kf', 1.0)
Parameter('pore_formation_2_BaxA_pore_1kr', 1.0)
Parameter('transport_0_BaxA_pore_SmacM_cargo_M_SmacC_cargo_C_2kf', 1.0)
Parameter('transport_0_BaxA_pore_SmacM_cargo_M_SmacC_cargo_C_1kr', 1.0)
Parameter('transport_1_BaxA_pore_SmacM_cargo_M_SmacC_cargo_C_1kc', 1.0)
Parameter('transport_0_BaxA_pore_CytoCM_cargo_M_CytoCC_cargo_C_2kf', 1.0)
Parameter('transport_0_BaxA_pore_CytoCM_cargo_M_CytoCC_cargo_C_1kr', 1.0)
Parameter('transport_1_BaxA_pore_CytoCM_cargo_M_CytoCC_cargo_C_1kc', 1.0)
Parameter('catalysis_0_C8A_catalyzer_C3pro_substrate_C3A_product_2kf', 1.0)
Parameter('catalysis_0_C8A_catalyzer_C3pro_substrate_C3A_product_1kr', 1.0)
Parameter('catalysis_1_C8A_catalyzer_C3pro_substrate_C3A_product_1kc', 1.0)
Parameter('catalysis_0_C3A_catalyzer_C6pro_substrate_C6A_product_2kf', 1.0)
Parameter('catalysis_0_C3A_catalyzer_C6pro_substrate_C6A_product_1kr', 1.0)
Parameter('catalysis_1_C3A_catalyzer_C6pro_substrate_C6A_product_1kc', 1.0)
Parameter('catalysis_0_C6A_catalyzer_C8pro_substrate_C8A_product_2kf', 1.0)
Parameter('catalysis_0_C6A_catalyzer_C8pro_substrate_C8A_product_1kr', 1.0)
Parameter('catalysis_1_C6A_catalyzer_C8pro_substrate_C8A_product_1kc', 1.0)
Parameter('Ligand_0', 1000.0)
Parameter('ParpU_0', 1000000.0)
Parameter('C8A_0', 0.0)
Parameter('SmacM_0', 100000.0)
Parameter('BaxM_0', 40000.0)
Parameter('Apop_0', 0.0)
Parameter('Fadd_0', 130000.0)
Parameter('SmacC_0', 0.0)
Parameter('ParpC_0', 0.0)
Parameter('Xiap_0', 35000.0)
Parameter('C9_0', 100000.0)
Parameter('C3ub_0', 0.0)
Parameter('C8pro_0', 130000.0)
Parameter('Bcl2_0', 170000.0)
Parameter('C3pro_0', 21000.0)
Parameter('CytoCM_0', 500000.0)
Parameter('CytoCC_0', 0.0)
Parameter('BaxA_0', 0.0)
Parameter('ApafI_0', 100000.0)
Parameter('BidU_0', 171000.0)
Parameter('BidT_0', 0.0)
Parameter('C3A_0', 0.0)
Parameter('ApafA_0', 0.0)
Parameter('BidM_0', 0.0)
Parameter('Receptor_0', 100.0)
Parameter('C6A_0', 0.0)
Parameter('C6pro_0', 100.0)
Observable('Ligand_obs', Ligand())
Observable('ParpU_obs', ParpU())
Observable('C8A_obs', C8A())
Observable('SmacM_obs', SmacM())
Observable('BaxM_obs', BaxM())
Observable('Apop_obs', Apop())
Observable('Fadd_obs', Fadd())
Observable('SmacC_obs', SmacC())
Observable('ParpC_obs', ParpC())
Observable('Xiap_obs', Xiap())
Observable('C9_obs', C9())
Observable('C3ub_obs', C3ub())
Observable('C8pro_obs', C8pro())
Observable('Bcl2_obs', Bcl2())
Observable('C3pro_obs', C3pro())
Observable('CytoCM_obs', CytoCM())
Observable('CytoCC_obs', CytoCC())
Observable('BaxA_obs', BaxA())
Observable('ApafI_obs', ApafI())
Observable('BidU_obs', BidU())
Observable('BidT_obs', BidT())
Observable('C3A_obs', C3A())
Observable('ApafA_obs', ApafA())
Observable('BidM_obs', BidM())
Observable('Receptor_obs', Receptor())
Observable('C6A_obs', C6A())
Observable('C6pro_obs', C6pro())
Rule('bind_0_Ligand_binder_Receptor_binder_target', Ligand(Receptor=None) + Receptor(Ligand=None, Fadd=None) | Ligand(Receptor=1) % Receptor(Ligand=1, Fadd=None), bind_0_Ligand_binder_Receptor_binder_target_2kf, bind_0_Ligand_binder_Receptor_binder_target_1kr)
Rule('bind_0_Receptor_binder_Fadd_binder_target', Receptor(Ligand=ANY, Fadd=None) + Fadd(Receptor=None, C8pro=None) | Receptor(Ligand=ANY, Fadd=1) % Fadd(Receptor=1, C8pro=None), bind_0_Receptor_binder_Fadd_binder_target_2kf, bind_0_Receptor_binder_Fadd_binder_target_1kr)
Rule('substrate_binding_0_Fadd_catalyzer_C8pro_substrate', Fadd(Receptor=ANY, C8pro=None) + C8pro(Fadd=None, C6A=None) | Fadd(Receptor=ANY, C8pro=1) % C8pro(Fadd=1, C6A=None), substrate_binding_0_Fadd_catalyzer_C8pro_substrate_2kf, substrate_binding_0_Fadd_catalyzer_C8pro_substrate_1kr)
Rule('catalytic_step_0_Fadd_catalyzer_C8pro_substrate_C8A_product', Fadd(Receptor=ANY, C8pro=1) % C8pro(Fadd=1, C6A=None) >> Fadd(Receptor=ANY, C8pro=None) + C8A(BidU=None, C3pro=None), catalytic_step_0_Fadd_catalyzer_C8pro_substrate_C8A_product_1kc)
Rule('catalysis_0_C8A_catalyzer_BidU_substrate_BidT_product', C8A(BidU=None, C3pro=None) + BidU(C8A=None) | C8A(BidU=1, C3pro=None) % BidU(C8A=1), catalysis_0_C8A_catalyzer_BidU_substrate_BidT_product_2kf, catalysis_0_C8A_catalyzer_BidU_substrate_BidT_product_1kr)
Rule('catalysis_1_C8A_catalyzer_BidU_substrate_BidT_product', C8A(BidU=1, C3pro=None) % BidU(C8A=1) >> C8A(BidU=None, C3pro=None) + BidT(), catalysis_1_C8A_catalyzer_BidU_substrate_BidT_product_1kc)
Rule('conversion_0_CytoCC_subunit_d_ApafI_subunit_c_ApafA_complex', ApafI() + CytoCC() | ApafA(), conversion_0_CytoCC_subunit_d_ApafI_subunit_c_ApafA_complex_2kf, conversion_0_CytoCC_subunit_d_ApafI_subunit_c_ApafA_complex_1kr)
Rule('inhibition_0_SmacC_inhibitor_Xiap_inh_target', SmacC(Xiap=None) + Xiap(SmacC=None, Apop=None, C3A=None) | SmacC(Xiap=1) % Xiap(SmacC=1, Apop=None, C3A=None), inhibition_0_SmacC_inhibitor_Xiap_inh_target_2kf, inhibition_0_SmacC_inhibitor_Xiap_inh_target_1kr)
Rule('conversion_0_C9_subunit_d_ApafA_subunit_c_Apop_complex', ApafA() + C9() | Apop(C3pro=None, Xiap=None), conversion_0_C9_subunit_d_ApafA_subunit_c_Apop_complex_2kf, conversion_0_C9_subunit_d_ApafA_subunit_c_Apop_complex_1kr)
Rule('catalysis_0_Apop_catalyzer_C3pro_substrate_C3A_product', Apop(C3pro=None, Xiap=None) + C3pro(Apop=None, C8A=None) | Apop(C3pro=1, Xiap=None) % C3pro(Apop=1, C8A=None), catalysis_0_Apop_catalyzer_C3pro_substrate_C3A_product_2kf, catalysis_0_Apop_catalyzer_C3pro_substrate_C3A_product_1kr)
Rule('catalysis_1_Apop_catalyzer_C3pro_substrate_C3A_product', Apop(C3pro=1, Xiap=None) % C3pro(Apop=1, C8A=None) >> Apop(C3pro=None, Xiap=None) + C3A(Xiap=None, ParpU=None, C6pro=None), catalysis_1_Apop_catalyzer_C3pro_substrate_C3A_product_1kc)
Rule('inhibition_0_Xiap_inhibitor_Apop_inh_target', Xiap(SmacC=None, Apop=None, C3A=None) + Apop(C3pro=None, Xiap=None) | Xiap(SmacC=None, Apop=1, C3A=None) % Apop(C3pro=None, Xiap=1), inhibition_0_Xiap_inhibitor_Apop_inh_target_2kf, inhibition_0_Xiap_inhibitor_Apop_inh_target_1kr)
Rule('catalysis_0_Xiap_catalyzer_C3A_substrate_C3ub_product', Xiap(SmacC=None, Apop=None, C3A=None) + C3A(Xiap=None, ParpU=None, C6pro=None) | Xiap(SmacC=None, Apop=None, C3A=1) % C3A(Xiap=1, ParpU=None, C6pro=None), catalysis_0_Xiap_catalyzer_C3A_substrate_C3ub_product_2kf, catalysis_0_Xiap_catalyzer_C3A_substrate_C3ub_product_1kr)
Rule('catalysis_1_Xiap_catalyzer_C3A_substrate_C3ub_product', Xiap(SmacC=None, Apop=None, C3A=1) % C3A(Xiap=1, ParpU=None, C6pro=None) >> Xiap(SmacC=None, Apop=None, C3A=None) + C3ub(), catalysis_1_Xiap_catalyzer_C3A_substrate_C3ub_product_1kc)
Rule('catalysis_0_C3A_catalyzer_ParpU_substrate_ParpC_product', C3A(Xiap=None, ParpU=None, C6pro=None) + ParpU(C3A=None) | C3A(Xiap=None, ParpU=1, C6pro=None) % ParpU(C3A=1), catalysis_0_C3A_catalyzer_ParpU_substrate_ParpC_product_2kf, catalysis_0_C3A_catalyzer_ParpU_substrate_ParpC_product_1kr)
Rule('catalysis_1_C3A_catalyzer_ParpU_substrate_ParpC_product', C3A(Xiap=None, ParpU=1, C6pro=None) % ParpU(C3A=1) >> C3A(Xiap=None, ParpU=None, C6pro=None) + ParpC(), catalysis_1_C3A_catalyzer_ParpU_substrate_ParpC_product_1kc)
Rule('equilibration_0_BidT_equil_a_BidM_equil_b', BidT() | BidM(BaxM=None, Bcl2=None), equilibration_0_BidT_equil_a_BidM_equil_b_1kf, equilibration_0_BidT_equil_a_BidM_equil_b_1kr)
Rule('catalysis_0_BidM_catalyzer_BaxM_substrate_BaxA_product', BidM(BaxM=None, Bcl2=None) + BaxM(BidM=None, BaxA=None) | BidM(BaxM=1, Bcl2=None) % BaxM(BidM=1, BaxA=None), catalysis_0_BidM_catalyzer_BaxM_substrate_BaxA_product_2kf, catalysis_0_BidM_catalyzer_BaxM_substrate_BaxA_product_1kr)
Rule('catalysis_1_BidM_catalyzer_BaxM_substrate_BaxA_product', BidM(BaxM=1, Bcl2=None) % BaxM(BidM=1, BaxA=None) >> BidM(BaxM=None, Bcl2=None) + BaxA(BaxM=None, Bcl2=None, BaxA_1=None, BaxA_2=None, SmacM=None, CytoCM=None), catalysis_1_BidM_catalyzer_BaxM_substrate_BaxA_product_1kc)
Rule('self_catalyze_0_BaxA_self_catalyzer_BaxM_self_substrate', BaxA(BaxM=None, Bcl2=None, BaxA_1=None, BaxA_2=None, SmacM=None, CytoCM=None) + BaxM(BidM=None, BaxA=None) | BaxA(BaxM=1, Bcl2=None, BaxA_1=None, BaxA_2=None, SmacM=None, CytoCM=None) % BaxM(BidM=None, BaxA=1), self_catalyze_0_BaxA_self_catalyzer_BaxM_self_substrate_2kf, self_catalyze_0_BaxA_self_catalyzer_BaxM_self_substrate_1kr)
Rule('self_catalyze_1_BaxA_self_catalyzer_BaxM_self_substrate', BaxA(BaxM=1, Bcl2=None, BaxA_1=None, BaxA_2=None, SmacM=None, CytoCM=None) % BaxM(BidM=None, BaxA=1) >> BaxA(BaxM=None, Bcl2=None, BaxA_1=None, BaxA_2=None, SmacM=None, CytoCM=None) + BaxA(BaxM=None, Bcl2=None, BaxA_1=None, BaxA_2=None, SmacM=None, CytoCM=None), self_catalyze_1_BaxA_self_catalyzer_BaxM_self_substrate_1kc)
Rule('inhibition_0_Bcl2_inhibitor_BidM_inh_target', Bcl2(BidM=None, BaxA=None) + BidM(BaxM=None, Bcl2=None) | Bcl2(BidM=1, BaxA=None) % BidM(BaxM=None, Bcl2=1), inhibition_0_Bcl2_inhibitor_BidM_inh_target_2df, inhibition_0_Bcl2_inhibitor_BidM_inh_target_1dr)
Rule('inhibition_0_Bcl2_inhibitor_BaxA_inh_target', Bcl2(BidM=None, BaxA=None) + BaxA(BaxM=None, Bcl2=None, BaxA_1=None, BaxA_2=None, SmacM=None, CytoCM=None) | Bcl2(BidM=None, BaxA=1) % BaxA(BaxM=None, Bcl2=1, BaxA_1=None, BaxA_2=None, SmacM=None, CytoCM=None), inhibition_0_Bcl2_inhibitor_BaxA_inh_target_2xf, inhibition_0_Bcl2_inhibitor_BaxA_inh_target_1xr)
Rule('pore_formation_0_BaxA_pore', BaxA(BaxM=None, Bcl2=None, BaxA_1=None, BaxA_2=None, SmacM=None, CytoCM=None) + BaxA(BaxM=None, Bcl2=None, BaxA_1=None, BaxA_2=None, SmacM=None, CytoCM=None) | BaxA(BaxM=None, Bcl2=None, BaxA_1=None, BaxA_2=1, SmacM=None, CytoCM=None) % BaxA(BaxM=None, Bcl2=None, BaxA_1=1, BaxA_2=None, SmacM=None, CytoCM=None), pore_formation_0_BaxA_pore_2kf, pore_formation_0_BaxA_pore_1kr)
Rule('pore_formation_1_BaxA_pore', BaxA(BaxM=None, Bcl2=None, BaxA_1=None, BaxA_2=None, SmacM=None, CytoCM=None) + BaxA(BaxM=None, Bcl2=None, BaxA_1=None, BaxA_2=1, SmacM=None, CytoCM=None) % BaxA(BaxM=None, Bcl2=None, BaxA_1=1, BaxA_2=None, SmacM=None, CytoCM=None) | BaxA(BaxM=None, Bcl2=None, BaxA_1=3, BaxA_2=1, SmacM=None, CytoCM=None) % BaxA(BaxM=None, Bcl2=None, BaxA_1=1, BaxA_2=2, SmacM=None, CytoCM=None) % BaxA(BaxM=None, Bcl2=None, BaxA_1=2, BaxA_2=3, SmacM=None, CytoCM=None), pore_formation_1_BaxA_pore_2kf, pore_formation_1_BaxA_pore_1kr)
Rule('pore_formation_2_BaxA_pore', BaxA(BaxM=None, Bcl2=None, BaxA_1=None, BaxA_2=None, SmacM=None, CytoCM=None) + BaxA(BaxM=None, Bcl2=None, BaxA_1=3, BaxA_2=1, SmacM=None, CytoCM=None) % BaxA(BaxM=None, Bcl2=None, BaxA_1=1, BaxA_2=2, SmacM=None, CytoCM=None) % BaxA(BaxM=None, Bcl2=None, BaxA_1=2, BaxA_2=3, SmacM=None, CytoCM=None) | BaxA(BaxM=None, Bcl2=None, BaxA_1=4, BaxA_2=1, SmacM=None, CytoCM=None) % BaxA(BaxM=None, Bcl2=None, BaxA_1=1, BaxA_2=2, SmacM=None, CytoCM=None) % BaxA(BaxM=None, Bcl2=None, BaxA_1=2, BaxA_2=3, SmacM=None, CytoCM=None) % BaxA(BaxM=None, Bcl2=None, BaxA_1=3, BaxA_2=4, SmacM=None, CytoCM=None), pore_formation_2_BaxA_pore_2kf, pore_formation_2_BaxA_pore_1kr)
Rule('transport_0_BaxA_pore_SmacM_cargo_M_SmacC_cargo_C', BaxA(BaxM=None, Bcl2=None, BaxA_1=4, BaxA_2=1, SmacM=None, CytoCM=None) % BaxA(BaxM=None, Bcl2=None, BaxA_1=1, BaxA_2=2, SmacM=None, CytoCM=None) % BaxA(BaxM=None, Bcl2=None, BaxA_1=2, BaxA_2=3, SmacM=None, CytoCM=None) % BaxA(BaxM=None, Bcl2=None, BaxA_1=3, BaxA_2=4, SmacM=None, CytoCM=None) + SmacM(BaxA=None) | BaxA(BaxM=None, Bcl2=None, BaxA_1=4, BaxA_2=1, SmacM=None, CytoCM=None) % BaxA(BaxM=None, Bcl2=None, BaxA_1=1, BaxA_2=2, SmacM=None, CytoCM=None) % BaxA(BaxM=None, Bcl2=None, BaxA_1=2, BaxA_2=3, SmacM=None, CytoCM=None) % BaxA(BaxM=None, Bcl2=None, BaxA_1=3, BaxA_2=4, SmacM=5, CytoCM=None) % SmacM(BaxA=5), transport_0_BaxA_pore_SmacM_cargo_M_SmacC_cargo_C_2kf, transport_0_BaxA_pore_SmacM_cargo_M_SmacC_cargo_C_1kr)
Rule('transport_1_BaxA_pore_SmacM_cargo_M_SmacC_cargo_C', BaxA(BaxM=None, Bcl2=None, BaxA_1=4, BaxA_2=1, SmacM=None, CytoCM=None) % BaxA(BaxM=None, Bcl2=None, BaxA_1=1, BaxA_2=2, SmacM=None, CytoCM=None) % BaxA(BaxM=None, Bcl2=None, BaxA_1=2, BaxA_2=3, SmacM=None, CytoCM=None) % BaxA(BaxM=None, Bcl2=None, BaxA_1=3, BaxA_2=4, SmacM=5, CytoCM=None) % SmacM(BaxA=5) >> BaxA(BaxM=None, Bcl2=None, BaxA_1=4, BaxA_2=1, SmacM=None, CytoCM=None) % BaxA(BaxM=None, Bcl2=None, BaxA_1=1, BaxA_2=2, SmacM=None, CytoCM=None) % BaxA(BaxM=None, Bcl2=None, BaxA_1=2, BaxA_2=3, SmacM=None, CytoCM=None) % BaxA(BaxM=None, Bcl2=None, BaxA_1=3, BaxA_2=4, SmacM=None, CytoCM=None) + SmacC(Xiap=None), transport_1_BaxA_pore_SmacM_cargo_M_SmacC_cargo_C_1kc)
Rule('transport_0_BaxA_pore_CytoCM_cargo_M_CytoCC_cargo_C', BaxA(BaxM=None, Bcl2=None, BaxA_1=4, BaxA_2=1, SmacM=None, CytoCM=None) % BaxA(BaxM=None, Bcl2=None, BaxA_1=1, BaxA_2=2, SmacM=None, CytoCM=None) % BaxA(BaxM=None, Bcl2=None, BaxA_1=2, BaxA_2=3, SmacM=None, CytoCM=None) % BaxA(BaxM=None, Bcl2=None, BaxA_1=3, BaxA_2=4, SmacM=None, CytoCM=None) + CytoCM(BaxA=None) | BaxA(BaxM=None, Bcl2=None, BaxA_1=4, BaxA_2=1, SmacM=None, CytoCM=None) % BaxA(BaxM=None, Bcl2=None, BaxA_1=1, BaxA_2=2, SmacM=None, CytoCM=None) % BaxA(BaxM=None, Bcl2=None, BaxA_1=2, BaxA_2=3, SmacM=None, CytoCM=None) % BaxA(BaxM=None, Bcl2=None, BaxA_1=3, BaxA_2=4, SmacM=None, CytoCM=5) % CytoCM(BaxA=5), transport_0_BaxA_pore_CytoCM_cargo_M_CytoCC_cargo_C_2kf, transport_0_BaxA_pore_CytoCM_cargo_M_CytoCC_cargo_C_1kr)
Rule('transport_1_BaxA_pore_CytoCM_cargo_M_CytoCC_cargo_C', BaxA(BaxM=None, Bcl2=None, BaxA_1=4, BaxA_2=1, SmacM=None, CytoCM=None) % BaxA(BaxM=None, Bcl2=None, BaxA_1=1, BaxA_2=2, SmacM=None, CytoCM=None) % BaxA(BaxM=None, Bcl2=None, BaxA_1=2, BaxA_2=3, SmacM=None, CytoCM=None) % BaxA(BaxM=None, Bcl2=None, BaxA_1=3, BaxA_2=4, SmacM=None, CytoCM=5) % CytoCM(BaxA=5) >> BaxA(BaxM=None, Bcl2=None, BaxA_1=4, BaxA_2=1, SmacM=None, CytoCM=None) % BaxA(BaxM=None, Bcl2=None, BaxA_1=1, BaxA_2=2, SmacM=None, CytoCM=None) % BaxA(BaxM=None, Bcl2=None, BaxA_1=2, BaxA_2=3, SmacM=None, CytoCM=None) % BaxA(BaxM=None, Bcl2=None, BaxA_1=3, BaxA_2=4, SmacM=None, CytoCM=None) + CytoCC(), transport_1_BaxA_pore_CytoCM_cargo_M_CytoCC_cargo_C_1kc)
Rule('catalysis_0_C8A_catalyzer_C3pro_substrate_C3A_product', C8A(BidU=None, C3pro=None) + C3pro(Apop=None, C8A=None) | C8A(BidU=None, C3pro=1) % C3pro(Apop=None, C8A=1), catalysis_0_C8A_catalyzer_C3pro_substrate_C3A_product_2kf, catalysis_0_C8A_catalyzer_C3pro_substrate_C3A_product_1kr)
Rule('catalysis_1_C8A_catalyzer_C3pro_substrate_C3A_product', C8A(BidU=None, C3pro=1) % C3pro(Apop=None, C8A=1) >> C8A(BidU=None, C3pro=None) + C3A(Xiap=None, ParpU=None, C6pro=None), catalysis_1_C8A_catalyzer_C3pro_substrate_C3A_product_1kc)
Rule('catalysis_0_C3A_catalyzer_C6pro_substrate_C6A_product', C3A(Xiap=None, ParpU=None, C6pro=None) + C6pro(C3A=None) | C3A(Xiap=None, ParpU=None, C6pro=1) % C6pro(C3A=1), catalysis_0_C3A_catalyzer_C6pro_substrate_C6A_product_2kf, catalysis_0_C3A_catalyzer_C6pro_substrate_C6A_product_1kr)
Rule('catalysis_1_C3A_catalyzer_C6pro_substrate_C6A_product', C3A(Xiap=None, ParpU=None, C6pro=1) % C6pro(C3A=1) >> C3A(Xiap=None, ParpU=None, C6pro=None) + C6A(C8pro=None), catalysis_1_C3A_catalyzer_C6pro_substrate_C6A_product_1kc)
Rule('catalysis_0_C6A_catalyzer_C8pro_substrate_C8A_product', C6A(C8pro=None) + C8pro(Fadd=None, C6A=None) | C6A(C8pro=1) % C8pro(Fadd=None, C6A=1), catalysis_0_C6A_catalyzer_C8pro_substrate_C8A_product_2kf, catalysis_0_C6A_catalyzer_C8pro_substrate_C8A_product_1kr)
Rule('catalysis_1_C6A_catalyzer_C8pro_substrate_C8A_product', C6A(C8pro=1) % C8pro(Fadd=None, C6A=1) >> C6A(C8pro=None) + C8A(BidU=None, C3pro=None), catalysis_1_C6A_catalyzer_C8pro_substrate_C8A_product_1kc)
Initial(Ligand(Receptor=None), Ligand_0)
Initial(ParpU(C3A=None), ParpU_0)
Initial(C8A(BidU=None, C3pro=None), C8A_0)
Initial(SmacM(BaxA=None), SmacM_0)
Initial(BaxM(BidM=None, BaxA=None), BaxM_0)
Initial(Apop(C3pro=None, Xiap=None), Apop_0)
Initial(Fadd(Receptor=None, C8pro=None), Fadd_0)
Initial(SmacC(Xiap=None), SmacC_0)
Initial(ParpC(), ParpC_0)
Initial(Xiap(SmacC=None, Apop=None, C3A=None), Xiap_0)
Initial(C9(), C9_0)
Initial(C3ub(), C3ub_0)
Initial(C8pro(Fadd=None, C6A=None), C8pro_0)
Initial(Bcl2(BidM=None, BaxA=None), Bcl2_0)
Initial(C3pro(Apop=None, C8A=None), C3pro_0)
Initial(CytoCM(BaxA=None), CytoCM_0)
Initial(CytoCC(), CytoCC_0)
Initial(BaxA(BaxM=None, Bcl2=None, BaxA_1=None, BaxA_2=None, SmacM=None, CytoCM=None), BaxA_0)
Initial(ApafI(), ApafI_0)
Initial(BidU(C8A=None), BidU_0)
Initial(BidT(), BidT_0)
Initial(C3A(Xiap=None, ParpU=None, C6pro=None), C3A_0)
Initial(ApafA(), ApafA_0)
Initial(BidM(BaxM=None, Bcl2=None), BidM_0)
Initial(Receptor(Ligand=None, Fadd=None), Receptor_0)
Initial(C6A(C8pro=None), C6A_0)
Initial(C6pro(C3A=None), C6pro_0)
| 95.138889 | 798 | 0.804136 |
9f19504c70d1e22254bc9760af753f76c8371b43 | 3,760 | py | Python | maskrcnn_benchmark/modeling/make_layers.py | megvii-model/DetNAS | aa92a90604c870fcb7e3ea9f60d16e6f107454d9 | [
"MIT"
] | 290 | 2019-10-26T03:37:41.000Z | 2022-03-07T11:16:34.000Z | maskrcnn_benchmark/modeling/make_layers.py | pawopawo/DetNAS | 49b4e458c5fe68765ec1590433114db7cda28810 | [
"MIT"
] | 37 | 2019-10-29T12:18:59.000Z | 2022-03-04T07:54:52.000Z | maskrcnn_benchmark/modeling/make_layers.py | pawopawo/DetNAS | 49b4e458c5fe68765ec1590433114db7cda28810 | [
"MIT"
] | 52 | 2019-10-26T13:13:55.000Z | 2022-01-18T00:57:08.000Z | # Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved.
"""
Miscellaneous utility functions
"""
import torch
from torch import nn
from torch.nn import functional as F
from maskrcnn_benchmark.config import cfg
from maskrcnn_benchmark.layers import Conv2d
from maskrcnn_benchmark.modeling.poolers import Pooler
from maskrcnn_benchmark.pytorch_distributed_syncbn.syncbn import DistributedSyncBN
def get_group_gn(dim, dim_per_gp, num_groups):
"""get number of groups used by GroupNorm, based on number of channels."""
assert dim_per_gp == -1 or num_groups == -1, \
"GroupNorm: can only specify G or C/G."
if dim_per_gp > 0:
assert dim % dim_per_gp == 0, \
"dim: {}, dim_per_gp: {}".format(dim, dim_per_gp)
group_gn = dim // dim_per_gp
else:
assert dim % num_groups == 0, \
"dim: {}, num_groups: {}".format(dim, num_groups)
group_gn = num_groups
return group_gn
def group_norm(out_channels, affine=True, divisor=1):
out_channels = out_channels // divisor
dim_per_gp = cfg.MODEL.GROUP_NORM.DIM_PER_GP // divisor
num_groups = cfg.MODEL.GROUP_NORM.NUM_GROUPS // divisor
eps = cfg.MODEL.GROUP_NORM.EPSILON # default: 1e-5
return torch.nn.GroupNorm(
get_group_gn(out_channels, dim_per_gp, num_groups),
out_channels,
eps,
affine
)
def make_conv3x3(
in_channels,
out_channels,
dilation=1,
stride=1,
use_gn=False,
use_relu=False,
kaiming_init=True
):
conv = Conv2d(
in_channels,
out_channels,
kernel_size=3,
stride=stride,
padding=dilation,
dilation=dilation,
bias=False if use_gn else True
)
if kaiming_init:
nn.init.kaiming_normal_(
conv.weight, mode="fan_out", nonlinearity="relu"
)
else:
torch.nn.init.normal_(conv.weight, std=0.01)
if not use_gn:
nn.init.constant_(conv.bias, 0)
module = [conv,]
if use_gn:
module.append(group_norm(out_channels))
if use_relu:
module.append(nn.ReLU(inplace=False)) #True))
if len(module) > 1:
return nn.Sequential(*module)
return conv
def make_fc(dim_in, hidden_dim, use_gn=False):
'''
Caffe2 implementation uses XavierFill, which in fact
corresponds to kaiming_uniform_ in PyTorch
'''
if use_gn:
fc = nn.Linear(dim_in, hidden_dim, bias=False)
nn.init.kaiming_uniform_(fc.weight, a=1)
return nn.Sequential(fc, group_norm(hidden_dim))
fc = nn.Linear(dim_in, hidden_dim)
nn.init.kaiming_uniform_(fc.weight, a=1)
nn.init.constant_(fc.bias, 0)
return fc
def conv_with_kaiming_uniform(use_gn=False, use_relu=False, use_syncbn=False):
def make_conv(
in_channels, out_channels, kernel_size, stride=1, dilation=1
):
conv = Conv2d(
in_channels,
out_channels,
kernel_size=kernel_size,
stride=stride,
padding=dilation * (kernel_size - 1) // 2,
dilation=dilation,
bias=False if use_gn else True
)
# Caffe2 implementation uses XavierFill, which in fact
# corresponds to kaiming_uniform_ in PyTorch
nn.init.kaiming_uniform_(conv.weight, a=1)
if not use_gn:
nn.init.constant_(conv.bias, 0)
module = [conv,]
if use_syncbn:
module.append(DistributedSyncBN(out_channels))
elif use_gn:
module.append(group_norm(out_channels))
if use_relu:
module.append(nn.ReLU(inplace=False)) #True))
if len(module) > 1:
return nn.Sequential(*module)
return conv
return make_conv
| 29.84127 | 82 | 0.639362 |
5084c07b0b1f393b3863cf69f142e97dbb672dcc | 5,173 | py | Python | model_driven_method/main.py | vjhansen/IRSTD | 0470b6bd14701bfc12737f774686b84b03d48e1d | [
"MIT"
] | 2 | 2021-06-23T13:16:50.000Z | 2021-09-14T13:25:02.000Z | model_driven_method/main.py | vjhansen/IRSTD | 0470b6bd14701bfc12737f774686b84b03d48e1d | [
"MIT"
] | null | null | null | model_driven_method/main.py | vjhansen/IRSTD | 0470b6bd14701bfc12737f774686b84b03d48e1d | [
"MIT"
] | 2 | 2021-09-14T13:25:58.000Z | 2021-09-29T03:29:26.000Z | """
Model-driven approach for IR Small Target Detection
Concept based on:
C. Gao, D. Meng, Y. Yang, Y. Wang, X. Zhou and A. G. Hauptmann,
"Infrared Patch-Image Model for Small Target Detection in a Single Image,"
in IEEE Transactions on Image Processing, vol. 22, no. 12, pp. 4996-5009,
Dec. 2013, doi: 10.1109/TIP.2013.2281420.
"""
import os
import time
import cv2
import numpy as np
from matplotlib import pyplot as plt
from PIL import Image
from md_utils import get_target_loc, pts_near, read_xml
from pcp import pcp_func
cwd = os.getcwd()
TEST_DIR = cwd+"/model_driven_method/test_imgs/"
#TEST_DIR = "../dataset/dataset_images/target_test/"
img_dir = os.listdir(TEST_DIR)
SAVE_DIR = 'model_driven_method/detection_pics/'
if not os.path.exists(SAVE_DIR):
os.makedirs(SAVE_DIR)
filelist = [file for file in img_dir if file.endswith('.png')]
TOTAL_TIME = 0
TRUE_POS = 0
FALSE_POS = 0
FALSE_NEG = 0
TOTAL_GT_OBJ = 0
images = []
img_filename = []
total_detc = []
MAX_IT_PARAM = 500
TOL_PARAM = 1e-2
METHOD_PARAM = 'ialm' # or apg
THRESH_PARAM = 150
SLIDEWIN_STEP_SIZE = 20
SLIDEWIN_PATCH_SIZE = 80
DELTA = 4
for it, file in enumerate(filelist):
if file.split(".")[-1] == 'png':
fullpath = TEST_DIR + file
tmp_img = Image.open(fullpath).convert("L")
tmp_img.save('img.jpg')
if os.path.isfile(fullpath):
read_xml_file = read_xml(TEST_DIR, file.split(".")[0])
GT_OBJECTS_IN_IMG = len(read_xml_file)
else:
GT_OBJECTS_IN_IMG = 0
img = plt.imread('img.jpg')
m, n = img.shape
im_shape = (m, n)
start = time.time()
T = pcp_func(
img,
im_shape,
max_iter=MAX_IT_PARAM,
tol=TOL_PARAM,
method=METHOD_PARAM,
sw_step_size=SLIDEWIN_STEP_SIZE,
sw_ptch_sz=SLIDEWIN_PATCH_SIZE)
end = time.time()
round_time = end-start
TOTAL_TIME = TOTAL_TIME + round_time
print("Total time: %.2f s" % round_time)
TOTAL_GT_OBJ = GT_OBJECTS_IN_IMG + TOTAL_GT_OBJ
img_filename.append(file.split(".")[0])
plt.imsave('t_img.jpg', T.reshape(im_shape), cmap='gray')
print(str(GT_OBJECTS_IN_IMG) + ' object(s) in ' + file)
circ_img_rgb, pcx_pos, pcy_pos = get_target_loc('t_img.jpg',
thresh=THRESH_PARAM,
delta=DELTA)
total_detc.append(pcx_pos)
gtcx_arr = []
gtcy_arr = []
status_img = []
if GT_OBJECTS_IN_IMG != 0: # GT objects in image
for iter1 in range(GT_OBJECTS_IN_IMG):
ymin_xml = read_xml_file[iter1][2]
xmin_xml = read_xml_file[iter1][1]
ymax_xml = read_xml_file[iter1][4]
xmax_xml = read_xml_file[iter1][3]
cx_xml = int((xmax_xml + xmin_xml) // 2)
cy_xml = int((ymax_xml + ymin_xml) // 2)
cv2.circle(circ_img_rgb, (cx_xml, cy_xml), 10, (0, 0, 255), 2)
gtcx_arr.append(cx_xml)
gtcy_arr.append(cy_xml)
if len(pcx_pos) != 0:
p_order = np.argsort(pcx_pos)
gt_order = np.argsort(gtcx_arr)
if GT_OBJECTS_IN_IMG == len(pcx_pos):
TRUE_POS += 1
IM_STATUS = 'TP_'
elif GT_OBJECTS_IN_IMG - len(pcx_pos) > 0:
FALSE_NEG += 1
IM_STATUS = 'FN_'
elif (len(pcx_pos) - GT_OBJECTS_IN_IMG > 0) or \
(GT_OBJECTS_IN_IMG == 0 and len(pcx_pos) != 0):
FALSE_POS += 1
IM_STATUS = 'FP_'
for it1, it2 in zip(range(len(pcx_pos)),
range(GT_OBJECTS_IN_IMG)):
pred_bbx = {
"centre_x": pcx_pos[p_order[it1]],
"centre_y": pcy_pos[p_order[it1]]
}
gt_bbx = {
"centre_x": gtcx_arr[gt_order[it2]],
"centre_y": gtcy_arr[gt_order[it2]]
}
# return true if objects are within proximity
PTS_CLOSE = pts_near(gt_bbx, pred_bbx, rad=5)
status_img.append(PTS_CLOSE)
if PTS_CLOSE and GT_OBJECTS_IN_IMG == len(pcx_pos):
TRUE_POS += 1
if sum(status_img) == GT_OBJECTS_IN_IMG:
# only if num(TRUE_POS) for this file == num(gt_obj_in_img)
IM_STATUS = 'TP_'
else:
FALSE_NEG += 1
IM_STATUS = 'FN_'
elif not(PTS_CLOSE) and len(pcx_pos) > GT_OBJECTS_IN_IMG:
FALSE_POS += 1
# only if num(False_POS) > num(gt_obj_in_img)
IM_STATUS = 'FP_'
elif GT_OBJECTS_IN_IMG == 0 and len(pcx_pos) == 0:
IM_STATUS = 'TN_'
elif GT_OBJECTS_IN_IMG - len(pcx_pos) > 0 and len(pcx_pos) == 0:
FALSE_NEG += 1
IM_STATUS = 'FN_'
cv2.imwrite(SAVE_DIR+IM_STATUS+'_'+METHOD_PARAM+'_'+str(TOL_PARAM)+'_'+str(MAX_IT_PARAM) +
'_'+str(THRESH_PARAM)+'_'+file.split(".")[0]+'_target.jpg', circ_img_rgb)
avg_time = TOTAL_TIME/(len(filelist))
print("Avg. time per img.: %.2f s" % avg_time)
print("TP: ", TRUE_POS)
print("FP: ", FALSE_POS)
print("FN: ", FALSE_NEG)
| 31.932099 | 94 | 0.586507 |
96a6765e518e989dbb7d60315c62e33058ee2b54 | 654 | py | Python | leetcode/easy_top_interview_question/design/minstack.py | alvinctk/google-tech-dev-guide | 9d7759bea1f44673c2de4f25a94b27368928a59f | [
"Apache-2.0"
] | 26 | 2019-06-07T05:29:47.000Z | 2022-03-19T15:32:27.000Z | leetcode/easy_top_interview_question/design/minstack.py | alvinctk/google-tech-dev-guide | 9d7759bea1f44673c2de4f25a94b27368928a59f | [
"Apache-2.0"
] | null | null | null | leetcode/easy_top_interview_question/design/minstack.py | alvinctk/google-tech-dev-guide | 9d7759bea1f44673c2de4f25a94b27368928a59f | [
"Apache-2.0"
] | 6 | 2019-10-10T06:39:28.000Z | 2020-05-12T19:50:55.000Z | class MinStack:
def __init__(self):
"""
initialize your data structure here.
"""
self.min = []
self.stack = []
def push(self, x: int) -> None:
self.stack.append(x)
self.min.append(min(self.min[-1] if self.min else float("inf"), x))
def pop(self) -> None:
self.min.pop()
return self.stack.pop()
def top(self) -> int:
return self.stack[-1]
def getMin(self) -> int:
return self.min[-1]
# Your MinStack object will be instantiated and called as such:
# obj = MinStack()
# obj.push(x)
# obj.pop()
# param_3 = obj.top()
# param_4 = obj.getMin()
| 21.096774 | 75 | 0.553517 |
fb7ea75d424ab14adb0e45d064761ffc4a60eeb3 | 2,802 | py | Python | labgraph/events/event_generator_node.py | leaflabs/labgraph | e95eb3e6fed0aef8a50f1a1bbf353cf4c46aa76e | [
"MIT"
] | 1 | 2021-08-01T06:31:08.000Z | 2021-08-01T06:31:08.000Z | labgraph/events/event_generator_node.py | VanEdward/labgraph | 9488feac59f9ef86091befdeaddb69d84e4d6fb3 | [
"MIT"
] | null | null | null | labgraph/events/event_generator_node.py | VanEdward/labgraph | 9488feac59f9ef86091befdeaddb69d84e4d6fb3 | [
"MIT"
] | 1 | 2021-12-28T18:52:58.000Z | 2021-12-28T18:52:58.000Z | #!/usr/bin/env python3
# Copyright 2004-present Facebook. All Rights Reserved.
from abc import abstractmethod
from time import time # TODO: Replace with LabGraph clock
from typing import Any, Dict, List, Tuple
from ..graphs.method import AsyncPublisher, get_method_metadata
from ..graphs.node import Node, NodeMeta
from ..graphs.topic import Topic
from .event_generator import BaseEventGenerator, EventPublishingHeap
from .event_messages import WaitEndMessage
CHECK_FOR_WAIT_COMPLETION_DELAY = 0.1
ACCEPTABLE_PUBLISH_TIME_DIFF = 0.01
class BaseEventGeneratorNodeMeta(NodeMeta):
"""
Metaclass for EventGeneratorNodes. This metaclass is responsible
for dynamically populating the `publish_events` function on the
derived event generator with @publisher decorators for all topics
defined on the class.
"""
_PUBLISH_FUNCTION_KEY = "publish_events"
def __init__(
cls, name: str, bases: Tuple[type, ...], fields: Dict[str, Any]
) -> None:
# Pre-process topics before NodeMeta
topics: List[Topic] = []
for field_value in fields.values():
if isinstance(field_value, Topic):
# Only subscribe to wait end topic
if field_value.message_type is not WaitEndMessage:
topics.append(field_value)
publishing_func = fields[cls._PUBLISH_FUNCTION_KEY]
if len(topics) > 0:
metadata = get_method_metadata(publishing_func)
metadata.published_topics = topics
super(BaseEventGeneratorNodeMeta, cls).__init__(name, bases, fields)
class BaseEventGeneratorNode(Node, metaclass=BaseEventGeneratorNodeMeta):
"""
An abstract base class for an EventGeneratorNode, which publishes
messages from its event generator based on times specified for
each message.
"""
def __init__(self) -> None:
super(BaseEventGeneratorNode, self).__init__()
self._start_time: float = time()
def _time_elapsed_since_start(self) -> float:
return time() - self._start_time
def setup_generator(self, generator: BaseEventGenerator) -> None:
"""
Saves a generator to the node. Should be overridden to
perform any necessary setup for the generator.
"""
self._generator = generator
def generate_events(self) -> EventPublishingHeap:
"""
Returns the heap of events generated by the generator associated
with this node.
"""
return self._generator.generate_events()
@abstractmethod
async def publish_events(self) -> AsyncPublisher:
"""
Publishes the events returned from `generate_events` based on the time
specified for each event in the graph.
"""
raise NotImplementedError()
| 32.581395 | 78 | 0.691292 |
134c381ae095720c3b305be9d564682c13f181b3 | 749 | py | Python | app/feedreader/migrations/0006_mptt_update.py | jawsper/feedreader | b2b4d8151a786c822e1b59e93d2d8e8959cd210d | [
"MIT"
] | null | null | null | app/feedreader/migrations/0006_mptt_update.py | jawsper/feedreader | b2b4d8151a786c822e1b59e93d2d8e8959cd210d | [
"MIT"
] | 28 | 2017-03-16T14:39:53.000Z | 2022-02-10T09:52:58.000Z | app/feedreader/migrations/0006_mptt_update.py | jawsper/feedreader | b2b4d8151a786c822e1b59e93d2d8e8959cd210d | [
"MIT"
] | null | null | null | # Generated by Django 2.2.5 on 2019-09-15 06:31
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
("feedreader", "0005_feed_quirk_fix_invalid_publication_date"),
]
operations = [
migrations.AlterField(
model_name="outline",
name="level",
field=models.PositiveIntegerField(editable=False),
),
migrations.AlterField(
model_name="outline",
name="lft",
field=models.PositiveIntegerField(editable=False),
),
migrations.AlterField(
model_name="outline",
name="rght",
field=models.PositiveIntegerField(editable=False),
),
]
| 25.827586 | 71 | 0.59279 |
6d196a6626ae5835ccc5f61584026266d4551be2 | 1,477 | py | Python | route/filter_inter_wiki_delete.py | k0000k/openNAMU | b5862a7e5a1f1a2a6bee5eec5b3d9784528f42e8 | [
"BSD-3-Clause"
] | 3 | 2018-10-06T09:02:34.000Z | 2018-10-20T02:42:31.000Z | route/filter_inter_wiki_delete.py | k0000k/openNAMU | b5862a7e5a1f1a2a6bee5eec5b3d9784528f42e8 | [
"BSD-3-Clause"
] | 42 | 2018-09-16T16:30:54.000Z | 2018-11-24T17:45:08.000Z | route/filter_inter_wiki_delete.py | k0000k/openNAMU | b5862a7e5a1f1a2a6bee5eec5b3d9784528f42e8 | [
"BSD-3-Clause"
] | 6 | 2018-09-23T12:29:19.000Z | 2018-11-24T17:31:35.000Z | from .tool.func import *
def filter_inter_wiki_delete(tool, name = 'Test'):
with get_db_connect() as conn:
curs = conn.cursor()
if admin_check(None, tool) != 1:
return re_error('/error/3')
if tool == 'del_inter_wiki':
curs.execute(db_change("delete from html_filter where html = ? and kind = 'inter_wiki'"), [name])
elif tool == 'del_edit_filter':
curs.execute(db_change("delete from html_filter where html = ? and kind = 'regex_filter'"), [name])
elif tool == 'del_name_filter':
curs.execute(db_change("delete from html_filter where html = ? and kind = 'name'"), [name])
elif tool == 'del_file_filter':
curs.execute(db_change("delete from html_filter where html = ? and kind = 'file'"), [name])
elif tool == 'del_email_filter':
curs.execute(db_change("delete from html_filter where html = ? and kind = 'email'"), [name])
elif tool == 'del_image_license':
curs.execute(db_change("delete from html_filter where html = ? and kind = 'image_license'"), [name])
elif tool == 'del_extension_filter':
curs.execute(db_change("delete from html_filter where html = ? and kind = 'extension'"), [name])
else:
curs.execute(db_change("delete from html_filter where html = ? and kind = 'edit_top'"), [name])
conn.commit()
return redirect('/' + re.sub(r'^del_', '', tool)) | 50.931034 | 112 | 0.607989 |
09c4da1f824b9466b954a47a4b9ac5ac4f78fc89 | 404 | py | Python | tk12.py | NomuFuga/tkinter_sample | 5e44496fb7fb96180a2060a327f8792bccdd7974 | [
"MIT"
] | null | null | null | tk12.py | NomuFuga/tkinter_sample | 5e44496fb7fb96180a2060a327f8792bccdd7974 | [
"MIT"
] | null | null | null | tk12.py | NomuFuga/tkinter_sample | 5e44496fb7fb96180a2060a327f8792bccdd7974 | [
"MIT"
] | null | null | null | import tkinter as tk
root = tk.Tk()
root.geometry("200x150")
lb_rgb = tk.Label(text="rgb",fg="#000",bg="#fff")
lb_rrggbb = tk.Label(text="rrggbb",fg="#abcdef",bg="#123456")
lb_rrrgggbbb = tk.Label(text="rrrgggbbb",fg="#123456789",bg="#987abcdef")
lb_colorname = tk.Label(text="colorname",fg="magenta",bg="yellow")
[widget.pack() for widget in (lb_rgb,lb_rrggbb,lb_rrrgggbbb,lb_colorname)]
root.mainloop() | 44.888889 | 74 | 0.720297 |
bf18057996e17e79aa81122a227102fb6ed27620 | 8,218 | py | Python | genweb/scholarship/api/scholarships.py | UPCnet/genweb.scholarship | 8661e2271dc3489934de5330ebfdcbd9df439991 | [
"MIT"
] | null | null | null | genweb/scholarship/api/scholarships.py | UPCnet/genweb.scholarship | 8661e2271dc3489934de5330ebfdcbd9df439991 | [
"MIT"
] | null | null | null | genweb/scholarship/api/scholarships.py | UPCnet/genweb.scholarship | 8661e2271dc3489934de5330ebfdcbd9df439991 | [
"MIT"
] | null | null | null | # -*- coding: utf-8 -*-
from Products.CMFPlone.interfaces import IPloneSiteRoot
from plone import api
from five import grok
from genweb.scholarship.api import ApiResponse
from genweb.scholarship.api import ObjectNotFound
from genweb.scholarship.api import REST
from genweb.scholarship.api import api_resource
from genweb.scholarship.api.root import APIRoot
class Scholarships(REST):
"""
/api/scholarships
and
/api/scholarships/sch_ID
Get all Scholarships by "X-Oauth-Username"
"""
placeholder_type = "scholarship"
placeholder_id = 'sch_id'
grok.adapts(APIRoot, IPloneSiteRoot)
@api_resource(required=[])
def GET(self):
results = []
properties = api.portal.get_tool(name='portal_properties')
sch_token = properties.scholarship_properties.sch_token
try:
lang = self.params['lang']
except:
lang = 'ca'
if self.request.get_header('Token') == sch_token:
scholarships = api.content.find(
portal_type="Scholarship",
review_state=['published'],
sort_order='descending',
sort_on='effective',
Language=lang,
)
total = len(scholarships)
items_x_page = 10 # Default items per page
pagination_page = self.params.pop('page', None)
if pagination_page:
if pagination_page == 'all':
more_items = False
else:
if pagination_page == '0':
pagination_page = 1
start = int(items_x_page) * (int(pagination_page) - 1)
end = int(items_x_page) * int(pagination_page)
scholarships = scholarships[start:end]
more_items = True if end < total else False
else:
# Don't page, return first 10 => ?page=1
scholarships = scholarships[0:items_x_page]
more_items = True if items_x_page < total else False
for item in scholarships:
obj = item.getObject()
scholarship_type = obj.scholarship_type
start_date = obj.start_date.strftime("%d/%m/%Y") if obj.start_date else ''
deadline = obj.deadline.strftime("%d/%m/%Y") if obj.deadline else ''
sch_path = '/'.join(obj.getPhysicalPath()[3:])
scholarship = dict(title=item.Title,
id=item.id,
summary=obj.summary.output if obj.summary else '',
path=item.getURL(),
sch_path=sch_path,
scholarship_type=scholarship_type,
start_date=start_date,
end_date=deadline,
)
results.append(scholarship)
values = dict(status=200,
items=results,
more_items=more_items,
total=total)
else:
values = dict(status=403,
items=results,
more_items=False,
total=0)
return ApiResponse(values)
class Scholarship(REST):
"""
/api/scholarships/{sch_id}
"""
grok.adapts(Scholarships, IPloneSiteRoot)
def __init__(self, context, request):
super(Scholarship, self).__init__(context, request)
# /api/scholarships/{obj_path_id}?sch_path={sch_path}
@api_resource(required=['sch_id'])
def GET(self):
properties = api.portal.get_tool(name='portal_properties')
sch_token = properties.scholarship_properties.sch_token
if self.request.get_header('Token') == sch_token:
root_path = '/'.join(api.portal.get().getPhysicalPath())
sch_path = self.params['sch_path']
path = root_path + '/' + sch_path
items = api.content.find(portal_type="Scholarship",
path=path)
if items:
for item in items:
obj = item.getObject()
summary = obj.summary.output if obj.summary else ''
scholarship_type = obj.scholarship_type
organism = obj.organism.output if obj.organism else ''
recipients = obj.recipients.output if obj.recipients else ''
others = obj.others.output if obj.others else ''
general = obj.general_requirements.output if obj.general_requirements else ''
academic = obj.academic_requirements.output if obj.academic_requirements else ''
economic = obj.economic_requirements.output if obj.economic_requirements else ''
incompatibilities = obj.incompatibilities.output if obj.incompatibilities else ''
start_date = obj.start_date.strftime("%d/%m/%Y") if obj.start_date else ''
deadline = obj.deadline.strftime("%d/%m/%Y") if obj.deadline else ''
submission = obj.submission.output if obj.submission else ''
documentation = obj.documentation.output if obj.documentation else ''
amount = obj.amount.output if obj.amount else ''
additional_amount = obj.additional_amount.output if obj.additional_amount else ''
duration = obj.duration.output if obj.duration else ''
payment = obj.payment.output if obj.payment else ''
beneficiaries = obj.beneficiaries.output if obj.beneficiaries else ''
criteria = obj.criteria.output if obj.criteria else ''
award_date = obj.award_date.strftime("%d/%m/%Y") if obj.award_date else ''
award_resolution = obj.award_resolution.output if obj.award_resolution else ''
allegations = obj.allegations.output if obj.allegations else ''
regulations = obj.regulations.output if obj.regulations else ''
scholarship = dict(status=200,
title=item.Title,
id=item.id,
summary=summary,
path=item.getURL(),
absolute_url=obj.absolute_url_path(),
organism=organism,
recipients=recipients,
others=others,
general=general,
academic=academic,
economic=economic,
incompatibilities=incompatibilities,
scholarship_type=scholarship_type,
start_date=start_date,
end_date=deadline,
submission=submission,
documentation=documentation,
amount=amount,
additional_amount=additional_amount,
duration=duration,
payment=payment,
beneficiaries=beneficiaries,
criteria=criteria,
award_date=award_date,
award_resolution=award_resolution,
allegations=allegations,
regulations=regulations,
)
else:
raise ObjectNotFound('Scholarship not found')
else:
scholarship = dict(status=403)
return ApiResponse(scholarship)
| 48.627219 | 101 | 0.503285 |
b98f84d6311b2fec5d422f2e5ddd73fcd66c84e1 | 1,693 | py | Python | examples/07-filter/06-mask.py | pepsipepsi/nodebox_opengl_python3 | cfb2633df1055a028672b11311603cc2241a1378 | [
"BSD-3-Clause"
] | 1 | 2017-03-19T16:56:46.000Z | 2017-03-19T16:56:46.000Z | examples/07-filter/06-mask.py | pepsipepsi/nodebox_opengl_python3 | cfb2633df1055a028672b11311603cc2241a1378 | [
"BSD-3-Clause"
] | null | null | null | examples/07-filter/06-mask.py | pepsipepsi/nodebox_opengl_python3 | cfb2633df1055a028672b11311603cc2241a1378 | [
"BSD-3-Clause"
] | null | null | null | import os, sys
sys.path.insert(0, os.path.join("..",".."))
from nodebox.graphics.context import *
from nodebox.graphics import *
from nodebox.graphics.shader import gradient, RADIAL, mask, invert
# Render a radial gradient image.
# Without additional parameters, the gradient will be grayscale.
g = gradient(350, 350, type=RADIAL)
# The mask() filter covers one image with another (grayscale) image.
# You can use the grayscale() filter to make image black & white.
# The mask will hide the source image where the mask is black.
# We use the radial gradient as a mask.
# The radial gradient is white at the edges and black at the center.
# We invert it so we get black edges.
# The result is that the source image will gradually fade away at the edges.
img = Image("dendrite.png")
img = mask(img, invert(g))
# Crop the source image to the size of the mask.
# Our mask is smaller than the source image, so beyond it is still pixel data
# but we no longer need it.
img = crop(img, x=0, y=0, width=350, height=350)
def draw(canvas):
#canvas.clear()
# Each frame, paint a new image to the canvas.
# Since its edges are transparent, all images blend into each other.
# This is a useful technique if you want to create random,
# procedural textures (e.g. tree back, rust & dirt, clouded sky, ...)
translate(random(450), random(450))
rotate(random(360))
translate(-img.width/2, -img.height/2) # Rotate from image center.
image(img)
# Start the application:
canvas.fps = 5 # Slow framerate so we can observe what is happening.
canvas.size = 500, 500 # This is a bad idea since keyboard events
canvas.run(draw) # are now logged very slowly. | 37.622222 | 77 | 0.712936 |
66bd3f180e0a61bc7431f53f899e717cadde6a04 | 385 | py | Python | examples/data/infos.py | axju/dogsbody | 05a95b8925e0c560040727e603e4591fff1b9bc3 | [
"MIT"
] | null | null | null | examples/data/infos.py | axju/dogsbody | 05a95b8925e0c560040727e603e4591fff1b9bc3 | [
"MIT"
] | null | null | null | examples/data/infos.py | axju/dogsbody | 05a95b8925e0c560040727e603e4591fff1b9bc3 | [
"MIT"
] | null | null | null | from pkg_resources import working_set
from dogsbody import runtime
packages = {}
for dist in list(working_set):
packages[dist.project_name] = dist.version
length = max([len(name) for name in packages])
with open(runtime.SOURCE.parent / 'infos.txt', 'w') as target:
for name, version in packages.items():
target.write('{1:<{0}s} {2}\n'.format(length, name, version))
| 29.615385 | 69 | 0.703896 |
e8386111baf7cb71746a978637dabc9a0155d8cd | 12,173 | py | Python | assignment2/cs231n/solver.py | lalithnag/cs231n | ed540c4ed06a6ee01966314e4106b8c44f58546b | [
"MIT"
] | null | null | null | assignment2/cs231n/solver.py | lalithnag/cs231n | ed540c4ed06a6ee01966314e4106b8c44f58546b | [
"MIT"
] | null | null | null | assignment2/cs231n/solver.py | lalithnag/cs231n | ed540c4ed06a6ee01966314e4106b8c44f58546b | [
"MIT"
] | null | null | null | from __future__ import print_function, division
from future import standard_library
standard_library.install_aliases()
from builtins import range
from builtins import object
import os
import pickle as pickle
import numpy as np
from cs231n import optim
class Solver(object):
"""
A Solver encapsulates all the logic necessary for training classification
models. The Solver performs stochastic gradient descent using different
update rules defined in optim.py.
The solver accepts both training and validataion data and labels so it can
periodically check classification accuracy on both training and validation
data to watch out for overfitting.
To train a model, you will first construct a Solver instance, passing the
model, dataset, and various options (learning rate, batch size, etc) to the
constructor. You will then call the train() method to run the optimization
procedure and train the model.
After the train() method returns, model.params will contain the parameters
that performed best on the validation set over the course of training.
In addition, the instance variable solver.loss_history will contain a list
of all losses encountered during training and the instance variables
solver.train_acc_history and solver.val_acc_history will be lists of the
accuracies of the model on the training and validation set at each epoch.
Example usage might look something like this:
data = {
'X_train': # training data
'y_train': # training labels
'X_val': # validation data
'y_val': # validation labels
}
model = MyAwesomeModel(hidden_size=100, reg=10)
solver = Solver(model, data,
update_rule='sgd',
optim_config={
'learning_rate': 1e-3,
},
lr_decay=0.95,
num_epochs=10, batch_size=100,
print_every=100)
solver.train()
A Solver works on a model object that must conform to the following API:
- model.params must be a dictionary mapping string parameter names to numpy
arrays containing parameter values.
- model.loss(X, y) must be a function that computes training-time loss and
gradients, and test-time classification scores, with the following inputs
and outputs:
Inputs:
- X: Array giving a minibatch of input data of shape (N, d_1, ..., d_k)
- y: Array of labels, of shape (N,) giving labels for X where y[i] is the
label for X[i].
Returns:
If y is None, run a test-time forward pass and return:
- scores: Array of shape (N, C) giving classification scores for X where
scores[i, c] gives the score of class c for X[i].
If y is not None, run a training time forward and backward pass and
return a tuple of:
- loss: Scalar giving the loss
- grads: Dictionary with the same keys as self.params mapping parameter
names to gradients of the loss with respect to those parameters.
"""
def __init__(self, model, data, **kwargs):
"""
Construct a new Solver instance.
Required arguments:
- model: A model object conforming to the API described above
- data: A dictionary of training and validation data containing:
'X_train': Array, shape (N_train, d_1, ..., d_k) of training images
'X_val': Array, shape (N_val, d_1, ..., d_k) of validation images
'y_train': Array, shape (N_train,) of labels for training images
'y_val': Array, shape (N_val,) of labels for validation images
Optional arguments:
- update_rule: A string giving the name of an update rule in optim.py.
Default is 'sgd'.
- optim_config: A dictionary containing hyperparameters that will be
passed to the chosen update rule. Each update rule requires different
hyperparameters (see optim.py) but all update rules require a
'learning_rate' parameter so that should always be present.
- lr_decay: A scalar for learning rate decay; after each epoch the
learning rate is multiplied by this value.
- batch_size: Size of minibatches used to compute loss and gradient
during training.
- num_epochs: The number of epochs to run for during training.
- print_every: Integer; training losses will be printed every
print_every iterations.
- verbose: Boolean; if set to false then no output will be printed
during training.
- num_train_samples: Number of training samples used to check training
accuracy; default is 1000; set to None to use entire training set.
- num_val_samples: Number of validation samples to use to check val
accuracy; default is None, which uses the entire validation set.
- checkpoint_name: If not None, then save model checkpoints here every
epoch.
"""
self.model = model
self.X_train = data['X_train']
self.y_train = data['y_train']
self.X_val = data['X_val']
self.y_val = data['y_val']
# Unpack keyword arguments
self.update_rule = kwargs.pop('update_rule', 'sgd')
self.optim_config = kwargs.pop('optim_config', {})
self.lr_decay = kwargs.pop('lr_decay', 1.0)
self.batch_size = kwargs.pop('batch_size', 100)
self.num_epochs = kwargs.pop('num_epochs', 10)
self.num_train_samples = kwargs.pop('num_train_samples', 1000)
self.num_val_samples = kwargs.pop('num_val_samples', None)
self.checkpoint_name = kwargs.pop('checkpoint_name', None)
self.print_every = kwargs.pop('print_every', 10)
self.verbose = kwargs.pop('verbose', True)
# Throw an error if there are extra keyword arguments (That's why pop is used)
if len(kwargs) > 0:
extra = ', '.join('"%s"' % k for k in list(kwargs.keys()))
raise ValueError('Unrecognized arguments %s' % extra)
# Make sure the update rule exists, then replace the string
# name with the actual function
if not hasattr(optim, self.update_rule):
raise ValueError('Invalid update_rule "%s"' % self.update_rule)
self.update_rule = getattr(optim, self.update_rule)
self._reset()
def _reset(self):
"""
Set up some book-keeping variables for optimization. Don't call this
manually.
"""
# Set up some variables for book-keeping
self.epoch = 0
self.best_val_acc = 0
self.best_params = {}
self.loss_history = []
self.train_acc_history = []
self.val_acc_history = []
# Make a deep copy of the optim_config for each parameter
self.optim_configs = {}
for p in self.model.params:
d = {k: v for k, v in self.optim_config.items()}
self.optim_configs[p] = d
def _step(self):
"""
Make a single gradient update. This is called by train() and should not
be called manually.
"""
# Make a minibatch of training data
num_train = self.X_train.shape[0]
batch_mask = np.random.choice(num_train, self.batch_size)
X_batch = self.X_train[batch_mask]
y_batch = self.y_train[batch_mask]
# Compute loss and gradient
loss, grads = self.model.loss(X_batch, y_batch)
self.loss_history.append(loss)
# Perform a parameter update
for p, w in self.model.params.items():
dw = grads[p]
config = self.optim_configs[p]
next_w, next_config = self.update_rule(w, dw, config)
self.model.params[p] = next_w
self.optim_configs[p] = next_config
def _save_checkpoint(self):
if self.checkpoint_name is None: return
checkpoint = {
'model': self.model,
'update_rule': self.update_rule,
'lr_decay': self.lr_decay,
'optim_config': self.optim_config,
'batch_size': self.batch_size,
'num_train_samples': self.num_train_samples,
'num_val_samples': self.num_val_samples,
'epoch': self.epoch,
'loss_history': self.loss_history,
'train_acc_history': self.train_acc_history,
'val_acc_history': self.val_acc_history,
}
filename = '%s_epoch_%d.pkl' % (self.checkpoint_name, self.epoch)
if self.verbose:
print('Saving checkpoint to "%s"' % filename)
with open(filename, 'wb') as f:
pickle.dump(checkpoint, f)
def check_accuracy(self, X, y, num_samples=None, batch_size=100):
"""
Check accuracy of the model on the provided data.
Inputs:
- X: Array of data, of shape (N, d_1, ..., d_k)
- y: Array of labels, of shape (N,)
- num_samples: If not None, subsample the data and only test the model
on num_samples datapoints.
- batch_size: Split X and y into batches of this size to avoid using
too much memory.
Returns:
- acc: Scalar giving the fraction of instances that were correctly
classified by the model.
"""
# Maybe subsample the data
N = X.shape[0]
if num_samples is not None and N > num_samples:
mask = np.random.choice(N, num_samples)
N = num_samples
X = X[mask]
y = y[mask]
# Compute predictions in batches
num_batches = N // batch_size
if N % batch_size != 0:
num_batches += 1
y_pred = []
for i in range(num_batches):
start = i * batch_size
end = (i + 1) * batch_size
scores = self.model.loss(X[start:end])
y_pred.append(np.argmax(scores, axis=1))
y_pred = np.hstack(y_pred)
acc = np.mean(y_pred == y)
return acc
def train(self):
"""
Run optimization to train the model.
"""
num_train = self.X_train.shape[0]
iterations_per_epoch = max(num_train // self.batch_size, 1)
num_iterations = self.num_epochs * iterations_per_epoch
for t in range(num_iterations):
self._step()
# Maybe print training loss
if self.verbose and t % self.print_every == 0:
print('(Iteration %d / %d) loss: %f' % (
t + 1, num_iterations, self.loss_history[-1]))
# At the end of every epoch, increment the epoch counter and decay
# the learning rate.
epoch_end = (t + 1) % iterations_per_epoch == 0
if epoch_end:
self.epoch += 1
for k in self.optim_configs:
self.optim_configs[k]['learning_rate'] *= self.lr_decay
# Check train and val accuracy on the first iteration, the last
# iteration, and at the end of each epoch.
first_it = (t == 0)
last_it = (t == num_iterations - 1)
if first_it or last_it or epoch_end:
train_acc = self.check_accuracy(self.X_train, self.y_train,
num_samples=self.num_train_samples)
val_acc = self.check_accuracy(self.X_val, self.y_val,
num_samples=self.num_val_samples)
self.train_acc_history.append(train_acc)
self.val_acc_history.append(val_acc)
self._save_checkpoint()
if self.verbose:
print('(Epoch %d / %d) train acc: %f; val_acc: %f' % (
self.epoch, self.num_epochs, train_acc, val_acc))
# Keep track of the best model
if val_acc > self.best_val_acc:
self.best_val_acc = val_acc
self.best_params = {}
for k, v in self.model.params.items():
self.best_params[k] = v.copy()
# At the end of training swap the best params into the model
self.model.params = self.best_params
| 39.651466 | 86 | 0.615789 |
5005cd0032ff0461de7aef786b598e5a5c927d9f | 532 | py | Python | Python3.6.5/Classic_Algorithms/closest_pair/demo2.py | huioo/Mega-Project-List | e17fb5b0bdff54e4d6feb59fead520e44803548d | [
"MIT"
] | null | null | null | Python3.6.5/Classic_Algorithms/closest_pair/demo2.py | huioo/Mega-Project-List | e17fb5b0bdff54e4d6feb59fead520e44803548d | [
"MIT"
] | null | null | null | Python3.6.5/Classic_Algorithms/closest_pair/demo2.py | huioo/Mega-Project-List | e17fb5b0bdff54e4d6feb59fead520e44803548d | [
"MIT"
] | null | null | null | # Closest pair problem
"""
The closest pair of points problem or closest pair problem is a problem of computational geometry:
given n points in metric space, find a pair of points with the smallest distance between them.
最接近的点问题或最接近的一对问题是计算几何的问题:
在度量(矩阵)空间中给定n个点,找到一对点,它们之间的距离最小。
"""
import random
# test code
def gen_random_matrix(m, n):
# m行,n列
return [[random.randint(1, n) for j in range(n)] for i in range(m)]
def closest_pair():
pass
if __name__ == '__main__':
ptlst = gen_random_matrix(5, 6)
| 20.461538 | 99 | 0.712406 |
58ec3addf0388e5b48df96984d2730a140e35aa8 | 6,799 | py | Python | blog/app/controller/admin/user.py | henrY2Young/flask-jwt | f1c47efee7fd7f271c02172371c2d9cec8adde5d | [
"MIT"
] | null | null | null | blog/app/controller/admin/user.py | henrY2Young/flask-jwt | f1c47efee7fd7f271c02172371c2d9cec8adde5d | [
"MIT"
] | null | null | null | blog/app/controller/admin/user.py | henrY2Young/flask-jwt | f1c47efee7fd7f271c02172371c2d9cec8adde5d | [
"MIT"
] | null | null | null | from time import time
import time
from flask import request, jsonify, Flask, render_template, session, redirect, url_for, g
from app import db
from app.controller.admin import user
from app.controller.home import home
from app.models.Users import Users
from function import Common
from app.controller.auth import Auth
from app.models.Categories import Categories
from app.models.Permission import Permission
@user.route('/index')
def index():
return '111'
@user.route('/register', methods=['POST'])
def register():
if request.method != 'POST':
Common.to_json('error', [])
username = request.form.get('username')
password = request.form.get('password')
userExit = db.session.query(Users).filter(Users.name == username).first()
if userExit is not None:
return Common.to_json('error', dict(msg='用户名已经存在'))
else:
dateTime = time.strftime('%Y-%m-%d %H:%M:%S', time.localtime(time.time()))
item = Users(name=username, password=password, address='1', tel='132111111', add_time=dateTime)
db.session.add(item)
res = db.session.commit()
return Common.to_json('success', dict(msg='注册成功'))
@user.route('/login', methods=['POST'])
def login():
if request.method != 'POST':
return Common.to_json('error', [])
username = request.json.get('username')
password = request.json.get('password')
userExit = db.session.query(Users).filter(Users.name == username).first()
if userExit is None:
return Common.to_json('error', dict(msg='用户不存在'))
if userExit and userExit.check_login_password(password):
login_time = int(time.time())
token = Auth.generate_jwt(userExit.id, login_time)
return Common.to_json('success', dict(
userinfo=dict(username=userExit.name), token=token['jwt'], refresh_token=token['refresh_token']))
else:
return Common.to_json('error', dict(msg='密码错误'))
@user.route('/refreshCode', methods=['post'])
@Auth.require_jwt
def refresh_token():
refresh_token = request.form.get('refresh_token')
return jsonify(Auth.get_jwt_by_refresh_code(refresh_token))
@user.route('/getInfo', methods=['post'])
@Auth.require_jwt
def getInfo():
return jsonify(Auth.decode_jwt(g.authorization))
@user.route('/authenticateUrl', methods=['post'])
@Auth.require_jwt
def authenticatePermission():
url = request.json.get('url')
if Auth.authenticatePermission(url):
return Common.to_json('success')
return Common.to_json('error')
@user.route('/getMenu', methods=['post'])
@Auth.require_jwt
def getMenuList():
res = Auth.permission()
return jsonify(res)
@user.route('/getRouter', methods=['post'])
@Auth.require_jwt
@Auth.require_root
def getRouter():
categoriesList = db.session.query(Categories).all()
res = []
for index, items in enumerate(categoriesList):
item = dict(id=items.id, date=str(items.create_time), icon=items.icon, url=items.url, parent_id=items.parent_id,
name=items.name)
res.append(item)
res = Common.to_json('success', res)
return res
@user.route('/addRouter', methods=['post'])
@Auth.require_jwt
@Auth.require_root
def addRouter():
name = request.json.get('name')
url = request.json.get('url')
parent_id = request.json.get('parent_id')
icon = request.json.get('icon')
create_time = time.strftime('%Y-%m-%d %H:%M:%S', time.localtime(time.time()))
router = Categories(name=name, url=url, parent_id=parent_id, icon=icon, create_time=create_time)
db.session.add(router)
result = db.session.commit()
return Common.to_json('success')
@user.route('/getRouterByid', methods=['post'])
@Auth.require_jwt
@Auth.require_root
def getRouterByid():
id = request.json.get('id')
router = db.session.query(Categories).filter(Categories.id == id).first()
res = dict(id=router.id, name=router.name, url=router.url, icon=router.icon, parent_id=router.parent_id)
return Common.to_json('success', res)
@user.route('/delRouterByid', methods=['post'])
@Auth.require_jwt
@Auth.require_root
def delRouterByid():
id = request.json.get('id')
# item = db.session.query()
result = Categories.query.filter_by(id=id).delete()
str(result)
# if()
return Common.to_json('success')
# db.session.query(Categories.id == id)
@user.route('/updateRouter', methods=['post'])
@Auth.require_jwt
@Auth.require_root
def updateRouter():
id = request.json.get('id')
name = request.json.get('name')
url = request.json.get('url')
parent_id = request.json.get('parent_id')
icon = request.json.get('icon')
create_time = time.strftime('%Y-%m-%d %H:%M:%S', time.localtime(time.time()))
item = db.session.query(Categories).filter_by(id=id).first()
item.name = name
item.url = url
item.parent_id = parent_id
item.icon = icon
item.create_time = create_time
db.session.add(item)
result = db.session.commit()
return Common.to_json('success')
@user.route('/getPermissionList', methods=['post'])
@Auth.require_jwt
@Auth.require_root
def getPermissionList():
res = db.session.query(Permission).all()
response = []
for i in res:
item = dict(id=i.id, permission=i.permission, user_id=i.user_id, create_time=str(i.create_time),
creator=i.creator)
response.append(item)
return Common.to_json('success', response)
@user.route('/getPermissionByid', methods=['post'])
@Auth.require_jwt
@Auth.require_root
def getPermissionByid():
id = request.json.get('id')
res = db.session.query(Permission).filter(Permission.id == id).first()
item = dict(id=res.id, user_id=res.user_id, permission=res.permission, create_time=str(res.create_time))
return Common.to_json('success', item)
@user.route('/addPermission', methods=['post'])
@Auth.require_jwt
@Auth.require_root
def addPermission():
userId = request.json.get('user_id')
permission = request.json.get('permission')
uId = Auth.getID()
creat_time = time.strftime('%Y-%m-%d %H:%M:%S', time.localtime(time.time()))
creator = db.session.query(Users).filter(Users.id == uId).first()
creatorname = creator.name
item = Permission(user_id=userId, permission=permission, create_time=creat_time, creator=creatorname)
db.session.add(item)
res = db.session.commit()
return Common.to_json('success')
@user.route('/updatePermission', methods=['post'])
@Auth.require_jwt
@Auth.require_root
def updatePermission():
id = request.json.get('id')
permission = request.json.get('permission')
item = Permission.query.filter_by(id=id).first()
item.permission = permission
db.session.add(item)
result = db.session.commit()
return Common.to_json('success', dict(data=str(result)))
| 32.222749 | 120 | 0.683924 |
6b7f40b80127e7303b509b6a47676381137259f8 | 825 | py | Python | Autocoders/Python/src/fprime_ac/generators/visitors/TestImplVisitorBase.py | chrisdonlan/fprime | 0cab90e238cff1b50c20f1e148a44cf8827a5bf8 | [
"Apache-2.0"
] | 5 | 2019-10-22T03:41:02.000Z | 2022-01-16T12:48:31.000Z | Autocoders/Python/src/fprime_ac/generators/visitors/TestImplVisitorBase.py | chrisdonlan/fprime | 0cab90e238cff1b50c20f1e148a44cf8827a5bf8 | [
"Apache-2.0"
] | 27 | 2019-02-07T17:58:58.000Z | 2019-08-13T00:46:24.000Z | Autocoders/Python/src/fprime_ac/generators/visitors/TestImplVisitorBase.py | chrisdonlan/fprime | 0cab90e238cff1b50c20f1e148a44cf8827a5bf8 | [
"Apache-2.0"
] | 3 | 2019-01-01T18:44:37.000Z | 2019-08-01T01:19:39.000Z | #!/bin/env python
#===============================================================================
# NAME: TestImplVisitorBase.py
#
# DESCRIPTION: A base class for TestImpl visitors
#
# AUTHOR: bocchino
# EMAIL: bocchino@jpl.nasa.gov
# DATE CREATED: November 14, 2015
#
# Copyright 2015, California Institute of Technology.
# ALL RIGHTS RESERVED. U.S. Government Sponsorship acknowledged.
#===============================================================================
from fprime_ac.generators.visitors import ComponentVisitorBase
class TestImplVisitorBase(ComponentVisitorBase.ComponentVisitorBase):
"""
A base class for TestImpl visitors
"""
def initTestImpl(self, obj, c):
self.init(obj, c)
c.component_base = c.name() + "ComponentBase"
c.gtest_base = c.name() + "GTestBase"
| 30.555556 | 80 | 0.579394 |
43a57be4c2845edba3718c78bc596fbb6e3b5100 | 1,234 | py | Python | tests/conftest.py | njdister/njdister-github3.py | 7a714ad0c9d9ddfda9c3e20d76f94ec992661edc | [
"BSD-3-Clause"
] | null | null | null | tests/conftest.py | njdister/njdister-github3.py | 7a714ad0c9d9ddfda9c3e20d76f94ec992661edc | [
"BSD-3-Clause"
] | null | null | null | tests/conftest.py | njdister/njdister-github3.py | 7a714ad0c9d9ddfda9c3e20d76f94ec992661edc | [
"BSD-3-Clause"
] | 1 | 2021-09-13T09:01:57.000Z | 2021-09-13T09:01:57.000Z | import base64
import betamax
import os
import pytest
from betamax_matchers import json_body
credentials = [os.environ.get('GH_USER', 'foo').encode(),
os.environ.get('GH_PASSWORD', 'bar').encode()]
betamax.Betamax.register_request_matcher(json_body.JSONBodyMatcher)
with betamax.Betamax.configure() as config:
config.cassette_library_dir = 'tests/cassettes'
record_mode = 'never' if os.environ.get('TRAVIS_GH3') else 'once'
config.default_cassette_options['record_mode'] = record_mode
config.define_cassette_placeholder(
'<AUTH_TOKEN>',
os.environ.get('GH_AUTH', 'x' * 20)
)
config.default_cassette_options['match_requests_on'].append('json-body')
config.define_cassette_placeholder(
'<BASIC_AUTH>',
base64.b64encode(b':'.join(credentials)).decode()
)
@pytest.fixture
def betamax_simple_body(request):
"""Return configuration to match cassette on uri, method and body."""
request.cls.betamax_simple_body = {
'match_requests_on': ['uri', 'method', 'body']
}
@pytest.fixture
def enterprise_url(request):
"""Configure class with enterprise url."""
request.cls.enterprise_url = 'https://enterprise.github3.com'
| 27.422222 | 76 | 0.700162 |
b495e591de75149419c7ff59863ad756c08a6f7d | 428 | py | Python | packages/python/plotly/plotly/validators/histogram/marker/pattern/_shapesrc.py | mastermind88/plotly.py | efa70710df1af22958e1be080e105130042f1839 | [
"MIT"
] | null | null | null | packages/python/plotly/plotly/validators/histogram/marker/pattern/_shapesrc.py | mastermind88/plotly.py | efa70710df1af22958e1be080e105130042f1839 | [
"MIT"
] | null | null | null | packages/python/plotly/plotly/validators/histogram/marker/pattern/_shapesrc.py | mastermind88/plotly.py | efa70710df1af22958e1be080e105130042f1839 | [
"MIT"
] | null | null | null | import _plotly_utils.basevalidators
class ShapesrcValidator(_plotly_utils.basevalidators.SrcValidator):
def __init__(
self, plotly_name="shapesrc", parent_name="histogram.marker.pattern", **kwargs
):
super(ShapesrcValidator, self).__init__(
plotly_name=plotly_name,
parent_name=parent_name,
edit_type=kwargs.pop("edit_type", "none"),
**kwargs,
)
| 30.571429 | 86 | 0.658879 |
9dfe6718aeb68b0e70dfdefc7df04fe49d48184b | 3,317 | py | Python | chatbot/corpus/cornelldata.py | HarshitBagla/Chatbot | 09b41b3bb48b0ba0f0532adbac0331799d53ca60 | [
"Apache-2.0"
] | null | null | null | chatbot/corpus/cornelldata.py | HarshitBagla/Chatbot | 09b41b3bb48b0ba0f0532adbac0331799d53ca60 | [
"Apache-2.0"
] | null | null | null | chatbot/corpus/cornelldata.py | HarshitBagla/Chatbot | 09b41b3bb48b0ba0f0532adbac0331799d53ca60 | [
"Apache-2.0"
] | null | null | null | # Copyright 2015 Conchylicultor. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
import os
import ast
"""
Load the cornell movie dialog corpus.
Available from here:
http://www.cs.cornell.edu/~cristian/Cornell_Movie-Dialogs_Corpus.html
"""
class CornellData:
"""
"""
def __init__(self, dirName):
"""
Args:
dirName (string): directory where to load the corpus
"""
self.lines = {}
self.conversations = []
MOVIE_LINES_FIELDS = ["lineID","characterID","movieID","character","text"]
MOVIE_CONVERSATIONS_FIELDS = ["character1ID","character2ID","movieID","utteranceIDs"]
self.lines = self.loadLines(os.path.join(dirName, "movie_lines.txt"), MOVIE_LINES_FIELDS)
self.conversations = self.loadConversations(os.path.join(dirName, "movie_conversations.txt"), MOVIE_CONVERSATIONS_FIELDS)
# TODO: Cleaner program (merge copy-paste) !!
def loadLines(self, fileName, fields):
"""
Args:
fileName (str): file to load
field (set<str>): fields to extract
Return:
dict<dict<str>>: the extracted fields for each line
"""
lines = {}
with open(fileName, 'r', encoding='iso-8859-1') as f: # TODO: Solve Iso encoding pb !
for line in f:
values = line.split(" +++$+++ ")
# Extract fields
lineObj = {}
for i, field in enumerate(fields):
lineObj[field] = values[i]
lines[lineObj['lineID']] = lineObj
return lines
def loadConversations(self, fileName, fields):
"""
Args:
fileName (str): file to load
field (set<str>): fields to extract
Return:
dict<dict<str>>: the extracted fields for each line
"""
conversations = []
with open(fileName, 'r', encoding='iso-8859-1') as f: # TODO: Solve Iso encoding pb !
for line in f:
values = line.split(" +++$+++ ")
# Extract fields
convObj = {}
for i, field in enumerate(fields):
convObj[field] = values[i]
# Convert string to list (convObj["utteranceIDs"] == "['L598485', 'L598486', ...]")
lineIds = ast.literal_eval(convObj["utteranceIDs"])
# Reassemble lines
convObj["lines"] = []
for lineId in lineIds:
convObj["lines"].append(self.lines[lineId])
conversations.append(convObj)
return conversations
def getConversations(self):
return self.conversations | 33.17 | 129 | 0.571902 |
4e38b47c8088ead45820b7fc0afd212c32199875 | 189 | py | Python | app_python/conftest.py | SmirnovaMarina/devops | 63badf302d809bfc20a1dab990938e4b4c201997 | [
"MIT"
] | null | null | null | app_python/conftest.py | SmirnovaMarina/devops | 63badf302d809bfc20a1dab990938e4b4c201997 | [
"MIT"
] | null | null | null | app_python/conftest.py | SmirnovaMarina/devops | 63badf302d809bfc20a1dab990938e4b4c201997 | [
"MIT"
] | 1 | 2021-08-23T08:23:17.000Z | 2021-08-23T08:23:17.000Z | import pytest
from main import create_app
@pytest.fixture
def app():
app = create_app({'TESTING': True})
yield app
@pytest.fixture
def client(app):
return app.test_client()
| 13.5 | 39 | 0.698413 |
558f76aafd7960785b77e82d3a3acc05e04c4ac6 | 23,873 | py | Python | hydrus/client/gui/ClientGUIControls.py | ReAnzu/hydrus | 069f77e1941d13b3bdd969aeeffd7ae003fcb71e | [
"WTFPL"
] | 1 | 2021-02-24T22:12:30.000Z | 2021-02-24T22:12:30.000Z | hydrus/client/gui/ClientGUIControls.py | ReAnzu/hydrus | 069f77e1941d13b3bdd969aeeffd7ae003fcb71e | [
"WTFPL"
] | null | null | null | hydrus/client/gui/ClientGUIControls.py | ReAnzu/hydrus | 069f77e1941d13b3bdd969aeeffd7ae003fcb71e | [
"WTFPL"
] | null | null | null | import typing
from qtpy import QtCore as QC
from qtpy import QtWidgets as QW
from hydrus.core import HydrusConstants as HC
from hydrus.core import HydrusData
from hydrus.core import HydrusExceptions
from hydrus.core import HydrusGlobals as HG
from hydrus.core import HydrusNetworking
from hydrus.core import HydrusText
from hydrus.client import ClientConstants as CC
from hydrus.client.gui import ClientGUICommon
from hydrus.client.gui import ClientGUICore as CGC
from hydrus.client.gui import ClientGUIFunctions
from hydrus.client.gui import ClientGUIMenus
from hydrus.client.gui import ClientGUIScrolledPanels
from hydrus.client.gui import ClientGUITime
from hydrus.client.gui import ClientGUITopLevelWindowsPanels
from hydrus.client.gui import QtPorting as QP
from hydrus.client.gui.lists import ClientGUIListConstants as CGLC
from hydrus.client.gui.lists import ClientGUIListCtrl
class BandwidthRulesCtrl( ClientGUICommon.StaticBox ):
def __init__( self, parent, bandwidth_rules ):
ClientGUICommon.StaticBox.__init__( self, parent, 'bandwidth rules' )
listctrl_panel = ClientGUIListCtrl.BetterListCtrlPanel( self )
# example for later:
'''
def sort_call( desired_columns, rule ):
( bandwidth_type, time_delta, max_allowed ) = rule
sort_time_delta = SafeNoneInt( time_delta )
result = {}
result[ CGLC.COLUMN_LIST_BANDWIDTH_RULES.MAX_ALLOWED ] = max_allowed
result[ CGLC.COLUMN_LIST_BANDWIDTH_RULES.EVERY ] = sort_time_delta
return result
def display_call( desired_columns, rule ):
( bandwidth_type, time_delta, max_allowed ) = rule
if bandwidth_type == HC.BANDWIDTH_TYPE_DATA:
pretty_max_allowed = HydrusData.ToHumanBytes( max_allowed )
elif bandwidth_type == HC.BANDWIDTH_TYPE_REQUESTS:
pretty_max_allowed = '{} requests'.format( HydrusData.ToHumanInt( max_allowed ) )
pretty_time_delta = HydrusData.TimeDeltaToPrettyTimeDelta( time_delta )
result = {}
result[ CGLC.COLUMN_LIST_BANDWIDTH_RULES.MAX_ALLOWED ] = pretty_max_allowed
result[ CGLC.COLUMN_LIST_BANDWIDTH_RULES.EVERY ] = pretty_time_delta
return result
'''
self._listctrl = ClientGUIListCtrl.BetterListCtrl( listctrl_panel, CGLC.COLUMN_LIST_BANDWIDTH_RULES.ID, 8, self._ConvertRuleToListCtrlTuples, use_simple_delete = True, activation_callback = self._Edit )
listctrl_panel.SetListCtrl( self._listctrl )
listctrl_panel.AddButton( 'add', self._Add )
listctrl_panel.AddButton( 'edit', self._Edit, enabled_only_on_selection = True )
listctrl_panel.AddDeleteButton()
#
self._listctrl.AddDatas( bandwidth_rules.GetRules() )
self._listctrl.Sort()
#
self.Add( listctrl_panel, CC.FLAGS_EXPAND_BOTH_WAYS )
def _Add( self ):
rule = ( HC.BANDWIDTH_TYPE_DATA, None, 1024 * 1024 * 100 )
with ClientGUITopLevelWindowsPanels.DialogEdit( self, 'edit rule' ) as dlg:
panel = self._EditPanel( dlg, rule )
dlg.SetPanel( panel )
if dlg.exec() == QW.QDialog.Accepted:
new_rule = panel.GetValue()
self._listctrl.AddDatas( ( new_rule, ) )
self._listctrl.Sort()
def _ConvertRuleToListCtrlTuples( self, rule ):
( bandwidth_type, time_delta, max_allowed ) = rule
pretty_time_delta = HydrusData.TimeDeltaToPrettyTimeDelta( time_delta )
if bandwidth_type == HC.BANDWIDTH_TYPE_DATA:
pretty_max_allowed = HydrusData.ToHumanBytes( max_allowed )
elif bandwidth_type == HC.BANDWIDTH_TYPE_REQUESTS:
pretty_max_allowed = HydrusData.ToHumanInt( max_allowed ) + ' requests'
sort_time_delta = ClientGUIListCtrl.SafeNoneInt( time_delta )
sort_tuple = ( max_allowed, sort_time_delta )
display_tuple = ( pretty_max_allowed, pretty_time_delta )
return ( display_tuple, sort_tuple )
def _Edit( self ):
selected_rules = self._listctrl.GetData( only_selected = True )
for rule in selected_rules:
with ClientGUITopLevelWindowsPanels.DialogEdit( self, 'edit rule' ) as dlg:
panel = self._EditPanel( dlg, rule )
dlg.SetPanel( panel )
if dlg.exec() == QW.QDialog.Accepted:
edited_rule = panel.GetValue()
self._listctrl.DeleteDatas( ( rule, ) )
self._listctrl.AddDatas( ( edited_rule, ) )
else:
break
self._listctrl.Sort()
def GetValue( self ):
bandwidth_rules = HydrusNetworking.BandwidthRules()
for rule in self._listctrl.GetData():
( bandwidth_type, time_delta, max_allowed ) = rule
bandwidth_rules.AddRule( bandwidth_type, time_delta, max_allowed )
return bandwidth_rules
class _EditPanel( ClientGUIScrolledPanels.EditPanel ):
def __init__( self, parent, rule ):
ClientGUIScrolledPanels.EditPanel.__init__( self, parent )
self._bandwidth_type = ClientGUICommon.BetterChoice( self )
self._bandwidth_type.addItem( 'data', HC.BANDWIDTH_TYPE_DATA )
self._bandwidth_type.addItem( 'requests', HC.BANDWIDTH_TYPE_REQUESTS )
self._bandwidth_type.currentIndexChanged.connect( self._UpdateEnabled )
self._max_allowed_bytes = BytesControl( self )
self._max_allowed_requests = QP.MakeQSpinBox( self, min=1, max=1048576 )
self._time_delta = ClientGUITime.TimeDeltaButton( self, min = 1, days = True, hours = True, minutes = True, seconds = True, monthly_allowed = True )
#
( bandwidth_type, time_delta, max_allowed ) = rule
self._bandwidth_type.SetValue( bandwidth_type )
self._time_delta.SetValue( time_delta )
if bandwidth_type == HC.BANDWIDTH_TYPE_DATA:
self._max_allowed_bytes.SetValue( max_allowed )
else:
self._max_allowed_requests.setValue( max_allowed )
self._UpdateEnabled()
#
hbox = QP.HBoxLayout()
QP.AddToLayout( hbox, self._max_allowed_bytes, CC.FLAGS_CENTER_PERPENDICULAR )
QP.AddToLayout( hbox, self._max_allowed_requests, CC.FLAGS_CENTER_PERPENDICULAR )
QP.AddToLayout( hbox, self._bandwidth_type, CC.FLAGS_CENTER_PERPENDICULAR )
QP.AddToLayout( hbox, ClientGUICommon.BetterStaticText(self,' every '), CC.FLAGS_CENTER_PERPENDICULAR )
QP.AddToLayout( hbox, self._time_delta, CC.FLAGS_CENTER_PERPENDICULAR )
self.widget().setLayout( hbox )
def _UpdateEnabled( self ):
bandwidth_type = self._bandwidth_type.GetValue()
if bandwidth_type == HC.BANDWIDTH_TYPE_DATA:
self._max_allowed_bytes.show()
self._max_allowed_requests.hide()
elif bandwidth_type == HC.BANDWIDTH_TYPE_REQUESTS:
self._max_allowed_bytes.hide()
self._max_allowed_requests.show()
def GetValue( self ):
bandwidth_type = self._bandwidth_type.GetValue()
time_delta = self._time_delta.GetValue()
if bandwidth_type == HC.BANDWIDTH_TYPE_DATA:
max_allowed = self._max_allowed_bytes.GetValue()
elif bandwidth_type == HC.BANDWIDTH_TYPE_REQUESTS:
max_allowed = self._max_allowed_requests.value()
return ( bandwidth_type, time_delta, max_allowed )
class BytesControl( QW.QWidget ):
valueChanged = QC.Signal()
def __init__( self, parent, initial_value = 65536 ):
QW.QWidget.__init__( self, parent )
self._spin = QP.MakeQSpinBox( self, min=0, max=1048576 )
self._unit = ClientGUICommon.BetterChoice( self )
self._unit.addItem( 'B', 1 )
self._unit.addItem( 'KB', 1024 )
self._unit.addItem( 'MB', 1024 * 1024 )
self._unit.addItem( 'GB', 1024 * 1024 * 1024 )
#
self.SetValue( initial_value )
#
hbox = QP.HBoxLayout()
QP.AddToLayout( hbox, self._spin, CC.FLAGS_CENTER_PERPENDICULAR )
QP.AddToLayout( hbox, self._unit, CC.FLAGS_CENTER_PERPENDICULAR )
self.setLayout( hbox )
self._spin.valueChanged.connect( self._HandleValueChanged )
self._unit.currentIndexChanged.connect( self._HandleValueChanged )
def _HandleValueChanged( self, val ):
self.valueChanged.emit()
def GetSeparatedValue( self ):
return (self._spin.value(), self._unit.GetValue())
def GetValue( self ):
return self._spin.value() * self._unit.GetValue()
def SetSeparatedValue( self, value, unit ):
return (self._spin.setValue( value ), self._unit.SetValue( unit ))
def SetValue( self, value: int ):
max_unit = 1024 * 1024 * 1024
unit = 1
while value % 1024 == 0 and unit < max_unit:
value //= 1024
unit *= 1024
self._spin.setValue( value )
self._unit.SetValue( unit )
class NoneableBytesControl( QW.QWidget ):
valueChanged = QC.Signal()
def __init__( self, parent, initial_value = 65536, none_label = 'no limit' ):
QW.QWidget.__init__( self, parent )
self._bytes = BytesControl( self )
self._none_checkbox = QW.QCheckBox( none_label, self )
#
self.SetValue( initial_value )
#
hbox = QP.HBoxLayout()
QP.AddToLayout( hbox, self._bytes, CC.FLAGS_CENTER_PERPENDICULAR )
QP.AddToLayout( hbox, self._none_checkbox, CC.FLAGS_CENTER_PERPENDICULAR )
self.setLayout( hbox )
#
self._none_checkbox.clicked.connect( self._UpdateEnabled )
self._bytes.valueChanged.connect( self._HandleValueChanged )
self._none_checkbox.clicked.connect( self._HandleValueChanged )
def _UpdateEnabled( self ):
if self._none_checkbox.isChecked():
self._bytes.setEnabled( False )
else:
self._bytes.setEnabled( True )
def _HandleValueChanged( self ):
self.valueChanged.emit()
def GetValue( self ):
if self._none_checkbox.isChecked():
return None
else:
return self._bytes.GetValue()
def setToolTip( self, text ):
QW.QWidget.setToolTip( self, text )
for c in self.children():
if isinstance( c, QW.QWidget ):
c.setToolTip( text )
def SetValue( self, value ):
if value is None:
self._none_checkbox.setChecked( True )
else:
self._none_checkbox.setChecked( False )
self._bytes.SetValue( value )
self._UpdateEnabled()
class NetworkJobControl( QW.QFrame ):
def __init__( self, parent ):
QW.QFrame.__init__( self, parent )
self.setFrameStyle( QW.QFrame.Box | QW.QFrame.Raised )
self._network_job = None
self._download_started = False
self._auto_override_bandwidth_rules = False
self._left_text = ClientGUICommon.BetterStaticText( self, ellipsize_end = True )
self._right_text = ClientGUICommon.BetterStaticText( self )
self._right_text.setAlignment( QC.Qt.AlignRight | QC.Qt.AlignVCenter )
self._last_right_min_width = 0
self._gauge = ClientGUICommon.Gauge( self )
self._cog_button = ClientGUICommon.BetterBitmapButton( self, CC.global_pixmaps().cog, self._ShowCogMenu )
self._cancel_button = ClientGUICommon.BetterBitmapButton( self, CC.global_pixmaps().stop, self.Cancel )
#
self._Update()
#
st_hbox = QP.HBoxLayout()
QP.AddToLayout( st_hbox, self._left_text, CC.FLAGS_EXPAND_BOTH_WAYS )
QP.AddToLayout( st_hbox, self._right_text, CC.FLAGS_CENTER_PERPENDICULAR )
left_vbox = QP.VBoxLayout()
QP.AddToLayout( left_vbox, st_hbox, CC.FLAGS_EXPAND_SIZER_PERPENDICULAR )
QP.AddToLayout( left_vbox, self._gauge, CC.FLAGS_EXPAND_BOTH_WAYS )
hbox = QP.HBoxLayout()
QP.AddToLayout( hbox, left_vbox, CC.FLAGS_EXPAND_SIZER_BOTH_WAYS )
QP.AddToLayout( hbox, self._cog_button, CC.FLAGS_CENTER_PERPENDICULAR )
QP.AddToLayout( hbox, self._cancel_button, CC.FLAGS_CENTER_PERPENDICULAR )
self.setLayout( hbox )
def _ShowCogMenu( self ):
menu = QW.QMenu()
if self._network_job is not None:
if self._network_job.CurrentlyWaitingOnConnectionError():
ClientGUIMenus.AppendMenuItem( menu, 'reattempt connection now', 'Stop waiting on a connection error and reattempt the job now.', self._network_job.OverrideConnectionErrorWait )
if self._network_job.CurrentlyWaitingOnServersideBandwidth():
ClientGUIMenus.AppendMenuItem( menu, 'reattempt request now (server reports low bandwidth)', 'Stop waiting on a serverside bandwidth delay and reattempt the job now.', self._network_job.OverrideServersideBandwidthWait )
if self._network_job.ObeysBandwidth():
ClientGUIMenus.AppendMenuItem( menu, 'override bandwidth rules for this job', 'Tell the current job to ignore existing bandwidth rules and go ahead anyway.', self._network_job.OverrideBandwidth )
if not self._network_job.TokensOK():
ClientGUIMenus.AppendMenuItem( menu, 'override gallery slot requirements for this job', 'Force-allow this download to proceed, ignoring the normal gallery wait times.', self._network_job.OverrideToken )
ClientGUIMenus.AppendSeparator( menu )
ClientGUIMenus.AppendMenuCheckItem( menu, 'auto-override bandwidth rules for all jobs here after five seconds', 'Ignore existing bandwidth rules for all jobs under this control, instead waiting a flat five seconds.', self._auto_override_bandwidth_rules, self.FlipAutoOverrideBandwidth )
CGC.core().PopupMenu( self._cog_button, menu )
def _OverrideBandwidthIfAppropriate( self ):
if self._network_job is None or self._network_job.NoEngineYet():
return
else:
if self._auto_override_bandwidth_rules and HydrusData.TimeHasPassed( self._network_job.GetCreationTime() + 5 ):
self._network_job.OverrideBandwidth()
def _Update( self ):
if self._network_job is None or self._network_job.NoEngineYet():
self._left_text.clear()
self._right_text.clear()
self._gauge.SetRange( 1 )
self._gauge.SetValue( 0 )
can_cancel = False
else:
if self._network_job.IsDone():
can_cancel = False
else:
can_cancel = True
( status_text, current_speed, bytes_read, bytes_to_read ) = self._network_job.GetStatus()
self._left_text.setText( status_text )
if not self._download_started and current_speed > 0:
self._download_started = True
speed_text = ''
if self._download_started and not self._network_job.HasError():
if bytes_read is not None:
if bytes_to_read is not None and bytes_read != bytes_to_read:
speed_text += HydrusData.ConvertValueRangeToBytes( bytes_read, bytes_to_read )
else:
speed_text += HydrusData.ToHumanBytes( bytes_read )
if current_speed != bytes_to_read: # if it is a real quick download, just say its size
speed_text += ' ' + HydrusData.ToHumanBytes( current_speed ) + '/s'
self._right_text.setText( speed_text )
right_width = ClientGUIFunctions.ConvertTextToPixelWidth( self._right_text, len( speed_text ) )
right_min_width = right_width
if right_min_width != self._last_right_min_width:
self._last_right_min_width = right_min_width
self._right_text.setMinimumWidth( right_min_width )
self._gauge.SetRange( bytes_to_read )
self._gauge.SetValue( bytes_read )
if can_cancel:
if not self._cancel_button.isEnabled():
self._cancel_button.setEnabled( True )
else:
if self._cancel_button.isEnabled():
self._cancel_button.setEnabled( False )
def Cancel( self ):
if self._network_job is not None:
self._network_job.Cancel( 'Cancelled by user.' )
def ClearNetworkJob( self ):
self.SetNetworkJob( None )
def FlipAutoOverrideBandwidth( self ):
self._auto_override_bandwidth_rules = not self._auto_override_bandwidth_rules
def SetNetworkJob( self, network_job ):
if network_job is None:
if self._network_job is not None:
self._network_job = None
self._Update()
HG.client_controller.gui.UnregisterUIUpdateWindow( self )
else:
if self._network_job != network_job:
self._network_job = network_job
self._download_started = False
HG.client_controller.gui.RegisterUIUpdateWindow( self )
def TIMERUIUpdate( self ):
self._OverrideBandwidthIfAppropriate()
if HG.client_controller.gui.IShouldRegularlyUpdate( self ):
self._Update()
class TextAndPasteCtrl( QW.QWidget ):
def __init__( self, parent, add_callable, allow_empty_input = False ):
self._add_callable = add_callable
self._allow_empty_input = allow_empty_input
QW.QWidget.__init__( self, parent )
self._text_input = QW.QLineEdit( self )
self._text_input.installEventFilter( ClientGUICommon.TextCatchEnterEventFilter( self._text_input, self.EnterText ) )
self._paste_button = ClientGUICommon.BetterBitmapButton( self, CC.global_pixmaps().paste, self._Paste )
self._paste_button.setToolTip( 'Paste multiple inputs from the clipboard. Assumes the texts are newline-separated.' )
self.setFocusProxy( self._text_input )
#
hbox = QP.HBoxLayout()
QP.AddToLayout( hbox, self._text_input, CC.FLAGS_EXPAND_BOTH_WAYS )
QP.AddToLayout( hbox, self._paste_button, CC.FLAGS_CENTER_PERPENDICULAR )
self.setLayout( hbox )
def _Paste( self ):
try:
raw_text = HG.client_controller.GetClipboardText()
except HydrusExceptions.DataMissing as e:
QW.QMessageBox.critical( self, 'Error', str(e) )
return
try:
texts = [ text for text in HydrusText.DeserialiseNewlinedTexts( raw_text ) ]
if not self._allow_empty_input:
texts = [ text for text in texts if text != '' ]
if len( texts ) > 0:
self._add_callable( texts )
except:
QW.QMessageBox.critical( self, 'Error', 'I could not understand what was in the clipboard' )
def EnterText( self ):
text = self._text_input.text()
text = HydrusText.StripIOInputLine( text )
if text == '' and not self._allow_empty_input:
return
self._add_callable( ( text, ) )
self._text_input.clear()
def GetValue( self ):
return self._text_input.text()
def setPlaceholderText( self, text ):
self._text_input.setPlaceholderText( text )
def SetValue( self, text ):
self._text_input.setText( text )
| 31.746011 | 294 | 0.536799 |
d40d7e83fee4185eacdcae4287e23baf582632ad | 2,489 | py | Python | python_challenge/tests.py | predator4hack/JdeRobot_Programming_tests | b9bb5bf2d3afec3815d50933882a03df90b293e2 | [
"MIT"
] | null | null | null | python_challenge/tests.py | predator4hack/JdeRobot_Programming_tests | b9bb5bf2d3afec3815d50933882a03df90b293e2 | [
"MIT"
] | null | null | null | python_challenge/tests.py | predator4hack/JdeRobot_Programming_tests | b9bb5bf2d3afec3815d50933882a03df90b293e2 | [
"MIT"
] | null | null | null | from game_of_life import GameOfLife
import numpy as np
import json
import unittest
from sample_app import configurations
class TestCgolMethods(unittest.TestCase):
def setUp(self):
self.g = GameOfLife()
self.conf = configurations()
with open('config.json') as config_fd:
config = json.load(config_fd)
self.grid_width = np.clip(config['width'], 8, 30)
self.grid_height = np.clip(config['height'], 8, 30)
def test_created_grid(self):
np.testing.assert_array_equal(self.g.get_grid(), np.zeros((self.grid_width, self.grid_height)))
def test_pattern_placement(self):
self.g.add_object(self.conf.Beacon, 0, 0)
self.g.add_object(self.conf.Block, 10, 10)
test_grid = np.zeros((self.grid_width, self.grid_height))
test_grid[0:4, 0:4] = np.array([[1, 1, 0, 0], [1, 0, 0, 0], [0, 0, 0, 1], [0, 0, 1, 1]])
test_grid[10:12, 10:12] = np.array([[1, 1], [1, 1]])
np.testing.assert_array_equal(self.g.get_grid(), test_grid)
def test_still_life(self):
self.g.add_object(self.conf.Block, 0, 0)
self.g.add_object(self.conf.Beehive, 6, 0)
self.g.add_object(self.conf.Tub, 6, 12)
self.g.update_grid()
test_grid = np.zeros((self.grid_width, self.grid_height))
test_grid[0:2, 0:2] = np.array([[1, 1], [1, 1]])
test_grid[6:9, 0:4] = np.array([[0, 1, 1, 0], [1, 0, 0, 1], [0, 1, 1, 0]])
test_grid[6:9,12:15] = np.array([[0, 1, 0], [1, 0, 1], [0, 1, 0]])
np.testing.assert_array_equal(self.g.get_grid(), test_grid)
def test_oscillators(self):
self.g.add_object(self.conf.Blinker, 0, 0)
self.g.add_object(self.conf.Toad, 6, 0)
self.g.add_object(self.conf.Beacon, 6, 12)
self.g.update_grid()
test_grid = np.zeros((self.grid_width, self.grid_height))
test_grid[0:3, 0:3] = np.array([[0, 1, 0], [0, 1, 0], [0, 1, 0]])
test_grid[6:10, 0:4] = np.array([[0, 0, 1, 0], [1, 0, 0, 1], [1, 0, 0, 1], [0, 1, 0, 0]])
test_grid[6:10,12:16] = np.array([[1, 1, 0, 0], [1, 1, 0, 0], [0, 0, 1, 1], [0, 0, 1, 1]])
np.testing.assert_array_equal(self.g.get_grid(), test_grid)
def test_spaceships(self):
self.g.add_object(self.conf.Glider, 0, 0)
self.g.add_object(self.conf.LWSpaceship, 12, 5)
self.g.update_grid()
test_grid = np.zeros((self.grid_width, self.grid_height))
test_grid[0:3, 1:4] = np.array([[1, 0, 0], [0, 1, 1], [1, 1, 0]])
test_grid[12:16, 5:10] = np.array([[0, 1, 1, 1, 1], [1, 0, 0, 0, 1], [0, 0, 0, 0, 1], [1, 0, 0, 1, 0]])
np.testing.assert_array_equal(self.g.get_grid(), test_grid)
if __name__ == '__main__':
unittest.main() | 42.186441 | 105 | 0.644837 |
98eb93a6121bd498ef038fc512f075acef371977 | 10,063 | py | Python | scripts/replication_studies.py | gregstarr/trough_stats | 4e8229eb55e016d4910420ede035ace1b1b52d38 | [
"MIT"
] | null | null | null | scripts/replication_studies.py | gregstarr/trough_stats | 4e8229eb55e016d4910420ede035ace1b1b52d38 | [
"MIT"
] | null | null | null | scripts/replication_studies.py | gregstarr/trough_stats | 4e8229eb55e016d4910420ede035ace1b1b52d38 | [
"MIT"
] | null | null | null | import pandas
import pathlib
import numpy as np
from scipy.stats import binned_statistic, linregress
from matplotlib import pyplot as plt
from ttools import io, utils, config, plotting
from get_dataset import get_tec_dataset
plt.style.use('ggplot')
colors = plt.rcParams['axes.prop_cycle'].by_key()['color']
plt.style.use('default')
MONTH_INDEX = {
'winter': [0, 1, 10, 11],
'equinox': [2, 3, 8, 9],
'summer': [4, 5, 6, 7],
}
def get_swarm_troughs(threshold):
swarm_trough_dir = pathlib.Path("E:\\ttools\\swarm\\trough candidates")
swarm_troughs = []
for p in swarm_trough_dir.glob("*.h5"):
print(p)
all_troughs = pandas.read_hdf(p, 'troughs')
swarm_troughs.append(all_troughs)
swarm_troughs = pandas.concat(swarm_troughs, ignore_index=True)
swarm_troughs.direction = swarm_troughs.direction == 'up'
sat = np.zeros(swarm_troughs.shape[0], dtype=int)
for i, s in enumerate(['A', 'B', 'C']):
m = swarm_troughs.sat == s
sat[m] = i
swarm_troughs.sat = sat
swarm_troughs.tec_time = utils.datetime64_to_timestamp(swarm_troughs.tec_time)
d = swarm_troughs.tec_time.values.astype('datetime64[s]').astype('datetime64[D]')
t = (d - d[0]) / np.timedelta64(1, 'D')
swarm_troughs['sat_ind'] = (144 * t + 6 * swarm_troughs.tec_ind + 2 * swarm_troughs.sat + swarm_troughs.direction).values
yes_troughs = swarm_troughs[swarm_troughs.min_dne <= threshold]
all_unique_ids, all_unique_counts = np.unique(swarm_troughs.sat_ind, return_counts=True)
yes_unique_ids, yes_unique_idx = np.unique(yes_troughs.sat_ind, axis=0, return_index=True)
yes_troughs = yes_troughs.iloc[yes_unique_idx]
no_troughs = swarm_troughs[np.isin(swarm_troughs.sat_ind, all_unique_ids[all_unique_counts == 1])]
return pandas.concat([no_troughs, yes_troughs]).sort_index()
def aa_figure_8_swarm(ax):
swarm_troughs = get_swarm_troughs(-.2)
swarm_troughs = swarm_troughs[(swarm_troughs.min_mlt > -5) & (swarm_troughs.min_mlt < 5)]
swarm_kp = io.get_kp(swarm_troughs.tec_time.values)
yes_mask = swarm_troughs.trough
min_mlat = swarm_troughs.min_mlat[yes_mask]
mean_stat = binned_statistic(swarm_kp, yes_mask, 'mean', np.arange(10))
count_stat = binned_statistic(swarm_kp, yes_mask, 'count', np.arange(10))
s = np.sqrt(mean_stat.statistic * (1 - mean_stat.statistic) / (count_stat.statistic - 1))
ax[0].bar(np.arange(9) + .3, mean_stat.statistic, .4, color=colors[0])
ax[0].errorbar(np.arange(9) + .3, mean_stat.statistic, yerr=s, fmt='.', c='k', ms=0)
mean_stat = binned_statistic(swarm_kp[yes_mask], min_mlat, 'mean', np.arange(10))
std_stat = binned_statistic(swarm_kp[yes_mask], min_mlat, 'std', np.arange(10))
reg = linregress(swarm_kp[yes_mask], min_mlat)
x = np.array([0, 9])
y = reg.slope * x + reg.intercept
ax[1].plot(x, y, '-', c=colors[0], label='Aa 2020')
ax[1].errorbar(np.arange(9) + .4, mean_stat.statistic, yerr=std_stat.statistic, fmt='o', c=colors[0])
def aa_figure_8_tec(ax, times, tec, troughs):
fin = np.isfinite(tec)
tec[~troughs] = np.inf
min_tec = np.min(tec, axis=1)
min_mlat = config.mlat_vals[np.argmin(tec, axis=1)]
kp = io.get_kp(times)
mask = ((config.mlt_vals > -5) & (config.mlt_vals < 5))[None, :] & np.isfinite(min_tec)
y = np.any(troughs[:, :, ((config.mlt_vals > -5) & (config.mlt_vals < 5))], axis=1)
f = np.mean(fin[:, :, ((config.mlt_vals > -5) & (config.mlt_vals < 5))], axis=1) >= .5
x = np.broadcast_to(kp[:, None], y.shape)
mean_stat = binned_statistic(x[f], y[f], 'mean', np.arange(10))
count_stat = binned_statistic(x[f], y[f], 'count', np.arange(10))
s = np.sqrt(mean_stat.statistic * (1 - mean_stat.statistic) / (count_stat.statistic - 1))
ax[0].bar(np.arange(9) + .7, mean_stat.statistic, .4, color=colors[1])
ax[0].errorbar(np.arange(9) + .7, mean_stat.statistic, yerr=s, fmt='.', c='k', ms=0)
x = np.broadcast_to(kp[:, None], min_mlat.shape)[mask]
y = min_mlat[mask]
mean_stat = binned_statistic(x, y, 'mean', np.arange(10))
std_stat = binned_statistic(x, y, 'std', np.arange(10))
reg = linregress(x, y)
xr = np.array([0, 9])
yr = reg.slope * xr + reg.intercept
ax[1].plot(xr, yr, '-', c=colors[1], label='Ours')
ax[1].errorbar(np.arange(9) + .6, mean_stat.statistic, yerr=std_stat.statistic, fmt='o', c=colors[1])
def aa_figure_2ghi_swarm(ax):
swarm_troughs = get_swarm_troughs(-.2)
swarm_troughs = swarm_troughs[swarm_troughs.trough]
swarm_kp = io.get_kp(swarm_troughs.tec_time.values)
swarm_troughs = swarm_troughs[swarm_kp <= 3]
x = swarm_troughs.min_mlt
y = swarm_troughs.min_mlat
time = swarm_troughs.tec_time.values.astype('datetime64[s]')
months = (time.astype('datetime64[M]') - time.astype('datetime64[Y]')).astype(int)
be = np.arange(-12, 14) - .5
bc = np.arange(-12, 13)
for i, (season, mo) in enumerate(MONTH_INDEX.items()):
mask = np.zeros_like(months, dtype=bool)
for m in mo:
mask |= months == m
mean_result = binned_statistic(x[mask], y[mask], 'mean', be)
std_result = binned_statistic(x[mask], y[mask], 'std', be)
ax[i].errorbar(bc - .2, mean_result.statistic, yerr=std_result.statistic, fmt='-', c=colors[0], errorevery=2)
ax[i].set_title(season)
def aa_figure_2ghi_tec(ax, times, tec, troughs):
tec[~troughs] = np.inf
min_tec = np.min(tec, axis=1)
kp = io.get_kp(times)
mask = np.isfinite(min_tec) & (kp <= 3)[:, None]
x = np.broadcast_to(config.mlt_vals[None, :], mask.shape)[mask]
t = np.broadcast_to(times[:, None], mask.shape)[mask].astype('datetime64[s]')
y = config.mlat_vals[np.argmin(tec, axis=1)]
y = y[mask]
months = (t.astype('datetime64[M]') - t.astype('datetime64[Y]')).astype(int)
be = np.arange(-12, 14) - .5
bc = np.arange(-12, 13)
for i, (season, mo) in enumerate(MONTH_INDEX.items()):
mask = np.zeros_like(months, dtype=bool)
for m in mo:
mask |= months == m
mean_result = binned_statistic(x[mask], y[mask], 'mean', be)
std_result = binned_statistic(x[mask], y[mask], 'std', be)
ax[i].errorbar(bc + .2, mean_result.statistic, yerr=std_result.statistic, fmt='-', c=colors[1], errorevery=2)
def aa_figure_4a(times, tec, troughs):
times = times.copy()
tec = tec.copy()
troughs = troughs.copy()
kp = io.get_kp(times)
times = times[kp <= 3]
tec_troughs = troughs[kp <= 3]
tec = tec[kp <= 3]
x = (times.astype('datetime64[s]') - times.astype('datetime64[Y]')).astype('timedelta64[s]').astype(float) / (60 * 60 * 24)
x = np.broadcast_to(x[:, None], (tec_troughs.shape[0], tec_troughs.shape[2]))
y = np.broadcast_to(config.mlt_vals[None, :], (tec_troughs.shape[0], tec_troughs.shape[2]))
y = y + np.random.randn(*y.shape) * .02
trough_mask = np.any((tec_troughs * np.isfinite(tec)), axis=1)
obs_mask = np.any(np.isfinite(tec), axis=1)
total_counts, *_ = np.histogram2d(x[obs_mask], y[obs_mask], bins=[40, 40], range=[(0, 365), [-12, 12]])
fig, ax = plt.subplots(dpi=300)
counts, xe, ye, pcm = ax.hist2d(x[trough_mask], y[trough_mask], bins=[40, 40], range=[(0, 365), [-12, 12]], cmap='jet')
fig, ax = plt.subplots(dpi=300)
prob = counts / total_counts
prob[total_counts < 100] = np.nan
pcm = ax.pcolormesh(xe, ye, prob.T, cmap='jet')
l = np.datetime64('2010-01-01T00:00:00') + np.arange(6).astype('timedelta64[M]').astype('timedelta64[s]') * 2
l = (l.astype('datetime64[s]') - l.astype('datetime64[Y]')).astype('timedelta64[s]').astype(float) / (60 * 60 * 24)
ax.set_xticks(l)
plt.colorbar(pcm)
def aa_figure_2abc(times, tec, troughs):
times = times.copy()
tec = tec.copy()
troughs = troughs.copy()
kp = io.get_kp(times)
times = times[kp <= 3]
tec_troughs = troughs[kp <= 3]
tec = tec[kp <= 3]
fin = np.isfinite(tec)
trough = tec_troughs & fin
months = (times.astype('datetime64[M]') - times.astype('datetime64[Y]')).astype(int)
for i, (season, mo) in enumerate(MONTH_INDEX.items()):
fig = plt.figure(dpi=300)
ax = fig.add_subplot(polar=True)
ax.set_title(season)
mask = np.zeros_like(months, dtype=bool)
for m in mo:
mask |= months == m
trough_sum = np.sum(trough[mask], axis=0)
all_sum = np.sum(fin[mask], axis=0)
p = trough_sum / all_sum
pcm = plotting.polar_pcolormesh(ax, config.mlat_grid, config.mlt_grid, p, cmap='jet', vmin=0)
plt.colorbar(pcm)
plotting.format_polar_mag_ax(ax)
def aa_figure_2ghi(times, tec, troughs):
plt.style.use('ggplot')
times = times.copy()
tec = tec.copy()
troughs = troughs.copy()
fig, ax = plt.subplots(1, 3, figsize=(18, 6), dpi=300)
aa_figure_2ghi_swarm(ax)
aa_figure_2ghi_tec(ax, times, tec, troughs)
plt.style.use('default')
def aa_figure_8(times, tec, troughs):
plt.style.use('ggplot')
times = times.copy()
tec = tec.copy()
troughs = troughs.copy()
fig, ax = plt.subplots(1, 2, figsize=(12, 6), dpi=300)
aa_figure_8_swarm(ax)
aa_figure_8_tec(ax, times, tec, troughs)
ax[0].set_ylim(0, 1)
ax[1].set_ylim(40, 80)
plt.style.use('default')
if __name__ == "__main__":
score_dir = pathlib.Path("E:\\ttools\\tec\\score\\l2_3")
for threshold in [1.5, 2.0, 2.5]:
times, tec, troughs = get_tec_dataset(score_dir, threshold)
aa_figure_2abc(times, tec, troughs)
aa_figure_2ghi(times, tec, troughs)
aa_figure_4a(times, tec, troughs)
aa_figure_8(times, tec, troughs)
score_dir = pathlib.Path("E:\\ttools\\tec\\score\\l2_9")
for threshold in [0.5, 1.0, 1.5]:
times, tec, troughs = get_tec_dataset(score_dir, threshold)
aa_figure_2abc(times, tec, troughs)
aa_figure_2ghi(times, tec, troughs)
aa_figure_4a(times, tec, troughs)
aa_figure_8(times, tec, troughs)
plt.show()
| 41.073469 | 127 | 0.643744 |
2ba7ad2883a3d2116f1b47a199a29f943d3d52c0 | 333 | py | Python | dora/tests/test_share.py | fairinternal/dora | 817c4763057bc8238bedfbf59ca1cdf8c3de7ae7 | [
"MIT"
] | 98 | 2021-09-21T14:27:21.000Z | 2022-03-18T17:46:45.000Z | dora/tests/test_share.py | fairinternal/dora | 817c4763057bc8238bedfbf59ca1cdf8c3de7ae7 | [
"MIT"
] | 6 | 2021-09-22T13:29:48.000Z | 2022-03-14T16:45:30.000Z | dora/tests/test_share.py | fairinternal/dora | 817c4763057bc8238bedfbf59ca1cdf8c3de7ae7 | [
"MIT"
] | 5 | 2021-09-21T12:42:01.000Z | 2022-01-27T17:22:17.000Z | # Copyright (c) Facebook, Inc. and its affiliates.
# All rights reserved.
#
# This source code is licensed under the license found in the
# LICENSE file in the root directory of this source tree.
from dora.share import dump, load
def test_dump_load():
x = [1, 2, 4, {'youpi': 'test', 'b': 56.3}]
assert load(dump(x)) == x
| 25.615385 | 61 | 0.675676 |
bddfb4aa8d6ed85cbd7e99359e5e2c9a4a05e397 | 1,111 | py | Python | examples/scikit-learn/logger.py | altescy/logexp | 19389c884c686ca42f691500e82e8963bd039f0c | [
"MIT"
] | 14 | 2020-01-19T08:07:14.000Z | 2021-01-18T19:06:23.000Z | examples/scikit-learn/logger.py | altescy/logexp | 19389c884c686ca42f691500e82e8963bd039f0c | [
"MIT"
] | null | null | null | examples/scikit-learn/logger.py | altescy/logexp | 19389c884c686ca42f691500e82e8963bd039f0c | [
"MIT"
] | null | null | null | from __future__ import annotations
import typing as tp
import logging
import time
from datetime import timedelta
class LogFormatter(logging.Formatter):
def __init__(self) -> None:
super().__init__()
self.start_time = time.time()
def format(self, record):
elapsed_seconds = round(record.created - self.start_time)
prefix = "%s - %s - %s" % (
record.levelname,
time.strftime("%x %X"),
timedelta(seconds=elapsed_seconds)
)
message = record.getMessage()
message = message.replace("\n", "\n" + " " * (len(prefix) + 3))
return "%s - %s" % (prefix, message) if message else ""
def create_logger(name: tp.Optional[str] = None) -> logging.Logger:
log_formatter = LogFormatter()
console_handler = logging.StreamHandler()
console_handler.setLevel(logging.INFO)
console_handler.setFormatter(log_formatter)
logger = logging.getLogger(name)
logger.handlers = []
logger.setLevel(logging.DEBUG)
logger.propagate = False
logger.addHandler(console_handler)
return logger
| 27.097561 | 71 | 0.649865 |
98e4e29d1b9497117e98ef9dcc0faa8c818d5214 | 29,747 | py | Python | github/NamedUser.py | aantr/WindowsHostManager | 75d248fc8991d471c6802fa79e7dee44a5708c65 | [
"CNRI-Python-GPL-Compatible"
] | 1 | 2021-06-25T09:13:12.000Z | 2021-06-25T09:13:12.000Z | venv/lib/python3.6/site-packages/github/NamedUser.py | rongshaoshuai/blogs | dafeb789428436c1ec8069e605400612b776b8f2 | [
"MIT"
] | 3 | 2021-03-30T23:03:03.000Z | 2021-03-30T23:06:57.000Z | lib/github/NamedUser.py | Corionis/Knobs-And-Scripts | 81a954fd0ed697e5759359ec0383a3f16a841143 | [
"MIT"
] | null | null | null | ############################ Copyrights and license ############################
# #
# Copyright 2012 Steve English <steve.english@navetas.com> #
# Copyright 2012 Vincent Jacques <vincent@vincent-jacques.net> #
# Copyright 2012 Zearin <zearin@gonk.net> #
# Copyright 2013 AKFish <akfish@gmail.com> #
# Copyright 2013 Vincent Jacques <vincent@vincent-jacques.net> #
# Copyright 2014 Vincent Jacques <vincent@vincent-jacques.net> #
# Copyright 2016 Jannis Gebauer <ja.geb@me.com> #
# Copyright 2016 Peter Buckley <dx-pbuckley@users.noreply.github.com> #
# Copyright 2017 Simon <spam@esemi.ru> #
# Copyright 2018 Iraquitan Cordeiro Filho <iraquitanfilho@gmail.com> #
# Copyright 2018 Steve Kowalik <steven@wedontsleep.org> #
# Copyright 2018 Victor Granic <vmg@boreal321.com> #
# Copyright 2018 Wan Liuyang <tsfdye@gmail.com> #
# Copyright 2018 namc <namratachaudhary@users.noreply.github.com> #
# Copyright 2018 sfdye <tsfdye@gmail.com> #
# Copyright 2018 itsbruce <it.is.bruce@gmail.com> #
# #
# This file is part of PyGithub. #
# http://pygithub.readthedocs.io/ #
# #
# PyGithub is free software: you can redistribute it and/or modify it under #
# the terms of the GNU Lesser General Public License as published by the Free #
# Software Foundation, either version 3 of the License, or (at your option) #
# any later version. #
# #
# PyGithub is distributed in the hope that it will be useful, but WITHOUT ANY #
# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS #
# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more #
# details. #
# #
# You should have received a copy of the GNU Lesser General Public License #
# along with PyGithub. If not, see <http://www.gnu.org/licenses/>. #
# #
################################################################################
import datetime
import github.Event
import github.Gist
import github.GithubObject
import github.NamedUser
import github.Organization
import github.PaginatedList
import github.Permissions
import github.Plan
import github.Repository
from . import Consts
class NamedUser(github.GithubObject.CompletableGithubObject):
"""
This class represents NamedUsers. The reference can be found here https://developer.github.com/v3/users/#get-a-single-user
"""
def __repr__(self):
return self.get__repr__({"login": self._login.value})
@property
def node_id(self):
"""
:type: string
"""
self._completeIfNotSet(self._node_id)
return self._node_id.value
@property
def twitter_username(self):
"""
:type: string
"""
self._completeIfNotSet(self._twitter_username)
return self._twitter_username.value
def __hash__(self):
return hash((self.id, self.login))
def __eq__(self, other):
return (
isinstance(other, type(self))
and self.login == other.login
and self.id == other.id
)
@property
def avatar_url(self):
"""
:type: string
"""
self._completeIfNotSet(self._avatar_url)
return self._avatar_url.value
@property
def bio(self):
"""
:type: string
"""
self._completeIfNotSet(self._bio)
return self._bio.value
@property
def blog(self):
"""
:type: string
"""
self._completeIfNotSet(self._blog)
return self._blog.value
@property
def collaborators(self):
"""
:type: integer
"""
self._completeIfNotSet(self._collaborators)
return self._collaborators.value
@property
def company(self):
"""
:type: string
"""
self._completeIfNotSet(self._company)
return self._company.value
@property
def contributions(self):
"""
:type: integer
"""
self._completeIfNotSet(self._contributions)
return self._contributions.value
@property
def created_at(self):
"""
:type: datetime.datetime
"""
self._completeIfNotSet(self._created_at)
return self._created_at.value
@property
def disk_usage(self):
"""
:type: integer
"""
self._completeIfNotSet(self._disk_usage)
return self._disk_usage.value
@property
def email(self):
"""
:type: string
"""
self._completeIfNotSet(self._email)
return self._email.value
@property
def events_url(self):
"""
:type: string
"""
self._completeIfNotSet(self._events_url)
return self._events_url.value
@property
def followers(self):
"""
:type: integer
"""
self._completeIfNotSet(self._followers)
return self._followers.value
@property
def followers_url(self):
"""
:type: string
"""
self._completeIfNotSet(self._followers_url)
return self._followers_url.value
@property
def following(self):
"""
:type: integer
"""
self._completeIfNotSet(self._following)
return self._following.value
@property
def following_url(self):
"""
:type: string
"""
self._completeIfNotSet(self._following_url)
return self._following_url.value
@property
def gists_url(self):
"""
:type: string
"""
self._completeIfNotSet(self._gists_url)
return self._gists_url.value
@property
def gravatar_id(self):
"""
:type: string
"""
self._completeIfNotSet(self._gravatar_id)
return self._gravatar_id.value
@property
def hireable(self):
"""
:type: bool
"""
self._completeIfNotSet(self._hireable)
return self._hireable.value
@property
def html_url(self):
"""
:type: string
"""
self._completeIfNotSet(self._html_url)
return self._html_url.value
@property
def id(self):
"""
:type: integer
"""
self._completeIfNotSet(self._id)
return self._id.value
@property
def invitation_teams_url(self):
"""
:type: string
"""
self._completeIfNotSet(self._invitation_teams_url)
return self._invitation_teams_url.value
@property
def inviter(self):
"""
:type: github.NamedUser.NamedUser
"""
self._completeIfNotSet(self._inviter)
return self._inviter.value
@property
def location(self):
"""
:type: string
"""
self._completeIfNotSet(self._location)
return self._location.value
@property
def login(self):
"""
:type: string
"""
self._completeIfNotSet(self._login)
return self._login.value
@property
def name(self):
"""
:type: string
"""
self._completeIfNotSet(self._name)
return self._name.value
@property
def organizations_url(self):
"""
:type: string
"""
self._completeIfNotSet(self._organizations_url)
return self._organizations_url.value
@property
def owned_private_repos(self):
"""
:type: integer
"""
self._completeIfNotSet(self._owned_private_repos)
return self._owned_private_repos.value
@property
def permissions(self):
"""
:type: :class:`github.Permissions.Permissions`
"""
self._completeIfNotSet(self._permissions)
return self._permissions.value
@property
def plan(self):
"""
:type: :class:`github.Plan.Plan`
"""
self._completeIfNotSet(self._plan)
return self._plan.value
@property
def private_gists(self):
"""
:type: integer
"""
self._completeIfNotSet(self._private_gists)
return self._private_gists.value
@property
def public_gists(self):
"""
:type: integer
"""
self._completeIfNotSet(self._public_gists)
return self._public_gists.value
@property
def public_repos(self):
"""
:type: integer
"""
self._completeIfNotSet(self._public_repos)
return self._public_repos.value
@property
def received_events_url(self):
"""
:type: string
"""
self._completeIfNotSet(self._received_events_url)
return self._received_events_url.value
@property
def repos_url(self):
"""
:type: string
"""
self._completeIfNotSet(self._repos_url)
return self._repos_url.value
@property
def role(self):
"""
:type: string
"""
self._completeIfNotSet(self._role)
return self._role.value
@property
def site_admin(self):
"""
:type: bool
"""
self._completeIfNotSet(self._site_admin)
return self._site_admin.value
@property
def starred_url(self):
"""
:type: string
"""
self._completeIfNotSet(self._starred_url)
return self._starred_url.value
@property
def subscriptions_url(self):
"""
:type: string
"""
self._completeIfNotSet(self._subscriptions_url)
return self._subscriptions_url.value
@property
def suspended_at(self):
"""
:type: datetime.datetime
"""
self._completeIfNotSet(self._suspended_at)
return self._suspended_at.value
@property
def team_count(self):
"""
:type: integer
"""
self._completeIfNotSet(self._team_count)
return self._team_count.value
@property
def total_private_repos(self):
"""
:type: integer
"""
self._completeIfNotSet(self._total_private_repos)
return self._total_private_repos.value
@property
def type(self):
"""
:type: string
"""
self._completeIfNotSet(self._type)
return self._type.value
@property
def updated_at(self):
"""
:type: datetime.datetime
"""
self._completeIfNotSet(self._updated_at)
return self._updated_at.value
@property
def url(self):
"""
:type: string
"""
self._completeIfNotSet(self._url)
return self._url.value
def get_events(self):
"""
:calls: `GET /users/:user/events <http://developer.github.com/v3/activity/events>`_
:rtype: :class:`github.PaginatedList.PaginatedList` of :class:`github.Event.Event`
"""
return github.PaginatedList.PaginatedList(
github.Event.Event, self._requester, self.url + "/events", None
)
def get_followers(self):
"""
:calls: `GET /users/:user/followers <http://developer.github.com/v3/users/followers>`_
:rtype: :class:`github.PaginatedList.PaginatedList` of :class:`github.NamedUser.NamedUser`
"""
return github.PaginatedList.PaginatedList(
NamedUser, self._requester, self.url + "/followers", None
)
def get_following(self):
"""
:calls: `GET /users/:user/following <http://developer.github.com/v3/users/followers>`_
:rtype: :class:`github.PaginatedList.PaginatedList` of :class:`github.NamedUser.NamedUser`
"""
return github.PaginatedList.PaginatedList(
NamedUser, self._requester, self.url + "/following", None
)
def get_gists(self, since=github.GithubObject.NotSet):
"""
:calls: `GET /users/:user/gists <http://developer.github.com/v3/gists>`_
:param since: datetime.datetime format YYYY-MM-DDTHH:MM:SSZ
:rtype: :class:`github.PaginatedList.PaginatedList` of :class:`github.Gist.Gist`
"""
assert since is github.GithubObject.NotSet or isinstance(
since, datetime.datetime
), since
url_parameters = dict()
if since is not github.GithubObject.NotSet:
url_parameters["since"] = since.strftime("%Y-%m-%dT%H:%M:%SZ")
return github.PaginatedList.PaginatedList(
github.Gist.Gist, self._requester, self.url + "/gists", url_parameters
)
def get_keys(self):
"""
:calls: `GET /users/:user/keys <http://developer.github.com/v3/users/keys>`_
:rtype: :class:`github.PaginatedList.PaginatedList` of :class:`github.UserKey.UserKey`
"""
return github.PaginatedList.PaginatedList(
github.UserKey.UserKey, self._requester, self.url + "/keys", None
)
def get_orgs(self):
"""
:calls: `GET /users/:user/orgs <http://developer.github.com/v3/orgs>`_
:rtype: :class:`github.PaginatedList.PaginatedList` of :class:`github.Organization.Organization`
"""
return github.PaginatedList.PaginatedList(
github.Organization.Organization, self._requester, self.url + "/orgs", None
)
def get_projects(self, state="open"):
"""
:calls: `GET /users/:user/projects <https://developer.github.com/v3/projects/#list-user-projects>`_
:param state: string
:rtype: :class:`github.PaginatedList.PaginatedList` of :class:`github.Project.Project`
"""
assert isinstance(state, str), state
url_parameters = {"state": state}
return github.PaginatedList.PaginatedList(
github.Project.Project,
self._requester,
self.url + "/projects",
url_parameters,
headers={"Accept": Consts.mediaTypeProjectsPreview},
)
def get_public_events(self):
"""
:calls: `GET /users/:user/events/public <http://developer.github.com/v3/activity/events>`_
:rtype: :class:`github.PaginatedList.PaginatedList` of :class:`github.Event.Event`
"""
return github.PaginatedList.PaginatedList(
github.Event.Event, self._requester, self.url + "/events/public", None
)
def get_public_received_events(self):
"""
:calls: `GET /users/:user/received_events/public <http://developer.github.com/v3/activity/events>`_
:rtype: :class:`github.PaginatedList.PaginatedList` of :class:`github.Event.Event`
"""
return github.PaginatedList.PaginatedList(
github.Event.Event,
self._requester,
self.url + "/received_events/public",
None,
)
def get_received_events(self):
"""
:calls: `GET /users/:user/received_events <http://developer.github.com/v3/activity/events>`_
:rtype: :class:`github.PaginatedList.PaginatedList` of :class:`github.Event.Event`
"""
return github.PaginatedList.PaginatedList(
github.Event.Event, self._requester, self.url + "/received_events", None
)
def get_repo(self, name):
"""
:calls: `GET /repos/:owner/:repo <http://developer.github.com/v3/repos>`_
:param name: string
:rtype: :class:`github.Repository.Repository`
"""
assert isinstance(name, str), name
headers, data = self._requester.requestJsonAndCheck(
"GET", "/repos/" + self.login + "/" + name
)
return github.Repository.Repository(
self._requester, headers, data, completed=True
)
def get_repos(
self,
type=github.GithubObject.NotSet,
sort=github.GithubObject.NotSet,
direction=github.GithubObject.NotSet,
):
"""
:calls: `GET /users/:user/repos <http://developer.github.com/v3/repos>`_
:param type: string
:param sort: string
:param direction: string
:rtype: :class:`github.PaginatedList.PaginatedList` of :class:`github.Repository.Repository`
"""
assert type is github.GithubObject.NotSet or isinstance(type, str), type
assert sort is github.GithubObject.NotSet or isinstance(sort, str), sort
assert direction is github.GithubObject.NotSet or isinstance(
direction, str
), direction
url_parameters = dict()
if type is not github.GithubObject.NotSet:
url_parameters["type"] = type
if sort is not github.GithubObject.NotSet:
url_parameters["sort"] = sort
if direction is not github.GithubObject.NotSet:
url_parameters["direction"] = direction
return github.PaginatedList.PaginatedList(
github.Repository.Repository,
self._requester,
self.url + "/repos",
url_parameters,
)
def get_starred(self):
"""
:calls: `GET /users/:user/starred <http://developer.github.com/v3/activity/starring>`_
:rtype: :class:`github.PaginatedList.PaginatedList` of :class:`github.Repository.Repository`
"""
return github.PaginatedList.PaginatedList(
github.Repository.Repository, self._requester, self.url + "/starred", None
)
def get_subscriptions(self):
"""
:calls: `GET /users/:user/subscriptions <http://developer.github.com/v3/activity/watching>`_
:rtype: :class:`github.PaginatedList.PaginatedList` of :class:`github.Repository.Repository`
"""
return github.PaginatedList.PaginatedList(
github.Repository.Repository,
self._requester,
self.url + "/subscriptions",
None,
)
def get_watched(self):
"""
:calls: `GET /users/:user/watched <http://developer.github.com/v3/activity/starring>`_
:rtype: :class:`github.PaginatedList.PaginatedList` of :class:`github.Repository.Repository`
"""
return github.PaginatedList.PaginatedList(
github.Repository.Repository, self._requester, self.url + "/watched", None
)
def has_in_following(self, following):
"""
:calls: `GET /users/:user/following/:target_user <http://developer.github.com/v3/users/followers/#check-if-one-user-follows-another>`_
:param following: :class:`github.NamedUser.NamedUser`
:rtype: bool
"""
assert isinstance(following, github.NamedUser.NamedUser), following
status, headers, data = self._requester.requestJson(
"GET", self.url + "/following/" + following._identity
)
return status == 204
@property
def _identity(self):
return self.login
def get_organization_membership(self, org):
"""
:calls: `GET /orgs/:org/memberships/:username <https://developer.github.com/v3/orgs/members/#get-organization-membership>`_
:param org: string or :class:`github.Organization.Organization`
:rtype: :class:`github.Membership.Membership`
"""
assert isinstance(org, str) or isinstance(
org, github.Organization.Organization
), org
if isinstance(org, github.Organization.Organization):
org = org.login
headers, data = self._requester.requestJsonAndCheck(
"GET", "/orgs/" + org + "/memberships/" + self.login
)
return github.Membership.Membership(
self._requester, headers, data, completed=True
)
def _initAttributes(self):
self._avatar_url = github.GithubObject.NotSet
self._bio = github.GithubObject.NotSet
self._blog = github.GithubObject.NotSet
self._collaborators = github.GithubObject.NotSet
self._company = github.GithubObject.NotSet
self._contributions = github.GithubObject.NotSet
self._created_at = github.GithubObject.NotSet
self._disk_usage = github.GithubObject.NotSet
self._email = github.GithubObject.NotSet
self._events_url = github.GithubObject.NotSet
self._followers = github.GithubObject.NotSet
self._followers_url = github.GithubObject.NotSet
self._following = github.GithubObject.NotSet
self._following_url = github.GithubObject.NotSet
self._gists_url = github.GithubObject.NotSet
self._gravatar_id = github.GithubObject.NotSet
self._hireable = github.GithubObject.NotSet
self._html_url = github.GithubObject.NotSet
self._id = github.GithubObject.NotSet
self._invitation_teams_url = github.GithubObject.NotSet
self._inviter = github.GithubObject.NotSet
self._location = github.GithubObject.NotSet
self._login = github.GithubObject.NotSet
self._name = github.GithubObject.NotSet
self._node_id = github.GithubObject.NotSet
self._organizations_url = github.GithubObject.NotSet
self._owned_private_repos = github.GithubObject.NotSet
self._permissions = github.GithubObject.NotSet
self._plan = github.GithubObject.NotSet
self._private_gists = github.GithubObject.NotSet
self._public_gists = github.GithubObject.NotSet
self._public_repos = github.GithubObject.NotSet
self._received_events_url = github.GithubObject.NotSet
self._repos_url = github.GithubObject.NotSet
self._role = github.GithubObject.NotSet
self._site_admin = github.GithubObject.NotSet
self._starred_url = github.GithubObject.NotSet
self._subscriptions_url = github.GithubObject.NotSet
self._suspended_at = github.GithubObject.NotSet
self._team_count = github.GithubObject.NotSet
self._total_private_repos = github.GithubObject.NotSet
self._twitter_username = github.GithubObject.NotSet
self._type = github.GithubObject.NotSet
self._updated_at = github.GithubObject.NotSet
self._url = github.GithubObject.NotSet
def _useAttributes(self, attributes):
if "avatar_url" in attributes: # pragma no branch
self._avatar_url = self._makeStringAttribute(attributes["avatar_url"])
if "bio" in attributes: # pragma no branch
self._bio = self._makeStringAttribute(attributes["bio"])
if "blog" in attributes: # pragma no branch
self._blog = self._makeStringAttribute(attributes["blog"])
if "collaborators" in attributes: # pragma no branch
self._collaborators = self._makeIntAttribute(attributes["collaborators"])
if "company" in attributes: # pragma no branch
self._company = self._makeStringAttribute(attributes["company"])
if "contributions" in attributes: # pragma no branch
self._contributions = self._makeIntAttribute(attributes["contributions"])
if "created_at" in attributes: # pragma no branch
self._created_at = self._makeDatetimeAttribute(attributes["created_at"])
if "disk_usage" in attributes: # pragma no branch
self._disk_usage = self._makeIntAttribute(attributes["disk_usage"])
if "email" in attributes: # pragma no branch
self._email = self._makeStringAttribute(attributes["email"])
if "events_url" in attributes: # pragma no branch
self._events_url = self._makeStringAttribute(attributes["events_url"])
if "followers" in attributes: # pragma no branch
self._followers = self._makeIntAttribute(attributes["followers"])
if "followers_url" in attributes: # pragma no branch
self._followers_url = self._makeStringAttribute(attributes["followers_url"])
if "following" in attributes: # pragma no branch
self._following = self._makeIntAttribute(attributes["following"])
if "following_url" in attributes: # pragma no branch
self._following_url = self._makeStringAttribute(attributes["following_url"])
if "gists_url" in attributes: # pragma no branch
self._gists_url = self._makeStringAttribute(attributes["gists_url"])
if "gravatar_id" in attributes: # pragma no branch
self._gravatar_id = self._makeStringAttribute(attributes["gravatar_id"])
if "hireable" in attributes: # pragma no branch
self._hireable = self._makeBoolAttribute(attributes["hireable"])
if "html_url" in attributes: # pragma no branch
self._html_url = self._makeStringAttribute(attributes["html_url"])
if "id" in attributes: # pragma no branch
self._id = self._makeIntAttribute(attributes["id"])
if "invitation_teams_url" in attributes: # pragma no branch
self._invitation_teams_url = self._makeStringAttribute(
attributes["invitation_teams_url"]
)
if "inviter" in attributes: # pragma no branch
self._inviter = self._makeClassAttribute(
github.NamedUser.NamedUser, attributes["inviter"]
)
if "location" in attributes: # pragma no branch
self._location = self._makeStringAttribute(attributes["location"])
if "login" in attributes: # pragma no branch
self._login = self._makeStringAttribute(attributes["login"])
if "name" in attributes: # pragma no branch
self._name = self._makeStringAttribute(attributes["name"])
if "node_id" in attributes: # pragma no branch
self._node_id = self._makeStringAttribute(attributes["node_id"])
if "organizations_url" in attributes: # pragma no branch
self._organizations_url = self._makeStringAttribute(
attributes["organizations_url"]
)
if "owned_private_repos" in attributes: # pragma no branch
self._owned_private_repos = self._makeIntAttribute(
attributes["owned_private_repos"]
)
if "permissions" in attributes: # pragma no branch
self._permissions = self._makeClassAttribute(
github.Permissions.Permissions, attributes["permissions"]
)
if "plan" in attributes: # pragma no branch
self._plan = self._makeClassAttribute(github.Plan.Plan, attributes["plan"])
if "private_gists" in attributes: # pragma no branch
self._private_gists = self._makeIntAttribute(attributes["private_gists"])
if "public_gists" in attributes: # pragma no branch
self._public_gists = self._makeIntAttribute(attributes["public_gists"])
if "public_repos" in attributes: # pragma no branch
self._public_repos = self._makeIntAttribute(attributes["public_repos"])
if "received_events_url" in attributes: # pragma no branch
self._received_events_url = self._makeStringAttribute(
attributes["received_events_url"]
)
if "repos_url" in attributes: # pragma no branch
self._repos_url = self._makeStringAttribute(attributes["repos_url"])
if "role" in attributes: # pragma no branch
self._role = self._makeStringAttribute(attributes["role"])
if "site_admin" in attributes: # pragma no branch
self._site_admin = self._makeBoolAttribute(attributes["site_admin"])
if "starred_url" in attributes: # pragma no branch
self._starred_url = self._makeStringAttribute(attributes["starred_url"])
if "subscriptions_url" in attributes: # pragma no branch
self._subscriptions_url = self._makeStringAttribute(
attributes["subscriptions_url"]
)
if "suspended_at" in attributes: # pragma no branch
self._suspended_at = self._makeDatetimeAttribute(attributes["suspended_at"])
if "team_count" in attributes:
self._team_count = self._makeIntAttribute(attributes["team_count"])
if "total_private_repos" in attributes: # pragma no branch
self._total_private_repos = self._makeIntAttribute(
attributes["total_private_repos"]
)
if "twitter_username" in attributes: # pragma no branch
self._twitter_username = self._makeStringAttribute(
attributes["twitter_username"]
)
if "type" in attributes: # pragma no branch
self._type = self._makeStringAttribute(attributes["type"])
if "updated_at" in attributes: # pragma no branch
self._updated_at = self._makeDatetimeAttribute(attributes["updated_at"])
if "url" in attributes: # pragma no branch
self._url = self._makeStringAttribute(attributes["url"])
| 36.998756 | 142 | 0.60433 |
ab81800c1e21abbb85573d262a8209d993f20f06 | 2,616 | py | Python | corehq/apps/reminders/management/commands/populate_app_id_for_survey_keyword.py | tstalka/commcare-hq | 902412b0f97ba0daac173fe284f3adc4c01bcd76 | [
"BSD-3-Clause"
] | null | null | null | corehq/apps/reminders/management/commands/populate_app_id_for_survey_keyword.py | tstalka/commcare-hq | 902412b0f97ba0daac173fe284f3adc4c01bcd76 | [
"BSD-3-Clause"
] | null | null | null | corehq/apps/reminders/management/commands/populate_app_id_for_survey_keyword.py | tstalka/commcare-hq | 902412b0f97ba0daac173fe284f3adc4c01bcd76 | [
"BSD-3-Clause"
] | null | null | null | import logging
from django.core.management.base import BaseCommand
from corehq.apps.app_manager.util import get_app_id_from_form_unique_id
from corehq.apps.reminders.models import SurveyKeyword
from corehq.dbaccessors.couchapps.all_docs import (
get_deleted_doc_ids_by_class,
get_doc_ids_by_class,
)
from corehq.util.couch import DocUpdate, iter_update
logger = logging.getLogger(__name__)
class Command(BaseCommand):
help = "Populate any SurveyKeyword models that contain a form_unique_id with the associated app_id."
def add_arguments(self, parser):
parser.add_argument(
'--dry-run',
action='store_true',
default=False,
help='Do not actually modify the database, just verbosely log what will happen',
)
def handle(self, dry_run=False, **options):
def _add_field(doc):
updated = False
log_prefix = "{} Domain {}, form unique_id {}".format("[DRY RUN]" if dry_run else "",
doc['domain'],
doc['form_unique_id'])
if doc.get('form_unique_id', None) and not doc.get('app_id', None):
doc['app_id'] = get_app_id_from_form_unique_id(doc['domain'], doc['form_unique_id'])
if doc['app_id']:
updated = True
logger.info("{}: Updated {} to use app id {}".format(log_prefix, doc['_id'], doc['app_id']))
else:
logger.info("{}: Could not find app".format(log_prefix))
for action in doc.get('actions', []):
if action.get('form_unique_id', None) and not action.get('app_id', None):
action['app_id'] = get_app_id_from_form_unique_id(doc['domain'], action['form_unique_id'])
if action['app_id']:
updated = True
logger.info("{}: Updated action in {} to use app id {}".format(log_prefix,
doc['_id'],
action['app_id']))
else:
logger.info("{}: Could not find app".format(log_prefix))
if updated and not dry_run:
return DocUpdate(doc)
doc_ids = get_doc_ids_by_class(SurveyKeyword) + get_deleted_doc_ids_by_class(SurveyKeyword)
iter_update(SurveyKeyword.get_db(), _add_field, doc_ids)
| 45.103448 | 112 | 0.542813 |
ba2803782ddf3264e06f3fb1e3760e37362f5acd | 1,203 | py | Python | tests/core/objects/exceptions.py | idjaw/netman | 58ba898de6e450a24b4f1721ce274ad3e12f9d33 | [
"Apache-2.0"
] | 1 | 2016-01-28T17:56:51.000Z | 2016-01-28T17:56:51.000Z | tests/core/objects/exceptions.py | idjaw/netman | 58ba898de6e450a24b4f1721ce274ad3e12f9d33 | [
"Apache-2.0"
] | 2 | 2021-12-13T20:55:50.000Z | 2022-03-29T22:07:13.000Z | tests/core/objects/exceptions.py | idjaw/netman | 58ba898de6e450a24b4f1721ce274ad3e12f9d33 | [
"Apache-2.0"
] | null | null | null | # Copyright 2015 Internap.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import importlib
import unittest
class ExceptionsComplianceTest(unittest.TestCase):
def test_for_remote_use_all_exceptions_should_be_instantiable_without_an_argument(self):
exceptions_module = importlib.import_module("netman.core.objects.exceptions")
for attribute in dir(exceptions_module):
exception_class = getattr(exceptions_module, attribute)
if isinstance(exception_class, type):
try:
exception_class()
except:
raise AssertionError("Class {0} should be instantiable with no params".format(attribute))
| 36.454545 | 109 | 0.723192 |
50f0f1f527ae05ae74c8b037e9601cfc2e358626 | 2,268 | py | Python | tests/en_spell_test.py | zouning68/pycorrector | 4daaf13e566f2cecc724fb5a77db5d89f1f25203 | [
"Apache-2.0"
] | 1 | 2019-08-07T05:14:01.000Z | 2019-08-07T05:14:01.000Z | tests/en_spell_test.py | zouning68/pycorrector | 4daaf13e566f2cecc724fb5a77db5d89f1f25203 | [
"Apache-2.0"
] | null | null | null | tests/en_spell_test.py | zouning68/pycorrector | 4daaf13e566f2cecc724fb5a77db5d89f1f25203 | [
"Apache-2.0"
] | 1 | 2020-09-23T09:11:49.000Z | 2020-09-23T09:11:49.000Z | # -*- coding: utf-8 -*-
# Author: XuMing <xuming624@qq.com>
# Brief:
import sys
sys.path.append("../")
from pycorrector.en_spell import *
def correction_t():
assert correction('spelling') == 'spelling' # no error
assert correction('speling') == 'spelling' # insert
assert correction('correctud') == 'corrected' # replace 1
assert correction('gorrectud') == 'corrected' # replace 2
assert correction('bycycle') == 'bicycle' # replace
assert correction('inconvient') == 'inconvenient' # insert 2
assert correction('arrainged') == 'arranged' # delete
assert correction('peotrry') == 'poetry' # transpose + delete
assert correction('word') == 'word' # know
assert correction('quintessential') == 'quintessential' # unknow
assert words('the test is it.') == ['the', 'test', 'is', 'it'] # segment
assert len(WORDS) > 100
assert WORDS['the'] > 100
assert P('word') > 0
assert P('quintessential') == 0
assert 0.07 < P('the') < 0.08
return 'unit_test pass'
def spell_t(tests, verbose=False):
"""
run correction(wrong) on all (right,wrong) pairs, and report result
:param tests:
:param verbose:
:return:
"""
import time
start = time.clock()
good, unknown = 0, 0
n = len(tests)
for right, wrong in tests:
w = correction(wrong)
good += (w == right)
if w != right:
unknown += (right not in WORDS)
if verbose:
print('correction({}) => {} ({}); expected {} ({})'.format(wrong, w, WORDS[w], right, WORDS[right]))
dt = time.clock() - start
print('{:.0%} of {} correct ({:.0%} unknown) at {:.0f} words per second'.format(good / n, n, unknown / n, n / dt))
def get_set(lines):
"""
parse 'right, wrong1, wrong2' lines into [('right', 'wrong1'), ('right', 'wrong2')] pairs
:param lines:
:return:
"""
return [(right, wrong) for (right, wrongs) in (line.split(':') for line in lines) for wrong in wrongs.split()]
if __name__ == '__main__':
print(correction_t())
spell_t(get_set(open('../pycorrector/data/en/spell-testset1.txt')),verbose=True) # Dev set
spell_t(get_set(open('../pycorrector/data/en/spell-testset2.txt')),verbose=True) # final test set
| 36 | 118 | 0.604056 |
52a823b6a0ea6eb3f80a94828258d94152530c1a | 13,253 | py | Python | lib/modules/_collections.py | JJTech0130/pypyjs-pwa | 1b5820212971cfab683715b21cd97e335b681546 | [
"MIT"
] | 195 | 2016-01-14T16:03:02.000Z | 2021-12-29T09:15:02.000Z | lib/modules/_collections.py | JJTech0130/pypyjs-pwa | 1b5820212971cfab683715b21cd97e335b681546 | [
"MIT"
] | 75 | 2016-01-14T16:03:02.000Z | 2020-04-29T22:51:53.000Z | lib/modules/_collections.py | JJTech0130/pypyjs-pwa | 1b5820212971cfab683715b21cd97e335b681546 | [
"MIT"
] | 11 | 2015-09-07T14:26:08.000Z | 2020-04-10T07:20:41.000Z | """High performance data structures
"""
#
# Copied and completed from the sandbox of CPython
# (nondist/sandbox/collections/pydeque.py rev 1.1, Raymond Hettinger)
#
# Note that PyPy also contains a built-in module '_collections' which will hide
# this one if compiled in.
try:
from threading import _get_ident as _thread_ident
except ImportError:
def _thread_ident():
return -1
n = 30
LFTLNK = n
RGTLNK = n+1
BLOCKSIZ = n+2
# The deque's size limit is d.maxlen. The limit can be zero or positive, or
# None. After an item is added to a deque, we check to see if the size has
# grown past the limit. If it has, we get the size back down to the limit by
# popping an item off of the opposite end. The methods that can trigger this
# are append(), appendleft(), extend(), and extendleft().
class deque(object):
def __new__(cls, iterable=(), *args, **kw):
self = super(deque, cls).__new__(cls, *args, **kw)
self.clear()
return self
def __init__(self, iterable=(), maxlen=None):
self.clear()
if maxlen is not None:
if maxlen < 0:
raise ValueError("maxlen must be non-negative")
self._maxlen = maxlen
add = self.append
for elem in iterable:
add(elem)
@property
def maxlen(self):
return self._maxlen
def clear(self):
self.right = self.left = [None] * BLOCKSIZ
self.rightndx = n//2 # points to last written element
self.leftndx = n//2+1
self.length = 0
self.state = 0
def append(self, x):
self.state += 1
self.rightndx += 1
if self.rightndx == n:
newblock = [None] * BLOCKSIZ
self.right[RGTLNK] = newblock
newblock[LFTLNK] = self.right
self.right = newblock
self.rightndx = 0
self.length += 1
self.right[self.rightndx] = x
if self.maxlen is not None and self.length > self.maxlen:
self.popleft()
def appendleft(self, x):
self.state += 1
self.leftndx -= 1
if self.leftndx == -1:
newblock = [None] * BLOCKSIZ
self.left[LFTLNK] = newblock
newblock[RGTLNK] = self.left
self.left = newblock
self.leftndx = n-1
self.length += 1
self.left[self.leftndx] = x
if self.maxlen is not None and self.length > self.maxlen:
self.pop()
def extend(self, iterable):
if iterable is self:
iterable = list(iterable)
for elem in iterable:
self.append(elem)
def extendleft(self, iterable):
if iterable is self:
iterable = list(iterable)
for elem in iterable:
self.appendleft(elem)
def pop(self):
if self.left is self.right and self.leftndx > self.rightndx:
raise IndexError("pop from an empty deque")
x = self.right[self.rightndx]
self.right[self.rightndx] = None
self.length -= 1
self.rightndx -= 1
self.state += 1
if self.rightndx == -1:
prevblock = self.right[LFTLNK]
if prevblock is None:
# the deque has become empty; recenter instead of freeing block
self.rightndx = n//2
self.leftndx = n//2+1
else:
prevblock[RGTLNK] = None
self.right[LFTLNK] = None
self.right = prevblock
self.rightndx = n-1
return x
def popleft(self):
if self.left is self.right and self.leftndx > self.rightndx:
raise IndexError("pop from an empty deque")
x = self.left[self.leftndx]
self.left[self.leftndx] = None
self.length -= 1
self.leftndx += 1
self.state += 1
if self.leftndx == n:
prevblock = self.left[RGTLNK]
if prevblock is None:
# the deque has become empty; recenter instead of freeing block
self.rightndx = n//2
self.leftndx = n//2+1
else:
prevblock[LFTLNK] = None
self.left[RGTLNK] = None
self.left = prevblock
self.leftndx = 0
return x
def count(self, value):
c = 0
for item in self:
if item == value:
c += 1
return c
def remove(self, value):
# Need to defend mutating or failing comparisons
i = 0
try:
for i in range(len(self)):
if self[0] == value:
self.popleft()
return
self.append(self.popleft())
i += 1
raise ValueError("deque.remove(x): x not in deque")
finally:
self.rotate(i)
def rotate(self, n=1):
length = len(self)
if length <= 1:
return
halflen = length >> 1
if n > halflen or n < -halflen:
n %= length
if n > halflen:
n -= length
elif n < -halflen:
n += length
while n > 0:
self.appendleft(self.pop())
n -= 1
while n < 0:
self.append(self.popleft())
n += 1
def reverse(self):
"reverse *IN PLACE*"
leftblock = self.left
rightblock = self.right
leftindex = self.leftndx
rightindex = self.rightndx
for i in range(self.length // 2):
# Validate that pointers haven't met in the middle
assert leftblock != rightblock or leftindex < rightindex
# Swap
(rightblock[rightindex], leftblock[leftindex]) = (
leftblock[leftindex], rightblock[rightindex])
# Advance left block/index pair
leftindex += 1
if leftindex == n:
leftblock = leftblock[RGTLNK]
assert leftblock is not None
leftindex = 0
# Step backwards with the right block/index pair
rightindex -= 1
if rightindex == -1:
rightblock = rightblock[LFTLNK]
assert rightblock is not None
rightindex = n - 1
def __repr__(self):
threadlocalattr = '__repr' + str(_thread_ident())
if threadlocalattr in self.__dict__:
return 'deque([...])'
else:
self.__dict__[threadlocalattr] = True
try:
if self.maxlen is not None:
return 'deque(%r, maxlen=%s)' % (list(self), self.maxlen)
else:
return 'deque(%r)' % (list(self),)
finally:
del self.__dict__[threadlocalattr]
def __iter__(self):
return deque_iterator(self, self._iter_impl)
def _iter_impl(self, original_state, giveup):
if self.state != original_state:
giveup()
block = self.left
while block:
l, r = 0, n
if block is self.left:
l = self.leftndx
if block is self.right:
r = self.rightndx + 1
for elem in block[l:r]:
yield elem
if self.state != original_state:
giveup()
block = block[RGTLNK]
def __reversed__(self):
return deque_iterator(self, self._reversed_impl)
def _reversed_impl(self, original_state, giveup):
if self.state != original_state:
giveup()
block = self.right
while block:
l, r = 0, n
if block is self.left:
l = self.leftndx
if block is self.right:
r = self.rightndx + 1
for elem in reversed(block[l:r]):
yield elem
if self.state != original_state:
giveup()
block = block[LFTLNK]
def __len__(self):
#sum = 0
#block = self.left
#while block:
# sum += n
# block = block[RGTLNK]
#return sum + self.rightndx - self.leftndx + 1 - n
return self.length
def __getref(self, index):
if index >= 0:
block = self.left
while block:
l, r = 0, n
if block is self.left:
l = self.leftndx
if block is self.right:
r = self.rightndx + 1
span = r-l
if index < span:
return block, l+index
index -= span
block = block[RGTLNK]
else:
block = self.right
while block:
l, r = 0, n
if block is self.left:
l = self.leftndx
if block is self.right:
r = self.rightndx + 1
negative_span = l-r
if index >= negative_span:
return block, r+index
index -= negative_span
block = block[LFTLNK]
raise IndexError("deque index out of range")
def __getitem__(self, index):
block, index = self.__getref(index)
return block[index]
def __setitem__(self, index, value):
block, index = self.__getref(index)
block[index] = value
def __delitem__(self, index):
length = len(self)
if index >= 0:
if index >= length:
raise IndexError("deque index out of range")
self.rotate(-index)
self.popleft()
self.rotate(index)
else:
index = ~index
if index >= length:
raise IndexError("deque index out of range")
self.rotate(index)
self.pop()
self.rotate(-index)
def __reduce_ex__(self, proto):
return type(self), (list(self), self.maxlen)
def __hash__(self):
raise TypeError("deque objects are unhashable")
def __copy__(self):
return self.__class__(self, self.maxlen)
# XXX make comparison more efficient
def __eq__(self, other):
if isinstance(other, deque):
return list(self) == list(other)
else:
return NotImplemented
def __ne__(self, other):
if isinstance(other, deque):
return list(self) != list(other)
else:
return NotImplemented
def __lt__(self, other):
if isinstance(other, deque):
return list(self) < list(other)
else:
return NotImplemented
def __le__(self, other):
if isinstance(other, deque):
return list(self) <= list(other)
else:
return NotImplemented
def __gt__(self, other):
if isinstance(other, deque):
return list(self) > list(other)
else:
return NotImplemented
def __ge__(self, other):
if isinstance(other, deque):
return list(self) >= list(other)
else:
return NotImplemented
def __iadd__(self, other):
self.extend(other)
return self
class deque_iterator(object):
def __init__(self, deq, itergen):
self.counter = len(deq)
def giveup():
self.counter = 0
raise RuntimeError("deque mutated during iteration")
self._gen = itergen(deq.state, giveup)
def next(self):
res = next(self._gen)
self.counter -= 1
return res
def __iter__(self):
return self
class defaultdict(dict):
def __init__(self, *args, **kwds):
if len(args) > 0:
default_factory = args[0]
args = args[1:]
if not callable(default_factory) and default_factory is not None:
raise TypeError("first argument must be callable")
else:
default_factory = None
self.default_factory = default_factory
super(defaultdict, self).__init__(*args, **kwds)
def __missing__(self, key):
# from defaultdict docs
if self.default_factory is None:
raise KeyError(key)
self[key] = value = self.default_factory()
return value
def __repr__(self, recurse=set()):
if id(self) in recurse:
return "defaultdict(...)"
try:
recurse.add(id(self))
return "defaultdict(%s, %s)" % (repr(self.default_factory), super(defaultdict, self).__repr__())
finally:
recurse.remove(id(self))
def copy(self):
return type(self)(self.default_factory, self)
def __copy__(self):
return self.copy()
def __reduce__(self):
"""
__reduce__ must return a 5-tuple as follows:
- factory function
- tuple of args for the factory function
- additional state (here None)
- sequence iterator (here None)
- dictionary iterator (yielding successive (key, value) pairs
This API is used by pickle.py and copy.py.
"""
return (type(self), (self.default_factory,), None, None, self.iteritems())
| 30.396789 | 108 | 0.526598 |
85a3300b9884358c8b2521d3d409003617f8d8f6 | 162 | py | Python | contrib/tests/assets/python/tests/test_sample.py | rockstack/rock | 1d010d942c5b1c8fd198223ac1f4a3dd5d690edb | [
"MIT"
] | 1 | 2015-03-13T06:01:06.000Z | 2015-03-13T06:01:06.000Z | contrib/tests/assets/python/tests/test_sample.py | rockstack/rock | 1d010d942c5b1c8fd198223ac1f4a3dd5d690edb | [
"MIT"
] | null | null | null | contrib/tests/assets/python/tests/test_sample.py | rockstack/rock | 1d010d942c5b1c8fd198223ac1f4a3dd5d690edb | [
"MIT"
] | null | null | null | import unittest
import sample
class TestCase(unittest.TestCase):
def test_simple(self):
self.assertEqual(sample.convert('# Test'), '<h1>Test</h1>')
| 20.25 | 67 | 0.697531 |
3e50bc50221b9a4a6d3b0fd77623d2ff08fce1b1 | 3,412 | py | Python | preprocess/PTMsites_src/PTMsites_process.py | Rongtingting/PTM-X- | 48865210a78599542f63d62709ac42acfb6eb8b4 | [
"Apache-2.0"
] | 2 | 2017-12-07T03:31:30.000Z | 2021-07-15T09:38:16.000Z | preprocess/PTMsites_src/PTMsites_process.py | Rongtingting/PTM-X- | 48865210a78599542f63d62709ac42acfb6eb8b4 | [
"Apache-2.0"
] | null | null | null | preprocess/PTMsites_src/PTMsites_process.py | Rongtingting/PTM-X- | 48865210a78599542f63d62709ac42acfb6eb8b4 | [
"Apache-2.0"
] | 2 | 2018-01-15T07:46:30.000Z | 2020-07-08T12:57:03.000Z | #!/usr/bin/python2.7
# run python PTMsites_process.py
import os
import numpy as np
from optparse import OptionParser
def load_file(PTM_file, species_name, keys):
data = np.loadtxt(PTM_file, delimiter='\t', skiprows=3, dtype="str")
key_idx = np.array([],"int")
for i in range(len(keys)):
_idx = np.where(data[0,:] == keys[i])[0]
if _idx.shape[0] == 0:
print("There is no keywords of %s " %keys[i] + "in the file %s!" %PTM_file)
if _idx.shape[0] > 1:
print("There is multiple keywords of %s " %keys[i] + "in the file %s!" %PTM_file)
key_idx = np.append(key_idx, _idx[0])
spc_idx = np.where(data[:,key_idx[0]]==species_name)[0]
#print(np.unique(data[:,key_idx[0]], return_counts=True))
RV = data[spc_idx, :][:, key_idx]
return RV
if __name__ == '__main__':
#0. parse command line options
parser = OptionParser()
parser.add_option("--data_dir",dest="data_dir",
help="The diroctory of the PTM sites data")
parser.add_option("--file_list",dest="file_list",
help="The list file that contains the files waiting for processing")
parser.add_option("--species",dest="species",
help="The species wanted to obtained from the full data, e.g., human or mouse")
parser.add_option("--out_file",dest="out_file",
help="The file for saving processed data",default='untitled_PTMsite_file.txt')
(options, args) = parser.parse_args()
data_dir = options.data_dir
file_list = options.file_list
species = options.species
out_file = options.out_file
# define the keys that we will use
keys = ["ORGANISM", "PROTEIN", "ACC_ID", "MOD_RSD", "MOD_RSD", "SITE_+/-7_AA",
"LT_LIT","MS_LIT","MS_CST"]
#keys = ["ORG", "PROTEIN", "ACC_ID", "MOD_TYPE", "MOD_RSD", "MODSITE_SEQ",
# "PUBMED_LTP", "PUBMED_MS2", "CST_MS2"]
# load the list file that contains the processing files
fid = open(file_list,"r")
all_files = fid.readlines()
fid.close()
# load all files that contains the PTM sites
PTM_file = all_files[0].split()[0]
PTMsites = np.array([], dtype="S50").reshape(-1, len(keys))
for i in range(0,len(all_files)):
PTM_file = all_files[i].split()[0]
PTM_type = os.path.basename(all_files[i]).split("_")[0]
PTMsites_tmp = load_file(os.path.join(data_dir, PTM_file), species, keys)
PTMsites_tmp[:,3] = PTM_type
PTMsites_tmp[:,4] = np.array([x.split("-")[0] for x in PTMsites_tmp[:,4]])
PTMsites = np.append(PTMsites, PTMsites_tmp, axis=0)
if(PTMsites_tmp.shape[0] > 0):
print("%d %s for %s included!" %(len(PTMsites_tmp), PTM_type, species))
else:
print("%d %s for %s included!" %(0, PTM_type, species))
# obtain the location of the PTMs, and sort the PTMs by the protein name,
# then by the PTM location
rsd_loc = np.zeros(PTMsites.shape[0],"int")
for i in range(PTMsites.shape[0]):
rsd_loc[i] = PTMsites[i,4][1:]
idx = np.lexsort((rsd_loc, PTMsites[:,1]))
PTMsites = PTMsites[idx,:]
# save the data into txt file
keys[3] = "MOD_TYPE"
fid = open(out_file,"w")
key_line = "\t".join(keys) + "\n"
fid.writelines(key_line)
for i in range(PTMsites.shape[0]):
data_line = "\t".join(list(PTMsites[i,:])) + "\n"
fid.writelines(data_line)
fid.close()
| 39.218391 | 93 | 0.621043 |
e5728d096ff479d854c970b37756cd83b98e40b9 | 571 | py | Python | lello/users/migrations/0002_auto_20200603_0840.py | FR98/lello-API | 2b2deddd04b00d893fdd1194674d354e5002b40e | [
"MIT"
] | null | null | null | lello/users/migrations/0002_auto_20200603_0840.py | FR98/lello-API | 2b2deddd04b00d893fdd1194674d354e5002b40e | [
"MIT"
] | null | null | null | lello/users/migrations/0002_auto_20200603_0840.py | FR98/lello-API | 2b2deddd04b00d893fdd1194674d354e5002b40e | [
"MIT"
] | null | null | null | # Generated by Django 3.0.6 on 2020-06-03 08:40
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
('users', '0001_initial'),
]
operations = [
migrations.AlterField(
model_name='userdetail',
name='user',
field=models.OneToOneField(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL),
),
]
| 25.954545 | 113 | 0.672504 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.