hexsha
stringlengths 40
40
| size
int64 1
1.03M
| ext
stringclasses 10
values | lang
stringclasses 1
value | max_stars_repo_path
stringlengths 3
239
| max_stars_repo_name
stringlengths 5
130
| max_stars_repo_head_hexsha
stringlengths 40
78
| max_stars_repo_licenses
listlengths 1
10
| max_stars_count
int64 1
191k
⌀ | max_stars_repo_stars_event_min_datetime
stringlengths 24
24
⌀ | max_stars_repo_stars_event_max_datetime
stringlengths 24
24
⌀ | max_issues_repo_path
stringlengths 3
239
| max_issues_repo_name
stringlengths 5
130
| max_issues_repo_head_hexsha
stringlengths 40
78
| max_issues_repo_licenses
listlengths 1
10
| max_issues_count
int64 1
67k
⌀ | max_issues_repo_issues_event_min_datetime
stringlengths 24
24
⌀ | max_issues_repo_issues_event_max_datetime
stringlengths 24
24
⌀ | max_forks_repo_path
stringlengths 3
239
| max_forks_repo_name
stringlengths 5
130
| max_forks_repo_head_hexsha
stringlengths 40
78
| max_forks_repo_licenses
listlengths 1
10
| max_forks_count
int64 1
105k
⌀ | max_forks_repo_forks_event_min_datetime
stringlengths 24
24
⌀ | max_forks_repo_forks_event_max_datetime
stringlengths 24
24
⌀ | content
stringlengths 1
1.03M
| avg_line_length
float64 1
958k
| max_line_length
int64 1
1.03M
| alphanum_fraction
float64 0
1
|
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
4a15522556ac2707bb2e56fc9f0957552104a37f
| 504
|
py
|
Python
|
checkboxes.py
|
PiyushKumar186/programming
|
4dc17488a2d197ccdb6acd6f80732da81147bb1b
|
[
"MIT"
] | null | null | null |
checkboxes.py
|
PiyushKumar186/programming
|
4dc17488a2d197ccdb6acd6f80732da81147bb1b
|
[
"MIT"
] | null | null | null |
checkboxes.py
|
PiyushKumar186/programming
|
4dc17488a2d197ccdb6acd6f80732da81147bb1b
|
[
"MIT"
] | null | null | null |
from Tkinter import *
master = Tk()
def var_states():
print("male: %d,\nfemale: %d" %(var1.get(),var2.get()))
Label(master, text="Your sex:").grid(row=0,sticky = W)
var1 = IntVar()
Checkbutton(master, text="male",variable=var1).grid(row=1,sticky=W)
var2 = IntVar()
Checkbutton(master, text="female",variable=var2).grid(row=2,stick=W)
Button(master, text='Quit', command= master.quit).grid(row=3,sticky=W,pady=4)
Button(master, text= 'Show',command=var_states).grid(row=4,sticky=W,pady=4)
mainloop()
| 36
| 77
| 0.702381
|
4a1552fdcb94ea12aea00ecce6c40ae936e168b9
| 848
|
py
|
Python
|
zar/urls.py
|
jdevries3133/ea_internal_tools
|
7c9f411605021ac8aec4ef734da065cd1f611208
|
[
"Apache-2.0"
] | null | null | null |
zar/urls.py
|
jdevries3133/ea_internal_tools
|
7c9f411605021ac8aec4ef734da065cd1f611208
|
[
"Apache-2.0"
] | 4
|
2021-04-08T20:25:58.000Z
|
2021-09-22T19:38:45.000Z
|
zar/urls.py
|
jdevries3133/ea_internal_tools
|
7c9f411605021ac8aec4ef734da065cd1f611208
|
[
"Apache-2.0"
] | null | null | null |
from django.urls import path
from .views import (
file_upload,
name_match,
monitor_progress,
download_previous_reports,
download_direct,
skip_name_match,
no_active_meeting_decision_fork,
)
urlpatterns = [
path('', file_upload, name='file_upload'),
path('name-match/', name_match, name='name_match'),
path('monitor-progress/', monitor_progress, name='monitor_progress'),
path(
'download-previous/',
download_previous_reports,
name='download_previous_reports'
),
path(
'no-active-meeting/',
no_active_meeting_decision_fork,
name='no_active_meeting_decision_fork'
),
path('snm/', skip_name_match, name='skip_name_match'),
path(
'report-download-direct/<int:pk>/',
download_direct,
name='download_direct'
),
]
| 24.941176
| 73
| 0.658019
|
4a1553adcca88aaa6ed2588cabea3f4b8344d4dc
| 1,962
|
py
|
Python
|
taurus_pyqtgraph/examples/y2axis.py
|
jordiandreu/taurus_pyqtgraph
|
f2500da108809e01b624e41a6b897f6bdccfafbe
|
[
"CC-BY-3.0"
] | null | null | null |
taurus_pyqtgraph/examples/y2axis.py
|
jordiandreu/taurus_pyqtgraph
|
f2500da108809e01b624e41a6b897f6bdccfafbe
|
[
"CC-BY-3.0"
] | null | null | null |
taurus_pyqtgraph/examples/y2axis.py
|
jordiandreu/taurus_pyqtgraph
|
f2500da108809e01b624e41a6b897f6bdccfafbe
|
[
"CC-BY-3.0"
] | null | null | null |
#!/usr/bin/env python
#############################################################################
##
# This file is part of Taurus
##
# http://taurus-scada.org
##
# Copyright 2011 CELLS / ALBA Synchrotron, Bellaterra, Spain
##
# Taurus is free software: you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
##
# Taurus is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser General Public License for more details.
##
# You should have received a copy of the GNU Lesser General Public License
# along with Taurus. If not, see <http://www.gnu.org/licenses/>.
##
#############################################################################
"""Example on using a tpg.Y2ViewBox to provide a secondary Y axis"""
from PyQt5 import Qt
import pyqtgraph as pg
import numpy
from taurus.qt.qtgui.tpg import Y2ViewBox, CurvesPropertiesTool
if __name__ == "__main__":
import sys
app = Qt.QApplication([])
w = pg.PlotWidget()
# add Y2 viewbox (provides a ViewBox associated to bottom & right axes)
y2 = Y2ViewBox()
y2.attachToPlotItem(w.getPlotItem())
# add a data item to Y1 (just as you would normally)
c1 = pg.PlotDataItem(name="c1", pen="c")
c1.setData(y=numpy.linspace(0, 20, 250))
w.addItem(c1)
# add a data item to Y2 (similar, but adding it to the secondary ViewBox!)
c2 = pg.PlotDataItem(name="c2", pen="y")
c2.setData(y=numpy.random.rand(250))
y2.addItem(c2) # <- note that it is y2, not w !
# (optional) add CurvesPropertiesTool to switch curves between Y1 and Y2
t = CurvesPropertiesTool()
t.attachToPlotItem(w.getPlotItem(), y2=y2)
w.show()
sys.exit(app.exec_())
| 32.163934
| 78
| 0.648318
|
4a15547215a44089034205abb984857aee70336e
| 9,509
|
py
|
Python
|
tests/util/test_saas_util.py
|
mohan-pogala/fidesops
|
5c686362d4fb3b85253dd7e2898be1131a5071ab
|
[
"Apache-2.0"
] | null | null | null |
tests/util/test_saas_util.py
|
mohan-pogala/fidesops
|
5c686362d4fb3b85253dd7e2898be1131a5071ab
|
[
"Apache-2.0"
] | null | null | null |
tests/util/test_saas_util.py
|
mohan-pogala/fidesops
|
5c686362d4fb3b85253dd7e2898be1131a5071ab
|
[
"Apache-2.0"
] | null | null | null |
from fidesops.common_exceptions import FidesopsException
import pytest
from fidesops.graph.config import (
Collection,
Dataset,
FieldAddress,
FieldPath,
ObjectField,
ScalarField,
)
from fidesops.util.saas_util import unflatten_dict, merge_datasets
class TestMergeDatasets:
"""
Multiple scenarios for merging SaaS config references with SaaS datasets.
SaaS datasets will not contain references and serve only as a definition
of available data from the given SaaS connector. Any references to other datasets
will be provided by the SaaS config.
"""
def test_add_identity(self):
"""Augment a SaaS dataset collection with an identity reference"""
saas_dataset = Dataset(
name="saas_dataset",
collections=[
Collection(
name="member",
fields=[
ScalarField(name="list_id"),
],
)
],
connection_key="connection_key",
)
saas_config = Dataset(
name="saas_config",
collections=[
Collection(
name="member",
fields=[
ScalarField(name="query", identity="email"),
],
)
],
connection_key="connection_key",
)
merged_dataset = merge_datasets(saas_dataset, saas_config)
collection = merged_dataset.collections[0]
assert len(collection.fields) == 2
list_id_field = collection.top_level_field_dict[FieldPath("list_id")]
assert len(list_id_field.references) == 0
query_field = collection.top_level_field_dict[FieldPath("query")]
assert len(query_field.references) == 0
assert query_field.identity == "email"
@pytest.mark.saas_connector
def test_add_reference(self):
"""Augment a SaaS dataset collection with a dataset reference"""
saas_dataset = Dataset(
name="saas_dataset",
collections=[
Collection(
name="conversations",
fields=[
ScalarField(name="campaign_id"),
],
)
],
connection_key="connection_key",
)
saas_config = Dataset(
name="saas_config",
collections=[
Collection(
name="conversations",
fields=[
ScalarField(
name="conversation_id",
references=[
(
FieldAddress(
"saas_connector", "member", "unique_email_id"
),
"from",
)
],
),
],
)
],
connection_key="connection_key",
)
merged_dataset = merge_datasets(saas_dataset, saas_config)
collection = merged_dataset.collections[0]
assert len(collection.fields) == 2
campaign_id_field = collection.top_level_field_dict[FieldPath("campaign_id")]
assert len(campaign_id_field.references) == 0
conversation_id_field = collection.top_level_field_dict[
FieldPath("conversation_id")
]
assert len(conversation_id_field.references) == 1
assert conversation_id_field.references[0] == (
FieldAddress("saas_connector", "member", "unique_email_id"),
"from",
)
@pytest.mark.saas_connector
def test_add_with_object_fields(self):
"""Verify complex SaaS dataset fields are preserved after merging"""
saas_dataset = Dataset(
name="saas_dataset",
collections=[
Collection(
name="member",
fields=[
ObjectField(
name="name",
fields={
"first": ScalarField(name="first"),
"last": ScalarField(name="last"),
},
)
],
)
],
connection_key="connection_key",
)
saas_config = Dataset(
name="saas_config",
collections=[
Collection(
name="member",
fields=[
ScalarField(name="query", identity="email"),
],
)
],
connection_key="connection_key",
)
merged_dataset = merge_datasets(saas_dataset, saas_config)
collection = merged_dataset.collections[0]
assert len(collection.fields) == 2
query_field = collection.top_level_field_dict[FieldPath("query")]
assert len(query_field.references) == 0
assert query_field.identity == "email"
name_field = collection.top_level_field_dict[FieldPath("name")]
assert isinstance(name_field, ObjectField)
assert len(name_field.fields) == 2
@pytest.mark.saas_connector
def test_merge_same_scalar_field(self):
"""Merge two scalar fields between datsets with the same collection/field name"""
saas_dataset = Dataset(
name="saas_dataset",
collections=[
Collection(
name="conversations",
fields=[
ScalarField(name="query"),
],
)
],
connection_key="connection_key",
)
saas_config = Dataset(
name="saas_config",
collections=[
Collection(
name="conversations",
fields=[
ScalarField(
name="query",
references=[
(
FieldAddress(
"saas_connector", "member", "unique_email_id"
),
"from",
)
],
),
],
)
],
connection_key="connection_key",
)
merged_dataset = merge_datasets(saas_dataset, saas_config)
collection = merged_dataset.collections[0]
assert len(collection.fields) == 1
assert len(collection.fields[0].references) == 1
@pytest.mark.saas_connector
def test_merge_same_object_field(self):
"""Merge a scalar and object field between datsets with the same collection/field name"""
saas_dataset = Dataset(
name="saas_dataset",
collections=[
Collection(
name="member",
fields=[
ObjectField(
name="name",
fields={
"first": ScalarField(name="first"),
"last": ScalarField(name="last"),
},
)
],
)
],
connection_key="connection_key",
)
saas_config = Dataset(
name="saas_config",
collections=[
Collection(
name="member",
fields=[
ScalarField(name="name", identity="email"),
],
)
],
connection_key="connection_key",
)
merged_dataset = merge_datasets(saas_dataset, saas_config)
collection = merged_dataset.collections[0]
assert len(collection.fields) == 1
name_field = collection.top_level_field_dict[FieldPath("name")]
assert isinstance(name_field, ObjectField)
assert len(name_field.fields) == 2
assert name_field.identity == "email"
def test_unflatten_dict():
# empty dictionary
assert unflatten_dict({}) == {}
# unflattened dictionary
assert unflatten_dict({"A": "1"}) == {"A": "1"}
# same level
assert unflatten_dict({"A.B": "1", "A.C": "2"}) == {"A": {"B": "1", "C": "2"}}
# mixed levels
assert unflatten_dict({"A": "1", "B.C": "2", "B.D": "3",}) == {
"A": "1",
"B": {"C": "2", "D": "3"},
}
# long path
assert unflatten_dict({"A.B.C.D.E.F.G": "1"}) == {
"A": {"B": {"C": {"D": {"E": {"F": {"G": "1"}}}}}}
}
# incoming values should overwrite existing values
assert unflatten_dict({"A.B": 1, "A.B": 2}) == {"A": {"B": 2}}
# conflicting types
with pytest.raises(FidesopsException):
unflatten_dict({"A.B": 1, "A": 2, "A.C": 3})
# data passed in is not completely flattened
with pytest.raises(FidesopsException):
unflatten_dict({'A.B.C': 1, 'A': {'B.D': 2}})
# unflatten_dict shouldn't be called with a None separator
with pytest.raises(IndexError):
unflatten_dict({"": "1"}, separator=None)
| 33.248252
| 97
| 0.479546
|
4a15565f38143279d37cd39e7581faafb7de905e
| 2,860
|
py
|
Python
|
db/migrations/versions/76cfe6045752_eftv_order_and_selectable.py
|
DeschutesBrewery/brewerypi
|
5459dfc6b1ed415920c13a8a7c9a2d3d3c82099f
|
[
"MIT"
] | 27
|
2017-11-27T05:01:05.000Z
|
2020-11-14T19:52:26.000Z
|
db/migrations/versions/76cfe6045752_eftv_order_and_selectable.py
|
DeschutesBrewery/brewerypi
|
5459dfc6b1ed415920c13a8a7c9a2d3d3c82099f
|
[
"MIT"
] | 259
|
2017-11-23T00:43:26.000Z
|
2020-11-03T01:07:30.000Z
|
db/migrations/versions/76cfe6045752_eftv_order_and_selectable.py
|
DeschutesBrewery/brewerypi
|
5459dfc6b1ed415920c13a8a7c9a2d3d3c82099f
|
[
"MIT"
] | 8
|
2018-10-29T04:39:29.000Z
|
2020-10-01T22:18:12.000Z
|
"""EFTV Order and Selectable.
Revision ID: 76cfe6045752
Revises: b13236977763
Create Date: 2020-08-30 22:12:40.468380
"""
from alembic import op
import sqlalchemy as sa
from sqlalchemy import orm
from sqlalchemy.dialects import mysql
from sqlalchemy.ext.declarative import declarative_base
Base = declarative_base()
class EventFrameTemplate(Base):
__tablename__ = "EventFrameTemplate"
EventFrameTemplateId = sa.Column(sa.Integer, primary_key = True)
EventFrameTemplateViews = orm.relationship("EventFrameTemplateView", backref = "EventFrameTemplate", lazy = "dynamic")
class EventFrameTemplateView(Base):
__tablename__ = "EventFrameTemplateView"
EventFrameTemplateViewId = sa.Column(sa.Integer, primary_key = True)
EventFrameTemplateId = sa.Column(sa.Integer, sa.ForeignKey("EventFrameTemplate.EventFrameTemplateId"), nullable = False)
Name = sa.Column(sa.String(45), nullable = False)
Order = sa.Column(sa.Integer, nullable = True)
# revision identifiers, used by Alembic.
revision = '76cfe6045752'
down_revision = 'b13236977763'
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.add_column('EventFrameTemplateView', sa.Column('Selectable', sa.Boolean(), nullable=True))
# Set any existing Event Frame Template Views to Selectable.
metadata = sa.MetaData()
metadata.reflect(bind = op.get_bind())
eventFrameTemplateViewTable = metadata.tables["EventFrameTemplateView"]
updateStatement = eventFrameTemplateViewTable.update().values(Selectable = True)
op.get_bind().execute(updateStatement)
# Alter Selectable column to prevent null.
op.alter_column('EventFrameTemplateView', 'Selectable', existing_type=mysql.TINYINT(display_width=1), nullable=False)
op.add_column('EventFrameTemplateView', sa.Column('Order', sa.Integer(), nullable=True))
metadata = sa.MetaData()
bind = op.get_bind()
session = orm.Session(bind=bind)
for eventFrameTemplate in session.query(EventFrameTemplate):
for i, eventFrameTemplateView in enumerate(eventFrameTemplate.EventFrameTemplateViews.order_by(EventFrameTemplateView.Name), 1):
eventFrameTemplateView.Order = i
session.commit()
op.alter_column('EventFrameTemplateView', 'Order', existing_type=mysql.INTEGER(display_width=11), nullable=False)
op.create_unique_constraint('AK__EventFrameTemplateViewId__Order', 'EventFrameTemplateView', ['EventFrameTemplateViewId', 'Order'])
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.drop_column('EventFrameTemplateView', 'Selectable')
op.drop_constraint('AK__EventFrameTemplateViewId__Order', 'EventFrameTemplateView', type_='unique')
op.drop_column('EventFrameTemplateView', 'Order')
# ### end Alembic commands ###
| 40.28169
| 136
| 0.761189
|
4a1556fcefc6f81fe6cf42b11d57abd7f2472c05
| 4,607
|
py
|
Python
|
nova/tests/unit/notifications/test_base.py
|
WeifanFu-bsn/nova
|
c7b54a80ac25f6a01d0a150c546532f5ae2592ce
|
[
"Apache-2.0"
] | null | null | null |
nova/tests/unit/notifications/test_base.py
|
WeifanFu-bsn/nova
|
c7b54a80ac25f6a01d0a150c546532f5ae2592ce
|
[
"Apache-2.0"
] | null | null | null |
nova/tests/unit/notifications/test_base.py
|
WeifanFu-bsn/nova
|
c7b54a80ac25f6a01d0a150c546532f5ae2592ce
|
[
"Apache-2.0"
] | 1
|
2020-07-22T21:17:41.000Z
|
2020-07-22T21:17:41.000Z
|
# Copyright (c) 2017 OpenStack Foundation
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import datetime
import mock
from nova import context as nova_context
from nova.notifications import base
from nova import test
from nova.tests import uuidsentinel as uuids
from nova import utils
class TestNullSafeUtils(test.NoDBTestCase):
def test_null_safe_isotime(self):
dt = None
self.assertEqual('', base.null_safe_isotime(dt))
dt = datetime.datetime(second=1,
minute=1,
hour=1,
day=1,
month=1,
year=2017)
self.assertEqual(utils.strtime(dt), base.null_safe_isotime(dt))
def test_null_safe_str(self):
line = None
self.assertEqual('', base.null_safe_str(line))
line = 'test'
self.assertEqual(line, base.null_safe_str(line))
class TestSendInstanceUpdateNotification(test.NoDBTestCase):
@mock.patch('nova.notifications.objects.base.NotificationBase.emit',
new_callable=mock.NonCallableMock) # asserts not called
# TODO(mriedem): Rather than mock is_enabled, it would be better to
# configure oslo_messaging_notifications.driver=['noop']
@mock.patch('nova.rpc.NOTIFIER.is_enabled', return_value=False)
def test_send_versioned_instance_update_notification_disabled(self,
mock_enabled,
mock_info):
"""Tests the case that versioned notifications are disabled which makes
_send_versioned_instance_update_notification a noop.
"""
base._send_versioned_instance_update(mock.sentinel.ctxt,
mock.sentinel.instance,
mock.sentinel.payload,
mock.sentinel.host,
mock.sentinel.service)
@mock.patch.object(base, 'bandwidth_usage')
@mock.patch.object(base, '_compute_states_payload')
@mock.patch('nova.rpc.get_notifier')
@mock.patch.object(base, 'info_from_instance')
def test_send_legacy_instance_update_notification(self, mock_info,
mock_get_notifier,
mock_states,
mock_bw):
"""Tests the case that versioned notifications are disabled and
assert that this does not prevent sending the unversioned
instance.update notification.
"""
self.flags(notification_format='unversioned', group='notifications')
base.send_instance_update_notification(mock.sentinel.ctxt,
mock.sentinel.instance)
mock_get_notifier.return_value.info.assert_called_once_with(
mock.sentinel.ctxt, 'compute.instance.update', mock.ANY)
class TestBandwidthUsage(test.NoDBTestCase):
@mock.patch('nova.context.RequestContext.elevated')
@mock.patch('nova.network.API')
@mock.patch('nova.objects.BandwidthUsageList.get_by_uuids')
def test_context_elevated(self, mock_get_bw_usage, mock_nw_api,
mock_elevated):
context = nova_context.RequestContext('fake', 'fake')
# We need this to not be a NovaObject so the old school
# get_instance_nw_info will run.
instance = {'uuid': uuids.instance}
audit_start = 'fake'
base.bandwidth_usage(context, instance, audit_start)
network_api = mock_nw_api.return_value
network_api.get_instance_nw_info.assert_called_once_with(
mock_elevated.return_value, instance)
mock_get_bw_usage.assert_called_once_with(
mock_elevated.return_value, [uuids.instance], audit_start)
mock_elevated.assert_called_once_with(read_deleted='yes')
| 43.87619
| 79
| 0.622097
|
4a1557ad408f60d581992055e41843d91518891e
| 4,433
|
py
|
Python
|
hydra/models.py
|
dbsiavichay/django-hydra
|
9b1bbfe762c4c64ea6b59a02226084ad88257f64
|
[
"MIT"
] | null | null | null |
hydra/models.py
|
dbsiavichay/django-hydra
|
9b1bbfe762c4c64ea6b59a02226084ad88257f64
|
[
"MIT"
] | null | null | null |
hydra/models.py
|
dbsiavichay/django-hydra
|
9b1bbfe762c4c64ea6b59a02226084ad88257f64
|
[
"MIT"
] | null | null | null |
""" Models for buid menus """
# Django
from django.db import models
from django.urls import reverse
from django.urls.exceptions import NoReverseMatch
from django.utils.text import slugify
from django.apps import apps
from django.contrib.auth.models import Permission
from django.contrib.contenttypes.models import ContentType
# Hydra
from . import site
from .utils import import_class
class Action(models.Model):
class ToChoices(models.IntegerChoices):
MODEL = 1, "Modelo"
CLASSVIEW = 2, "Vista"
to = models.PositiveSmallIntegerField(
choices=ToChoices.choices,
verbose_name="acción hacia un"
)
app_label = models.CharField(
max_length=128,
verbose_name="aplicación"
)
name = models.CharField(max_length=128, verbose_name='nombre de la acción')
element = models.CharField(
max_length=128,
verbose_name='elemento accionado'
)
permissions = models.ManyToManyField(
Permission,
blank=True,
verbose_name="permisos específicos"
)
def __str__(self):
return self.name
class Meta:
verbose_name = "acción"
verbose_name_plural = "acciones"
unique_together = ("app_label", "element")
ordering = ("name",)
def get_model_class(self):
if self.to != self.ToChoices.MODEL:
return None
try:
model_class = apps.get_model(self.app_label, self.element)
return model_class
except LookupError:
return None
def get_view_class(self):
if self.to != self.ToChoices.CLASSVIEW:
return None
try:
app_config = apps.get_app_config(self.app_label)
view_class = import_class(f"{app_config.name}.views", self.element)
return view_class
except (LookupError):
return None
def get_permissions(self):
return [f"auth.{perm.codename}" for perm in self.permissions.all()]
def has_permissions(self, user):
if not user.is_authenticated or not user.is_active:
return False
if user.is_superuser:
return True
if self.to == self.ToChoices.MODEL:
basic_perms = any(
user.has_perm(f"{self.app_label}.{perm}_{self.element}")
for perm in ("view", "add", "change", "delete")
)
else:
basic_perms = True
specific_perms = all(
user.has_perm(perm) for perm in self.get_permissions()
)
return basic_perms and specific_perms
class Menu(models.Model):
""" Models for menu """
parent = models.ForeignKey(
'self',
blank=True, null=True,
related_name='submenus',
on_delete=models.CASCADE,
verbose_name='menú padre'
)
name = models.CharField(max_length=128, verbose_name='nombre')
route = models.CharField(
max_length=512,
unique=True, editable=False,
verbose_name='ruta de acceso'
)
action = models.ForeignKey(
Action,
on_delete=models.CASCADE,
verbose_name='acción'
)
icon_class = models.CharField(
max_length=128,
blank=True, null=True,
verbose_name='clase css del ícono'
)
is_group = models.BooleanField(
default=False, editable=False,
verbose_name="agrupa"
)
sequence = models.PositiveSmallIntegerField(verbose_name='secuencia')
is_active = models.BooleanField(default=True, verbose_name='activo?')
class Meta:
ordering = ('route', 'sequence')
def __str__(self):
return f"{self.name} | {self.get_route()}"
def get_route(self):
route = f"{self.parent.get_route()}/{slugify(self.name)}" if self.parent else slugify(self.name)
return route
def get_url(self):
url_name = None
if self.action.to == Action.ToChoices.MODEL:
model_class = self.action.get_model_class()
if model_class and model_class in site._registry:
model_site = site._registry[model_class]
url_name = model_site.get_url_name("list")
else:
url_name = f"site:{slugify(self.name)}"
try:
url = reverse(url_name)
return url
except NoReverseMatch:
print("Not found url for %s" % url_name)
return url_name
| 29.164474
| 104
| 0.616513
|
4a15583a27141718ddef5903c16d9c1726c800f4
| 983
|
py
|
Python
|
configs/transforms_config.py
|
wkabbani/egain
|
3357e8e612fe2d2a158ea15184442f54b942dc0d
|
[
"Apache-2.0",
"BSD-2-Clause",
"MIT"
] | null | null | null |
configs/transforms_config.py
|
wkabbani/egain
|
3357e8e612fe2d2a158ea15184442f54b942dc0d
|
[
"Apache-2.0",
"BSD-2-Clause",
"MIT"
] | null | null | null |
configs/transforms_config.py
|
wkabbani/egain
|
3357e8e612fe2d2a158ea15184442f54b942dc0d
|
[
"Apache-2.0",
"BSD-2-Clause",
"MIT"
] | null | null | null |
from abc import abstractmethod
import torchvision.transforms as transforms
class TransformsConfig(object):
def __init__(self, opts):
self.opts = opts
@abstractmethod
def get_transforms(self):
pass
class EncodeTransforms(TransformsConfig):
def __init__(self, opts):
super(EncodeTransforms, self).__init__(opts)
def get_transforms(self):
transforms_dict = {
'transform_gt_train': transforms.Compose([
transforms.Resize((256, 256)),
transforms.RandomHorizontalFlip(0.5),
transforms.ToTensor(),
transforms.Normalize([0.5, 0.5, 0.5], [0.5, 0.5, 0.5])]),
'transform_source': None,
'transform_test': transforms.Compose([
transforms.Resize((256, 256)),
transforms.ToTensor(),
transforms.Normalize([0.5, 0.5, 0.5], [0.5, 0.5, 0.5])]),
'transform_inference': transforms.Compose([
transforms.Resize((256, 256)),
transforms.ToTensor(),
transforms.Normalize([0.5, 0.5, 0.5], [0.5, 0.5, 0.5])])
}
return transforms_dict
| 25.868421
| 61
| 0.69176
|
4a15585b977e2015f5ba393faf926483d93d2c03
| 1,334
|
py
|
Python
|
.github/workflows/aws/upload_nightly_index.py
|
pyg-team/pyg-lib
|
c1656eda314cc7d161e85da96300ffeea29d0bcb
|
[
"MIT"
] | 13
|
2022-03-26T21:44:41.000Z
|
2022-03-31T03:36:23.000Z
|
.github/workflows/aws/upload_nightly_index.py
|
pyg-team/pyg-lib
|
c1656eda314cc7d161e85da96300ffeea29d0bcb
|
[
"MIT"
] | null | null | null |
.github/workflows/aws/upload_nightly_index.py
|
pyg-team/pyg-lib
|
c1656eda314cc7d161e85da96300ffeea29d0bcb
|
[
"MIT"
] | null | null | null |
from collections import defaultdict
import boto3
ROOT_URL = 'https://data.pyg.org/whl/nightly'
html = '<!DOCTYPE html>\n<html>\n<body>\n{}\n</body>\n</html>'
href = ' <a href="{}">{}</a><br/>'
args = {
'ContentType': 'text/html',
'CacheControl': 'max-age=300',
'ACL': 'public-read',
}
bucket = boto3.resource('s3').Bucket(name='data.pyg.org')
wheels_dict = defaultdict(list)
for obj in bucket.objects.filter(Prefix='whl/nightly'):
if obj.key[-3:] != 'whl':
continue
torch_version, wheel_name = obj.key.split('/')[-2:]
wheels_dict[torch_version].append(wheel_name)
index_html = html.format('\n'.join([
href.format(f'{torch_version}.html'.replace('+', '%2B'), version)
for version in wheels_dict
]))
with open('index.html', 'w') as f:
f.write(index_html)
bucket.Object('whl/nightly/index.html').upload_file('index.html', args)
for torch_version, wheel_names in wheels_dict.items():
torch_version_html = html.format('\n'.join([
href.format(
f'{ROOT_URL}/{torch_version}/{wheel_name}'.replace('+', '%2B'),
wheel_name) for wheel_name in wheel_names
]))
with open(f'{torch_version}.html', 'w') as f:
f.write(torch_version_html)
bucket.Object(f'whl/nightly/{torch_version}.html').upload_file(
f'{torch_version}.html', args)
| 31.023256
| 75
| 0.646177
|
4a15595ff86f4a9fab5464cf1def1afb0d91d9eb
| 6,594
|
py
|
Python
|
test/test_misc_explainers.py
|
jjog22/interpret-community
|
cb613a2edc455f505bba41382b842bc240f59ee2
|
[
"MIT"
] | 2
|
2020-10-14T01:02:37.000Z
|
2022-02-17T01:47:49.000Z
|
test/test_misc_explainers.py
|
malikamalik/interpret-community
|
17f5a3fd28e6377f51e9532c5648e91e2037b9e5
|
[
"MIT"
] | 12
|
2021-03-10T01:29:02.000Z
|
2022-02-26T21:11:42.000Z
|
test/test_misc_explainers.py
|
malikamalik/interpret-community
|
17f5a3fd28e6377f51e9532c5648e91e2037b9e5
|
[
"MIT"
] | 1
|
2020-11-19T20:00:23.000Z
|
2020-11-19T20:00:23.000Z
|
# ---------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# ---------------------------------------------------------
# Tests for kernel, tree and deep explainers.
import pytest
import logging
from lightgbm import LGBMClassifier, LGBMRegressor
from interpret_community.shap.kernel_explainer import KernelExplainer
from interpret_community.shap.tree_explainer import TreeExplainer
from interpret_community.shap.deep_explainer import DeepExplainer
from interpret_community.shap.linear_explainer import LinearExplainer
from common_tabular_tests import VerifyTabularTests
from common_utils import create_keras_multiclass_classifier, create_keras_regressor, \
create_sklearn_linear_regressor, create_sklearn_logistic_regressor
from constants import owner_email_tools_and_ux
test_logger = logging.getLogger(__name__)
test_logger.setLevel(logging.INFO)
@pytest.mark.owner(email=owner_email_tools_and_ux)
@pytest.mark.usefixtures("clean_dir")
class TestKernelExplainer(object):
def setup_class(self):
def create_explainer(model, x_train, **kwargs):
return KernelExplainer(model, x_train, **kwargs)
self._verify_tabular = VerifyTabularTests(test_logger, create_explainer)
def test_kernel_explainer_raw_transformations_list_classification(self):
self._verify_tabular.verify_explain_model_transformations_list_classification()
def test_kernel_explainer_raw_transformations_column_transformer_classification(self):
self._verify_tabular.verify_explain_model_transformations_column_transformer_classification()
def test_kernel_explainer_raw_transformations_list_regression(self):
self._verify_tabular.verify_explain_model_transformations_list_regression()
def test_kernel_explainer_raw_transformations_column_transformer_regression(self):
self._verify_tabular.verify_explain_model_transformations_list_regression()
@pytest.mark.owner(email=owner_email_tools_and_ux)
@pytest.mark.usefixtures("clean_dir")
class TestDeepExplainer(object):
def setup_class(self):
def create_explainer(model, x_train, **kwargs):
return DeepExplainer(model, x_train, **kwargs)
self._verify_tabular = VerifyTabularTests(test_logger, create_explainer)
def _get_create_model(self, classification):
if classification:
train_fn = create_keras_multiclass_classifier
else:
train_fn = create_keras_regressor
def create_model(x, y):
return train_fn(x, y)
return create_model
def test_deep_explainer_raw_transformations_list_classification(self):
self._verify_tabular.verify_explain_model_transformations_list_classification(self._get_create_model(
classification=True))
def test_deep_explainer_raw_transformations_column_transformer_classification(self):
self._verify_tabular.verify_explain_model_transformations_column_transformer_classification(
self._get_create_model(classification=True))
def test_deep_explainer_raw_transformations_list_regression(self):
self._verify_tabular.verify_explain_model_transformations_list_regression(self._get_create_model(
classification=False))
def test_deep_explainer_raw_transformations_column_transformer_regression(self):
self._verify_tabular.verify_explain_model_transformations_column_transformer_regression(
self._get_create_model(classification=False))
@pytest.mark.owner(email=owner_email_tools_and_ux)
@pytest.mark.usefixtures("clean_dir")
class TestTreeExplainer(object):
def setup_class(self):
def create_explainer(model, x_train, **kwargs):
return TreeExplainer(model, **kwargs)
self._verify_tabular = VerifyTabularTests(test_logger, create_explainer)
def _get_create_model(self, classification):
if classification:
model = LGBMClassifier()
else:
model = LGBMRegressor()
def create_model(x, y):
return model.fit(x, y)
return create_model
def test_tree_explainer_raw_transformations_list_classification(self):
self._verify_tabular.verify_explain_model_transformations_list_classification(self._get_create_model(
classification=True))
def test_tree_explainer_raw_transformations_column_transformer_classification(self):
self._verify_tabular.verify_explain_model_transformations_column_transformer_classification(
self._get_create_model(classification=True))
def test_tree_explainer_raw_transformations_list_regression(self):
self._verify_tabular.verify_explain_model_transformations_list_regression(self._get_create_model(
classification=False))
def test_tree_explainer_raw_transformations_column_transformer_regression(self):
self._verify_tabular.verify_explain_model_transformations_list_regression(self._get_create_model(
classification=False))
@pytest.mark.owner(email=owner_email_tools_and_ux)
@pytest.mark.usefixtures("clean_dir")
class TestLinearExplainer(object):
def setup_class(self):
def create_explainer(model, x_train, **kwargs):
return LinearExplainer(model, x_train, **kwargs)
self._verify_tabular = VerifyTabularTests(test_logger, create_explainer)
def _get_create_model(self, classification):
if classification:
train_fn = create_sklearn_logistic_regressor
else:
train_fn = create_sklearn_linear_regressor
def create_model(x, y):
return train_fn(x, y)
return create_model
def test_linear_explainer_raw_transformations_list_classification(self):
self._verify_tabular.verify_explain_model_transformations_list_classification(self._get_create_model(
classification=True))
def test_linear_explainer_raw_transformations_column_transformer_classification(self):
self._verify_tabular.verify_explain_model_transformations_column_transformer_classification(
self._get_create_model(classification=True))
def test_linear_explainer_raw_transformations_list_regression(self):
self._verify_tabular.verify_explain_model_transformations_list_regression(self._get_create_model(
classification=False))
def test_linear_explainer_raw_transformations_column_transformer_regression(self):
self._verify_tabular.verify_explain_model_transformations_list_regression(self._get_create_model(
classification=False))
| 42.541935
| 109
| 0.775099
|
4a1559ee12dac1bc4dec2ae0637f5d77e3df37f0
| 6,463
|
py
|
Python
|
src/classification/aggregating_results.py
|
PeterJackNaylor/CellularHeatmaps
|
52829685683b6f3315b62246a77cc2206326e2b3
|
[
"Apache-2.0"
] | null | null | null |
src/classification/aggregating_results.py
|
PeterJackNaylor/CellularHeatmaps
|
52829685683b6f3315b62246a77cc2206326e2b3
|
[
"Apache-2.0"
] | 2
|
2022-01-13T03:57:02.000Z
|
2022-03-12T01:01:45.000Z
|
src/classification/aggregating_results.py
|
PeterJackNaylor/CellularHeatmaps
|
52829685683b6f3315b62246a77cc2206326e2b3
|
[
"Apache-2.0"
] | 1
|
2020-10-12T07:56:51.000Z
|
2020-10-12T07:56:51.000Z
|
import pandas as pd
import pickle as pkl
from glob import glob
import numpy as np
from sklearn.metrics import roc_auc_score, accuracy_score, recall_score, precision_score, f1_score
import pandas as pd
inner_fold = 5
label_file = "/mnt/data3/pnaylor/CellularHeatmaps/outputs/label_nature.csv"
y_interest = "Residual"
def get_options():
import argparse
parser = argparse.ArgumentParser(
description='Creating training on heatmaps')
parser.add_argument('--labels', required=False,
default="/Users/naylorpeter/tmp/predict_from_umap_cell/patients/multi_class.csv",
metavar="str", type=str,
help='path to label csv file')
parser.add_argument('--y_interest', required=False,
default="RCB_class",
metavar="str", type=str,
help='tag for the variable of interest in labels')
parser.add_argument('--inner_fold', required=False,
default=5,
metavar="int", type=int,
help='number of inner folds to perform')
parser.add_argument('--filename', required=False,
default="results.csv",
metavar="str", type=str,
help='file name for val and test')
args = parser.parse_args()
return args
def main():
options = get_options()
inner_fold = options.inner_fold
label_file = options.labels
y_interest = options.y_interest
label = pd.read_csv(label_file, index_col="Biopsy")[y_interest]
list_dic = []
for f in glob("*.pkl"):
dic = pkl.load(open(f, 'rb'))
validation_predictions = [dic["{}_validation_prob".format(i)].join(label) for i in range(inner_fold)]
test_predictions = [dic["{}_test_prob".format(i)].join(label) for i in range(inner_fold)]
auc_scores = []
auc_scores_t = []
for i in range(inner_fold):
y_scores = validation_predictions[i][1]
y_true = validation_predictions[i][y_interest]
auc_scores.append(roc_auc_score(y_true, y_scores))
y_scores_t = test_predictions[i][1]
y_true_t = test_predictions[i][y_interest]
auc_scores_t.append(roc_auc_score(y_true_t, y_scores_t))
best_ind = np.argmax(auc_scores)
auc_score_best_val = auc_scores[best_ind]
auc_score_best_val_t = auc_scores_t[best_ind]
validation_predictions = pd.concat(validation_predictions, axis=0)
y_scores = validation_predictions[1]
y_true = validation_predictions[y_interest]
avg_auc = roc_auc_score(y_true, y_scores)
avg_acc = accuracy_score(y_true, y_scores.round(0))
avg_rec = recall_score(y_true, y_scores.round(0))
avg_pre = precision_score(y_true, y_scores.round(0))
avg_f1 = f1_score(y_true, y_scores.round(0))
val_dic = {'acc':avg_acc, 'rec':avg_rec, 'pre':avg_pre, 'avg_f1':avg_f1}
test_predictions_c = pd.concat(test_predictions, axis=0)
y_scores_t = test_predictions_c[1]
y_true_t = test_predictions_c[y_interest]
avg_auc_t = roc_auc_score(y_true_t, y_scores_t)
avg_acc_t = accuracy_score(y_true_t, y_scores_t.round(0))
avg_rec_t = recall_score(y_true_t, y_scores_t.round(0))
avg_pre_t = precision_score(y_true_t, y_scores_t.round(0))
avg_f1_t = f1_score(y_true_t, y_scores_t.round(0))
test_dic = {'acc':avg_acc_t, 'rec':avg_rec_t, 'pre':avg_pre_t, 'avg_f1':avg_f1_t}
res = {'max_val': auc_score_best_val,
'max_val_t': auc_score_best_val_t,
'max_val_t_p': test_predictions[best_ind],
'avg_val': avg_auc,
'avg_val_t': avg_auc_t,
'avg_val_t_p': test_predictions_c,
'val_dic': val_dic,
'test_dic': test_dic}
list_dic.append((f, dic, res))
final_results = pd.DataFrame(index=range(len(list_dic)), columns=["fold", "model", "lr", "max_val"])
ind = 0
for name, dic, res in list_dic:
_, _, fold, _, model, _, lr = name.split('_')
lr = lr.split('.p')[0]
final_results.ix[ind, "fold"] = fold
final_results.ix[ind, "model"] = model
final_results.ix[ind, "lr"] = lr
final_results.ix[ind, "max_val"] = res["max_val"]
final_results.ix[ind, "mean_val"] = res["avg_val"]
ind += 1
final_results['max_val'] = final_results['max_val'].astype('float')
gfr = final_results.groupby(["model", "lr"])
final_final = gfr.mean()
for g in gfr:
avg_p = []
max_p = []
for n in range(g[1].shape[0]):
ind_n = (g[1]).index[n]
_, _, res = list_dic[ind_n]
avg_p.append(res['avg_val_t_p'])
max_p.append(res['max_val_t_p'])
prob_t_avg = pd.concat(avg_p, axis=0)
prob_t_max = pd.concat(max_p, axis=0)
y_scores = prob_t_max[1]
y_true = prob_t_max[y_interest]
auc_max_test = roc_auc_score(y_true, y_scores)
acc_max_test = accuracy_score(y_true, y_scores.round(0))
rec_max_test = recall_score(y_true, y_scores.round(0))
pre_max_test = precision_score(y_true, y_scores.round(0))
f1__max_test = f1_score(y_true, y_scores.round(0))
final_final.ix[g[0], 'test_max'] = auc_max_test
final_final.ix[g[0], 'test_acc_max'] = acc_max_test
final_final.ix[g[0], 'test_rec_max'] = rec_max_test
final_final.ix[g[0], 'test_pre_max'] = pre_max_test
final_final.ix[g[0], 'test_f1_max'] = f1__max_test
y_scores = prob_t_avg[1]
y_true = prob_t_avg[y_interest]
auc_avg_test = roc_auc_score(y_true, y_scores)
acc_avg_test = accuracy_score(y_true, y_scores.round(0))
rec_avg_test = recall_score(y_true, y_scores.round(0))
pre_avg_test = precision_score(y_true, y_scores.round(0))
f1__avg_test = f1_score(y_true, y_scores.round(0))
final_final.ix[g[0], 'test_avg'] = auc_avg_test
final_final.ix[g[0], 'test_acc_avg'] = acc_avg_test
final_final.ix[g[0], 'test_rec_avg'] = rec_avg_test
final_final.ix[g[0], 'test_pre_avg'] = pre_avg_test
final_final.ix[g[0], 'test_f1_avg'] = f1__avg_test
final_final.to_csv(options.filename)
if __name__ == "__main__":
main()
| 40.647799
| 109
| 0.621229
|
4a155aa7dddf00535ff85a81dff56c47757cc68c
| 1,106
|
py
|
Python
|
src/labware_domain_models/__init__.py
|
CuriBio/labware-domain-models
|
ad263027a9661cb98a7d92f5194cbbf2260dccd2
|
[
"MIT"
] | null | null | null |
src/labware_domain_models/__init__.py
|
CuriBio/labware-domain-models
|
ad263027a9661cb98a7d92f5194cbbf2260dccd2
|
[
"MIT"
] | 22
|
2020-08-17T23:40:17.000Z
|
2022-03-30T21:03:17.000Z
|
src/labware_domain_models/__init__.py
|
CuriBio/labware-domain-models
|
ad263027a9661cb98a7d92f5194cbbf2260dccd2
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
"""Labware and Barcoded SBS Labware models."""
from .barcoded_sbs_labware import BarcodedSbsLabware
from .exceptions import CartesianVectorRequirePlateHeightError
from .exceptions import PositionInvalidForLabwareDefinitionError
from .exceptions import WellCoordinatesRequireA1CenterError
from .exceptions import WellCoordinatesRequireColumnOffsetError
from .exceptions import WellCoordinatesRequireRowOffsetError
from .labware_definitions import CartesianVector
from .labware_definitions import CoordinateSystem
from .labware_definitions import get_row_and_column_from_well_name
from .labware_definitions import LabwareDefinition
from .labware_definitions import WellCoordinate
__all__ = [
"LabwareDefinition",
"BarcodedSbsLabware",
"PositionInvalidForLabwareDefinitionError",
"WellCoordinate",
"WellCoordinatesRequireA1CenterError",
"WellCoordinatesRequireColumnOffsetError",
"WellCoordinatesRequireRowOffsetError",
"CartesianVector",
"CoordinateSystem",
"CartesianVectorRequirePlateHeightError",
"get_row_and_column_from_well_name",
]
| 39.5
| 66
| 0.83906
|
4a155b47f1fca0b2023ca0e0b75fd7bc54cc610f
| 4,778
|
py
|
Python
|
msgraph-cli-extensions/v1_0/usersfunctions_v1_0/azext_usersfunctions_v1_0/vendored_sdks/usersfunctions/aio/operations/_users_calendar_groups_calendars_calendar_view_instances_operations.py
|
thewahome/msgraph-cli
|
33127d9efa23a0e5f5303c93242fbdbb73348671
|
[
"MIT"
] | null | null | null |
msgraph-cli-extensions/v1_0/usersfunctions_v1_0/azext_usersfunctions_v1_0/vendored_sdks/usersfunctions/aio/operations/_users_calendar_groups_calendars_calendar_view_instances_operations.py
|
thewahome/msgraph-cli
|
33127d9efa23a0e5f5303c93242fbdbb73348671
|
[
"MIT"
] | null | null | null |
msgraph-cli-extensions/v1_0/usersfunctions_v1_0/azext_usersfunctions_v1_0/vendored_sdks/usersfunctions/aio/operations/_users_calendar_groups_calendars_calendar_view_instances_operations.py
|
thewahome/msgraph-cli
|
33127d9efa23a0e5f5303c93242fbdbb73348671
|
[
"MIT"
] | null | null | null |
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
from typing import Any, Callable, Dict, Generic, List, Optional, TypeVar
import warnings
from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
from azure.core.pipeline import PipelineResponse
from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest
from azure.mgmt.core.exceptions import ARMErrorFormat
from ... import models
T = TypeVar('T')
ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]]
class UsersCalendarGroupsCalendarsCalendarViewInstancesOperations:
"""UsersCalendarGroupsCalendarsCalendarViewInstancesOperations async operations.
You should not instantiate this class directly. Instead, you should create a Client instance that
instantiates it for you and attaches it as an attribute.
:ivar models: Alias to model classes used in this operation group.
:type models: ~users_functions.models
:param client: Client for service requests.
:param config: Configuration of service client.
:param serializer: An object model serializer.
:param deserializer: An object model deserializer.
"""
models = models
def __init__(self, client, config, serializer, deserializer) -> None:
self._client = client
self._serialize = serializer
self._deserialize = deserializer
self._config = config
async def delta(
self,
user_id: str,
calendar_group_id: str,
calendar_id: str,
event_id: str,
**kwargs
) -> List["models.MicrosoftGraphEvent"]:
"""Invoke function delta.
Invoke function delta.
:param user_id: key: id of user.
:type user_id: str
:param calendar_group_id: key: id of calendarGroup.
:type calendar_group_id: str
:param calendar_id: key: id of calendar.
:type calendar_id: str
:param event_id: key: id of event.
:type event_id: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: list of MicrosoftGraphEvent, or the result of cls(response)
:rtype: list[~users_functions.models.MicrosoftGraphEvent]
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType[List["models.MicrosoftGraphEvent"]]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
accept = "application/json"
# Construct URL
url = self.delta.metadata['url'] # type: ignore
path_format_arguments = {
'user-id': self._serialize.url("user_id", user_id, 'str'),
'calendarGroup-id': self._serialize.url("calendar_group_id", calendar_group_id, 'str'),
'calendar-id': self._serialize.url("calendar_id", calendar_id, 'str'),
'event-id': self._serialize.url("event_id", event_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.get(url, query_parameters, header_parameters)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize(models.OdataError, response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
deserialized = self._deserialize('[MicrosoftGraphEvent]', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
delta.metadata = {'url': '/users/{user-id}/calendarGroups/{calendarGroup-id}/calendars/{calendar-id}/calendarView/{event-id}/instances/microsoft.graph.delta()'} # type: ignore
| 44.240741
| 180
| 0.673085
|
4a155bb837725b90d1dfea3c3e4420670f1b89e0
| 6,282
|
py
|
Python
|
examples/timelineCsvConverter.py
|
ubaldb/TradeRepublicApi
|
8df80a75483c85361c4c0a729ad7a8235ee54b12
|
[
"MIT"
] | 1
|
2021-04-03T13:42:59.000Z
|
2021-04-03T13:42:59.000Z
|
examples/timelineCsvConverter.py
|
ubaldb/TradeRepublicApi
|
8df80a75483c85361c4c0a729ad7a8235ee54b12
|
[
"MIT"
] | null | null | null |
examples/timelineCsvConverter.py
|
ubaldb/TradeRepublicApi
|
8df80a75483c85361c4c0a729ad7a8235ee54b12
|
[
"MIT"
] | null | null | null |
import json
import re
from datetime import datetime
# Read my timeline
with open("myTimeline.json", "r", encoding="utf-8") as f:
timeline = json.loads(f.read())
# Read stock JSON data
with open("../LS/isins.json", "r", encoding="utf-8") as f:
lsIsins = json.loads(f.read())
# All stocks crawled from TR
with open("allStocks.json", "r", encoding="utf-8") as f:
allStocks = json.loads(f.read())
companyNames = {}
for stock in allStocks:
companyNames[stock["company"]["name"]] = stock["isin"]
# Fixed ISINs
with open("companyNameIsins.json", "r", encoding="utf-8") as f:
fixedIsins = json.loads(f.read())
# Extract decimal number in a string
def getDecimalFromString(inputString):
try:
numbers = re.findall("[-+]?\d.*\,\d+|\d+", inputString)
return numbers[0].replace(".", "").replace(",", ".")
except:
return None
return None
# Unify a company name to compare
# Trade Republic uses different company names. This makes it very hard to map the timeline events to companies.
# @TradeRepublic: Please add ISIN in timeline event JSON
def unifyCompanyName(inputString):
unify = "".join(e for e in inputString if e.isalnum()).lower()
return unify
# Return ISIN from company name. Uses the JSON object from isins.json
# Returns None, if no ISIN found
def getIsinFromStockName(stockName):
try:
return companyNames[stockName]
except:
try:
# Try to get the ISIN from the fixed list
return fixedIsins[stockName]
except:
stockNameUnify = unifyCompanyName(stockName)
for stock in lsIsins:
try:
isin = stock[1]
name = stock[2]
nameUnify = unifyCompanyName(stock[2])
if stockNameUnify in nameUnify:
return isin
except:
continue
return ""
# Portfolio Performance transaction types
# Kauf, Einlage, Verkauf, Zinsen, Gebühren, Dividende, Umbuchung (Eingang), Umbuchung (Ausgang)
missingIsins = {}
# Write transactions.csv file
# date, transaction, shares, amount, total, fee, isin, name
with open("myTransactions.csv", "w") as f:
f.write("Datum;Typ;Stück;amount;Wert;Gebühren;ISIN;name\n")
for event in timeline:
event = event["data"]
dateTime = datetime.fromtimestamp(int(event["timestamp"] / 1000))
date = dateTime.strftime("%Y-%m-%d")
title = event["title"]
try:
body = event["body"]
except:
body = ""
if "storniert" in body:
continue
# Cash in
if title == "Einzahlung":
f.write(
"{0};{1};{2};{3};{4};{5};{6};{7}\n".format(
date, "Einlage", "", "", event["cashChangeAmount"], "", "", ""
)
)
elif title == "Auszahlung":
f.write(
"{0};{1};{2};{3};{4};{5};{6};{7}\n".format(
date, "Entnahme", "", "", abs(event["cashChangeAmount"]), "", "", ""
)
)
# Dividend - Shares
elif title == "Reinvestierung":
# TODO: Implement reinvestment
print("Detected reivestment, skipping... (not implemented yet)")
# Dividend - Cash
elif "Gutschrift Dividende" in body:
isin = getIsinFromStockName(title)
amountPerShare = getDecimalFromString(body)
f.write(
"{0};{1};{2};{3};{4};{5};{6};{7}\n".format(
date,
"Dividende",
"",
amountPerShare,
event["cashChangeAmount"],
"",
isin,
title,
)
)
if isin == "" and title not in missingIsins.keys():
missingIsins[title] = ""
print("WARNING: Company not found ({0}), missing ISIN".format(title))
# Savings plan execution or normal buy
elif (
body.startswith("Sparplan ausgeführt")
or body.startswith("Kauf")
or body.startswith("Limit Kauf zu")
):
fee = 0
if body.startswith("Kauf") or body.startswith("Limit Kauf zu"):
fee = 1.0
isin = getIsinFromStockName(title)
amountPerShare = abs(float(getDecimalFromString(body)))
cashChangeAmount = abs(event["cashChangeAmount"])
shares = "{0:.4f}".format((cashChangeAmount - fee) / amountPerShare)
f.write(
"{0};{1};{2};{3};{4};{5};{6};{7}\n".format(
date,
"Kauf",
shares,
amountPerShare,
cashChangeAmount,
fee,
isin,
title,
)
)
if isin == "" and title not in missingIsins.keys():
missingIsins[title] = ""
print("WARNING: Company not found ({0}), missing ISIN".format(title))
# Sell
elif body.startswith("Verkauf") or body.startswith("Limit Verkauf zu"):
isin = getIsinFromStockName(title)
amountPerShare = abs(float(getDecimalFromString(body)))
cashChangeAmount = abs(event["cashChangeAmount"])
shares = "{0:.4f}".format(cashChangeAmount / amountPerShare)
f.write(
"{0};{1};{2};{3};{4};{5};{6};{7}\n".format(
date,
"Verkauf",
shares,
amountPerShare,
cashChangeAmount,
"1.0",
isin,
title,
)
)
if isin == "" and title not in missingIsins.keys():
missingIsins[title] = ""
print("WARNING: Company not found ({0}), missing ISIN".format(title))
if len(missingIsins.keys()) > 0:
print("--- MISSING ISINs ---")
print(json.dumps(missingIsins, indent="\t", sort_keys=True))
print("Add ISINs to companyNameIsins.json and start again\n")
print("Finished!")
| 34.707182
| 111
| 0.515123
|
4a155c9f042f5515f9b3c0af6ad39938daa83a19
| 1,959
|
py
|
Python
|
tests/test_dashboard.py
|
491852809/ceph-dash
|
503923035d347a8e8bf7d7020c1c24c87b47919d
|
[
"BSD-2-Clause"
] | 427
|
2015-01-05T17:58:46.000Z
|
2022-03-23T03:03:09.000Z
|
tests/test_dashboard.py
|
491852809/ceph-dash
|
503923035d347a8e8bf7d7020c1c24c87b47919d
|
[
"BSD-2-Clause"
] | 53
|
2015-01-19T16:06:31.000Z
|
2021-12-02T19:28:33.000Z
|
tests/test_dashboard.py
|
491852809/ceph-dash
|
503923035d347a8e8bf7d7020c1c24c87b47919d
|
[
"BSD-2-Clause"
] | 154
|
2015-01-02T14:11:53.000Z
|
2022-03-04T02:23:19.000Z
|
import unittest
from app.dashboard.views import find_host_for_osd
from app.dashboard.views import get_unhealthy_osd_details
CEPH_OSD_TREE = {
'nodes': [
{
'type': 'host',
'name': 'testhost01',
'children': [
1,
2,
3
]
},
{
'type': 'osd',
'name': 'osd.1',
'id': 1,
'exists': 1,
'status': 'down',
'reweight': 1.0
},
{
'type': 'osd',
'name': 'osd.2',
'id': 2,
'exists': 1,
'status': 'up',
'reweight': 0.0
},
{
'type': 'osd',
'name': 'osd.3',
'id': 3,
'exists': 1,
'status': 'up',
'reweight': 1.0
},
{
'type': 'osd',
'name': 'osd.4',
'id': 4,
'exists': 0,
'status': 'up',
'reweight': 1.0
}
]
}
class TestDashboard(unittest.TestCase):
def setUp(self):
pass
def tearDown(self):
pass
def test_find_host(self):
result = find_host_for_osd(0, CEPH_OSD_TREE)
self.assertEqual(result, 'unknown')
result = find_host_for_osd(1, CEPH_OSD_TREE)
self.assertEqual(result, 'testhost01')
def test_unhealthy_osd(self):
result = get_unhealthy_osd_details(CEPH_OSD_TREE)
self.assertTrue(isinstance(result, list))
self.assertEqual(len(result), 2)
def test_unhealthy_osd_detail(self):
result = get_unhealthy_osd_details(CEPH_OSD_TREE)
for item in result:
if item['name'] == 'osd.1':
self.assertEqual(item['status'], 'down')
if item['name'] == 'osd.2':
self.assertEqual(item['status'], 'out')
if __name__ == '__main__':
unittest.main()
| 23.890244
| 57
| 0.455334
|
4a155dc79d24a41d9a82e292d670c43e08d1dfa2
| 6,194
|
py
|
Python
|
autoscale_cloudcafe/autoscale/models/request/autoscale_requests.py
|
alex/otter
|
e46316634ae4c211f7436aa4d41321ac1edba0af
|
[
"Apache-2.0"
] | 1
|
2015-11-08T12:58:44.000Z
|
2015-11-08T12:58:44.000Z
|
autoscale_cloudcafe/autoscale/models/request/autoscale_requests.py
|
alex/otter
|
e46316634ae4c211f7436aa4d41321ac1edba0af
|
[
"Apache-2.0"
] | null | null | null |
autoscale_cloudcafe/autoscale/models/request/autoscale_requests.py
|
alex/otter
|
e46316634ae4c211f7436aa4d41321ac1edba0af
|
[
"Apache-2.0"
] | null | null | null |
"""
Marshalling for autoscale requests
"""
from cafe.engine.models.base import AutoMarshallingModel
from cloudcafe.compute.servers_api.models.requests import CreateServer
import json
class Webhook_Request(AutoMarshallingModel):
"""
Marshalling for webhook requests
"""
def __init__(self, name, metadata):
super(Webhook_Request, self).__init__()
self.name = name
#self.url = url
self.metadata = metadata
def _obj_to_json(self):
req = []
req.append(self._auto_to_dict())
return json.dumps(req)
class Update_Webhook_Request(AutoMarshallingModel):
"""
Marshalling for update webhook requests
"""
def __init__(self, name, metadata):
super(Update_Webhook_Request, self).__init__()
self.name = name
#self.url = url
self.metadata = metadata
def _obj_to_json(self):
return json.dumps(self._auto_to_dict())
class Policy_Request(AutoMarshallingModel):
"""
Marshalling for policy requests
"""
def __init__(self, name, cooldown, change=None, change_percent=None,
desired_capacity=None, policy_type=None, args=None):
super(Policy_Request, self).__init__()
self.name = name
self.cooldown = cooldown
self.change = change
self.changePercent = change_percent
self.desiredCapacity = desired_capacity
self.type = policy_type
self.args = args
def _obj_to_json(self):
req = []
req.append(self._auto_to_dict())
return json.dumps(req)
class Update_Policy_Request(AutoMarshallingModel):
"""
Marshalling for update policy requests
"""
def __init__(self, name, cooldown, change=None, change_percent=None,
desired_capacity=None, policy_type=None, args=None):
super(Update_Policy_Request, self).__init__()
self.name = name
self.cooldown = cooldown
self.change = change
self.changePercent = change_percent
self.desiredCapacity = desired_capacity
self.type = policy_type
self.args = args
def _obj_to_json(self):
return json.dumps(self._auto_to_dict())
class Group_Request(AutoMarshallingModel):
"""
Marshalling for group requests
"""
def __init__(self, name, cooldown, min_entities, max_entities=None,
metadata=None):
super(Group_Request, self).__init__()
self.name = name
self.cooldown = cooldown
self.minEntities = min_entities
self.maxEntities = max_entities
self.metadata = metadata
def _obj_to_json(self):
return json.dumps(self._auto_to_dict())
class Config_Request(AutoMarshallingModel):
"""
Marshalling for group config requests
"""
def __init__(self, name, image_ref, flavor_ref, personality=None,
metadata=None, disk_config=None, networks=None,
load_balancers=None):
super(Config_Request, self).__init__()
self.name = name
self.image_ref = image_ref
self.flavor_ref = flavor_ref
self.personality = personality
self.metadata = metadata
self.disk_config = disk_config
self.networks = networks
self.load_balancers = load_balancers
def _obj_to_json(self):
server = CreateServer(name=self.name, imageRef=self.image_ref,
flavorRef=self.flavor_ref,
personality=self.personality,
metadata=self.metadata,
diskConfig=self.disk_config,
networks=self.networks)
server_json = server._obj_to_json()
body = {'type': 'launch_server',
'args': json.loads(server_json)}
if self.load_balancers:
body['args']['loadBalancers'] = self.load_balancers
#if self.disk_config:
# del body['args']['server']['diskConfig']
return json.dumps(body)
class ScalingGroup_Request(AutoMarshallingModel):
"""
Marshalling for scaling group requests
"""
def __init__(self, gc_name, gc_cooldown, gc_min_entities, lc_name,
lc_image_ref, lc_flavor_ref,
gc_max_entities=None, gc_metadata=None,
lc_personality=None, lc_metadata=None,
lc_disk_config=None, lc_networks=None,
lc_load_balancers=None, sp_list=None):
super(ScalingGroup_Request, self).__init__()
self.gc_name = gc_name
self.gc_cooldown = gc_cooldown
self.gc_min_entities = gc_min_entities
self.gc_max_entities = gc_max_entities
self.gc_metadata = gc_metadata
self.lc_name = lc_name
self.lc_image_ref = lc_image_ref
self.lc_flavor_ref = lc_flavor_ref
self.lc_personality = lc_personality
self.lc_metadata = lc_metadata
self.lc_disk_config = lc_disk_config
self.lc_networks = lc_networks
self.lc_load_balancers = lc_load_balancers
self.sp_list = sp_list
def _obj_to_json(self):
config = Config_Request(name=self.lc_name, image_ref=self.lc_image_ref,
flavor_ref=self.lc_flavor_ref,
personality=self.lc_personality,
metadata=self.lc_metadata,
disk_config=self.lc_disk_config,
networks=self.lc_networks,
load_balancers=self.lc_load_balancers)
group = Group_Request(name=self.gc_name, cooldown=self.gc_cooldown,
min_entities=self.gc_min_entities,
max_entities=self.gc_max_entities,
metadata=self.gc_metadata)
config_json = config._obj_to_json()
group_json = group._obj_to_json()
body = {'groupConfiguration': json.loads(group_json),
'launchConfiguration': json.loads(config_json)}
if self.sp_list:
body['scalingPolicies'] = self.sp_list
return json.dumps(body)
| 34.032967
| 79
| 0.620278
|
4a155e0e81b731e7054eb7cb3df4c7c6f5008de4
| 34,151
|
py
|
Python
|
python/ccxt/async/kucoin.py
|
guoliangli123/ccxt
|
b2c797330bb150e5bbe1db6f95d49a4f777c8da0
|
[
"MIT"
] | 2
|
2018-03-25T09:41:57.000Z
|
2019-03-14T04:07:56.000Z
|
python/ccxt/async/kucoin.py
|
guoliangli123/ccxt
|
b2c797330bb150e5bbe1db6f95d49a4f777c8da0
|
[
"MIT"
] | null | null | null |
python/ccxt/async/kucoin.py
|
guoliangli123/ccxt
|
b2c797330bb150e5bbe1db6f95d49a4f777c8da0
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
# PLEASE DO NOT EDIT THIS FILE, IT IS GENERATED AND WILL BE OVERWRITTEN:
# https://github.com/ccxt/ccxt/blob/master/CONTRIBUTING.md#how-to-contribute-code
from ccxt.async.base.exchange import Exchange
import base64
import hashlib
import math
import json
from ccxt.base.errors import ExchangeError
from ccxt.base.errors import AuthenticationError
from ccxt.base.errors import InsufficientFunds
from ccxt.base.errors import InvalidOrder
from ccxt.base.errors import OrderNotFound
from ccxt.base.errors import InvalidNonce
class kucoin (Exchange):
def describe(self):
return self.deep_extend(super(kucoin, self).describe(), {
'id': 'kucoin',
'name': 'Kucoin',
'countries': 'HK', # Hong Kong
'version': 'v1',
'rateLimit': 2000,
'userAgent': self.userAgents['chrome'],
'has': {
'CORS': False,
'cancelOrders': True,
'createMarketOrder': False,
'fetchDepositAddress': True,
'fetchTickers': True,
'fetchOHLCV': True, # see the method implementation below
'fetchOrder': True,
'fetchOrders': False,
'fetchClosedOrders': True,
'fetchOpenOrders': True,
'fetchMyTrades': True,
'fetchCurrencies': True,
'withdraw': True,
},
'timeframes': {
'1m': 1,
'5m': 5,
'15m': 15,
'30m': 30,
'1h': 60,
'8h': 480,
'1d': 'D',
'1w': 'W',
},
'urls': {
'logo': 'https://user-images.githubusercontent.com/1294454/33795655-b3c46e48-dcf6-11e7-8abe-dc4588ba7901.jpg',
'api': {
'public': 'https://api.kucoin.com',
'private': 'https://api.kucoin.com',
'kitchen': 'https://kitchen.kucoin.com',
'kitchen-2': 'https://kitchen-2.kucoin.com',
},
'www': 'https://kucoin.com',
'doc': 'https://kucoinapidocs.docs.apiary.io',
'fees': 'https://news.kucoin.com/en/fee',
},
'api': {
'kitchen': {
'get': [
'open/chart/history',
],
},
'public': {
'get': [
'open/chart/config',
'open/chart/history',
'open/chart/symbol',
'open/currencies',
'open/deal-orders',
'open/kline',
'open/lang-list',
'open/orders',
'open/orders-buy',
'open/orders-sell',
'open/tick',
'market/open/coin-info',
'market/open/coins',
'market/open/coins-trending',
'market/open/symbols',
],
},
'private': {
'get': [
'account/balance',
'account/{coin}/wallet/address',
'account/{coin}/wallet/records',
'account/{coin}/balance',
'account/promotion/info',
'account/promotion/sum',
'deal-orders',
'order/active',
'order/active-map',
'order/dealt',
'order/detail',
'referrer/descendant/count',
'user/info',
],
'post': [
'account/{coin}/withdraw/apply',
'account/{coin}/withdraw/cancel',
'account/promotion/draw',
'cancel-order',
'order',
'order/cancel-all',
'user/change-lang',
],
},
},
'fees': {
'trading': {
'maker': 0.001,
'taker': 0.001,
},
'funding': {
'tierBased': False,
'percentage': False,
'withdraw': {
'KCS': 2.0,
'BTC': 0.0005,
'USDT': 10.0,
'ETH': 0.01,
'LTC': 0.001,
'NEO': 0.0,
'GAS': 0.0,
'KNC': 0.5,
'BTM': 5.0,
'QTUM': 0.1,
'EOS': 0.5,
'CVC': 3.0,
'OMG': 0.1,
'PAY': 0.5,
'SNT': 20.0,
'BHC': 1.0,
'HSR': 0.01,
'WTC': 0.1,
'VEN': 2.0,
'MTH': 10.0,
'RPX': 1.0,
'REQ': 20.0,
'EVX': 0.5,
'MOD': 0.5,
'NEBL': 0.1,
'DGB': 0.5,
'CAG': 2.0,
'CFD': 0.5,
'RDN': 0.5,
'UKG': 5.0,
'BCPT': 5.0,
'PPT': 0.1,
'BCH': 0.0005,
'STX': 2.0,
'NULS': 1.0,
'GVT': 0.1,
'HST': 2.0,
'PURA': 0.5,
'SUB': 2.0,
'QSP': 5.0,
'POWR': 1.0,
'FLIXX': 10.0,
'LEND': 20.0,
'AMB': 3.0,
'RHOC': 2.0,
'R': 2.0,
'DENT': 50.0,
'DRGN': 1.0,
'ACT': 0.1,
},
'deposit': {},
},
},
# exchange-specific options
'options': {
'timeDifference': 0, # the difference between system clock and Kucoin clock
'adjustForTimeDifference': False, # controls the adjustment logic upon instantiation
},
})
def nonce(self):
return self.milliseconds() - self.options['timeDifference']
async def load_time_difference(self):
response = await self.publicGetOpenTick()
after = self.milliseconds()
self.options['timeDifference'] = int(after - response['timestamp'])
return self.options['timeDifference']
async def fetch_markets(self):
response = await self.publicGetMarketOpenSymbols()
if self.options['adjustForTimeDifference']:
await self.load_time_difference()
markets = response['data']
result = []
for i in range(0, len(markets)):
market = markets[i]
id = market['symbol']
base = market['coinType']
quote = market['coinTypePair']
base = self.common_currency_code(base)
quote = self.common_currency_code(quote)
symbol = base + '/' + quote
precision = {
'amount': 8,
'price': 8,
}
active = market['trading']
result.append({
'id': id,
'symbol': symbol,
'base': base,
'quote': quote,
'active': active,
'taker': self.safe_float(market, 'feeRate'),
'maker': self.safe_float(market, 'feeRate'),
'info': market,
'lot': math.pow(10, -precision['amount']),
'precision': precision,
'limits': {
'amount': {
'min': math.pow(10, -precision['amount']),
'max': None,
},
'price': {
'min': None,
'max': None,
},
},
})
return result
async def fetch_deposit_address(self, code, params={}):
await self.load_markets()
currency = self.currency(code)
response = await self.privateGetAccountCoinWalletAddress(self.extend({
'coin': currency['id'],
}, params))
data = response['data']
address = self.safe_string(data, 'address')
self.check_address(address)
tag = self.safe_string(data, 'userOid')
return {
'currency': code,
'address': address,
'tag': tag,
'status': 'ok',
'info': response,
}
async def fetch_currencies(self, params={}):
response = await self.publicGetMarketOpenCoins(params)
currencies = response['data']
result = {}
for i in range(0, len(currencies)):
currency = currencies[i]
id = currency['coin']
# todo: will need to rethink the fees
# to add support for multiple withdrawal/deposit methods and
# differentiated fees for each particular method
code = self.common_currency_code(id)
precision = currency['tradePrecision']
deposit = currency['enableDeposit']
withdraw = currency['enableWithdraw']
active = (deposit and withdraw)
result[code] = {
'id': id,
'code': code,
'info': currency,
'name': currency['name'],
'active': active,
'status': 'ok',
'fee': currency['withdrawMinFee'], # todo: redesign
'precision': precision,
'limits': {
'amount': {
'min': math.pow(10, -precision),
'max': math.pow(10, precision),
},
'price': {
'min': math.pow(10, -precision),
'max': math.pow(10, precision),
},
'cost': {
'min': None,
'max': None,
},
'withdraw': {
'min': currency['withdrawMinAmount'],
'max': math.pow(10, precision),
},
},
}
return result
async def fetch_balance(self, params={}):
await self.load_markets()
response = await self.privateGetAccountBalance(self.extend({
'limit': 20, # default 12, max 20
'page': 1,
}, params))
balances = response['data']
result = {'info': balances}
indexed = self.index_by(balances, 'coinType')
keys = list(indexed.keys())
for i in range(0, len(keys)):
id = keys[i]
currency = self.common_currency_code(id)
account = self.account()
balance = indexed[id]
used = float(balance['freezeBalance'])
free = float(balance['balance'])
total = self.sum(free, used)
account['free'] = free
account['used'] = used
account['total'] = total
result[currency] = account
return self.parse_balance(result)
async def fetch_order_book(self, symbol, limit=None, params={}):
await self.load_markets()
market = self.market(symbol)
response = await self.publicGetOpenOrders(self.extend({
'symbol': market['id'],
}, params))
orderbook = response['data']
return self.parse_order_book(orderbook, None, 'BUY', 'SELL')
def parse_order(self, order, market=None):
side = self.safe_value(order, 'direction')
if side is None:
side = order['type']
if side is not None:
side = side.lower()
orderId = self.safe_string(order, 'orderOid')
if orderId is None:
orderId = self.safe_string(order, 'oid')
# do not confuse trades with orders
trades = None
if 'dealOrders' in order:
trades = self.safe_value(order['dealOrders'], 'datas')
if trades is not None:
trades = self.parse_trades(trades, market)
for i in range(0, len(trades)):
trades[i]['side'] = side
trades[i]['order'] = orderId
symbol = None
if market:
symbol = market['symbol']
else:
symbol = order['coinType'] + '/' + order['coinTypePair']
timestamp = self.safe_value(order, 'createdAt')
remaining = self.safe_float(order, 'pendingAmount')
status = self.safe_value(order, 'status')
filled = self.safe_float(order, 'dealAmount')
amount = self.safe_float(order, 'amount')
cost = self.safe_float(order, 'dealValue')
if cost is None:
cost = self.safe_float(order, 'dealValueTotal')
if status is None:
if remaining is not None:
if remaining > 0:
status = 'open'
else:
status = 'closed'
if filled is None:
if status is not None:
if status == 'closed':
filled = self.safe_float(order, 'amount')
elif filled == 0.0:
if trades is not None:
cost = 0
for i in range(0, len(trades)):
filled += trades[i]['amount']
cost += trades[i]['cost']
# kucoin price and amount fields have varying names
# thus the convoluted spaghetti code below
price = None
if filled is not None:
# if the order was filled at least for some part
if filled > 0.0:
price = self.safe_float(order, 'price')
if price is None:
price = self.safe_float(order, 'dealPrice')
if price is None:
price = self.safe_float(order, 'dealPriceAverage')
else:
# it's an open order, not filled yet, use the initial price
price = self.safe_float(order, 'orderPrice')
if price is None:
price = self.safe_float(order, 'price')
if price is not None:
if cost is None:
cost = price * filled
if amount is None:
if remaining is not None:
amount = self.sum(filled, remaining)
elif remaining is None:
remaining = amount - filled
if status == 'open':
if (cost is None) or (cost == 0.0):
if price is not None:
if amount is not None:
cost = amount * price
feeCurrency = None
if market:
feeCurrency = market['quote'] if (side == 'sell') else market['base']
else:
feeCurrencyField = 'coinTypePair' if (side == 'sell') else 'coinType'
feeCurrency = self.safe_string(order, feeCurrencyField)
if feeCurrency is not None:
if feeCurrency in self.currencies_by_id:
feeCurrency = self.currencies_by_id[feeCurrency]['code']
feeCost = self.safe_float(order, 'fee')
fee = {
'cost': self.safe_float(order, 'feeTotal', feeCost),
'rate': self.safe_float(order, 'feeRate'),
'currency': feeCurrency,
}
result = {
'info': order,
'id': orderId,
'timestamp': timestamp,
'datetime': self.iso8601(timestamp),
'symbol': symbol,
'type': 'limit',
'side': side,
'price': price,
'amount': amount,
'cost': cost,
'filled': filled,
'remaining': remaining,
'status': status,
'fee': fee,
'trades': trades,
}
return result
async def fetch_order(self, id, symbol=None, params={}):
if symbol is None:
raise ExchangeError(self.id + ' fetchOrder requires a symbol argument')
orderType = self.safe_value(params, 'type')
if orderType is None:
raise ExchangeError(self.id + ' fetchOrder requires a type parameter("BUY" or "SELL")')
await self.load_markets()
market = self.market(symbol)
request = {
'symbol': market['id'],
'type': orderType,
'orderOid': id,
}
response = await self.privateGetOrderDetail(self.extend(request, params))
if not response['data']:
raise OrderNotFound(self.id + ' ' + self.json(response))
order = self.parse_order(response['data'], market)
orderId = order['id']
if orderId in self.orders:
order['status'] = self.orders[orderId]['status']
self.orders[orderId] = order
return order
async def fetch_open_orders(self, symbol=None, since=None, limit=None, params={}):
if not symbol:
raise ExchangeError(self.id + ' fetchOpenOrders requires a symbol')
await self.load_markets()
market = self.market(symbol)
request = {
'symbol': market['id'],
}
response = await self.privateGetOrderActiveMap(self.extend(request, params))
orders = self.array_concat(response['data']['SELL'], response['data']['BUY'])
for i in range(0, len(orders)):
order = self.parse_order(self.extend(orders[i], {
'status': 'open',
}), market)
orderId = order['id']
if orderId in self.orders:
if self.orders[orderId]['status'] != 'open':
order['status'] = self.orders[orderId]['status']
self.orders[order['id']] = order
openOrders = self.filter_by(self.orders, 'status', 'open')
return self.filter_by_symbol_since_limit(openOrders, symbol, since, limit)
async def fetch_closed_orders(self, symbol=None, since=None, limit=20, params={}):
request = {}
await self.load_markets()
market = None
if symbol is not None:
market = self.market(symbol)
request['symbol'] = market['id']
if since is not None:
request['since'] = since
if limit is not None:
request['limit'] = limit
response = await self.privateGetOrderDealt(self.extend(request, params))
orders = response['data']['datas']
for i in range(0, len(orders)):
order = self.parse_order(self.extend(orders[i], {
'status': 'closed',
}), market)
orderId = order['id']
if orderId in self.orders:
if self.orders[orderId]['status'] == 'canceled':
order['status'] = self.orders[orderId]['status']
self.orders[order['id']] = order
closedOrders = self.filter_by(self.orders, 'status', 'closed')
return self.filter_by_symbol_since_limit(closedOrders, symbol, since, limit)
async def create_order(self, symbol, type, side, amount, price=None, params={}):
if type != 'limit':
raise ExchangeError(self.id + ' allows limit orders only')
await self.load_markets()
market = self.market(symbol)
quote = market['quote']
base = market['base']
request = {
'symbol': market['id'],
'type': side.upper(),
'price': self.truncate(price, self.currencies[quote]['precision']),
'amount': self.truncate(amount, self.currencies[base]['precision']),
}
price = float(price)
amount = float(amount)
cost = price * amount
response = await self.privatePostOrder(self.extend(request, params))
orderId = self.safe_string(response['data'], 'orderOid')
order = {
'info': response,
'id': orderId,
'timestamp': None,
'datetime': None,
'type': type,
'side': side,
'amount': amount,
'filled': None,
'remaining': None,
'price': price,
'cost': cost,
'status': 'open',
'fee': None,
'trades': None,
}
self.orders[orderId] = order
return order
async def cancel_orders(self, symbol=None, params={}):
# https://kucoinapidocs.docs.apiary.io/#reference/0/trading/cancel-all-orders
# docs say symbol is required, but it seems to be optional
# you can cancel all orders, or filter by symbol or type or both
request = {}
if symbol:
await self.load_markets()
market = self.market(symbol)
request['symbol'] = market['id']
if 'type' in params:
request['type'] = params['type'].upper()
params = self.omit(params, 'type')
response = await self.privatePostOrderCancelAll(self.extend(request, params))
openOrders = self.filter_by(self.orders, 'status', 'open')
for i in range(0, len(openOrders)):
order = openOrders[i]
orderId = order['id']
self.orders[orderId]['status'] = 'canceled'
return response
async def cancel_order(self, id, symbol=None, params={}):
if symbol is None:
raise ExchangeError(self.id + ' cancelOrder requires a symbol')
await self.load_markets()
market = self.market(symbol)
request = {
'symbol': market['id'],
'orderOid': id,
}
if 'type' in params:
request['type'] = params['type'].upper()
params = self.omit(params, 'type')
else:
raise ExchangeError(self.id + ' cancelOrder requires parameter type=["BUY"|"SELL"]')
response = await self.privatePostCancelOrder(self.extend(request, params))
if id in self.orders:
self.orders[id]['status'] = 'canceled'
else:
# store it in cache for further references
timestamp = self.milliseconds()
side = request['type'].lower()
self.orders[id] = {
'id': id,
'timestamp': timestamp,
'datetime': self.iso8601(timestamp),
'type': None,
'side': side,
'symbol': symbol,
'status': 'canceled',
}
return response
def parse_ticker(self, ticker, market=None):
timestamp = ticker['datetime']
symbol = None
if market:
symbol = market['symbol']
else:
symbol = ticker['coinType'] + '/' + ticker['coinTypePair']
# TNC coin doesn't have changerate for some reason
change = self.safe_float(ticker, 'changeRate')
if change is not None:
change *= 100
last = self.safe_float(ticker, 'lastDealPrice')
return {
'symbol': symbol,
'timestamp': timestamp,
'datetime': self.iso8601(timestamp),
'high': self.safe_float(ticker, 'high'),
'low': self.safe_float(ticker, 'low'),
'bid': self.safe_float(ticker, 'buy'),
'bidVolume': None,
'ask': self.safe_float(ticker, 'sell'),
'askVolume': None,
'vwap': None,
'open': None,
'close': last,
'last': last,
'previousClose': None,
'change': change,
'percentage': None,
'average': None,
'baseVolume': self.safe_float(ticker, 'vol'),
'quoteVolume': self.safe_float(ticker, 'volValue'),
'info': ticker,
}
async def fetch_tickers(self, symbols=None, params={}):
response = await self.publicGetMarketOpenSymbols(params)
tickers = response['data']
result = {}
for t in range(0, len(tickers)):
ticker = self.parse_ticker(tickers[t])
symbol = ticker['symbol']
result[symbol] = ticker
return result
async def fetch_ticker(self, symbol, params={}):
await self.load_markets()
market = self.market(symbol)
response = await self.publicGetOpenTick(self.extend({
'symbol': market['id'],
}, params))
ticker = response['data']
return self.parse_ticker(ticker, market)
def parse_trade(self, trade, market=None):
id = None
order = None
info = trade
timestamp = None
type = None
side = None
price = None
cost = None
amount = None
fee = None
if isinstance(trade, list):
timestamp = trade[0]
type = 'limit'
if trade[1] == 'BUY':
side = 'buy'
elif trade[1] == 'SELL':
side = 'sell'
price = trade[2]
amount = trade[3]
else:
timestamp = self.safe_value(trade, 'createdAt')
order = self.safe_string(trade, 'orderOid')
if order is None:
order = self.safe_string(trade, 'oid')
side = self.safe_string(trade, 'dealDirection')
if side is not None:
side = side.lower()
price = self.safe_float(trade, 'dealPrice')
amount = self.safe_float(trade, 'amount')
cost = self.safe_float(trade, 'dealValue')
feeCurrency = None
if 'coinType' in trade:
feeCurrency = self.safe_string(trade, 'coinType')
if feeCurrency is not None:
if feeCurrency in self.currencies_by_id:
feeCurrency = self.currencies_by_id[feeCurrency]['code']
fee = {
'cost': self.safe_float(trade, 'fee'),
'currency': feeCurrency,
}
symbol = None
if market is not None:
symbol = market['symbol']
return {
'id': id,
'order': order,
'info': info,
'timestamp': timestamp,
'datetime': self.iso8601(timestamp),
'symbol': symbol,
'type': type,
'side': side,
'price': price,
'cost': cost,
'amount': amount,
'fee': fee,
}
async def fetch_trades(self, symbol, since=None, limit=None, params={}):
await self.load_markets()
market = self.market(symbol)
response = await self.publicGetOpenDealOrders(self.extend({
'symbol': market['id'],
}, params))
return self.parse_trades(response['data'], market, since, limit)
async def fetch_my_trades(self, symbol=None, since=None, limit=None, params={}):
if not symbol:
raise ExchangeError(self.id + ' fetchMyTrades requires a symbol argument')
await self.load_markets()
market = self.market(symbol)
request = {
'symbol': market['id'],
}
if limit:
request['limit'] = limit
response = await self.privateGetDealOrders(self.extend(request, params))
return self.parse_trades(response['data']['datas'], market, since, limit)
def parse_trading_view_ohlcvs(self, ohlcvs, market=None, timeframe='1m', since=None, limit=None):
result = []
for i in range(0, len(ohlcvs['t'])):
result.append([
ohlcvs['t'][i] * 1000,
ohlcvs['o'][i],
ohlcvs['h'][i],
ohlcvs['l'][i],
ohlcvs['c'][i],
ohlcvs['v'][i],
])
return self.parse_ohlcvs(result, market, timeframe, since, limit)
async def fetch_ohlcv(self, symbol, timeframe='1m', since=None, limit=None, params={}):
await self.load_markets()
market = self.market(symbol)
end = self.seconds()
resolution = self.timeframes[timeframe]
# convert 'resolution' to minutes in order to calculate 'from' later
minutes = resolution
if minutes == 'D':
if limit is None:
limit = 30 # 30 days, 1 month
minutes = 1440
elif minutes == 'W':
if limit is None:
limit = 52 # 52 weeks, 1 year
minutes = 10080
elif limit is None:
# last 1440 periods, whatever the duration of the period is
# for 1m it equals 1 day(24 hours)
# for 5m it equals 5 days
# ...
limit = 1440
start = end - limit * minutes * 60
# if 'since' has been supplied by user
if since is not None:
start = int(since / 1000) # convert milliseconds to seconds
end = min(end, self.sum(start, limit * minutes * 60))
request = {
'symbol': market['id'],
'resolution': resolution,
'from': start,
'to': end,
}
response = await self.publicGetOpenChartHistory(self.extend(request, params))
return self.parse_trading_view_ohlcvs(response, market, timeframe, since, limit)
async def withdraw(self, code, amount, address, tag=None, params={}):
self.check_address(address)
await self.load_markets()
currency = self.currency(code)
self.check_address(address)
response = await self.privatePostAccountCoinWithdrawApply(self.extend({
'coin': currency['id'],
'amount': amount,
'address': address,
}, params))
return {
'info': response,
'id': None,
}
def sign(self, path, api='public', method='GET', params={}, headers=None, body=None):
endpoint = '/' + self.version + '/' + self.implode_params(path, params)
url = self.urls['api'][api] + endpoint
query = self.omit(params, self.extract_params(path))
if api == 'private':
self.check_required_credentials()
# their nonce is always a calibrated synched milliseconds-timestamp
nonce = self.nonce()
queryString = ''
nonce = str(nonce)
if query:
queryString = self.rawencode(self.keysort(query))
url += '?' + queryString
if method != 'GET':
body = queryString
auth = endpoint + '/' + nonce + '/' + queryString
payload = base64.b64encode(self.encode(auth))
# payload should be "encoded" as returned from stringToBase64
signature = self.hmac(payload, self.encode(self.secret), hashlib.sha256)
headers = {
'KC-API-KEY': self.apiKey,
'KC-API-NONCE': nonce,
'KC-API-SIGNATURE': signature,
}
else:
if query:
url += '?' + self.urlencode(query)
return {'url': url, 'method': method, 'body': body, 'headers': headers}
def throw_exception_on_error(self, response):
#
# API endpoints return the following formats
# {success: False, code: "ERROR", msg: "Min price:100.0"}
# {success: True, code: "OK", msg: "Operation succeeded."}
#
# Web OHLCV endpoint returns self:
# {s: "ok", o: [], h: [], l: [], c: [], v: []}
#
# This particular method handles API responses only
#
if not('success' in list(response.keys())):
return
if response['success'] is True:
return # not an error
if not('code' in list(response.keys())) or not('msg' in list(response.keys())):
raise ExchangeError(self.id + ': malformed response: ' + self.json(response))
code = self.safe_string(response, 'code')
message = self.safe_string(response, 'msg')
feedback = self.id + ' ' + self.json(response)
if code == 'UNAUTH':
if message == 'Invalid nonce':
raise InvalidNonce(feedback)
raise AuthenticationError(feedback)
elif code == 'ERROR':
if message.find('The precision of amount') >= 0:
raise InvalidOrder(feedback) # amount violates precision.amount
if message.find('Min amount each order') >= 0:
raise InvalidOrder(feedback) # amount < limits.amount.min
if message.find('Min price:') >= 0:
raise InvalidOrder(feedback) # price < limits.price.min
if message.find('The precision of price') >= 0:
raise InvalidOrder(feedback) # price violates precision.price
elif code == 'NO_BALANCE':
if message.find('Insufficient balance') >= 0:
raise InsufficientFunds(feedback)
raise ExchangeError(self.id + ': unknown response: ' + self.json(response))
def handle_errors(self, code, reason, url, method, headers, body, response=None):
if response is not None:
# JS callchain parses body beforehand
self.throw_exception_on_error(response)
elif body and(body[0] == '{'):
# Python/PHP callchains don't have json available at self step
self.throw_exception_on_error(json.loads(body))
| 39.029714
| 126
| 0.48432
|
4a155ec278a5ccbe2c1b45913942a5c7816d4aaf
| 780
|
py
|
Python
|
apps/courses/serializers.py
|
umairqadir97/learning-management-system
|
9924326e77146830f3fb05a9d86f876f86c4d9b8
|
[
"MIT"
] | 7
|
2020-06-03T15:31:44.000Z
|
2021-11-21T21:19:59.000Z
|
apps/courses/serializers.py
|
umairqadir97/learning-management-system
|
9924326e77146830f3fb05a9d86f876f86c4d9b8
|
[
"MIT"
] | null | null | null |
apps/courses/serializers.py
|
umairqadir97/learning-management-system
|
9924326e77146830f3fb05a9d86f876f86c4d9b8
|
[
"MIT"
] | 10
|
2020-11-06T00:40:01.000Z
|
2022-01-12T03:20:14.000Z
|
from django.contrib.auth.models import User
from rest_framework import serializers
from .models import Courses
class UserSerializer(serializers.HyperlinkedModelSerializer):
class Meta:
model = User
fields = ('username', 'first_name', 'last_name', 'email')
class CourseSerializer(serializers.ModelSerializer):
owner = UserSerializer(required=False)
class Meta:
model = Courses
fields = '__all__'
def update(self, instance, validated_data):
print(validated_data)
pass
def create(self, validated_data):
request = self.context.get("request")
user = request.user
course = Courses.objects.create(**validated_data)
course.owner = user
course.save()
return course
| 25.16129
| 65
| 0.673077
|
4a155ed5cb37592835d6fe20d2bbabd01cf2d725
| 1,221
|
py
|
Python
|
pipenv/vendor/shellingham/posix/ps.py
|
erikkemperman/pipenv
|
8707fe52571422ff5aa2905a2063fdf5ce14840b
|
[
"MIT"
] | 3
|
2020-06-04T05:22:33.000Z
|
2020-09-23T19:44:02.000Z
|
pipenv/vendor/shellingham/posix/ps.py
|
erikkemperman/pipenv
|
8707fe52571422ff5aa2905a2063fdf5ce14840b
|
[
"MIT"
] | 9
|
2019-12-05T00:49:12.000Z
|
2021-09-08T01:31:25.000Z
|
pipenv/vendor/shellingham/posix/ps.py
|
erikkemperman/pipenv
|
8707fe52571422ff5aa2905a2063fdf5ce14840b
|
[
"MIT"
] | 1
|
2019-06-10T13:45:08.000Z
|
2019-06-10T13:45:08.000Z
|
import errno
import shlex
import subprocess
import sys
from ._core import Process
class PsNotAvailable(EnvironmentError):
pass
def get_process_mapping():
"""Try to look up the process tree via the output of `ps`.
"""
try:
output = subprocess.check_output([
'ps', '-ww', '-o', 'pid=', '-o', 'ppid=', '-o', 'args=',
])
except OSError as e: # Python 2-compatible FileNotFoundError.
if e.errno != errno.ENOENT:
raise
raise PsNotAvailable('ps not found')
except subprocess.CalledProcessError as e:
# `ps` can return 1 if the process list is completely empty.
# (sarugaku/shellingham#15)
if not e.output.strip():
return {}
raise
if not isinstance(output, str):
encoding = sys.getfilesystemencoding() or sys.getdefaultencoding()
output = output.decode(encoding)
processes = {}
for line in output.split('\n'):
try:
pid, ppid, args = line.strip().split(None, 2)
processes[pid] = Process(
args=tuple(shlex.split(args)), pid=pid, ppid=ppid,
)
except ValueError:
continue
return processes
| 28.395349
| 74
| 0.587224
|
4a155f63953ebb5962e824e6c65a9ba6edeb6fc1
| 4,098
|
py
|
Python
|
ui/src/lib/pybitcointools/bitcoin/mnemonic.py
|
superzitao/Wallet
|
a7018511afcf47e04e563640e52b86fd4862f838
|
[
"MIT"
] | null | null | null |
ui/src/lib/pybitcointools/bitcoin/mnemonic.py
|
superzitao/Wallet
|
a7018511afcf47e04e563640e52b86fd4862f838
|
[
"MIT"
] | null | null | null |
ui/src/lib/pybitcointools/bitcoin/mnemonic.py
|
superzitao/Wallet
|
a7018511afcf47e04e563640e52b86fd4862f838
|
[
"MIT"
] | null | null | null |
import hashlib
import os.path
import binascii
import random
from bisect import bisect_left
wordlist_english=list(open(os.path.join(os.path.dirname(os.path.realpath(__file__)),'english.txt'),'r'))
def eint_to_bytes(entint,entbits):
a=hex(entint)[2:].rstrip('L').zfill(32)
print(a)
return binascii.unhexlify(a)
def mnemonic_int_to_words(mint,mint_num_words,wordlist=wordlist_english):
backwords=[wordlist[(mint >> (11*x)) & 0x7FF].strip() for x in range(mint_num_words)]
return backwords[::-1]
def entropy_cs(entbytes):
entropy_size=8*len(entbytes)
checksum_size=entropy_size//32
hd=hashlib.sha256(entbytes).hexdigest()
csint=int(hd,16) >> (256-checksum_size)
return csint,checksum_size
def entropy_to_words(entbytes,wordlist=wordlist_english):
if(len(entbytes) < 4 or len(entbytes) % 4 != 0):
raise ValueError("The size of the entropy must be a multiple of 4 bytes (multiple of 32 bits)")
entropy_size=8*len(entbytes)
csint,checksum_size = entropy_cs(entbytes)
entint=int(binascii.hexlify(entbytes),16)
mint=(entint << checksum_size) | csint
mint_num_words=(entropy_size+checksum_size)//11
return mnemonic_int_to_words(mint,mint_num_words,wordlist)
def words_bisect(word,wordlist=wordlist_english):
lo=bisect_left(wordlist,word)
hi=len(wordlist)-bisect_left(wordlist[:lo:-1],word)
return lo,hi
def words_split(wordstr,wordlist=wordlist_english):
def popword(wordstr,wordlist):
for fwl in range(1,9):
w=wordstr[:fwl].strip()
lo,hi=words_bisect(w,wordlist)
if(hi-lo == 1):
return w,wordstr[fwl:].lstrip()
wordlist=wordlist[lo:hi]
raise Exception("Wordstr %s not found in list" %(w))
words=[]
tail=wordstr
while(len(tail)):
head,tail=popword(tail,wordlist)
words.append(head)
return words
def words_to_mnemonic_int(words,wordlist=wordlist_english):
if(isinstance(words,str)):
words=words_split(words,wordlist)
return sum([wordlist.index(w) << (11*x) for x,w in enumerate(words[::-1])])
def words_verify(words,wordlist=wordlist_english):
if(isinstance(words,str)):
words=words_split(words,wordlist)
mint = words_to_mnemonic_int(words,wordlist)
mint_bits=len(words)*11
cs_bits=mint_bits//32
entropy_bits=mint_bits-cs_bits
eint=mint >> cs_bits
csint=mint & ((1 << cs_bits)-1)
ebytes=_eint_to_bytes(eint,entropy_bits)
return csint == entropy_cs(ebytes)
def mnemonic_to_seed(mnemonic_phrase,passphrase=b''):
try:
from hashlib import pbkdf2_hmac
def pbkdf2_hmac_sha256(password,salt,iters=2048):
return pbkdf2_hmac(hash_name='sha512',password=password,salt=salt,iterations=iters)
except:
try:
from Crypto.Protocol.KDF import PBKDF2
from Crypto.Hash import SHA512,HMAC
def pbkdf2_hmac_sha256(password,salt,iters=2048):
return PBKDF2(password=password,salt=salt,dkLen=64,count=iters,prf=lambda p,s: HMAC.new(p,s,SHA512).digest())
except:
try:
from pbkdf2 import PBKDF2
import hmac
def pbkdf2_hmac_sha256(password,salt,iters=2048):
return PBKDF2(password,salt, iterations=iters, macmodule=hmac, digestmodule=hashlib.sha512).read(64)
except:
raise RuntimeError("No implementation of pbkdf2 was found!")
return pbkdf2_hmac_sha256(password=mnemonic_phrase,salt=b'mnemonic'+passphrase)
def words_mine(prefix,entbits,satisfunction,wordlist=wordlist_english,randombits=random.getrandbits):
prefix_bits=len(prefix)*11
mine_bits=entbits-prefix_bits
pint=words_to_mnemonic_int(prefix,wordlist)
pint<<=mine_bits
dint=randombits(mine_bits)
count=0
while(not satisfunction(entropy_to_words(eint_to_bytes(pint+dint,entbits)))):
dint=randombits(mine_bits)
if((count & 0xFFFF) == 0):
print("Searched %f percent of the space" % (float(count)/float(1 << mine_bits)))
return entropy_to_words(eint_to_bytes(pint+dint,entbits))
if __name__=="__main__":
import json
testvectors=json.load(open('vectors.json','r'))
passed=True
for v in testvectors['english']:
ebytes=binascii.unhexlify(v[0])
w=' '.join(entropy_to_words(ebytes))
seed=mnemonic_to_seed(w,passphrase='TREZOR')
passed = passed and w==v[1]
passed = passed and binascii.hexlify(seed)==v[2]
| 32.267717
| 113
| 0.758419
|
4a155ff882ed1a8e72e89a111f577e2c5d222f93
| 324
|
py
|
Python
|
reviews/admin.py
|
Flict-dev/Boxes-api
|
a2aa76b2db6aaa5af1028138566dc3b2b251fcdb
|
[
"MIT"
] | 1
|
2021-12-13T22:03:58.000Z
|
2021-12-13T22:03:58.000Z
|
reviews/admin.py
|
Flict-dev/Boxes-api
|
a2aa76b2db6aaa5af1028138566dc3b2b251fcdb
|
[
"MIT"
] | null | null | null |
reviews/admin.py
|
Flict-dev/Boxes-api
|
a2aa76b2db6aaa5af1028138566dc3b2b251fcdb
|
[
"MIT"
] | null | null | null |
from django.contrib import admin
from .models import Reviews
@admin.register(Reviews)
class ReviewsAdmin(admin.ModelAdmin):
list_display = (
'id',
'author',
'text',
'created_at',
'published_at',
'status',
)
list_filter = ('author', 'created_at', 'published_at')
| 20.25
| 58
| 0.598765
|
4a15604b6405a502b536f0088ab5d1496cb42e35
| 428,355
|
py
|
Python
|
tests/hwsim/test_ap_wps.py
|
westermo/hostapd
|
42bc5b5afa5c80faeef414bfd8a5e6316c12352d
|
[
"Unlicense"
] | 1
|
2022-03-11T09:42:27.000Z
|
2022-03-11T09:42:27.000Z
|
tests/hwsim/test_ap_wps.py
|
westermo/hostapd
|
42bc5b5afa5c80faeef414bfd8a5e6316c12352d
|
[
"Unlicense"
] | null | null | null |
tests/hwsim/test_ap_wps.py
|
westermo/hostapd
|
42bc5b5afa5c80faeef414bfd8a5e6316c12352d
|
[
"Unlicense"
] | null | null | null |
# WPS tests
# Copyright (c) 2013-2017, Jouni Malinen <j@w1.fi>
#
# This software may be distributed under the terms of the BSD license.
# See README for more details.
from remotehost import remote_compatible
from tshark import run_tshark
import base64
import binascii
from Crypto.Cipher import AES
import hashlib
import hmac
import os
import time
import sys
import stat
import subprocess
import logging
logger = logging.getLogger()
import re
import socket
import struct
try:
from http.client import HTTPConnection
from urllib.request import urlopen
from urllib.parse import urlparse, urljoin
from urllib.error import HTTPError
from io import StringIO
from socketserver import StreamRequestHandler, TCPServer
except ImportError:
from httplib import HTTPConnection
from urllib import urlopen
from urlparse import urlparse, urljoin
from urllib2 import build_opener, ProxyHandler, HTTPError
from StringIO import StringIO
from SocketServer import StreamRequestHandler, TCPServer
import urllib
import xml.etree.ElementTree as ET
import hwsim_utils
import hostapd
from wpasupplicant import WpaSupplicant
from utils import *
from test_ap_eap import int_eap_server_params
def wps_start_ap(apdev, ssid="test-wps-conf", extra_cred=None):
params = {"ssid": ssid, "eap_server": "1", "wps_state": "2",
"wpa_passphrase": "12345678", "wpa": "2",
"wpa_key_mgmt": "WPA-PSK", "rsn_pairwise": "CCMP"}
if extra_cred:
params['extra_cred'] = extra_cred
return hostapd.add_ap(apdev, params)
@remote_compatible
def test_ap_wps_init(dev, apdev):
"""Initial AP configuration with first WPS Enrollee"""
skip_without_tkip(dev[0])
ssid = "test-wps"
hapd = hostapd.add_ap(apdev[0],
{"ssid": ssid, "eap_server": "1", "wps_state": "1"})
logger.info("WPS provisioning step")
hapd.request("WPS_PBC")
if "PBC Status: Active" not in hapd.request("WPS_GET_STATUS"):
raise Exception("PBC status not shown correctly")
id = dev[0].add_network()
dev[0].set_network_quoted(id, "ssid", "home")
dev[0].set_network_quoted(id, "psk", "12345678")
dev[0].request("ENABLE_NETWORK %s no-connect" % id)
id = dev[0].add_network()
dev[0].set_network_quoted(id, "ssid", "home2")
dev[0].set_network(id, "bssid", "00:11:22:33:44:55")
dev[0].set_network(id, "key_mgmt", "NONE")
dev[0].request("ENABLE_NETWORK %s no-connect" % id)
dev[0].request("WPS_PBC")
dev[0].wait_connected(timeout=30)
status = dev[0].get_status()
if status['wpa_state'] != 'COMPLETED' or status['bssid'] != apdev[0]['bssid']:
raise Exception("Not fully connected")
if status['ssid'] != ssid:
raise Exception("Unexpected SSID")
if status['pairwise_cipher'] != 'CCMP':
raise Exception("Unexpected encryption configuration")
if status['key_mgmt'] != 'WPA2-PSK':
raise Exception("Unexpected key_mgmt")
status = hapd.request("WPS_GET_STATUS")
if "PBC Status: Disabled" not in status:
raise Exception("PBC status not shown correctly")
if "Last WPS result: Success" not in status:
raise Exception("Last WPS result not shown correctly")
if "Peer Address: " + dev[0].p2p_interface_addr() not in status:
raise Exception("Peer address not shown correctly")
conf = hapd.request("GET_CONFIG")
if "wps_state=configured" not in conf:
raise Exception("AP not in WPS configured state")
if "wpa=2" in conf:
if "rsn_pairwise_cipher=CCMP" not in conf:
raise Exception("Unexpected rsn_pairwise_cipher")
if "group_cipher=CCMP" not in conf:
raise Exception("Unexpected group_cipher")
else:
if "wpa=3" not in conf:
raise Exception("AP not in WPA+WPA2 configuration")
if "rsn_pairwise_cipher=CCMP TKIP" not in conf:
raise Exception("Unexpected rsn_pairwise_cipher")
if "wpa_pairwise_cipher=CCMP TKIP" not in conf:
raise Exception("Unexpected wpa_pairwise_cipher")
if "group_cipher=TKIP" not in conf:
raise Exception("Unexpected group_cipher")
if len(dev[0].list_networks()) != 3:
raise Exception("Unexpected number of network blocks")
def test_ap_wps_init_2ap_pbc(dev, apdev):
"""Initial two-radio AP configuration with first WPS PBC Enrollee"""
skip_without_tkip(dev[0])
ssid = "test-wps"
params = {"ssid": ssid, "eap_server": "1", "wps_state": "1"}
hapd = hostapd.add_ap(apdev[0], params)
hostapd.add_ap(apdev[1], params)
logger.info("WPS provisioning step")
hapd.request("WPS_PBC")
dev[0].scan_for_bss(apdev[0]['bssid'], freq="2412", force_scan=True)
dev[0].scan_for_bss(apdev[1]['bssid'], freq="2412")
bss = dev[0].get_bss(apdev[0]['bssid'])
if "[WPS-PBC]" not in bss['flags']:
raise Exception("WPS-PBC flag missing from AP1")
bss = dev[0].get_bss(apdev[1]['bssid'])
if "[WPS-PBC]" not in bss['flags']:
raise Exception("WPS-PBC flag missing from AP2")
dev[0].dump_monitor()
dev[0].request("SET wps_cred_processing 2")
dev[0].request("WPS_PBC")
ev = dev[0].wait_event(["WPS-CRED-RECEIVED"], timeout=30)
dev[0].request("SET wps_cred_processing 0")
if ev is None:
raise Exception("WPS cred event not seen")
if "100e" not in ev:
raise Exception("WPS attributes not included in the cred event")
dev[0].wait_connected(timeout=30)
dev[1].scan_for_bss(apdev[0]['bssid'], freq="2412", force_scan=True)
dev[1].scan_for_bss(apdev[1]['bssid'], freq="2412")
bss = dev[1].get_bss(apdev[0]['bssid'])
if "[WPS-PBC]" in bss['flags']:
raise Exception("WPS-PBC flag not cleared from AP1")
bss = dev[1].get_bss(apdev[1]['bssid'])
if "[WPS-PBC]" in bss['flags']:
raise Exception("WPS-PBC flag not cleared from AP2")
def test_ap_wps_init_2ap_pin(dev, apdev):
"""Initial two-radio AP configuration with first WPS PIN Enrollee"""
skip_without_tkip(dev[0])
ssid = "test-wps"
params = {"ssid": ssid, "eap_server": "1", "wps_state": "1"}
hapd = hostapd.add_ap(apdev[0], params)
hostapd.add_ap(apdev[1], params)
logger.info("WPS provisioning step")
pin = dev[0].wps_read_pin()
hapd.request("WPS_PIN any " + pin)
dev[0].scan_for_bss(apdev[0]['bssid'], freq="2412", force_scan=True)
dev[0].scan_for_bss(apdev[1]['bssid'], freq="2412")
bss = dev[0].get_bss(apdev[0]['bssid'])
if "[WPS-AUTH]" not in bss['flags']:
raise Exception("WPS-AUTH flag missing from AP1")
bss = dev[0].get_bss(apdev[1]['bssid'])
if "[WPS-AUTH]" not in bss['flags']:
raise Exception("WPS-AUTH flag missing from AP2")
dev[0].dump_monitor()
dev[0].request("WPS_PIN any " + pin)
dev[0].wait_connected(timeout=30)
dev[1].scan_for_bss(apdev[0]['bssid'], freq="2412", force_scan=True)
dev[1].scan_for_bss(apdev[1]['bssid'], freq="2412")
bss = dev[1].get_bss(apdev[0]['bssid'])
if "[WPS-AUTH]" in bss['flags']:
raise Exception("WPS-AUTH flag not cleared from AP1")
bss = dev[1].get_bss(apdev[1]['bssid'])
if "[WPS-AUTH]" in bss['flags']:
raise Exception("WPS-AUTH flag not cleared from AP2")
@remote_compatible
def test_ap_wps_init_through_wps_config(dev, apdev):
"""Initial AP configuration using wps_config command"""
ssid = "test-wps-init-config"
hapd = hostapd.add_ap(apdev[0],
{"ssid": ssid, "eap_server": "1", "wps_state": "1"})
if "FAIL" in hapd.request("WPS_CONFIG " + binascii.hexlify(ssid.encode()).decode() + " WPA2PSK CCMP " + binascii.hexlify(b"12345678").decode()):
raise Exception("WPS_CONFIG command failed")
ev = hapd.wait_event(["WPS-NEW-AP-SETTINGS"], timeout=5)
if ev is None:
raise Exception("Timeout on WPS-NEW-AP-SETTINGS events")
# It takes some time for the AP to update Beacon and Probe Response frames,
# so wait here before requesting the scan to be started to avoid adding
# extra five second wait to the test due to fetching obsolete scan results.
hapd.ping()
time.sleep(0.2)
dev[0].connect(ssid, psk="12345678", scan_freq="2412", proto="WPA2",
pairwise="CCMP", group="CCMP")
if "FAIL" not in hapd.request("WPS_CONFIG foo"):
raise Exception("Invalid WPS_CONFIG accepted")
@remote_compatible
def test_ap_wps_init_through_wps_config_2(dev, apdev):
"""AP configuration using wps_config and wps_cred_processing=2"""
ssid = "test-wps-init-config"
hapd = hostapd.add_ap(apdev[0],
{"ssid": ssid, "eap_server": "1", "wps_state": "1",
"wps_cred_processing": "2"})
if "FAIL" in hapd.request("WPS_CONFIG " + binascii.hexlify(ssid.encode()).decode() + " WPA2PSK CCMP " + binascii.hexlify(b"12345678").decode()):
raise Exception("WPS_CONFIG command failed")
ev = hapd.wait_event(["WPS-NEW-AP-SETTINGS"], timeout=5)
if ev is None:
raise Exception("Timeout on WPS-NEW-AP-SETTINGS events")
if "100e" not in ev:
raise Exception("WPS-NEW-AP-SETTINGS did not include Credential")
@remote_compatible
def test_ap_wps_invalid_wps_config_passphrase(dev, apdev):
"""AP configuration using wps_config command with invalid passphrase"""
ssid = "test-wps-init-config"
hapd = hostapd.add_ap(apdev[0],
{"ssid": ssid, "eap_server": "1", "wps_state": "1"})
if "FAIL" not in hapd.request("WPS_CONFIG " + binascii.hexlify(ssid.encode()).decode() + " WPA2PSK CCMP " + binascii.hexlify(b"1234567").decode()):
raise Exception("Invalid WPS_CONFIG command accepted")
def test_ap_wps_conf(dev, apdev):
"""WPS PBC provisioning with configured AP"""
ssid = "test-wps-conf"
hapd = hostapd.add_ap(apdev[0],
{"ssid": ssid, "eap_server": "1", "wps_state": "2",
"wpa_passphrase": "12345678", "wpa": "2",
"wpa_key_mgmt": "WPA-PSK", "rsn_pairwise": "CCMP"})
logger.info("WPS provisioning step")
hapd.request("WPS_PBC")
dev[0].set("device_name", "Device A")
dev[0].scan_for_bss(apdev[0]['bssid'], freq="2412")
dev[0].dump_monitor()
dev[0].request("WPS_PBC " + apdev[0]['bssid'])
dev[0].wait_connected(timeout=30)
status = dev[0].get_status()
if status['wpa_state'] != 'COMPLETED':
raise Exception("Not fully connected")
if status['bssid'] != apdev[0]['bssid']:
raise Exception("Unexpected BSSID")
if status['ssid'] != ssid:
raise Exception("Unexpected SSID")
if status['pairwise_cipher'] != 'CCMP' or status['group_cipher'] != 'CCMP':
raise Exception("Unexpected encryption configuration")
if status['key_mgmt'] != 'WPA2-PSK':
raise Exception("Unexpected key_mgmt")
sta = hapd.get_sta(dev[0].p2p_interface_addr())
if 'wpsDeviceName' not in sta or sta['wpsDeviceName'] != "Device A":
raise Exception("Device name not available in STA command")
def test_ap_wps_conf_5ghz(dev, apdev):
"""WPS PBC provisioning with configured AP on 5 GHz band"""
try:
hapd = None
ssid = "test-wps-conf"
params = {"ssid": ssid, "eap_server": "1", "wps_state": "2",
"wpa_passphrase": "12345678", "wpa": "2",
"wpa_key_mgmt": "WPA-PSK", "rsn_pairwise": "CCMP",
"country_code": "FI", "hw_mode": "a", "channel": "36"}
hapd = hostapd.add_ap(apdev[0], params)
logger.info("WPS provisioning step")
hapd.request("WPS_PBC")
dev[0].set("device_name", "Device A")
dev[0].scan_for_bss(apdev[0]['bssid'], freq="5180")
dev[0].request("WPS_PBC " + apdev[0]['bssid'])
dev[0].wait_connected(timeout=30)
sta = hapd.get_sta(dev[0].p2p_interface_addr())
if 'wpsDeviceName' not in sta or sta['wpsDeviceName'] != "Device A":
raise Exception("Device name not available in STA command")
finally:
dev[0].request("DISCONNECT")
clear_regdom(hapd, dev)
def test_ap_wps_conf_chan14(dev, apdev):
"""WPS PBC provisioning with configured AP on channel 14"""
try:
hapd = None
ssid = "test-wps-conf"
params = {"ssid": ssid, "eap_server": "1", "wps_state": "2",
"wpa_passphrase": "12345678", "wpa": "2",
"wpa_key_mgmt": "WPA-PSK", "rsn_pairwise": "CCMP",
"country_code": "JP", "hw_mode": "b", "channel": "14"}
hapd = hostapd.add_ap(apdev[0], params)
logger.info("WPS provisioning step")
hapd.request("WPS_PBC")
dev[0].set("device_name", "Device A")
dev[0].request("WPS_PBC")
dev[0].wait_connected(timeout=30)
sta = hapd.get_sta(dev[0].p2p_interface_addr())
if 'wpsDeviceName' not in sta or sta['wpsDeviceName'] != "Device A":
raise Exception("Device name not available in STA command")
finally:
dev[0].request("DISCONNECT")
clear_regdom(hapd, dev)
@remote_compatible
def test_ap_wps_twice(dev, apdev):
"""WPS provisioning with twice to change passphrase"""
ssid = "test-wps-twice"
params = {"ssid": ssid, "eap_server": "1", "wps_state": "2",
"wpa_passphrase": "12345678", "wpa": "2",
"wpa_key_mgmt": "WPA-PSK", "rsn_pairwise": "CCMP"}
hapd = hostapd.add_ap(apdev[0], params)
logger.info("WPS provisioning step")
hapd.request("WPS_PBC")
dev[0].scan_for_bss(apdev[0]['bssid'], freq="2412")
dev[0].dump_monitor()
dev[0].request("WPS_PBC " + apdev[0]['bssid'])
dev[0].wait_connected(timeout=30)
dev[0].request("DISCONNECT")
logger.info("Restart AP with different passphrase and re-run WPS")
hostapd.remove_bss(apdev[0])
params['wpa_passphrase'] = 'another passphrase'
hapd = hostapd.add_ap(apdev[0], params)
logger.info("WPS provisioning step")
hapd.request("WPS_PBC")
dev[0].dump_monitor()
dev[0].request("WPS_PBC " + apdev[0]['bssid'])
dev[0].wait_connected(timeout=30)
networks = dev[0].list_networks()
if len(networks) > 1:
raise Exception("Unexpected duplicated network block present")
@remote_compatible
def test_ap_wps_incorrect_pin(dev, apdev):
"""WPS PIN provisioning with incorrect PIN"""
ssid = "test-wps-incorrect-pin"
hapd = hostapd.add_ap(apdev[0],
{"ssid": ssid, "eap_server": "1", "wps_state": "2",
"wpa_passphrase": "12345678", "wpa": "2",
"wpa_key_mgmt": "WPA-PSK", "rsn_pairwise": "CCMP"})
logger.info("WPS provisioning attempt 1")
hapd.request("WPS_PIN any 12345670")
dev[0].scan_for_bss(apdev[0]['bssid'], freq="2412")
dev[0].dump_monitor()
dev[0].request("WPS_PIN %s 55554444" % apdev[0]['bssid'])
ev = dev[0].wait_event(["WPS-FAIL"], timeout=30)
if ev is None:
raise Exception("WPS operation timed out")
if "config_error=18" not in ev:
raise Exception("Incorrect config_error reported")
if "msg=8" not in ev:
raise Exception("PIN error detected on incorrect message")
dev[0].wait_disconnected(timeout=10)
dev[0].request("WPS_CANCEL")
# if a scan was in progress, wait for it to complete before trying WPS again
ev = dev[0].wait_event(["CTRL-EVENT-SCAN-RESULTS"], 5)
status = hapd.request("WPS_GET_STATUS")
if "Last WPS result: Failed" not in status:
raise Exception("WPS failure result not shown correctly")
logger.info("WPS provisioning attempt 2")
hapd.request("WPS_PIN any 12345670")
dev[0].dump_monitor()
dev[0].request("WPS_PIN %s 12344444" % apdev[0]['bssid'])
ev = dev[0].wait_event(["WPS-FAIL"], timeout=30)
if ev is None:
raise Exception("WPS operation timed out")
if "config_error=18" not in ev:
raise Exception("Incorrect config_error reported")
if "msg=10" not in ev:
raise Exception("PIN error detected on incorrect message")
dev[0].wait_disconnected(timeout=10)
@remote_compatible
def test_ap_wps_conf_pin(dev, apdev):
"""WPS PIN provisioning with configured AP"""
ssid = "test-wps-conf-pin"
hapd = hostapd.add_ap(apdev[0],
{"ssid": ssid, "eap_server": "1", "wps_state": "2",
"wpa_passphrase": "12345678", "wpa": "2",
"wpa_key_mgmt": "WPA-PSK", "rsn_pairwise": "CCMP"})
logger.info("WPS provisioning step")
pin = dev[0].wps_read_pin()
hapd.request("WPS_PIN any " + pin)
dev[0].scan_for_bss(apdev[0]['bssid'], freq="2412")
dev[0].dump_monitor()
dev[0].request("WPS_PIN %s %s" % (apdev[0]['bssid'], pin))
dev[0].wait_connected(timeout=30)
status = dev[0].get_status()
if status['wpa_state'] != 'COMPLETED' or status['bssid'] != apdev[0]['bssid']:
raise Exception("Not fully connected")
if status['ssid'] != ssid:
raise Exception("Unexpected SSID")
if status['pairwise_cipher'] != 'CCMP' or status['group_cipher'] != 'CCMP':
raise Exception("Unexpected encryption configuration")
if status['key_mgmt'] != 'WPA2-PSK':
raise Exception("Unexpected key_mgmt")
dev[1].scan_for_bss(apdev[0]['bssid'], freq="2412", force_scan=True)
bss = dev[1].get_bss(apdev[0]['bssid'])
if "[WPS-AUTH]" in bss['flags']:
raise Exception("WPS-AUTH flag not cleared")
logger.info("Try to connect from another station using the same PIN")
pin = dev[1].request("WPS_PIN " + apdev[0]['bssid'])
ev = dev[1].wait_event(["WPS-M2D", "CTRL-EVENT-CONNECTED"], timeout=30)
if ev is None:
raise Exception("Operation timed out")
if "WPS-M2D" not in ev:
raise Exception("Unexpected WPS operation started")
hapd.request("WPS_PIN any " + pin)
dev[1].wait_connected(timeout=30)
def test_ap_wps_conf_pin_mixed_mode(dev, apdev):
"""WPS PIN provisioning with configured AP (WPA+WPA2)"""
skip_without_tkip(dev[0])
ssid = "test-wps-conf-pin-mixed"
hapd = hostapd.add_ap(apdev[0],
{"ssid": ssid, "eap_server": "1", "wps_state": "2",
"wpa_passphrase": "12345678", "wpa": "3",
"wpa_key_mgmt": "WPA-PSK", "rsn_pairwise": "CCMP",
"wpa_pairwise": "TKIP"})
logger.info("WPS provisioning step")
pin = dev[0].wps_read_pin()
hapd.request("WPS_PIN any " + pin)
dev[0].scan_for_bss(apdev[0]['bssid'], freq="2412")
dev[0].dump_monitor()
dev[0].request("WPS_PIN %s %s" % (apdev[0]['bssid'], pin))
dev[0].wait_connected(timeout=30)
status = dev[0].get_status()
dev[0].request("REMOVE_NETWORK all")
dev[0].wait_disconnected()
if status['pairwise_cipher'] != 'CCMP' or status['group_cipher'] != 'TKIP' or status['key_mgmt'] != 'WPA2-PSK':
raise Exception("Unexpected encryption/key_mgmt configuration: pairwise=%s group=%s key_mgmt=%s" % (status['pairwise_cipher'], status['group_cipher'], status['key_mgmt']))
logger.info("WPS provisioning step (auth_types=0x1b)")
if "OK" not in dev[0].request("SET wps_force_auth_types 0x1b"):
raise Exception("Failed to set wps_force_auth_types 0x1b")
pin = dev[0].wps_read_pin()
hapd.request("WPS_PIN any " + pin)
dev[0].scan_for_bss(apdev[0]['bssid'], freq="2412")
dev[0].dump_monitor()
dev[0].request("WPS_PIN %s %s" % (apdev[0]['bssid'], pin))
dev[0].wait_connected(timeout=30)
status = dev[0].get_status()
dev[0].request("REMOVE_NETWORK all")
dev[0].wait_disconnected()
if status['pairwise_cipher'] != 'CCMP' or status['group_cipher'] != 'TKIP' or status['key_mgmt'] != 'WPA2-PSK':
raise Exception("Unexpected encryption/key_mgmt configuration: pairwise=%s group=%s key_mgmt=%s" % (status['pairwise_cipher'], status['group_cipher'], status['key_mgmt']))
logger.info("WPS provisioning step (auth_types=0 encr_types=0)")
if "OK" not in dev[0].request("SET wps_force_auth_types 0"):
raise Exception("Failed to set wps_force_auth_types 0")
if "OK" not in dev[0].request("SET wps_force_encr_types 0"):
raise Exception("Failed to set wps_force_encr_types 0")
pin = dev[0].wps_read_pin()
hapd.request("WPS_PIN any " + pin)
dev[0].scan_for_bss(apdev[0]['bssid'], freq="2412")
dev[0].dump_monitor()
dev[0].request("WPS_PIN %s %s" % (apdev[0]['bssid'], pin))
dev[0].wait_connected(timeout=30)
status = dev[0].get_status()
dev[0].request("REMOVE_NETWORK all")
dev[0].wait_disconnected()
if status['pairwise_cipher'] != 'CCMP' or status['group_cipher'] != 'TKIP' or status['key_mgmt'] != 'WPA2-PSK':
raise Exception("Unexpected encryption/key_mgmt configuration: pairwise=%s group=%s key_mgmt=%s" % (status['pairwise_cipher'], status['group_cipher'], status['key_mgmt']))
dev[0].request("SET wps_force_auth_types ")
dev[0].request("SET wps_force_encr_types ")
@remote_compatible
def test_ap_wps_conf_pin_v1(dev, apdev):
"""WPS PIN provisioning with configured WPS v1.0 AP"""
ssid = "test-wps-conf-pin-v1"
hapd = hostapd.add_ap(apdev[0],
{"ssid": ssid, "eap_server": "1", "wps_state": "2",
"wpa_passphrase": "12345678", "wpa": "2",
"wpa_key_mgmt": "WPA-PSK", "rsn_pairwise": "CCMP"})
logger.info("WPS provisioning step")
pin = dev[0].wps_read_pin()
hapd.request("SET wps_version_number 0x10")
hapd.request("WPS_PIN any " + pin)
found = False
for i in range(0, 10):
dev[0].scan(freq="2412")
if "[WPS-PIN]" in dev[0].request("SCAN_RESULTS"):
found = True
break
if not found:
hapd.request("SET wps_version_number 0x20")
raise Exception("WPS-PIN flag not seen in scan results")
dev[0].dump_monitor()
dev[0].request("WPS_PIN %s %s" % (apdev[0]['bssid'], pin))
dev[0].wait_connected(timeout=30)
hapd.request("SET wps_version_number 0x20")
@remote_compatible
def test_ap_wps_conf_pin_2sta(dev, apdev):
"""Two stations trying to use WPS PIN at the same time"""
ssid = "test-wps-conf-pin2"
hapd = hostapd.add_ap(apdev[0],
{"ssid": ssid, "eap_server": "1", "wps_state": "2",
"wpa_passphrase": "12345678", "wpa": "2",
"wpa_key_mgmt": "WPA-PSK", "rsn_pairwise": "CCMP"})
logger.info("WPS provisioning step")
pin = "12345670"
pin2 = "55554444"
hapd.request("WPS_PIN " + dev[0].get_status_field("uuid") + " " + pin)
hapd.request("WPS_PIN " + dev[1].get_status_field("uuid") + " " + pin)
dev[0].dump_monitor()
dev[1].dump_monitor()
dev[0].scan_for_bss(apdev[0]['bssid'], freq="2412")
dev[1].scan_for_bss(apdev[0]['bssid'], freq="2412")
dev[0].request("WPS_PIN %s %s" % (apdev[0]['bssid'], pin))
dev[1].request("WPS_PIN %s %s" % (apdev[0]['bssid'], pin))
dev[0].wait_connected(timeout=30)
dev[1].wait_connected(timeout=30)
@remote_compatible
def test_ap_wps_conf_pin_timeout(dev, apdev):
"""WPS PIN provisioning with configured AP timing out PIN"""
ssid = "test-wps-conf-pin"
hapd = hostapd.add_ap(apdev[0],
{"ssid": ssid, "eap_server": "1", "wps_state": "2",
"wpa_passphrase": "12345678", "wpa": "2",
"wpa_key_mgmt": "WPA-PSK", "rsn_pairwise": "CCMP"})
addr = dev[0].p2p_interface_addr()
pin = dev[0].wps_read_pin()
if "FAIL" not in hapd.request("WPS_PIN "):
raise Exception("Unexpected success on invalid WPS_PIN")
hapd.request("WPS_PIN any " + pin + " 1")
dev[0].scan_for_bss(apdev[0]['bssid'], freq="2412")
time.sleep(1.1)
dev[0].request("WPS_PIN %s %s" % (apdev[0]['bssid'], pin))
ev = hapd.wait_event(["WPS-PIN-NEEDED"], timeout=20)
if ev is None:
raise Exception("WPS-PIN-NEEDED event timed out")
ev = dev[0].wait_event(["WPS-M2D"])
if ev is None:
raise Exception("M2D not reported")
dev[0].request("WPS_CANCEL")
hapd.request("WPS_PIN any " + pin + " 20 " + addr)
dev[0].request("WPS_PIN %s %s" % (apdev[0]['bssid'], pin))
dev[0].wait_connected(timeout=30)
def test_ap_wps_reg_connect(dev, apdev):
"""WPS registrar using AP PIN to connect"""
ssid = "test-wps-reg-ap-pin"
appin = "12345670"
hostapd.add_ap(apdev[0],
{"ssid": ssid, "eap_server": "1", "wps_state": "2",
"wpa_passphrase": "12345678", "wpa": "2",
"wpa_key_mgmt": "WPA-PSK", "rsn_pairwise": "CCMP",
"ap_pin": appin})
logger.info("WPS provisioning step")
dev[0].dump_monitor()
dev[0].flush_scan_cache()
dev[0].scan_for_bss(apdev[0]['bssid'], freq=2412)
dev[0].wps_reg(apdev[0]['bssid'], appin)
status = dev[0].get_status()
if status['wpa_state'] != 'COMPLETED' or status['bssid'] != apdev[0]['bssid']:
raise Exception("Not fully connected")
if status['ssid'] != ssid:
raise Exception("Unexpected SSID")
if status['pairwise_cipher'] != 'CCMP' or status['group_cipher'] != 'CCMP':
raise Exception("Unexpected encryption configuration")
if status['key_mgmt'] != 'WPA2-PSK':
raise Exception("Unexpected key_mgmt")
def test_ap_wps_reg_connect_zero_len_ap_pin(dev, apdev):
"""hostapd with zero length ap_pin parameter"""
ssid = "test-wps-reg-ap-pin"
appin = ""
hostapd.add_ap(apdev[0],
{"ssid": ssid, "eap_server": "1", "wps_state": "2",
"wpa_passphrase": "12345678", "wpa": "2",
"wpa_key_mgmt": "WPA-PSK", "rsn_pairwise": "CCMP",
"ap_pin": appin})
logger.info("WPS provisioning step")
dev[0].dump_monitor()
dev[0].flush_scan_cache()
dev[0].scan_for_bss(apdev[0]['bssid'], freq=2412)
dev[0].wps_reg(apdev[0]['bssid'], appin, no_wait=True)
ev = dev[0].wait_event(["WPS-FAIL"], timeout=15)
if ev is None:
raise Exception("No WPS-FAIL reported")
if "msg=5 config_error=15" not in ev:
raise Exception("Unexpected WPS-FAIL: " + ev)
def test_ap_wps_reg_connect_mixed_mode(dev, apdev):
"""WPS registrar using AP PIN to connect (WPA+WPA2)"""
skip_without_tkip(dev[0])
ssid = "test-wps-reg-ap-pin"
appin = "12345670"
hostapd.add_ap(apdev[0],
{"ssid": ssid, "eap_server": "1", "wps_state": "2",
"wpa_passphrase": "12345678", "wpa": "3",
"wpa_key_mgmt": "WPA-PSK", "rsn_pairwise": "CCMP",
"wpa_pairwise": "TKIP", "ap_pin": appin})
dev[0].flush_scan_cache()
dev[0].scan_for_bss(apdev[0]['bssid'], freq=2412)
dev[0].wps_reg(apdev[0]['bssid'], appin)
status = dev[0].get_status()
if status['wpa_state'] != 'COMPLETED' or status['bssid'] != apdev[0]['bssid']:
raise Exception("Not fully connected")
if status['ssid'] != ssid:
raise Exception("Unexpected SSID")
if status['pairwise_cipher'] != 'CCMP' or status['group_cipher'] != 'TKIP':
raise Exception("Unexpected encryption configuration")
if status['key_mgmt'] != 'WPA2-PSK':
raise Exception("Unexpected key_mgmt")
def test_ap_wps_reg_override_ap_settings(dev, apdev):
"""WPS registrar and ap_settings override"""
ap_settings = "/tmp/ap_wps_reg_override_ap_settings"
try:
os.remove(ap_settings)
except:
pass
# Override AP Settings with values that point to another AP
data = build_wsc_attr(ATTR_NETWORK_INDEX, b'\x01')
data += build_wsc_attr(ATTR_SSID, b"test")
data += build_wsc_attr(ATTR_AUTH_TYPE, b'\x00\x01')
data += build_wsc_attr(ATTR_ENCR_TYPE, b'\x00\x01')
data += build_wsc_attr(ATTR_NETWORK_KEY, b'')
data += build_wsc_attr(ATTR_MAC_ADDR, binascii.unhexlify(apdev[1]['bssid'].replace(':', '')))
with open(ap_settings, "wb") as f:
f.write(data)
ssid = "test-wps-reg-ap-pin"
appin = "12345670"
hostapd.add_ap(apdev[0],
{"ssid": ssid, "eap_server": "1", "wps_state": "2",
"wpa_passphrase": "12345678", "wpa": "2",
"wpa_key_mgmt": "WPA-PSK", "rsn_pairwise": "CCMP",
"ap_pin": appin, "ap_settings": ap_settings})
hapd2 = hostapd.add_ap(apdev[1], {"ssid": "test"})
dev[0].flush_scan_cache()
dev[0].scan_for_bss(apdev[0]['bssid'], freq=2412)
dev[0].scan_for_bss(apdev[1]['bssid'], freq=2412)
dev[0].wps_reg(apdev[0]['bssid'], appin)
ev = hapd2.wait_event(['AP-STA-CONNECTED'], timeout=10)
os.remove(ap_settings)
if ev is None:
raise Exception("No connection with the other AP")
def check_wps_reg_failure(dev, ap, appin):
dev.request("WPS_REG " + ap['bssid'] + " " + appin)
ev = dev.wait_event(["WPS-SUCCESS", "WPS-FAIL"], timeout=15)
if ev is None:
raise Exception("WPS operation timed out")
if "WPS-SUCCESS" in ev:
raise Exception("WPS operation succeeded unexpectedly")
if "config_error=15" not in ev:
raise Exception("WPS setup locked state was not reported correctly")
def test_ap_wps_random_ap_pin(dev, apdev):
"""WPS registrar using random AP PIN"""
ssid = "test-wps-reg-random-ap-pin"
ap_uuid = "27ea801a-9e5c-4e73-bd82-f89cbcd10d7e"
params = {"ssid": ssid, "eap_server": "1", "wps_state": "2",
"wpa_passphrase": "12345678", "wpa": "2",
"wpa_key_mgmt": "WPA-PSK", "rsn_pairwise": "CCMP",
"device_name": "Wireless AP", "manufacturer": "Company",
"model_name": "WAP", "model_number": "123",
"serial_number": "12345", "device_type": "6-0050F204-1",
"os_version": "01020300",
"config_methods": "label push_button",
"uuid": ap_uuid, "upnp_iface": "lo"}
hapd = hostapd.add_ap(apdev[0], params)
appin = hapd.request("WPS_AP_PIN random")
if "FAIL" in appin:
raise Exception("Could not generate random AP PIN")
if appin not in hapd.request("WPS_AP_PIN get"):
raise Exception("Could not fetch current AP PIN")
logger.info("WPS provisioning step")
dev[0].flush_scan_cache()
dev[0].scan_for_bss(apdev[0]['bssid'], freq=2412)
dev[0].wps_reg(apdev[0]['bssid'], appin)
hapd.request("WPS_AP_PIN disable")
logger.info("WPS provisioning step with AP PIN disabled")
dev[1].scan_for_bss(apdev[0]['bssid'], freq=2412)
check_wps_reg_failure(dev[1], apdev[0], appin)
logger.info("WPS provisioning step with AP PIN reset")
appin = "12345670"
hapd.request("WPS_AP_PIN set " + appin)
dev[1].wps_reg(apdev[0]['bssid'], appin)
dev[0].request("REMOVE_NETWORK all")
dev[1].request("REMOVE_NETWORK all")
dev[0].wait_disconnected(timeout=10)
dev[1].wait_disconnected(timeout=10)
logger.info("WPS provisioning step after AP PIN timeout")
hapd.request("WPS_AP_PIN disable")
appin = hapd.request("WPS_AP_PIN random 1")
time.sleep(1.1)
if "FAIL" not in hapd.request("WPS_AP_PIN get"):
raise Exception("AP PIN unexpectedly still enabled")
check_wps_reg_failure(dev[0], apdev[0], appin)
logger.info("WPS provisioning step after AP PIN timeout(2)")
hapd.request("WPS_AP_PIN disable")
appin = "12345670"
hapd.request("WPS_AP_PIN set " + appin + " 1")
time.sleep(1.1)
if "FAIL" not in hapd.request("WPS_AP_PIN get"):
raise Exception("AP PIN unexpectedly still enabled")
check_wps_reg_failure(dev[1], apdev[0], appin)
with fail_test(hapd, 1, "os_get_random;wps_generate_pin"):
hapd.request("WPS_AP_PIN random 1")
hapd.request("WPS_AP_PIN disable")
with alloc_fail(hapd, 1, "upnp_wps_set_ap_pin"):
hapd.request("WPS_AP_PIN set 12345670")
hapd.request("WPS_AP_PIN disable")
if "FAIL" not in hapd.request("WPS_AP_PIN set"):
raise Exception("Invalid WPS_AP_PIN accepted")
if "FAIL" not in hapd.request("WPS_AP_PIN foo"):
raise Exception("Invalid WPS_AP_PIN accepted")
if "FAIL" not in hapd.request("WPS_AP_PIN set " + 9*'1'):
raise Exception("Invalid WPS_AP_PIN accepted")
def test_ap_wps_reg_config(dev, apdev):
"""WPS registrar configuring an AP using AP PIN"""
ssid = "test-wps-init-ap-pin"
appin = "12345670"
hostapd.add_ap(apdev[0],
{"ssid": ssid, "eap_server": "1", "wps_state": "2",
"ap_pin": appin})
logger.info("WPS configuration step")
dev[0].flush_scan_cache()
dev[0].scan_for_bss(apdev[0]['bssid'], freq=2412)
dev[0].dump_monitor()
new_ssid = "wps-new-ssid"
new_passphrase = "1234567890"
dev[0].wps_reg(apdev[0]['bssid'], appin, new_ssid, "WPA2PSK", "CCMP",
new_passphrase)
status = dev[0].get_status()
if status['wpa_state'] != 'COMPLETED' or status['bssid'] != apdev[0]['bssid']:
raise Exception("Not fully connected")
if status['ssid'] != new_ssid:
raise Exception("Unexpected SSID")
if status['pairwise_cipher'] != 'CCMP' or status['group_cipher'] != 'CCMP':
raise Exception("Unexpected encryption configuration")
if status['key_mgmt'] != 'WPA2-PSK':
raise Exception("Unexpected key_mgmt")
logger.info("Re-configure back to open")
dev[0].request("REMOVE_NETWORK all")
dev[0].flush_scan_cache()
dev[0].dump_monitor()
dev[0].wps_reg(apdev[0]['bssid'], appin, "wps-open", "OPEN", "NONE", "")
status = dev[0].get_status()
if status['wpa_state'] != 'COMPLETED' or status['bssid'] != apdev[0]['bssid']:
raise Exception("Not fully connected")
if status['ssid'] != "wps-open":
raise Exception("Unexpected SSID")
if status['key_mgmt'] != 'NONE':
raise Exception("Unexpected key_mgmt")
def test_ap_wps_reg_config_ext_processing(dev, apdev):
"""WPS registrar configuring an AP with external config processing"""
ssid = "test-wps-init-ap-pin"
appin = "12345670"
params = {"ssid": ssid, "eap_server": "1", "wps_state": "2",
"wps_cred_processing": "1", "ap_pin": appin}
hapd = hostapd.add_ap(apdev[0], params)
dev[0].flush_scan_cache()
dev[0].scan_for_bss(apdev[0]['bssid'], freq=2412)
new_ssid = "wps-new-ssid"
new_passphrase = "1234567890"
dev[0].wps_reg(apdev[0]['bssid'], appin, new_ssid, "WPA2PSK", "CCMP",
new_passphrase, no_wait=True)
ev = dev[0].wait_event(["WPS-SUCCESS"], timeout=15)
if ev is None:
raise Exception("WPS registrar operation timed out")
ev = hapd.wait_event(["WPS-NEW-AP-SETTINGS"], timeout=15)
if ev is None:
raise Exception("WPS configuration timed out")
if "1026" not in ev:
raise Exception("AP Settings missing from event")
hapd.request("SET wps_cred_processing 0")
if "FAIL" in hapd.request("WPS_CONFIG " + binascii.hexlify(new_ssid.encode()).decode() + " WPA2PSK CCMP " + binascii.hexlify(new_passphrase.encode()).decode()):
raise Exception("WPS_CONFIG command failed")
dev[0].wait_connected(timeout=15)
def test_ap_wps_reg_config_tkip(dev, apdev):
"""WPS registrar configuring AP to use TKIP and AP upgrading to TKIP+CCMP"""
skip_with_fips(dev[0])
skip_without_tkip(dev[0])
ssid = "test-wps-init-ap"
appin = "12345670"
hapd = hostapd.add_ap(apdev[0],
{"ssid": ssid, "eap_server": "1", "wps_state": "1",
"ap_pin": appin})
logger.info("WPS configuration step")
dev[0].flush_scan_cache()
dev[0].request("SET wps_version_number 0x10")
dev[0].scan_for_bss(apdev[0]['bssid'], freq=2412)
dev[0].dump_monitor()
new_ssid = "wps-new-ssid-with-tkip"
new_passphrase = "1234567890"
dev[0].wps_reg(apdev[0]['bssid'], appin, new_ssid, "WPAPSK", "TKIP",
new_passphrase)
logger.info("Re-connect to verify WPA2 mixed mode")
dev[0].request("DISCONNECT")
id = 0
dev[0].set_network(id, "pairwise", "CCMP")
dev[0].set_network(id, "proto", "RSN")
dev[0].connect_network(id)
status = dev[0].get_status()
if status['wpa_state'] != 'COMPLETED' or status['bssid'] != apdev[0]['bssid']:
raise Exception("Not fully connected: wpa_state={} bssid={}".format(status['wpa_state'], status['bssid']))
if status['ssid'] != new_ssid:
raise Exception("Unexpected SSID")
if status['pairwise_cipher'] != 'CCMP':
raise Exception("Unexpected encryption configuration")
if status['group_cipher'] != 'TKIP':
conf = hapd.request("GET_CONFIG")
if "group_cipher=CCMP" not in conf or status['group_cipher'] != 'CCMP':
raise Exception("Unexpected encryption configuration")
if status['key_mgmt'] != 'WPA2-PSK':
raise Exception("Unexpected key_mgmt")
def test_ap_wps_setup_locked(dev, apdev):
"""WPS registrar locking up AP setup on AP PIN failures"""
ssid = "test-wps-incorrect-ap-pin"
appin = "12345670"
hapd = hostapd.add_ap(apdev[0],
{"ssid": ssid, "eap_server": "1", "wps_state": "2",
"wpa_passphrase": "12345678", "wpa": "2",
"wpa_key_mgmt": "WPA-PSK", "rsn_pairwise": "CCMP",
"ap_pin": appin})
new_ssid = "wps-new-ssid-test"
new_passphrase = "1234567890"
dev[0].flush_scan_cache()
dev[0].scan_for_bss(apdev[0]['bssid'], freq=2412)
ap_setup_locked = False
for pin in ["55554444", "1234", "12345678", "00000000", "11111111"]:
dev[0].dump_monitor()
logger.info("Try incorrect AP PIN - attempt " + pin)
dev[0].wps_reg(apdev[0]['bssid'], pin, new_ssid, "WPA2PSK",
"CCMP", new_passphrase, no_wait=True)
ev = dev[0].wait_event(["WPS-FAIL", "CTRL-EVENT-CONNECTED"])
if ev is None:
raise Exception("Timeout on receiving WPS operation failure event")
if "CTRL-EVENT-CONNECTED" in ev:
raise Exception("Unexpected connection")
if "config_error=15" in ev:
logger.info("AP Setup Locked")
ap_setup_locked = True
elif "config_error=18" not in ev:
raise Exception("config_error=18 not reported")
dev[0].wait_disconnected(timeout=10)
time.sleep(0.1)
if not ap_setup_locked:
raise Exception("AP setup was not locked")
dev[0].request("WPS_CANCEL")
dev[0].scan_for_bss(apdev[0]['bssid'], freq=2412, force_scan=True,
only_new=True)
bss = dev[0].get_bss(apdev[0]['bssid'])
if 'wps_ap_setup_locked' not in bss or bss['wps_ap_setup_locked'] != '1':
logger.info("BSS: " + str(bss))
raise Exception("AP Setup Locked not indicated in scan results")
status = hapd.request("WPS_GET_STATUS")
if "Last WPS result: Failed" not in status:
raise Exception("WPS failure result not shown correctly")
if "Peer Address: " + dev[0].p2p_interface_addr() not in status:
raise Exception("Peer address not shown correctly")
time.sleep(0.5)
dev[0].dump_monitor()
logger.info("WPS provisioning step")
pin = dev[0].wps_read_pin()
hapd.request("WPS_PIN any " + pin)
dev[0].request("WPS_PIN %s %s" % (apdev[0]['bssid'], pin))
ev = dev[0].wait_event(["WPS-SUCCESS"], timeout=30)
if ev is None:
raise Exception("WPS success was not reported")
dev[0].wait_connected(timeout=30)
appin = hapd.request("WPS_AP_PIN random")
if "FAIL" in appin:
raise Exception("Could not generate random AP PIN")
ev = hapd.wait_event(["WPS-AP-SETUP-UNLOCKED"], timeout=10)
if ev is None:
raise Exception("Failed to unlock AP PIN")
def test_ap_wps_setup_locked_timeout(dev, apdev):
"""WPS re-enabling AP PIN after timeout"""
ssid = "test-wps-incorrect-ap-pin"
appin = "12345670"
hapd = hostapd.add_ap(apdev[0],
{"ssid": ssid, "eap_server": "1", "wps_state": "2",
"wpa_passphrase": "12345678", "wpa": "2",
"wpa_key_mgmt": "WPA-PSK", "rsn_pairwise": "CCMP",
"ap_pin": appin})
new_ssid = "wps-new-ssid-test"
new_passphrase = "1234567890"
dev[0].flush_scan_cache()
dev[0].scan_for_bss(apdev[0]['bssid'], freq=2412)
ap_setup_locked = False
for pin in ["55554444", "1234", "12345678", "00000000", "11111111"]:
dev[0].dump_monitor()
logger.info("Try incorrect AP PIN - attempt " + pin)
dev[0].wps_reg(apdev[0]['bssid'], pin, new_ssid, "WPA2PSK",
"CCMP", new_passphrase, no_wait=True)
ev = dev[0].wait_event(["WPS-FAIL", "CTRL-EVENT-CONNECTED"], timeout=15)
if ev is None:
raise Exception("Timeout on receiving WPS operation failure event")
if "CTRL-EVENT-CONNECTED" in ev:
raise Exception("Unexpected connection")
if "config_error=15" in ev:
logger.info("AP Setup Locked")
ap_setup_locked = True
break
elif "config_error=18" not in ev:
raise Exception("config_error=18 not reported")
dev[0].wait_disconnected(timeout=10)
time.sleep(0.1)
if not ap_setup_locked:
raise Exception("AP setup was not locked")
ev = hapd.wait_event(["WPS-AP-SETUP-UNLOCKED"], timeout=80)
if ev is None:
raise Exception("AP PIN did not get unlocked on 60 second timeout")
def test_ap_wps_setup_locked_2(dev, apdev):
"""WPS AP configured for special ap_setup_locked=2 mode"""
ssid = "test-wps-ap-pin"
appin = "12345670"
params = {"ssid": ssid, "eap_server": "1", "wps_state": "2",
"wpa_passphrase": "12345678", "wpa": "2",
"wpa_key_mgmt": "WPA-PSK", "rsn_pairwise": "CCMP",
"ap_pin": appin, "ap_setup_locked": "2"}
hapd = hostapd.add_ap(apdev[0], params)
new_ssid = "wps-new-ssid-test"
new_passphrase = "1234567890"
dev[0].flush_scan_cache()
dev[0].scan_for_bss(apdev[0]['bssid'], freq=2412)
dev[0].wps_reg(apdev[0]['bssid'], appin)
dev[0].request("REMOVE_NETWORK all")
dev[0].wait_disconnected()
hapd.dump_monitor()
dev[0].dump_monitor()
dev[0].wps_reg(apdev[0]['bssid'], appin, new_ssid, "WPA2PSK",
"CCMP", new_passphrase, no_wait=True)
ev = hapd.wait_event(["WPS-FAIL"], timeout=5)
if ev is None:
raise Exception("hostapd did not report WPS failure")
if "msg=12 config_error=15" not in ev:
raise Exception("Unexpected failure reason (AP): " + ev)
ev = dev[0].wait_event(["WPS-FAIL", "CTRL-EVENT-CONNECTED"])
if ev is None:
raise Exception("Timeout on receiving WPS operation failure event")
if "CTRL-EVENT-CONNECTED" in ev:
raise Exception("Unexpected connection")
if "config_error=15" not in ev:
raise Exception("Unexpected failure reason (STA): " + ev)
dev[0].request("WPS_CANCEL")
dev[0].wait_disconnected()
def setup_ap_wps_pbc_overlap_2ap(apdev):
params = {"ssid": "wps1", "eap_server": "1", "wps_state": "2",
"wpa_passphrase": "12345678", "wpa": "2",
"wpa_key_mgmt": "WPA-PSK", "rsn_pairwise": "CCMP",
"wps_independent": "1"}
hapd = hostapd.add_ap(apdev[0], params)
params = {"ssid": "wps2", "eap_server": "1", "wps_state": "2",
"wpa_passphrase": "123456789", "wpa": "2",
"wpa_key_mgmt": "WPA-PSK", "rsn_pairwise": "CCMP",
"wps_independent": "1"}
hapd2 = hostapd.add_ap(apdev[1], params)
hapd.request("WPS_PBC")
hapd2.request("WPS_PBC")
return hapd, hapd2
@remote_compatible
def test_ap_wps_pbc_overlap_2ap(dev, apdev):
"""WPS PBC session overlap with two active APs"""
hapd, hapd2 = setup_ap_wps_pbc_overlap_2ap(apdev)
logger.info("WPS provisioning step")
dev[0].scan_for_bss(apdev[0]['bssid'], freq="2412", force_scan=True)
dev[0].scan_for_bss(apdev[1]['bssid'], freq="2412")
dev[0].request("WPS_PBC")
ev = dev[0].wait_event(["WPS-OVERLAP-DETECTED"], timeout=15)
hapd.request("DISABLE")
hapd2.request("DISABLE")
dev[0].flush_scan_cache()
if ev is None:
raise Exception("PBC session overlap not detected")
@remote_compatible
def test_ap_wps_pbc_overlap_2ap_specific_bssid(dev, apdev):
"""WPS PBC session overlap with two active APs (specific BSSID selected)"""
hapd, hapd2 = setup_ap_wps_pbc_overlap_2ap(apdev)
logger.info("WPS provisioning step")
dev[0].scan_for_bss(apdev[0]['bssid'], freq="2412", force_scan=True)
dev[0].scan_for_bss(apdev[1]['bssid'], freq="2412")
dev[0].request("WPS_PBC " + apdev[0]['bssid'])
ev = dev[0].wait_event(["WPS-OVERLAP-DETECTED",
"CTRL-EVENT-CONNECTED"], timeout=15)
dev[0].request("DISCONNECT")
hapd.request("DISABLE")
hapd2.request("DISABLE")
dev[0].flush_scan_cache()
if ev is None:
raise Exception("PBC session overlap result not reported")
if "CTRL-EVENT-CONNECTED" not in ev:
raise Exception("Connection did not complete")
@remote_compatible
def test_ap_wps_pbc_overlap_2sta(dev, apdev):
"""WPS PBC session overlap with two active STAs"""
ssid = "test-wps-pbc-overlap"
hapd = hostapd.add_ap(apdev[0],
{"ssid": ssid, "eap_server": "1", "wps_state": "2",
"wpa_passphrase": "12345678", "wpa": "2",
"wpa_key_mgmt": "WPA-PSK", "rsn_pairwise": "CCMP"})
logger.info("WPS provisioning step")
hapd.request("WPS_PBC")
dev[0].scan_for_bss(apdev[0]['bssid'], freq="2412")
dev[0].dump_monitor()
dev[1].scan_for_bss(apdev[0]['bssid'], freq="2412")
dev[1].dump_monitor()
dev[0].request("WPS_PBC " + apdev[0]['bssid'])
dev[1].request("WPS_PBC " + apdev[0]['bssid'])
ev = dev[0].wait_event(["WPS-M2D"], timeout=15)
if ev is None:
raise Exception("PBC session overlap not detected (dev0)")
if "config_error=12" not in ev:
raise Exception("PBC session overlap not correctly reported (dev0)")
dev[0].request("WPS_CANCEL")
dev[0].request("DISCONNECT")
ev = dev[1].wait_event(["WPS-M2D"], timeout=15)
if ev is None:
raise Exception("PBC session overlap not detected (dev1)")
if "config_error=12" not in ev:
raise Exception("PBC session overlap not correctly reported (dev1)")
dev[1].request("WPS_CANCEL")
dev[1].request("DISCONNECT")
ev = hapd.wait_event(["WPS-OVERLAP-DETECTED"], timeout=1)
if ev is None:
raise Exception("PBC session overlap not detected (AP)")
if "PBC Status: Overlap" not in hapd.request("WPS_GET_STATUS"):
raise Exception("PBC status not shown correctly")
hapd.request("WPS_CANCEL")
ret = hapd.request("WPS_PBC")
if "FAIL" not in ret:
raise Exception("PBC mode allowed to be started while PBC overlap still active")
hapd.request("DISABLE")
dev[0].flush_scan_cache()
dev[1].flush_scan_cache()
def test_ap_wps_pbc_session_workaround(dev, apdev):
"""WPS PBC session overlap workaround"""
ssid = "test-wps-pbc-overlap"
hapd = hostapd.add_ap(apdev[0],
{"ssid": ssid, "eap_server": "1", "wps_state": "2",
"wpa_passphrase": "12345678", "wpa": "2",
"wpa_key_mgmt": "WPA-PSK", "rsn_pairwise": "CCMP"})
bssid = hapd.own_addr()
hapd.request("WPS_PBC")
dev[0].scan_for_bss(bssid, freq="2412")
dev[0].request("WPS_PBC " + bssid)
dev[0].wait_connected(timeout=30)
dev[0].request("REMOVE_NETWORK all")
dev[0].wait_disconnected(timeout=30)
dev[0].dump_monitor()
# Trigger AP/Registrar to ignore PBC activation immediately after
# successfully completed provisioning
dev[0].request("WPS_PBC " + bssid)
ev = dev[0].wait_event(["CTRL-EVENT-SCAN-RESULTS"], timeout=10)
if ev is None:
raise Exception("No scan results reported")
dev[0].request("WPS_CANCEL")
dev[0].dump_monitor()
# Verify that PBC session overlap does not prevent connection
hapd.request("WPS_PBC")
dev[1].scan_for_bss(bssid, freq="2412")
dev[1].request("WPS_PBC " + bssid)
dev[1].wait_connected()
dev[1].request("REMOVE_NETWORK all")
dev[1].wait_disconnected()
hapd.request("DISABLE")
dev[0].flush_scan_cache()
dev[1].flush_scan_cache()
@remote_compatible
def test_ap_wps_cancel(dev, apdev):
"""WPS AP cancelling enabled config method"""
ssid = "test-wps-ap-cancel"
hapd = hostapd.add_ap(apdev[0],
{"ssid": ssid, "eap_server": "1", "wps_state": "2",
"wpa_passphrase": "12345678", "wpa": "2",
"wpa_key_mgmt": "WPA-PSK", "rsn_pairwise": "CCMP"})
bssid = apdev[0]['bssid']
logger.info("Verify PBC enable/cancel")
hapd.request("WPS_PBC")
dev[0].scan(freq="2412")
dev[0].scan(freq="2412")
bss = dev[0].get_bss(apdev[0]['bssid'])
if "[WPS-PBC]" not in bss['flags']:
raise Exception("WPS-PBC flag missing")
if "FAIL" in hapd.request("WPS_CANCEL"):
raise Exception("WPS_CANCEL failed")
dev[0].scan(freq="2412")
dev[0].scan(freq="2412")
bss = dev[0].get_bss(apdev[0]['bssid'])
if "[WPS-PBC]" in bss['flags']:
raise Exception("WPS-PBC flag not cleared")
logger.info("Verify PIN enable/cancel")
hapd.request("WPS_PIN any 12345670")
dev[0].scan(freq="2412")
dev[0].scan(freq="2412")
bss = dev[0].get_bss(apdev[0]['bssid'])
if "[WPS-AUTH]" not in bss['flags']:
raise Exception("WPS-AUTH flag missing")
if "FAIL" in hapd.request("WPS_CANCEL"):
raise Exception("WPS_CANCEL failed")
dev[0].scan(freq="2412")
dev[0].scan(freq="2412")
bss = dev[0].get_bss(apdev[0]['bssid'])
if "[WPS-AUTH]" in bss['flags']:
raise Exception("WPS-AUTH flag not cleared")
dev[0].flush_scan_cache()
def test_ap_wps_er_add_enrollee(dev, apdev):
"""WPS ER configuring AP and adding a new enrollee using PIN"""
try:
_test_ap_wps_er_add_enrollee(dev, apdev)
finally:
dev[0].request("WPS_ER_STOP")
def _test_ap_wps_er_add_enrollee(dev, apdev):
ssid = "wps-er-add-enrollee"
ap_pin = "12345670"
ap_uuid = "27ea801a-9e5c-4e73-bd82-f89cbcd10d7e"
hostapd.add_ap(apdev[0],
{"ssid": ssid, "eap_server": "1", "wps_state": "1",
"device_name": "Wireless AP", "manufacturer": "Company",
"model_name": "WAP", "model_number": "123",
"serial_number": "12345", "device_type": "6-0050F204-1",
"os_version": "01020300",
'friendly_name': "WPS AP - <>&'\" - TEST",
"config_methods": "label push_button",
"ap_pin": ap_pin, "uuid": ap_uuid, "upnp_iface": "lo"})
logger.info("WPS configuration step")
new_passphrase = "1234567890"
dev[0].dump_monitor()
dev[0].flush_scan_cache()
dev[0].scan_for_bss(apdev[0]['bssid'], freq=2412)
dev[0].wps_reg(apdev[0]['bssid'], ap_pin, ssid, "WPA2PSK", "CCMP",
new_passphrase)
status = dev[0].get_status()
if status['wpa_state'] != 'COMPLETED' or status['bssid'] != apdev[0]['bssid']:
raise Exception("Not fully connected")
if status['ssid'] != ssid:
raise Exception("Unexpected SSID")
if status['pairwise_cipher'] != 'CCMP' or status['group_cipher'] != 'CCMP':
raise Exception("Unexpected encryption configuration")
if status['key_mgmt'] != 'WPA2-PSK':
raise Exception("Unexpected key_mgmt")
logger.info("Start ER")
dev[0].request("WPS_ER_START ifname=lo")
ev = dev[0].wait_event(["WPS-ER-AP-ADD"], timeout=15)
if ev is None:
raise Exception("AP discovery timed out")
if ap_uuid not in ev:
raise Exception("Expected AP UUID not found")
if "|WPS AP - <>&'" - TEST|Company|" not in ev:
raise Exception("Expected friendly name not found")
logger.info("Learn AP configuration through UPnP")
dev[0].dump_monitor()
dev[0].request("WPS_ER_LEARN " + ap_uuid + " " + ap_pin)
ev = dev[0].wait_event(["WPS-ER-AP-SETTINGS"], timeout=15)
if ev is None:
raise Exception("AP learn timed out")
if ap_uuid not in ev:
raise Exception("Expected AP UUID not in settings")
if "ssid=" + ssid not in ev:
raise Exception("Expected SSID not in settings")
if "key=" + new_passphrase not in ev:
raise Exception("Expected passphrase not in settings")
ev = dev[0].wait_event(["WPS-FAIL"], timeout=15)
if ev is None:
raise Exception("WPS-FAIL after AP learn timed out")
time.sleep(0.1)
logger.info("Add Enrollee using ER")
pin = dev[1].wps_read_pin()
dev[0].dump_monitor()
dev[0].request("WPS_ER_PIN any " + pin + " " + dev[1].p2p_interface_addr())
dev[1].scan_for_bss(apdev[0]['bssid'], freq=2412)
dev[1].dump_monitor()
dev[1].request("WPS_PIN %s %s" % (apdev[0]['bssid'], pin))
ev = dev[1].wait_event(["WPS-SUCCESS"], timeout=30)
if ev is None:
raise Exception("Enrollee did not report success")
dev[1].wait_connected(timeout=15)
ev = dev[0].wait_event(["WPS-SUCCESS"], timeout=15)
if ev is None:
raise Exception("WPS ER did not report success")
hwsim_utils.test_connectivity_sta(dev[0], dev[1])
logger.info("Add a specific Enrollee using ER")
pin = dev[2].wps_read_pin()
addr2 = dev[2].p2p_interface_addr()
dev[0].dump_monitor()
dev[2].scan_for_bss(apdev[0]['bssid'], freq=2412)
dev[2].dump_monitor()
dev[2].request("WPS_PIN %s %s" % (apdev[0]['bssid'], pin))
ev = dev[0].wait_event(["WPS-ER-ENROLLEE-ADD"], timeout=10)
if ev is None:
raise Exception("Enrollee not seen")
if addr2 not in ev:
raise Exception("Unexpected Enrollee MAC address")
dev[0].request("WPS_ER_PIN " + addr2 + " " + pin + " " + addr2)
dev[2].wait_connected(timeout=30)
ev = dev[0].wait_event(["WPS-SUCCESS"], timeout=15)
if ev is None:
raise Exception("WPS ER did not report success")
logger.info("Verify registrar selection behavior")
dev[0].request("WPS_ER_PIN any " + pin + " " + dev[1].p2p_interface_addr())
dev[1].request("DISCONNECT")
dev[1].wait_disconnected(timeout=10)
dev[1].scan_for_bss(apdev[0]['bssid'], freq="2412")
dev[1].scan(freq="2412")
bss = dev[1].get_bss(apdev[0]['bssid'])
if "[WPS-AUTH]" not in bss['flags']:
# It is possible for scan to miss an update especially when running
# tests under load with multiple VMs, so allow another attempt.
dev[1].scan(freq="2412")
bss = dev[1].get_bss(apdev[0]['bssid'])
if "[WPS-AUTH]" not in bss['flags']:
raise Exception("WPS-AUTH flag missing")
logger.info("Stop ER")
dev[0].dump_monitor()
dev[0].request("WPS_ER_STOP")
ev = dev[0].wait_event(["WPS-ER-AP-REMOVE"])
if ev is None:
raise Exception("WPS ER unsubscription timed out")
# It takes some time for the UPnP UNSUBSCRIBE command to go through, so wait
# a bit before verifying that the scan results have changed.
time.sleep(0.2)
for i in range(0, 10):
dev[1].request("BSS_FLUSH 0")
dev[1].scan(freq="2412", only_new=True)
bss = dev[1].get_bss(apdev[0]['bssid'])
if bss and 'flags' in bss and "[WPS-AUTH]" not in bss['flags']:
break
logger.debug("WPS-AUTH flag was still in place - wait a bit longer")
time.sleep(0.1)
if "[WPS-AUTH]" in bss['flags']:
raise Exception("WPS-AUTH flag not removed")
def test_ap_wps_er_add_enrollee_uuid(dev, apdev):
"""WPS ER adding a new enrollee identified by UUID"""
try:
_test_ap_wps_er_add_enrollee_uuid(dev, apdev)
finally:
dev[0].request("WPS_ER_STOP")
def _test_ap_wps_er_add_enrollee_uuid(dev, apdev):
ssid = "wps-er-add-enrollee"
ap_pin = "12345670"
ap_uuid = "27ea801a-9e5c-4e73-bd82-f89cbcd10d7e"
hostapd.add_ap(apdev[0],
{"ssid": ssid, "eap_server": "1", "wps_state": "2",
"wpa_passphrase": "12345678", "wpa": "2",
"wpa_key_mgmt": "WPA-PSK", "rsn_pairwise": "CCMP",
"device_name": "Wireless AP", "manufacturer": "Company",
"model_name": "WAP", "model_number": "123",
"serial_number": "12345", "device_type": "6-0050F204-1",
"os_version": "01020300",
"config_methods": "label push_button",
"ap_pin": ap_pin, "uuid": ap_uuid, "upnp_iface": "lo"})
logger.info("WPS configuration step")
dev[0].flush_scan_cache()
dev[0].scan_for_bss(apdev[0]['bssid'], freq=2412)
dev[0].wps_reg(apdev[0]['bssid'], ap_pin)
logger.info("Start ER")
dev[0].request("WPS_ER_START ifname=lo")
ev = dev[0].wait_event(["WPS-ER-AP-ADD"], timeout=15)
if ev is None:
raise Exception("AP discovery timed out")
if ap_uuid not in ev:
raise Exception("Expected AP UUID not found")
logger.info("Learn AP configuration through UPnP")
dev[0].dump_monitor()
dev[0].request("WPS_ER_LEARN " + ap_uuid + " " + ap_pin)
ev = dev[0].wait_event(["WPS-ER-AP-SETTINGS"], timeout=15)
if ev is None:
raise Exception("AP learn timed out")
if ap_uuid not in ev:
raise Exception("Expected AP UUID not in settings")
ev = dev[0].wait_event(["WPS-FAIL"], timeout=15)
if ev is None:
raise Exception("WPS-FAIL after AP learn timed out")
time.sleep(0.1)
logger.info("Add a specific Enrollee using ER (PBC/UUID)")
addr1 = dev[1].p2p_interface_addr()
dev[0].dump_monitor()
dev[1].scan_for_bss(apdev[0]['bssid'], freq=2412)
dev[1].dump_monitor()
dev[1].request("WPS_PBC %s" % apdev[0]['bssid'])
ev = dev[0].wait_event(["WPS-ER-ENROLLEE-ADD"], timeout=10)
if ev is None:
raise Exception("Enrollee not seen")
if addr1 not in ev:
raise Exception("Unexpected Enrollee MAC address")
uuid = ev.split(' ')[1]
dev[0].request("WPS_ER_PBC " + uuid)
dev[1].wait_connected(timeout=30)
ev = dev[0].wait_event(["WPS-SUCCESS"], timeout=15)
if ev is None:
raise Exception("WPS ER did not report success")
logger.info("Add a specific Enrollee using ER (PIN/UUID)")
pin = dev[2].wps_read_pin()
addr2 = dev[2].p2p_interface_addr()
dev[0].dump_monitor()
dev[2].scan_for_bss(apdev[0]['bssid'], freq=2412)
dev[2].dump_monitor()
dev[2].request("WPS_PIN %s %s" % (apdev[0]['bssid'], pin))
ev = dev[0].wait_event(["WPS-ER-ENROLLEE-ADD"], timeout=10)
if ev is None:
raise Exception("Enrollee not seen")
if addr2 not in ev:
raise Exception("Unexpected Enrollee MAC address")
uuid = ev.split(' ')[1]
dev[0].request("WPS_ER_PIN " + uuid + " " + pin)
dev[2].wait_connected(timeout=30)
ev = dev[0].wait_event(["WPS-SUCCESS"], timeout=15)
if ev is None:
raise Exception("WPS ER did not report success")
ev = dev[0].wait_event(["WPS-ER-ENROLLEE-REMOVE"], timeout=15)
if ev is None:
raise Exception("No Enrollee STA entry timeout seen")
logger.info("Stop ER")
dev[0].dump_monitor()
dev[0].request("WPS_ER_STOP")
def test_ap_wps_er_multi_add_enrollee(dev, apdev):
"""Multiple WPS ERs adding a new enrollee using PIN"""
try:
_test_ap_wps_er_multi_add_enrollee(dev, apdev)
finally:
for i in range(2):
dev[i].request("WPS_ER_STOP")
def _test_ap_wps_er_multi_add_enrollee(dev, apdev):
ssid = "wps-er-add-enrollee"
ap_pin = "12345670"
ap_uuid = "27ea801a-9e5c-4e73-bd82-f89cbcd10d7e"
hostapd.add_ap(apdev[0],
{"ssid": ssid, "eap_server": "1", "wps_state": "2",
"wpa_passphrase": "12345678", "wpa": "2",
"wpa_key_mgmt": "WPA-PSK", "rsn_pairwise": "CCMP",
"device_name": "Wireless AP", "manufacturer": "Company",
"model_name": "WAP", "model_number": "123",
"serial_number": "12345", "device_type": "6-0050F204-1",
"os_version": "01020300",
'friendly_name': "WPS AP",
"config_methods": "label push_button",
"ap_pin": ap_pin, "uuid": ap_uuid, "upnp_iface": "lo"})
for i in range(2):
dev[i].flush_scan_cache()
dev[i].scan_for_bss(apdev[0]['bssid'], freq=2412)
dev[i].wps_reg(apdev[0]['bssid'], ap_pin)
for i in range(2):
dev[i].request("WPS_ER_START ifname=lo")
for i in range(2):
ev = dev[i].wait_event(["WPS-ER-AP-ADD"], timeout=15)
if ev is None:
raise Exception("AP discovery timed out")
dev[i].dump_monitor()
for i in range(2):
dev[i].request("WPS_ER_LEARN " + ap_uuid + " " + ap_pin)
for i in range(2):
ev = dev[i].wait_event(["WPS-ER-AP-SETTINGS"], timeout=15)
if ev is None:
raise Exception("AP learn timed out")
ev = dev[i].wait_event(["WPS-FAIL"], timeout=15)
if ev is None:
raise Exception("WPS-FAIL after AP learn timed out")
time.sleep(0.1)
pin = dev[2].wps_read_pin()
addr = dev[2].own_addr()
dev[0].dump_monitor()
dev[0].request("WPS_ER_PIN any " + pin + " " + addr)
dev[1].dump_monitor()
dev[1].request("WPS_ER_PIN any " + pin + " " + addr)
dev[2].scan_for_bss(apdev[0]['bssid'], freq=2412)
dev[2].dump_monitor()
dev[2].request("WPS_PIN %s %s" % (apdev[0]['bssid'], pin))
ev = dev[2].wait_event(["WPS-SUCCESS"], timeout=30)
if ev is None:
raise Exception("Enrollee did not report success")
dev[2].wait_connected(timeout=15)
def test_ap_wps_er_add_enrollee_pbc(dev, apdev):
"""WPS ER connected to AP and adding a new enrollee using PBC"""
try:
_test_ap_wps_er_add_enrollee_pbc(dev, apdev)
finally:
dev[0].request("WPS_ER_STOP")
def _test_ap_wps_er_add_enrollee_pbc(dev, apdev):
ssid = "wps-er-add-enrollee-pbc"
ap_pin = "12345670"
ap_uuid = "27ea801a-9e5c-4e73-bd82-f89cbcd10d7e"
hostapd.add_ap(apdev[0],
{"ssid": ssid, "eap_server": "1", "wps_state": "2",
"wpa_passphrase": "12345678", "wpa": "2",
"wpa_key_mgmt": "WPA-PSK", "rsn_pairwise": "CCMP",
"device_name": "Wireless AP", "manufacturer": "Company",
"model_name": "WAP", "model_number": "123",
"serial_number": "12345", "device_type": "6-0050F204-1",
"os_version": "01020300",
"config_methods": "label push_button",
"ap_pin": ap_pin, "uuid": ap_uuid, "upnp_iface": "lo"})
logger.info("Learn AP configuration")
dev[0].flush_scan_cache()
dev[0].scan_for_bss(apdev[0]['bssid'], freq=2412)
dev[0].dump_monitor()
dev[0].wps_reg(apdev[0]['bssid'], ap_pin)
status = dev[0].get_status()
if status['wpa_state'] != 'COMPLETED' or status['bssid'] != apdev[0]['bssid']:
raise Exception("Not fully connected")
logger.info("Start ER")
dev[0].request("WPS_ER_START ifname=lo")
ev = dev[0].wait_event(["WPS-ER-AP-ADD"], timeout=15)
if ev is None:
raise Exception("AP discovery timed out")
if ap_uuid not in ev:
raise Exception("Expected AP UUID not found")
enrollee = dev[1].p2p_interface_addr()
if "FAIL-UNKNOWN-UUID" not in dev[0].request("WPS_ER_PBC " + enrollee):
raise Exception("Unknown UUID not reported")
logger.info("Add Enrollee using ER and PBC")
dev[0].dump_monitor()
dev[1].dump_monitor()
dev[1].request("WPS_PBC")
for i in range(0, 2):
ev = dev[0].wait_event(["WPS-ER-ENROLLEE-ADD"], timeout=15)
if ev is None:
raise Exception("Enrollee discovery timed out")
if enrollee in ev:
break
if i == 1:
raise Exception("Expected Enrollee not found")
if "FAIL-NO-AP-SETTINGS" not in dev[0].request("WPS_ER_PBC " + enrollee):
raise Exception("Unknown UUID not reported")
logger.info("Use learned network configuration on ER")
dev[0].request("WPS_ER_SET_CONFIG " + ap_uuid + " 0")
if "OK" not in dev[0].request("WPS_ER_PBC " + enrollee):
raise Exception("WPS_ER_PBC failed")
ev = dev[1].wait_event(["WPS-SUCCESS"], timeout=15)
if ev is None:
raise Exception("Enrollee did not report success")
dev[1].wait_connected(timeout=15)
ev = dev[0].wait_event(["WPS-SUCCESS"], timeout=15)
if ev is None:
raise Exception("WPS ER did not report success")
hwsim_utils.test_connectivity_sta(dev[0], dev[1])
def test_ap_wps_er_pbc_overlap(dev, apdev):
"""WPS ER connected to AP and PBC session overlap"""
try:
_test_ap_wps_er_pbc_overlap(dev, apdev)
finally:
dev[0].request("WPS_ER_STOP")
def _test_ap_wps_er_pbc_overlap(dev, apdev):
ssid = "wps-er-add-enrollee-pbc"
ap_pin = "12345670"
ap_uuid = "27ea801a-9e5c-4e73-bd82-f89cbcd10d7e"
hostapd.add_ap(apdev[0],
{"ssid": ssid, "eap_server": "1", "wps_state": "2",
"wpa_passphrase": "12345678", "wpa": "2",
"wpa_key_mgmt": "WPA-PSK", "rsn_pairwise": "CCMP",
"device_name": "Wireless AP", "manufacturer": "Company",
"model_name": "WAP", "model_number": "123",
"serial_number": "12345", "device_type": "6-0050F204-1",
"os_version": "01020300",
"config_methods": "label push_button",
"ap_pin": ap_pin, "uuid": ap_uuid, "upnp_iface": "lo"})
dev[0].flush_scan_cache()
dev[0].scan_for_bss(apdev[0]['bssid'], freq=2412)
dev[0].dump_monitor()
dev[0].wps_reg(apdev[0]['bssid'], ap_pin)
dev[1].scan_for_bss(apdev[0]['bssid'], freq="2412")
dev[2].scan_for_bss(apdev[0]['bssid'], freq="2412")
# avoid leaving dev 1 or 2 as the last Probe Request to the AP
dev[0].scan_for_bss(apdev[0]['bssid'], freq=2412, force_scan=True)
dev[0].dump_monitor()
dev[0].request("WPS_ER_START ifname=lo")
ev = dev[0].wait_event(["WPS-ER-AP-ADD"], timeout=15)
if ev is None:
raise Exception("AP discovery timed out")
if ap_uuid not in ev:
raise Exception("Expected AP UUID not found")
# verify BSSID selection of the AP instead of UUID
if "FAIL" in dev[0].request("WPS_ER_SET_CONFIG " + apdev[0]['bssid'] + " 0"):
raise Exception("Could not select AP based on BSSID")
dev[0].dump_monitor()
dev[1].request("WPS_PBC " + apdev[0]['bssid'])
dev[2].request("WPS_PBC " + apdev[0]['bssid'])
ev = dev[1].wait_event(["CTRL-EVENT-SCAN-RESULTS"], timeout=10)
if ev is None:
raise Exception("PBC scan failed")
ev = dev[2].wait_event(["CTRL-EVENT-SCAN-RESULTS"], timeout=10)
if ev is None:
raise Exception("PBC scan failed")
found1 = False
found2 = False
addr1 = dev[1].own_addr()
addr2 = dev[2].own_addr()
for i in range(3):
ev = dev[0].wait_event(["WPS-ER-ENROLLEE-ADD"], timeout=15)
if ev is None:
raise Exception("Enrollee discovery timed out")
if addr1 in ev:
found1 = True
if found2:
break
if addr2 in ev:
found2 = True
if found1:
break
if dev[0].request("WPS_ER_PBC " + ap_uuid) != "FAIL-PBC-OVERLAP\n":
raise Exception("PBC overlap not reported")
dev[1].request("WPS_CANCEL")
dev[2].request("WPS_CANCEL")
if dev[0].request("WPS_ER_PBC foo") != "FAIL\n":
raise Exception("Invalid WPS_ER_PBC accepted")
def test_ap_wps_er_v10_add_enrollee_pin(dev, apdev):
"""WPS v1.0 ER connected to AP and adding a new enrollee using PIN"""
try:
_test_ap_wps_er_v10_add_enrollee_pin(dev, apdev)
finally:
dev[0].request("WPS_ER_STOP")
def _test_ap_wps_er_v10_add_enrollee_pin(dev, apdev):
ssid = "wps-er-add-enrollee-pbc"
ap_pin = "12345670"
ap_uuid = "27ea801a-9e5c-4e73-bd82-f89cbcd10d7e"
hostapd.add_ap(apdev[0],
{"ssid": ssid, "eap_server": "1", "wps_state": "2",
"wpa_passphrase": "12345678", "wpa": "2",
"wpa_key_mgmt": "WPA-PSK", "rsn_pairwise": "CCMP",
"device_name": "Wireless AP", "manufacturer": "Company",
"model_name": "WAP", "model_number": "123",
"serial_number": "12345", "device_type": "6-0050F204-1",
"os_version": "01020300",
"config_methods": "label push_button",
"ap_pin": ap_pin, "uuid": ap_uuid, "upnp_iface": "lo"})
logger.info("Learn AP configuration")
dev[0].request("SET wps_version_number 0x10")
dev[0].flush_scan_cache()
dev[0].scan_for_bss(apdev[0]['bssid'], freq=2412)
dev[0].dump_monitor()
dev[0].wps_reg(apdev[0]['bssid'], ap_pin)
status = dev[0].get_status()
if status['wpa_state'] != 'COMPLETED' or status['bssid'] != apdev[0]['bssid']:
raise Exception("Not fully connected")
logger.info("Start ER")
dev[0].request("WPS_ER_START ifname=lo")
ev = dev[0].wait_event(["WPS-ER-AP-ADD"], timeout=15)
if ev is None:
raise Exception("AP discovery timed out")
if ap_uuid not in ev:
raise Exception("Expected AP UUID not found")
logger.info("Use learned network configuration on ER")
dev[0].request("WPS_ER_SET_CONFIG " + ap_uuid + " 0")
logger.info("Add Enrollee using ER and PIN")
enrollee = dev[1].p2p_interface_addr()
pin = dev[1].wps_read_pin()
dev[0].dump_monitor()
dev[0].request("WPS_ER_PIN any " + pin + " " + enrollee)
dev[1].scan_for_bss(apdev[0]['bssid'], freq=2412)
dev[1].dump_monitor()
dev[1].request("WPS_PIN %s %s" % (apdev[0]['bssid'], pin))
dev[1].wait_connected(timeout=30)
ev = dev[0].wait_event(["WPS-SUCCESS"], timeout=15)
if ev is None:
raise Exception("WPS ER did not report success")
@remote_compatible
def test_ap_wps_er_config_ap(dev, apdev):
"""WPS ER configuring AP over UPnP"""
try:
_test_ap_wps_er_config_ap(dev, apdev)
finally:
dev[0].request("WPS_ER_STOP")
def _test_ap_wps_er_config_ap(dev, apdev):
ssid = "wps-er-ap-config"
ap_pin = "12345670"
ap_uuid = "27ea801a-9e5c-4e73-bd82-f89cbcd10d7e"
hostapd.add_ap(apdev[0],
{"ssid": ssid, "eap_server": "1", "wps_state": "2",
"wpa_passphrase": "12345678", "wpa": "2",
"wpa_key_mgmt": "WPA-PSK", "rsn_pairwise": "CCMP",
"device_name": "Wireless AP", "manufacturer": "Company",
"model_name": "WAP", "model_number": "123",
"serial_number": "12345", "device_type": "6-0050F204-1",
"os_version": "01020300",
"config_methods": "label push_button",
"ap_pin": ap_pin, "uuid": ap_uuid, "upnp_iface": "lo"})
logger.info("Connect ER to the AP")
dev[0].connect(ssid, psk="12345678", scan_freq="2412")
logger.info("WPS configuration step")
dev[0].request("WPS_ER_START ifname=lo")
ev = dev[0].wait_event(["WPS-ER-AP-ADD"], timeout=15)
if ev is None:
raise Exception("AP discovery timed out")
if ap_uuid not in ev:
raise Exception("Expected AP UUID not found")
new_passphrase = "1234567890"
dev[0].request("WPS_ER_CONFIG " + apdev[0]['bssid'] + " " + ap_pin + " " +
binascii.hexlify(ssid.encode()).decode() + " WPA2PSK CCMP " +
binascii.hexlify(new_passphrase.encode()).decode())
ev = dev[0].wait_event(["WPS-SUCCESS"])
if ev is None:
raise Exception("WPS ER configuration operation timed out")
dev[0].wait_disconnected(timeout=10)
dev[0].connect(ssid, psk="1234567890", scan_freq="2412")
logger.info("WPS ER restart")
dev[0].request("WPS_ER_START")
ev = dev[0].wait_event(["WPS-ER-AP-ADD"], timeout=15)
if ev is None:
raise Exception("AP discovery timed out on ER restart")
if ap_uuid not in ev:
raise Exception("Expected AP UUID not found on ER restart")
if "OK" not in dev[0].request("WPS_ER_STOP"):
raise Exception("WPS_ER_STOP failed")
if "OK" not in dev[0].request("WPS_ER_STOP"):
raise Exception("WPS_ER_STOP failed")
@remote_compatible
def test_ap_wps_er_cache_ap_settings(dev, apdev):
"""WPS ER caching AP settings"""
try:
_test_ap_wps_er_cache_ap_settings(dev, apdev)
finally:
dev[0].request("WPS_ER_STOP")
def _test_ap_wps_er_cache_ap_settings(dev, apdev):
ssid = "wps-er-add-enrollee"
ap_pin = "12345670"
ap_uuid = "27ea801a-9e5c-4e73-bd82-f89cbcd10d7e"
params = {"ssid": ssid, "eap_server": "1", "wps_state": "2",
"wpa_passphrase": "12345678", "wpa": "2",
"wpa_key_mgmt": "WPA-PSK", "rsn_pairwise": "CCMP",
"device_name": "Wireless AP", "manufacturer": "Company",
"model_name": "WAP", "model_number": "123",
"serial_number": "12345", "device_type": "6-0050F204-1",
"os_version": "01020300",
"config_methods": "label push_button",
"ap_pin": ap_pin, "uuid": ap_uuid, "upnp_iface": "lo"}
hapd = hostapd.add_ap(apdev[0], params)
dev[0].flush_scan_cache()
dev[0].scan_for_bss(apdev[0]['bssid'], freq=2412)
dev[0].wps_reg(apdev[0]['bssid'], ap_pin)
id = int(dev[0].list_networks()[0]['id'])
dev[0].set_network(id, "scan_freq", "2412")
dev[0].request("WPS_ER_START ifname=lo")
ev = dev[0].wait_event(["WPS-ER-AP-ADD"], timeout=15)
if ev is None:
raise Exception("AP discovery timed out")
if ap_uuid not in ev:
raise Exception("Expected AP UUID not found")
dev[0].dump_monitor()
dev[0].request("WPS_ER_LEARN " + ap_uuid + " " + ap_pin)
ev = dev[0].wait_event(["WPS-ER-AP-SETTINGS"], timeout=15)
if ev is None:
raise Exception("AP learn timed out")
ev = dev[0].wait_event(["WPS-FAIL"], timeout=15)
if ev is None:
raise Exception("WPS-FAIL after AP learn timed out")
time.sleep(0.1)
hapd.disable()
for i in range(2):
ev = dev[0].wait_event(["WPS-ER-AP-REMOVE", "CTRL-EVENT-DISCONNECTED"],
timeout=15)
if ev is None:
raise Exception("AP removal or disconnection timed out")
hapd = hostapd.add_ap(apdev[0], params)
for i in range(2):
ev = dev[0].wait_event(["WPS-ER-AP-ADD", "CTRL-EVENT-CONNECTED"],
timeout=15)
if ev is None:
raise Exception("AP discovery or connection timed out")
pin = dev[1].wps_read_pin()
dev[0].dump_monitor()
dev[0].request("WPS_ER_PIN any " + pin + " " + dev[1].p2p_interface_addr())
time.sleep(0.2)
dev[1].scan_for_bss(apdev[0]['bssid'], freq=2412)
dev[1].dump_monitor()
dev[1].request("WPS_PIN %s %s" % (apdev[0]['bssid'], pin))
ev = dev[1].wait_event(["WPS-SUCCESS"], timeout=30)
if ev is None:
raise Exception("Enrollee did not report success")
dev[1].wait_connected(timeout=15)
ev = dev[0].wait_event(["WPS-SUCCESS"], timeout=15)
if ev is None:
raise Exception("WPS ER did not report success")
dev[0].dump_monitor()
dev[0].request("WPS_ER_STOP")
def test_ap_wps_er_cache_ap_settings_oom(dev, apdev):
"""WPS ER caching AP settings (OOM)"""
try:
_test_ap_wps_er_cache_ap_settings_oom(dev, apdev)
finally:
dev[0].request("WPS_ER_STOP")
def _test_ap_wps_er_cache_ap_settings_oom(dev, apdev):
ssid = "wps-er-add-enrollee"
ap_pin = "12345670"
ap_uuid = "27ea801a-9e5c-4e73-bd82-f89cbcd10d7e"
params = {"ssid": ssid, "eap_server": "1", "wps_state": "2",
"wpa_passphrase": "12345678", "wpa": "2",
"wpa_key_mgmt": "WPA-PSK", "rsn_pairwise": "CCMP",
"device_name": "Wireless AP", "manufacturer": "Company",
"model_name": "WAP", "model_number": "123",
"serial_number": "12345", "device_type": "6-0050F204-1",
"os_version": "01020300",
"config_methods": "label push_button",
"ap_pin": ap_pin, "uuid": ap_uuid, "upnp_iface": "lo"}
hapd = hostapd.add_ap(apdev[0], params)
dev[0].flush_scan_cache()
dev[0].scan_for_bss(apdev[0]['bssid'], freq=2412)
dev[0].wps_reg(apdev[0]['bssid'], ap_pin)
id = int(dev[0].list_networks()[0]['id'])
dev[0].set_network(id, "scan_freq", "2412")
dev[0].request("WPS_ER_START ifname=lo")
ev = dev[0].wait_event(["WPS-ER-AP-ADD"], timeout=15)
if ev is None:
raise Exception("AP discovery timed out")
if ap_uuid not in ev:
raise Exception("Expected AP UUID not found")
dev[0].dump_monitor()
dev[0].request("WPS_ER_LEARN " + ap_uuid + " " + ap_pin)
ev = dev[0].wait_event(["WPS-ER-AP-SETTINGS"], timeout=15)
if ev is None:
raise Exception("AP learn timed out")
ev = dev[0].wait_event(["WPS-FAIL"], timeout=15)
if ev is None:
raise Exception("WPS-FAIL after AP learn timed out")
time.sleep(0.1)
with alloc_fail(dev[0], 1, "=wps_er_ap_use_cached_settings"):
hapd.disable()
for i in range(2):
ev = dev[0].wait_event(["WPS-ER-AP-REMOVE",
"CTRL-EVENT-DISCONNECTED"],
timeout=15)
if ev is None:
raise Exception("AP removal or disconnection timed out")
hapd = hostapd.add_ap(apdev[0], params)
for i in range(2):
ev = dev[0].wait_event(["WPS-ER-AP-ADD", "CTRL-EVENT-CONNECTED"],
timeout=15)
if ev is None:
raise Exception("AP discovery or connection timed out")
dev[0].request("WPS_ER_STOP")
def test_ap_wps_er_cache_ap_settings_oom2(dev, apdev):
"""WPS ER caching AP settings (OOM 2)"""
try:
_test_ap_wps_er_cache_ap_settings_oom2(dev, apdev)
finally:
dev[0].request("WPS_ER_STOP")
def _test_ap_wps_er_cache_ap_settings_oom2(dev, apdev):
ssid = "wps-er-add-enrollee"
ap_pin = "12345670"
ap_uuid = "27ea801a-9e5c-4e73-bd82-f89cbcd10d7e"
params = {"ssid": ssid, "eap_server": "1", "wps_state": "2",
"wpa_passphrase": "12345678", "wpa": "2",
"wpa_key_mgmt": "WPA-PSK", "rsn_pairwise": "CCMP",
"device_name": "Wireless AP", "manufacturer": "Company",
"model_name": "WAP", "model_number": "123",
"serial_number": "12345", "device_type": "6-0050F204-1",
"os_version": "01020300",
"config_methods": "label push_button",
"ap_pin": ap_pin, "uuid": ap_uuid, "upnp_iface": "lo"}
hapd = hostapd.add_ap(apdev[0], params)
dev[0].flush_scan_cache()
dev[0].scan_for_bss(apdev[0]['bssid'], freq=2412)
dev[0].wps_reg(apdev[0]['bssid'], ap_pin)
id = int(dev[0].list_networks()[0]['id'])
dev[0].set_network(id, "scan_freq", "2412")
dev[0].request("WPS_ER_START ifname=lo")
ev = dev[0].wait_event(["WPS-ER-AP-ADD"], timeout=15)
if ev is None:
raise Exception("AP discovery timed out")
if ap_uuid not in ev:
raise Exception("Expected AP UUID not found")
dev[0].dump_monitor()
dev[0].request("WPS_ER_LEARN " + ap_uuid + " " + ap_pin)
ev = dev[0].wait_event(["WPS-ER-AP-SETTINGS"], timeout=15)
if ev is None:
raise Exception("AP learn timed out")
ev = dev[0].wait_event(["WPS-FAIL"], timeout=15)
if ev is None:
raise Exception("WPS-FAIL after AP learn timed out")
time.sleep(0.1)
with alloc_fail(dev[0], 1, "=wps_er_ap_cache_settings"):
hapd.disable()
for i in range(2):
ev = dev[0].wait_event(["WPS-ER-AP-REMOVE",
"CTRL-EVENT-DISCONNECTED"],
timeout=15)
if ev is None:
raise Exception("AP removal or disconnection timed out")
hapd = hostapd.add_ap(apdev[0], params)
for i in range(2):
ev = dev[0].wait_event(["WPS-ER-AP-ADD", "CTRL-EVENT-CONNECTED"],
timeout=15)
if ev is None:
raise Exception("AP discovery or connection timed out")
dev[0].request("WPS_ER_STOP")
def test_ap_wps_er_subscribe_oom(dev, apdev):
"""WPS ER subscribe OOM"""
try:
_test_ap_wps_er_subscribe_oom(dev, apdev)
finally:
dev[0].request("WPS_ER_STOP")
def _test_ap_wps_er_subscribe_oom(dev, apdev):
ssid = "wps-er-add-enrollee"
ap_pin = "12345670"
ap_uuid = "27ea801a-9e5c-4e73-bd82-f89cbcd10d7e"
params = {"ssid": ssid, "eap_server": "1", "wps_state": "2",
"wpa_passphrase": "12345678", "wpa": "2",
"wpa_key_mgmt": "WPA-PSK", "rsn_pairwise": "CCMP",
"device_name": "Wireless AP", "manufacturer": "Company",
"model_name": "WAP", "model_number": "123",
"serial_number": "12345", "device_type": "6-0050F204-1",
"os_version": "01020300",
"config_methods": "label push_button",
"ap_pin": ap_pin, "uuid": ap_uuid, "upnp_iface": "lo"}
hapd = hostapd.add_ap(apdev[0], params)
dev[0].flush_scan_cache()
dev[0].scan_for_bss(apdev[0]['bssid'], freq=2412)
dev[0].wps_reg(apdev[0]['bssid'], ap_pin)
id = int(dev[0].list_networks()[0]['id'])
dev[0].set_network(id, "scan_freq", "2412")
with alloc_fail(dev[0], 1, "http_client_addr;wps_er_subscribe"):
dev[0].request("WPS_ER_START ifname=lo")
for i in range(50):
res = dev[0].request("GET_ALLOC_FAIL")
if res.startswith("0:"):
break
time.sleep(0.1)
ev = dev[0].wait_event(["WPS-ER-AP-ADD"], timeout=0)
if ev:
raise Exception("Unexpected AP discovery during OOM")
dev[0].request("WPS_ER_STOP")
def test_ap_wps_er_set_sel_reg_oom(dev, apdev):
"""WPS ER SetSelectedRegistrar OOM"""
try:
_test_ap_wps_er_set_sel_reg_oom(dev, apdev)
finally:
dev[0].request("WPS_ER_STOP")
def _test_ap_wps_er_set_sel_reg_oom(dev, apdev):
ssid = "wps-er-add-enrollee"
ap_pin = "12345670"
ap_uuid = "27ea801a-9e5c-4e73-bd82-f89cbcd10d7e"
params = {"ssid": ssid, "eap_server": "1", "wps_state": "2",
"wpa_passphrase": "12345678", "wpa": "2",
"wpa_key_mgmt": "WPA-PSK", "rsn_pairwise": "CCMP",
"device_name": "Wireless AP", "manufacturer": "Company",
"model_name": "WAP", "model_number": "123",
"serial_number": "12345", "device_type": "6-0050F204-1",
"os_version": "01020300",
"config_methods": "label push_button",
"ap_pin": ap_pin, "uuid": ap_uuid, "upnp_iface": "lo"}
hapd = hostapd.add_ap(apdev[0], params)
dev[0].flush_scan_cache()
dev[0].scan_for_bss(apdev[0]['bssid'], freq=2412)
dev[0].wps_reg(apdev[0]['bssid'], ap_pin)
dev[0].request("WPS_ER_START ifname=lo")
ev = dev[0].wait_event(["WPS-ER-AP-ADD"], timeout=10)
if ev is None:
raise Exception("AP not discovered")
dev[0].request("WPS_ER_LEARN " + ap_uuid + " " + ap_pin)
ev = dev[0].wait_event(["WPS-ER-AP-SETTINGS"], timeout=15)
if ev is None:
raise Exception("AP learn timed out")
ev = dev[0].wait_event(["WPS-FAIL"], timeout=15)
if ev is None:
raise Exception("WPS-FAIL timed out")
time.sleep(0.1)
for func in ["http_client_url_parse;wps_er_send_set_sel_reg",
"wps_er_soap_hdr;wps_er_send_set_sel_reg",
"http_client_addr;wps_er_send_set_sel_reg",
"wpabuf_alloc;wps_er_set_sel_reg"]:
with alloc_fail(dev[0], 1, func):
if "OK" not in dev[0].request("WPS_ER_PBC " + ap_uuid):
raise Exception("WPS_ER_PBC failed")
ev = dev[0].wait_event(["WPS-PBC-ACTIVE"], timeout=3)
if ev is None:
raise Exception("WPS-PBC-ACTIVE not seen")
dev[0].request("WPS_ER_STOP")
@remote_compatible
def test_ap_wps_er_learn_oom(dev, apdev):
"""WPS ER learn OOM"""
try:
_test_ap_wps_er_learn_oom(dev, apdev)
finally:
dev[0].request("WPS_ER_STOP")
def _test_ap_wps_er_learn_oom(dev, apdev):
ssid = "wps-er-add-enrollee"
ap_pin = "12345670"
ap_uuid = "27ea801a-9e5c-4e73-bd82-f89cbcd10d7e"
params = {"ssid": ssid, "eap_server": "1", "wps_state": "2",
"wpa_passphrase": "12345678", "wpa": "2",
"wpa_key_mgmt": "WPA-PSK", "rsn_pairwise": "CCMP",
"device_name": "Wireless AP", "manufacturer": "Company",
"model_name": "WAP", "model_number": "123",
"serial_number": "12345", "device_type": "6-0050F204-1",
"os_version": "01020300",
"config_methods": "label push_button",
"ap_pin": ap_pin, "uuid": ap_uuid, "upnp_iface": "lo"}
hapd = hostapd.add_ap(apdev[0], params)
dev[0].flush_scan_cache()
dev[0].scan_for_bss(apdev[0]['bssid'], freq=2412)
dev[0].wps_reg(apdev[0]['bssid'], ap_pin)
dev[0].request("WPS_ER_START ifname=lo")
ev = dev[0].wait_event(["WPS-ER-AP-ADD"], timeout=10)
if ev is None:
raise Exception("AP not discovered")
for func in ["wps_er_http_put_message_cb",
"xml_get_base64_item;wps_er_http_put_message_cb",
"http_client_url_parse;wps_er_ap_put_message",
"wps_er_soap_hdr;wps_er_ap_put_message",
"http_client_addr;wps_er_ap_put_message"]:
with alloc_fail(dev[0], 1, func):
dev[0].request("WPS_ER_LEARN " + ap_uuid + " " + ap_pin)
ev = dev[0].wait_event(["WPS-ER-AP-SETTINGS"], timeout=1)
if ev is not None:
raise Exception("AP learn succeeded during OOM")
dev[0].request("WPS_ER_LEARN " + ap_uuid + " " + ap_pin)
ev = dev[0].wait_event(["WPS-ER-AP-SETTINGS"], timeout=10)
if ev is None:
raise Exception("AP learn did not succeed")
if "FAIL" not in dev[0].request("WPS_ER_LEARN 00000000-9e5c-4e73-bd82-f89cbcd10d7e " + ap_pin):
raise Exception("WPS_ER_LEARN for unknown AP accepted")
dev[0].request("WPS_ER_STOP")
def test_ap_wps_fragmentation(dev, apdev):
"""WPS with fragmentation in EAP-WSC and mixed mode WPA+WPA2"""
skip_without_tkip(dev[0])
ssid = "test-wps-fragmentation"
appin = "12345670"
hapd = hostapd.add_ap(apdev[0],
{"ssid": ssid, "eap_server": "1", "wps_state": "2",
"wpa_passphrase": "12345678", "wpa": "3",
"wpa_key_mgmt": "WPA-PSK", "rsn_pairwise": "CCMP",
"wpa_pairwise": "TKIP", "ap_pin": appin,
"fragment_size": "50"})
logger.info("WPS provisioning step (PBC)")
hapd.request("WPS_PBC")
dev[0].scan_for_bss(apdev[0]['bssid'], freq=2412)
dev[0].dump_monitor()
dev[0].request("SET wps_fragment_size 50")
dev[0].request("WPS_PBC " + apdev[0]['bssid'])
dev[0].wait_connected(timeout=30)
status = dev[0].get_status()
if status['wpa_state'] != 'COMPLETED':
raise Exception("Not fully connected")
if status['pairwise_cipher'] != 'CCMP' or status['group_cipher'] != 'TKIP':
raise Exception("Unexpected encryption configuration")
if status['key_mgmt'] != 'WPA2-PSK':
raise Exception("Unexpected key_mgmt")
logger.info("WPS provisioning step (PIN)")
pin = dev[1].wps_read_pin()
hapd.request("WPS_PIN any " + pin)
dev[1].scan_for_bss(apdev[0]['bssid'], freq=2412)
dev[1].request("SET wps_fragment_size 50")
dev[1].request("WPS_PIN %s %s" % (apdev[0]['bssid'], pin))
dev[1].wait_connected(timeout=30)
status = dev[1].get_status()
if status['wpa_state'] != 'COMPLETED':
raise Exception("Not fully connected")
if status['pairwise_cipher'] != 'CCMP' or status['group_cipher'] != 'TKIP':
raise Exception("Unexpected encryption configuration")
if status['key_mgmt'] != 'WPA2-PSK':
raise Exception("Unexpected key_mgmt")
logger.info("WPS connection as registrar")
dev[2].scan_for_bss(apdev[0]['bssid'], freq=2412)
dev[2].request("SET wps_fragment_size 50")
dev[2].wps_reg(apdev[0]['bssid'], appin)
status = dev[2].get_status()
if status['wpa_state'] != 'COMPLETED':
raise Exception("Not fully connected")
if status['pairwise_cipher'] != 'CCMP' or status['group_cipher'] != 'TKIP':
raise Exception("Unexpected encryption configuration")
if status['key_mgmt'] != 'WPA2-PSK':
raise Exception("Unexpected key_mgmt")
@remote_compatible
def test_ap_wps_new_version_sta(dev, apdev):
"""WPS compatibility with new version number on the station"""
ssid = "test-wps-ver"
hapd = hostapd.add_ap(apdev[0],
{"ssid": ssid, "eap_server": "1", "wps_state": "2",
"wpa_passphrase": "12345678", "wpa": "2",
"wpa_key_mgmt": "WPA-PSK", "rsn_pairwise": "CCMP"})
logger.info("WPS provisioning step")
hapd.request("WPS_PBC")
dev[0].scan_for_bss(apdev[0]['bssid'], freq="2412")
dev[0].dump_monitor()
dev[0].request("SET wps_version_number 0x43")
dev[0].request("SET wps_vendor_ext_m1 000137100100020001")
dev[0].request("WPS_PBC " + apdev[0]['bssid'])
dev[0].wait_connected(timeout=30)
@remote_compatible
def test_ap_wps_new_version_ap(dev, apdev):
"""WPS compatibility with new version number on the AP"""
ssid = "test-wps-ver"
hapd = hostapd.add_ap(apdev[0],
{"ssid": ssid, "eap_server": "1", "wps_state": "2",
"wpa_passphrase": "12345678", "wpa": "2",
"wpa_key_mgmt": "WPA-PSK", "rsn_pairwise": "CCMP"})
logger.info("WPS provisioning step")
if "FAIL" in hapd.request("SET wps_version_number 0x43"):
raise Exception("Failed to enable test functionality")
hapd.request("WPS_PBC")
dev[0].scan_for_bss(apdev[0]['bssid'], freq="2412")
dev[0].dump_monitor()
dev[0].request("WPS_PBC " + apdev[0]['bssid'])
dev[0].wait_connected(timeout=30)
hapd.request("SET wps_version_number 0x20")
@remote_compatible
def test_ap_wps_check_pin(dev, apdev):
"""Verify PIN checking through control interface"""
hapd = hostapd.add_ap(apdev[0],
{"ssid": "wps", "eap_server": "1", "wps_state": "2",
"wpa_passphrase": "12345678", "wpa": "2",
"wpa_key_mgmt": "WPA-PSK", "rsn_pairwise": "CCMP"})
for t in [("12345670", "12345670"),
("12345678", "FAIL-CHECKSUM"),
("12345", "FAIL"),
("123456789", "FAIL"),
("1234-5670", "12345670"),
("1234 5670", "12345670"),
("1-2.3:4 5670", "12345670")]:
res = hapd.request("WPS_CHECK_PIN " + t[0]).rstrip('\n')
res2 = dev[0].request("WPS_CHECK_PIN " + t[0]).rstrip('\n')
if res != res2:
raise Exception("Unexpected difference in WPS_CHECK_PIN responses")
if res != t[1]:
raise Exception("Incorrect WPS_CHECK_PIN response {} (expected {})".format(res, t[1]))
if "FAIL" not in hapd.request("WPS_CHECK_PIN 12345"):
raise Exception("Unexpected WPS_CHECK_PIN success")
if "FAIL" not in hapd.request("WPS_CHECK_PIN 123456789"):
raise Exception("Unexpected WPS_CHECK_PIN success")
for i in range(0, 10):
pin = dev[0].request("WPS_PIN get")
rpin = dev[0].request("WPS_CHECK_PIN " + pin).rstrip('\n')
if pin != rpin:
raise Exception("Random PIN validation failed for " + pin)
def test_ap_wps_pin_get_failure(dev, apdev):
"""PIN generation failure"""
with fail_test(dev[0], 1,
"os_get_random;wpa_supplicant_ctrl_iface_wps_pin"):
if "FAIL" not in dev[0].request("WPS_PIN get"):
raise Exception("WPS_PIN did not report failure")
def test_ap_wps_wep_config(dev, apdev):
"""WPS 2.0 AP rejecting WEP configuration"""
ssid = "test-wps-config"
appin = "12345670"
hapd = hostapd.add_ap(apdev[0],
{"ssid": ssid, "eap_server": "1", "wps_state": "2",
"ap_pin": appin})
dev[0].flush_scan_cache()
dev[0].scan_for_bss(apdev[0]['bssid'], freq=2412)
dev[0].wps_reg(apdev[0]['bssid'], appin, "wps-new-ssid-wep", "OPEN", "WEP",
"hello", no_wait=True)
ev = hapd.wait_event(["WPS-FAIL"], timeout=15)
if ev is None:
raise Exception("WPS-FAIL timed out")
if "reason=2" not in ev:
raise Exception("Unexpected reason code in WPS-FAIL")
status = hapd.request("WPS_GET_STATUS")
if "Last WPS result: Failed" not in status:
raise Exception("WPS failure result not shown correctly")
if "Failure Reason: WEP Prohibited" not in status:
raise Exception("Failure reason not reported correctly")
if "Peer Address: " + dev[0].p2p_interface_addr() not in status:
raise Exception("Peer address not shown correctly")
def test_ap_wps_wep_enroll(dev, apdev):
"""WPS 2.0 STA rejecting WEP configuration"""
ssid = "test-wps-wep"
params = {"ssid": ssid, "eap_server": "1", "wps_state": "2",
"skip_cred_build": "1", "extra_cred": "wps-wep-cred"}
hapd = hostapd.add_ap(apdev[0], params)
hapd.request("WPS_PBC")
dev[0].scan_for_bss(apdev[0]['bssid'], freq=2412)
dev[0].request("WPS_PBC " + apdev[0]['bssid'])
ev = dev[0].wait_event(["WPS-FAIL"], timeout=15)
if ev is None:
raise Exception("WPS-FAIL event timed out")
if "msg=12" not in ev or "reason=2 (WEP Prohibited)" not in ev:
raise Exception("Unexpected WPS-FAIL event: " + ev)
@remote_compatible
def test_ap_wps_ie_fragmentation(dev, apdev):
"""WPS AP using fragmented WPS IE"""
ssid = "test-wps-ie-fragmentation"
params = {"ssid": ssid, "eap_server": "1", "wps_state": "2",
"wpa_passphrase": "12345678", "wpa": "2",
"wpa_key_mgmt": "WPA-PSK", "rsn_pairwise": "CCMP",
"device_name": "1234567890abcdef1234567890abcdef",
"manufacturer": "1234567890abcdef1234567890abcdef1234567890abcdef1234567890abcdef",
"model_name": "1234567890abcdef1234567890abcdef",
"model_number": "1234567890abcdef1234567890abcdef",
"serial_number": "1234567890abcdef1234567890abcdef"}
hapd = hostapd.add_ap(apdev[0], params)
hapd.request("WPS_PBC")
dev[0].scan_for_bss(apdev[0]['bssid'], freq="2412")
dev[0].request("WPS_PBC " + apdev[0]['bssid'])
dev[0].wait_connected(timeout=30)
bss = dev[0].get_bss(apdev[0]['bssid'])
if "wps_device_name" not in bss or bss['wps_device_name'] != "1234567890abcdef1234567890abcdef":
logger.info("Device Name not received correctly")
logger.info(bss)
# This can fail if Probe Response frame is missed and Beacon frame was
# used to fill in the BSS entry. This can happen, e.g., during heavy
# load every now and then and is not really an error, so try to
# workaround by runnign another scan.
dev[0].scan(freq="2412", only_new=True)
bss = dev[0].get_bss(apdev[0]['bssid'])
if not bss or "wps_device_name" not in bss or bss['wps_device_name'] != "1234567890abcdef1234567890abcdef":
logger.info(bss)
raise Exception("Device Name not received correctly")
if len(re.findall("dd..0050f204", bss['ie'])) != 2:
raise Exception("Unexpected number of WPS IEs")
def get_psk(pskfile):
psks = {}
with open(pskfile, "r") as f:
lines = f.read().splitlines()
for l in lines:
if l == "# WPA PSKs":
continue
vals = l.split(' ')
if len(vals) != 3 or vals[0] != "wps=1":
continue
addr = vals[1]
psk = vals[2]
psks[addr] = psk
return psks
def test_ap_wps_per_station_psk(dev, apdev):
"""WPS PBC provisioning with per-station PSK"""
addr0 = dev[0].own_addr()
addr1 = dev[1].own_addr()
addr2 = dev[2].own_addr()
ssid = "wps"
appin = "12345670"
pskfile = "/tmp/ap_wps_per_enrollee_psk.psk_file"
try:
os.remove(pskfile)
except:
pass
hapd = None
try:
with open(pskfile, "w") as f:
f.write("# WPA PSKs\n")
params = {"ssid": ssid, "eap_server": "1", "wps_state": "2",
"wpa": "2", "wpa_key_mgmt": "WPA-PSK",
"rsn_pairwise": "CCMP", "ap_pin": appin,
"wpa_psk_file": pskfile}
hapd = hostapd.add_ap(apdev[0], params)
logger.info("First enrollee")
hapd.request("WPS_PBC")
dev[0].flush_scan_cache()
dev[0].scan_for_bss(apdev[0]['bssid'], freq=2412)
dev[0].request("WPS_PBC " + apdev[0]['bssid'])
dev[0].wait_connected(timeout=30)
logger.info("Second enrollee")
hapd.request("WPS_PBC")
dev[1].scan_for_bss(apdev[0]['bssid'], freq=2412)
dev[1].request("WPS_PBC " + apdev[0]['bssid'])
dev[1].wait_connected(timeout=30)
logger.info("External registrar")
dev[2].scan_for_bss(apdev[0]['bssid'], freq=2412)
dev[2].wps_reg(apdev[0]['bssid'], appin)
logger.info("Verifying PSK results")
psks = get_psk(pskfile)
if addr0 not in psks:
raise Exception("No PSK recorded for sta0")
if addr1 not in psks:
raise Exception("No PSK recorded for sta1")
if addr2 not in psks:
raise Exception("No PSK recorded for sta2")
if psks[addr0] == psks[addr1]:
raise Exception("Same PSK recorded for sta0 and sta1")
if psks[addr0] == psks[addr2]:
raise Exception("Same PSK recorded for sta0 and sta2")
if psks[addr1] == psks[addr2]:
raise Exception("Same PSK recorded for sta1 and sta2")
dev[0].request("REMOVE_NETWORK all")
logger.info("Second external registrar")
dev[0].scan_for_bss(apdev[0]['bssid'], freq=2412)
dev[0].wps_reg(apdev[0]['bssid'], appin)
psks2 = get_psk(pskfile)
if addr0 not in psks2:
raise Exception("No PSK recorded for sta0(reg)")
if psks[addr0] == psks2[addr0]:
raise Exception("Same PSK recorded for sta0(enrollee) and sta0(reg)")
finally:
os.remove(pskfile)
if hapd:
dev[0].request("DISCONNECT")
dev[1].request("DISCONNECT")
dev[2].request("DISCONNECT")
hapd.disable()
dev[0].flush_scan_cache()
dev[1].flush_scan_cache()
dev[2].flush_scan_cache()
def test_ap_wps_per_station_psk_preset(dev, apdev):
"""WPS PIN provisioning with per-station PSK preset"""
addr0 = dev[0].own_addr()
addr1 = dev[1].own_addr()
addr2 = dev[2].own_addr()
ssid = "wps"
appin = "12345670"
pskfile = "/tmp/ap_wps_per_enrollee_psk_preset.psk_file"
try:
os.remove(pskfile)
except:
pass
hapd = None
try:
with open(pskfile, "w") as f:
f.write("# WPA PSKs\n")
f.write("wps=1 " + addr0 + " preset-passphrase-0\n")
f.write("wps=1 " + addr2 + " preset-passphrase-2\n")
params = {"ssid": ssid, "eap_server": "1", "wps_state": "2",
"wpa": "2", "wpa_key_mgmt": "WPA-PSK",
"rsn_pairwise": "CCMP", "ap_pin": appin,
"wpa_psk_file": pskfile}
hapd = hostapd.add_ap(apdev[0], params)
bssid = hapd.own_addr()
logger.info("First enrollee")
pin = dev[0].wps_read_pin()
hapd.request("WPS_PIN any " + pin)
dev[0].scan_for_bss(bssid, freq=2412)
dev[0].request("WPS_PIN %s %s" % (bssid, pin))
dev[0].wait_connected(timeout=30)
logger.info("Second enrollee")
pin = dev[1].wps_read_pin()
hapd.request("WPS_PIN any " + pin)
dev[1].scan_for_bss(bssid, freq=2412)
dev[1].request("WPS_PIN %s %s" % (bssid, pin))
dev[1].wait_connected(timeout=30)
logger.info("External registrar")
dev[2].scan_for_bss(bssid, freq=2412)
dev[2].wps_reg(bssid, appin)
logger.info("Verifying PSK results")
psks = get_psk(pskfile)
if addr0 not in psks:
raise Exception("No PSK recorded for sta0")
if addr1 not in psks:
raise Exception("No PSK recorded for sta1")
if addr2 not in psks:
raise Exception("No PSK recorded for sta2")
logger.info("PSK[0]: " + psks[addr0])
logger.info("PSK[1]: " + psks[addr1])
logger.info("PSK[2]: " + psks[addr2])
if psks[addr0] == psks[addr1]:
raise Exception("Same PSK recorded for sta0 and sta1")
if psks[addr0] == psks[addr2]:
raise Exception("Same PSK recorded for sta0 and sta2")
if psks[addr1] == psks[addr2]:
raise Exception("Same PSK recorded for sta1 and sta2")
pmk0 = hapd.request("GET_PMK " + addr0)
pmk1 = hapd.request("GET_PMK " + addr1)
pmk2 = hapd.request("GET_PMK " + addr2)
logger.info("PMK[0]: " + pmk0)
logger.info("PMK[1]: " + pmk1)
logger.info("PMK[2]: " + pmk2)
if pmk0 != "565faec21ff04702d9d17c464e1301efd36c8a3ea46bb866b4bec7fed4384579":
raise Exception("PSK[0] mismatch")
if psks[addr1] != pmk1:
raise Exception("PSK[1] mismatch")
if psks[addr2] != pmk2:
raise Exception("PSK[2] mismatch")
dev[0].request("REMOVE_NETWORK all")
dev[0].wait_disconnected()
dev[0].dump_monitor()
logger.info("First enrollee again")
pin = dev[0].wps_read_pin()
hapd.request("WPS_PIN any " + pin)
dev[0].scan_for_bss(bssid, freq=2412)
dev[0].request("WPS_PIN %s %s" % (bssid, pin))
dev[0].wait_connected(timeout=30)
psks2 = get_psk(pskfile)
if addr0 not in psks2:
raise Exception("No PSK recorded for sta0 (2)")
if psks[addr0] != psks2[addr0]:
raise Exception("Different PSK recorded for sta0(enrollee) and sta0(enrollee 2)")
finally:
os.remove(pskfile)
def test_ap_wps_per_station_psk_failure(dev, apdev):
"""WPS PBC provisioning with per-station PSK (file not writable)"""
addr0 = dev[0].p2p_dev_addr()
addr1 = dev[1].p2p_dev_addr()
addr2 = dev[2].p2p_dev_addr()
ssid = "wps"
appin = "12345670"
pskfile = "/tmp/ap_wps_per_enrollee_psk.psk_file"
try:
os.remove(pskfile)
except:
pass
hapd = None
try:
with open(pskfile, "w") as f:
f.write("# WPA PSKs\n")
params = {"ssid": ssid, "eap_server": "1", "wps_state": "2",
"wpa": "2", "wpa_key_mgmt": "WPA-PSK",
"rsn_pairwise": "CCMP", "ap_pin": appin,
"wpa_psk_file": pskfile}
hapd = hostapd.add_ap(apdev[0], params)
if "FAIL" in hapd.request("SET wpa_psk_file /tmp/does/not/exists/ap_wps_per_enrollee_psk_failure.psk_file"):
raise Exception("Failed to set wpa_psk_file")
logger.info("First enrollee")
hapd.request("WPS_PBC")
dev[0].scan_for_bss(apdev[0]['bssid'], freq=2412)
dev[0].request("WPS_PBC " + apdev[0]['bssid'])
dev[0].wait_connected(timeout=30)
logger.info("Second enrollee")
hapd.request("WPS_PBC")
dev[1].scan_for_bss(apdev[0]['bssid'], freq=2412)
dev[1].request("WPS_PBC " + apdev[0]['bssid'])
dev[1].wait_connected(timeout=30)
logger.info("External registrar")
dev[2].scan_for_bss(apdev[0]['bssid'], freq=2412)
dev[2].wps_reg(apdev[0]['bssid'], appin)
logger.info("Verifying PSK results")
psks = get_psk(pskfile)
if len(psks) > 0:
raise Exception("PSK recorded unexpectedly")
finally:
if hapd:
for i in range(3):
dev[i].request("DISCONNECT")
hapd.disable()
for i in range(3):
dev[i].flush_scan_cache()
os.remove(pskfile)
def test_ap_wps_pin_request_file(dev, apdev):
"""WPS PIN provisioning with configured AP"""
ssid = "wps"
pinfile = "/tmp/ap_wps_pin_request_file.log"
if os.path.exists(pinfile):
os.remove(pinfile)
hapd = hostapd.add_ap(apdev[0],
{"ssid": ssid, "eap_server": "1", "wps_state": "2",
"wps_pin_requests": pinfile,
"wpa_passphrase": "12345678", "wpa": "2",
"wpa_key_mgmt": "WPA-PSK", "rsn_pairwise": "CCMP"})
uuid = dev[0].get_status_field("uuid")
pin = dev[0].wps_read_pin()
try:
dev[0].scan_for_bss(apdev[0]['bssid'], freq="2412")
dev[0].request("WPS_PIN %s %s" % (apdev[0]['bssid'], pin))
ev = hapd.wait_event(["WPS-PIN-NEEDED"], timeout=15)
if ev is None:
raise Exception("PIN needed event not shown")
if uuid not in ev:
raise Exception("UUID mismatch")
dev[0].request("WPS_CANCEL")
success = False
with open(pinfile, "r") as f:
lines = f.readlines()
for l in lines:
if uuid in l:
success = True
break
if not success:
raise Exception("PIN request entry not in the log file")
finally:
try:
os.remove(pinfile)
except:
pass
def test_ap_wps_auto_setup_with_config_file(dev, apdev):
"""WPS auto-setup with configuration file"""
skip_without_tkip(dev[0])
conffile = "/tmp/ap_wps_auto_setup_with_config_file.conf"
ifname = apdev[0]['ifname']
try:
with open(conffile, "w") as f:
f.write("driver=nl80211\n")
f.write("hw_mode=g\n")
f.write("channel=1\n")
f.write("ieee80211n=1\n")
f.write("interface=%s\n" % ifname)
f.write("ctrl_interface=/var/run/hostapd\n")
f.write("ssid=wps\n")
f.write("eap_server=1\n")
f.write("wps_state=1\n")
hapd = hostapd.add_bss(apdev[0], ifname, conffile)
hapd.request("WPS_PBC")
dev[0].scan_for_bss(apdev[0]['bssid'], freq="2412")
dev[0].request("WPS_PBC " + apdev[0]['bssid'])
dev[0].wait_connected(timeout=30)
with open(conffile, "r") as f:
lines = f.read().splitlines()
vals = dict()
for l in lines:
try:
[name, value] = l.split('=', 1)
vals[name] = value
except ValueError as e:
if "# WPS configuration" in l:
pass
else:
raise Exception("Unexpected configuration line: " + l)
if vals['ieee80211n'] != '1' or vals['wps_state'] != '2' or "WPA-PSK" not in vals['wpa_key_mgmt']:
raise Exception("Incorrect configuration: " + str(vals))
finally:
try:
os.remove(conffile)
except:
pass
@long_duration_test
def test_ap_wps_pbc_timeout(dev, apdev):
"""wpa_supplicant PBC walk time and WPS ER SelReg timeout"""
ap_uuid = "27ea801a-9e5c-4e73-bd82-f89cbcd10d7e"
hapd = add_ssdp_ap(apdev[0], ap_uuid)
location = ssdp_get_location(ap_uuid)
urls = upnp_get_urls(location)
eventurl = urlparse(urls['event_sub_url'])
ctrlurl = urlparse(urls['control_url'])
url = urlparse(location)
conn = HTTPConnection(url.netloc)
class WPSERHTTPServer(StreamRequestHandler):
def handle(self):
data = self.rfile.readline().strip()
logger.debug(data)
self.wfile.write(gen_wps_event())
server = MyTCPServer(("127.0.0.1", 12345), WPSERHTTPServer)
server.timeout = 1
headers = {"callback": '<http://127.0.0.1:12345/event>',
"NT": "upnp:event",
"timeout": "Second-1234"}
conn.request("SUBSCRIBE", eventurl.path, "\r\n\r\n", headers)
resp = conn.getresponse()
if resp.status != 200:
raise Exception("Unexpected HTTP response: %d" % resp.status)
sid = resp.getheader("sid")
logger.debug("Subscription SID " + sid)
msg = '''<?xml version="1.0"?>
<s:Envelope xmlns:s="http://schemas.xmlsoap.org/soap/envelope/" s:encodingStyle="http://schemas.xmlsoap.org/soap/encoding/">
<s:Body>
<u:SetSelectedRegistrar xmlns:u="urn:schemas-wifialliance-org:service:WFAWLANConfig:1">
<NewMessage>EEoAARAQQQABARASAAIAABBTAAIxSBBJAA4ANyoAASABBv///////xBIABA2LbR7pTpRkYj7
VFi5hrLk
</NewMessage>
</u:SetSelectedRegistrar>
</s:Body>
</s:Envelope>'''
headers = {"Content-type": 'text/xml; charset="utf-8"'}
headers["SOAPAction"] = '"urn:schemas-wifialliance-org:service:WFAWLANConfig:1#%s"' % "SetSelectedRegistrar"
conn.request("POST", ctrlurl.path, msg, headers)
resp = conn.getresponse()
if resp.status != 200:
raise Exception("Unexpected HTTP response: %d" % resp.status)
server.handle_request()
logger.info("Start WPS_PBC and wait for PBC walk time expiration")
if "OK" not in dev[0].request("WPS_PBC"):
raise Exception("WPS_PBC failed")
start = os.times()[4]
server.handle_request()
dev[1].request("BSS_FLUSH 0")
dev[1].scan_for_bss(apdev[0]['bssid'], freq="2412", force_scan=True,
only_new=True)
bss = dev[1].get_bss(apdev[0]['bssid'])
logger.debug("BSS: " + str(bss))
if '[WPS-AUTH]' not in bss['flags']:
raise Exception("WPS not indicated authorized")
server.handle_request()
wps_timeout_seen = False
while True:
hapd.dump_monitor()
dev[1].dump_monitor()
if not wps_timeout_seen:
ev = dev[0].wait_event(["WPS-TIMEOUT"], timeout=0)
if ev is not None:
logger.info("PBC timeout seen")
wps_timeout_seen = True
else:
dev[0].dump_monitor()
now = os.times()[4]
if now - start > 130:
raise Exception("Selected registration information not removed")
dev[1].request("BSS_FLUSH 0")
dev[1].scan_for_bss(apdev[0]['bssid'], freq="2412", force_scan=True,
only_new=True)
bss = dev[1].get_bss(apdev[0]['bssid'])
logger.debug("BSS: " + str(bss))
if '[WPS-AUTH]' not in bss['flags']:
break
server.handle_request()
server.server_close()
if wps_timeout_seen:
return
now = os.times()[4]
if now < start + 150:
dur = start + 150 - now
else:
dur = 1
logger.info("Continue waiting for PBC timeout (%d sec)" % dur)
ev = dev[0].wait_event(["WPS-TIMEOUT"], timeout=dur)
if ev is None:
raise Exception("WPS-TIMEOUT not reported")
def add_ssdp_ap(ap, ap_uuid):
ssid = "wps-ssdp"
ap_pin = "12345670"
params = {"ssid": ssid, "eap_server": "1", "wps_state": "2",
"wpa_passphrase": "12345678", "wpa": "2",
"wpa_key_mgmt": "WPA-PSK", "rsn_pairwise": "CCMP",
"device_name": "Wireless AP", "manufacturer": "Company",
"model_name": "WAP", "model_number": "123",
"serial_number": "12345", "device_type": "6-0050F204-1",
"os_version": "01020300",
"config_methods": "label push_button",
"ap_pin": ap_pin, "uuid": ap_uuid, "upnp_iface": "lo",
"friendly_name": "WPS Access Point",
"manufacturer_url": "http://www.example.com/",
"model_description": "Wireless Access Point",
"model_url": "http://www.example.com/model/",
"upc": "123456789012"}
return hostapd.add_ap(ap, params)
def ssdp_send(msg, no_recv=False):
socket.setdefaulttimeout(1)
sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM, socket.IPPROTO_UDP)
sock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
sock.setsockopt(socket.IPPROTO_IP, socket.IP_MULTICAST_TTL, 2)
sock.bind(("127.0.0.1", 0))
sock.sendto(msg.encode(), ("239.255.255.250", 1900))
if no_recv:
return None
return sock.recv(1000).decode()
def ssdp_send_msearch(st, no_recv=False):
msg = '\r\n'.join([
'M-SEARCH * HTTP/1.1',
'HOST: 239.255.255.250:1900',
'MX: 1',
'MAN: "ssdp:discover"',
'ST: ' + st,
'', ''])
return ssdp_send(msg, no_recv=no_recv)
def test_ap_wps_ssdp_msearch(dev, apdev):
"""WPS AP and SSDP M-SEARCH messages"""
ap_uuid = "27ea801a-9e5c-4e73-bd82-f89cbcd10d7e"
add_ssdp_ap(apdev[0], ap_uuid)
msg = '\r\n'.join([
'M-SEARCH * HTTP/1.1',
'Host: 239.255.255.250:1900',
'Mx: 1',
'Man: "ssdp:discover"',
'St: urn:schemas-wifialliance-org:device:WFADevice:1',
'', ''])
ssdp_send(msg)
msg = '\r\n'.join([
'M-SEARCH * HTTP/1.1',
'host:\t239.255.255.250:1900\t\t\t\t \t\t',
'mx: \t1\t\t ',
'man: \t \t "ssdp:discover" ',
'st: urn:schemas-wifialliance-org:device:WFADevice:1\t\t',
'', ''])
ssdp_send(msg)
ssdp_send_msearch("ssdp:all")
ssdp_send_msearch("upnp:rootdevice")
ssdp_send_msearch("uuid:" + ap_uuid)
ssdp_send_msearch("urn:schemas-wifialliance-org:service:WFAWLANConfig:1")
ssdp_send_msearch("urn:schemas-wifialliance-org:device:WFADevice:1")
msg = '\r\n'.join([
'M-SEARCH * HTTP/1.1',
'HOST:\t239.255.255.250:1900',
'MAN: "ssdp:discover"',
'MX: 130',
'ST: urn:schemas-wifialliance-org:device:WFADevice:1',
'', ''])
ssdp_send(msg, no_recv=True)
def test_ap_wps_ssdp_invalid_msearch(dev, apdev):
"""WPS AP and invalid SSDP M-SEARCH messages"""
ap_uuid = "27ea801a-9e5c-4e73-bd82-f89cbcd10d7e"
add_ssdp_ap(apdev[0], ap_uuid)
socket.setdefaulttimeout(1)
sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM, socket.IPPROTO_UDP)
sock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
sock.setsockopt(socket.IPPROTO_IP, socket.IP_MULTICAST_TTL, 2)
sock.bind(("127.0.0.1", 0))
logger.debug("Missing MX")
msg = '\r\n'.join([
'M-SEARCH * HTTP/1.1',
'HOST: 239.255.255.250:1900',
'MAN: "ssdp:discover"',
'ST: urn:schemas-wifialliance-org:device:WFADevice:1',
'', ''])
sock.sendto(msg.encode(), ("239.255.255.250", 1900))
logger.debug("Negative MX")
msg = '\r\n'.join([
'M-SEARCH * HTTP/1.1',
'HOST: 239.255.255.250:1900',
'MX: -1',
'MAN: "ssdp:discover"',
'ST: urn:schemas-wifialliance-org:device:WFADevice:1',
'', ''])
sock.sendto(msg.encode(), ("239.255.255.250", 1900))
logger.debug("Invalid MX")
msg = '\r\n'.join([
'M-SEARCH * HTTP/1.1',
'HOST: 239.255.255.250:1900',
'MX; 1',
'MAN: "ssdp:discover"',
'ST: urn:schemas-wifialliance-org:device:WFADevice:1',
'', ''])
sock.sendto(msg.encode(), ("239.255.255.250", 1900))
logger.debug("Missing MAN")
msg = '\r\n'.join([
'M-SEARCH * HTTP/1.1',
'HOST: 239.255.255.250:1900',
'MX: 1',
'ST: urn:schemas-wifialliance-org:device:WFADevice:1',
'', ''])
sock.sendto(msg.encode(), ("239.255.255.250", 1900))
logger.debug("Invalid MAN")
msg = '\r\n'.join([
'M-SEARCH * HTTP/1.1',
'HOST: 239.255.255.250:1900',
'MX: 1',
'MAN: foo',
'ST: urn:schemas-wifialliance-org:device:WFADevice:1',
'', ''])
sock.sendto(msg.encode(), ("239.255.255.250", 1900))
msg = '\r\n'.join([
'M-SEARCH * HTTP/1.1',
'HOST: 239.255.255.250:1900',
'MX: 1',
'MAN; "ssdp:discover"',
'ST: urn:schemas-wifialliance-org:device:WFADevice:1',
'', ''])
sock.sendto(msg.encode(), ("239.255.255.250", 1900))
logger.debug("Missing HOST")
msg = '\r\n'.join([
'M-SEARCH * HTTP/1.1',
'MAN: "ssdp:discover"',
'MX: 1',
'ST: urn:schemas-wifialliance-org:device:WFADevice:1',
'', ''])
sock.sendto(msg.encode(), ("239.255.255.250", 1900))
logger.debug("Missing ST")
msg = '\r\n'.join([
'M-SEARCH * HTTP/1.1',
'HOST: 239.255.255.250:1900',
'MAN: "ssdp:discover"',
'MX: 1',
'', ''])
sock.sendto(msg.encode(), ("239.255.255.250", 1900))
logger.debug("Mismatching ST")
msg = '\r\n'.join([
'M-SEARCH * HTTP/1.1',
'HOST: 239.255.255.250:1900',
'MAN: "ssdp:discover"',
'MX: 1',
'ST: uuid:16d5f8a9-4ee4-4f5e-81f9-cc6e2f47f42d',
'', ''])
sock.sendto(msg.encode(), ("239.255.255.250", 1900))
msg = '\r\n'.join([
'M-SEARCH * HTTP/1.1',
'HOST: 239.255.255.250:1900',
'MAN: "ssdp:discover"',
'MX: 1',
'ST: foo:bar',
'', ''])
sock.sendto(msg.encode(), ("239.255.255.250", 1900))
msg = '\r\n'.join([
'M-SEARCH * HTTP/1.1',
'HOST: 239.255.255.250:1900',
'MAN: "ssdp:discover"',
'MX: 1',
'ST: foobar',
'', ''])
sock.sendto(msg.encode(), ("239.255.255.250", 1900))
logger.debug("Invalid ST")
msg = '\r\n'.join([
'M-SEARCH * HTTP/1.1',
'HOST: 239.255.255.250:1900',
'MAN: "ssdp:discover"',
'MX: 1',
'ST; urn:schemas-wifialliance-org:device:WFADevice:1',
'', ''])
sock.sendto(msg.encode(), ("239.255.255.250", 1900))
logger.debug("Invalid M-SEARCH")
msg = '\r\n'.join([
'M+SEARCH * HTTP/1.1',
'HOST: 239.255.255.250:1900',
'MAN: "ssdp:discover"',
'MX: 1',
'ST: urn:schemas-wifialliance-org:device:WFADevice:1',
'', ''])
sock.sendto(msg.encode(), ("239.255.255.250", 1900))
msg = '\r\n'.join([
'M-SEARCH-* HTTP/1.1',
'HOST: 239.255.255.250:1900',
'MAN: "ssdp:discover"',
'MX: 1',
'ST: urn:schemas-wifialliance-org:device:WFADevice:1',
'', ''])
sock.sendto(msg.encode(), ("239.255.255.250", 1900))
logger.debug("Invalid message format")
sock.sendto(b"NOTIFY * HTTP/1.1", ("239.255.255.250", 1900))
msg = '\r'.join([
'M-SEARCH * HTTP/1.1',
'HOST: 239.255.255.250:1900',
'MAN: "ssdp:discover"',
'MX: 1',
'ST: urn:schemas-wifialliance-org:device:WFADevice:1',
'', ''])
sock.sendto(msg.encode(), ("239.255.255.250", 1900))
try:
r = sock.recv(1000)
raise Exception("Unexpected M-SEARCH response: " + r)
except socket.timeout:
pass
logger.debug("Valid M-SEARCH")
msg = '\r\n'.join([
'M-SEARCH * HTTP/1.1',
'HOST: 239.255.255.250:1900',
'MAN: "ssdp:discover"',
'MX: 1',
'ST: urn:schemas-wifialliance-org:device:WFADevice:1',
'', ''])
sock.sendto(msg.encode(), ("239.255.255.250", 1900))
try:
r = sock.recv(1000)
pass
except socket.timeout:
raise Exception("No SSDP response")
def test_ap_wps_ssdp_burst(dev, apdev):
"""WPS AP and SSDP burst"""
ap_uuid = "27ea801a-9e5c-4e73-bd82-f89cbcd10d7e"
add_ssdp_ap(apdev[0], ap_uuid)
msg = '\r\n'.join([
'M-SEARCH * HTTP/1.1',
'HOST: 239.255.255.250:1900',
'MAN: "ssdp:discover"',
'MX: 1',
'ST: urn:schemas-wifialliance-org:device:WFADevice:1',
'', ''])
socket.setdefaulttimeout(1)
sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM, socket.IPPROTO_UDP)
sock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
sock.setsockopt(socket.IPPROTO_IP, socket.IP_MULTICAST_TTL, 2)
sock.bind(("127.0.0.1", 0))
for i in range(0, 25):
sock.sendto(msg.encode(), ("239.255.255.250", 1900))
resp = 0
while True:
try:
r = sock.recv(1000).decode()
if not r.startswith("HTTP/1.1 200 OK\r\n"):
raise Exception("Unexpected message: " + r)
resp += 1
except socket.timeout:
break
if resp < 20:
raise Exception("Too few SSDP responses")
sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM, socket.IPPROTO_UDP)
sock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
sock.setsockopt(socket.IPPROTO_IP, socket.IP_MULTICAST_TTL, 2)
sock.bind(("127.0.0.1", 0))
for i in range(0, 25):
sock.sendto(msg.encode(), ("239.255.255.250", 1900))
while True:
try:
r = sock.recv(1000).decode()
if ap_uuid in r:
break
except socket.timeout:
raise Exception("No SSDP response")
def ssdp_get_location(uuid):
res = ssdp_send_msearch("uuid:" + uuid)
location = None
for l in res.splitlines():
if l.lower().startswith("location:"):
location = l.split(':', 1)[1].strip()
break
if location is None:
raise Exception("No UPnP location found")
return location
def upnp_get_urls(location):
if sys.version_info[0] > 2:
conn = urlopen(location)
else:
conn = urlopen(location, proxies={})
tree = ET.parse(conn)
root = tree.getroot()
urn = '{urn:schemas-upnp-org:device-1-0}'
service = root.find("./" + urn + "device/" + urn + "serviceList/" + urn + "service")
res = {}
res['scpd_url'] = urljoin(location, service.find(urn + 'SCPDURL').text)
res['control_url'] = urljoin(location,
service.find(urn + 'controlURL').text)
res['event_sub_url'] = urljoin(location,
service.find(urn + 'eventSubURL').text)
return res
def upnp_soap_action(conn, path, action, include_soap_action=True,
soap_action_override=None, newmsg=None, neweventtype=None,
neweventmac=None):
soapns = 'http://schemas.xmlsoap.org/soap/envelope/'
wpsns = 'urn:schemas-wifialliance-org:service:WFAWLANConfig:1'
ET.register_namespace('soapenv', soapns)
ET.register_namespace('wfa', wpsns)
attrib = {}
attrib['{%s}encodingStyle' % soapns] = 'http://schemas.xmlsoap.org/soap/encoding/'
root = ET.Element("{%s}Envelope" % soapns, attrib=attrib)
body = ET.SubElement(root, "{%s}Body" % soapns)
act = ET.SubElement(body, "{%s}%s" % (wpsns, action))
if newmsg:
msg = ET.SubElement(act, "NewMessage")
msg.text = base64.b64encode(newmsg.encode()).decode()
if neweventtype:
msg = ET.SubElement(act, "NewWLANEventType")
msg.text = neweventtype
if neweventmac:
msg = ET.SubElement(act, "NewWLANEventMAC")
msg.text = neweventmac
headers = {"Content-type": 'text/xml; charset="utf-8"'}
if include_soap_action:
headers["SOAPAction"] = '"urn:schemas-wifialliance-org:service:WFAWLANConfig:1#%s"' % action
elif soap_action_override:
headers["SOAPAction"] = soap_action_override
decl = b'<?xml version=\'1.0\' encoding=\'utf8\'?>\n'
conn.request("POST", path, decl + ET.tostring(root), headers)
return conn.getresponse()
def test_ap_wps_upnp(dev, apdev):
"""WPS AP and UPnP operations"""
ap_uuid = "27ea801a-9e5c-4e73-bd82-f89cbcd10d7e"
add_ssdp_ap(apdev[0], ap_uuid)
location = ssdp_get_location(ap_uuid)
urls = upnp_get_urls(location)
if sys.version_info[0] > 2:
conn = urlopen(urls['scpd_url'])
else:
conn = urlopen(urls['scpd_url'], proxies={})
scpd = conn.read()
if sys.version_info[0] > 2:
try:
conn = urlopen(urljoin(location, "unknown.html"))
raise Exception("Unexpected HTTP response to GET unknown URL")
except HTTPError as e:
if e.code != 404:
raise Exception("Unexpected HTTP response to GET unknown URL")
else:
conn = urlopen(urljoin(location, "unknown.html"), proxies={})
if conn.getcode() != 404:
raise Exception("Unexpected HTTP response to GET unknown URL")
url = urlparse(location)
conn = HTTPConnection(url.netloc)
#conn.set_debuglevel(1)
headers = {"Content-type": 'text/xml; charset="utf-8"',
"SOAPAction": '"urn:schemas-wifialliance-org:service:WFAWLANConfig:1#GetDeviceInfo"'}
conn.request("POST", "hello", "\r\n\r\n", headers)
resp = conn.getresponse()
if resp.status != 404:
raise Exception("Unexpected HTTP response: %d" % resp.status)
conn.request("UNKNOWN", "hello", "\r\n\r\n", headers)
resp = conn.getresponse()
if resp.status != 501:
raise Exception("Unexpected HTTP response: %d" % resp.status)
headers = {"Content-type": 'text/xml; charset="utf-8"',
"SOAPAction": '"urn:some-unknown-action#GetDeviceInfo"'}
ctrlurl = urlparse(urls['control_url'])
conn.request("POST", ctrlurl.path, "\r\n\r\n", headers)
resp = conn.getresponse()
if resp.status != 401:
raise Exception("Unexpected HTTP response: %d" % resp.status)
logger.debug("GetDeviceInfo without SOAPAction header")
resp = upnp_soap_action(conn, ctrlurl.path, "GetDeviceInfo",
include_soap_action=False)
if resp.status != 401:
raise Exception("Unexpected HTTP response: %d" % resp.status)
logger.debug("GetDeviceInfo with invalid SOAPAction header")
for act in ["foo",
"urn:schemas-wifialliance-org:service:WFAWLANConfig:1#GetDeviceInfo",
'"urn:schemas-wifialliance-org:service:WFAWLANConfig:1"',
'"urn:schemas-wifialliance-org:service:WFAWLANConfig:123#GetDevice']:
resp = upnp_soap_action(conn, ctrlurl.path, "GetDeviceInfo",
include_soap_action=False,
soap_action_override=act)
if resp.status != 401:
raise Exception("Unexpected HTTP response: %d" % resp.status)
resp = upnp_soap_action(conn, ctrlurl.path, "GetDeviceInfo")
if resp.status != 200:
raise Exception("Unexpected HTTP response: %d" % resp.status)
dev = resp.read().decode()
if "NewDeviceInfo" not in dev:
raise Exception("Unexpected GetDeviceInfo response")
logger.debug("PutMessage without required parameters")
resp = upnp_soap_action(conn, ctrlurl.path, "PutMessage")
if resp.status != 600:
raise Exception("Unexpected HTTP response: %d" % resp.status)
logger.debug("PutWLANResponse without required parameters")
resp = upnp_soap_action(conn, ctrlurl.path, "PutWLANResponse")
if resp.status != 600:
raise Exception("Unexpected HTTP response: %d" % resp.status)
logger.debug("SetSelectedRegistrar from unregistered ER")
resp = upnp_soap_action(conn, ctrlurl.path, "SetSelectedRegistrar")
if resp.status != 501:
raise Exception("Unexpected HTTP response: %d" % resp.status)
logger.debug("Unknown action")
resp = upnp_soap_action(conn, ctrlurl.path, "Unknown")
if resp.status != 401:
raise Exception("Unexpected HTTP response: %d" % resp.status)
def test_ap_wps_upnp_subscribe(dev, apdev):
"""WPS AP and UPnP event subscription"""
ap_uuid = "27ea801a-9e5c-4e73-bd82-f89cbcd10d7e"
hapd = add_ssdp_ap(apdev[0], ap_uuid)
location = ssdp_get_location(ap_uuid)
urls = upnp_get_urls(location)
eventurl = urlparse(urls['event_sub_url'])
url = urlparse(location)
conn = HTTPConnection(url.netloc)
#conn.set_debuglevel(1)
headers = {"callback": '<http://127.0.0.1:12345/event>',
"timeout": "Second-1234"}
conn.request("SUBSCRIBE", "hello", "\r\n\r\n", headers)
resp = conn.getresponse()
if resp.status != 412:
raise Exception("Unexpected HTTP response: %d" % resp.status)
conn.request("SUBSCRIBE", eventurl.path, "\r\n\r\n", headers)
resp = conn.getresponse()
if resp.status != 412:
raise Exception("Unexpected HTTP response: %d" % resp.status)
headers = {"NT": "upnp:event",
"timeout": "Second-1234"}
conn.request("SUBSCRIBE", eventurl.path, "\r\n\r\n", headers)
resp = conn.getresponse()
if resp.status != 412:
raise Exception("Unexpected HTTP response: %d" % resp.status)
headers = {"callback": '<http://127.0.0.1:12345/event>',
"NT": "upnp:foobar",
"timeout": "Second-1234"}
conn.request("SUBSCRIBE", eventurl.path, "\r\n\r\n", headers)
resp = conn.getresponse()
if resp.status != 400:
raise Exception("Unexpected HTTP response: %d" % resp.status)
logger.debug("Valid subscription")
headers = {"callback": '<http://127.0.0.1:12345/event>',
"NT": "upnp:event",
"timeout": "Second-1234"}
conn.request("SUBSCRIBE", eventurl.path, "\r\n\r\n", headers)
resp = conn.getresponse()
if resp.status != 200:
raise Exception("Unexpected HTTP response: %d" % resp.status)
sid = resp.getheader("sid")
logger.debug("Subscription SID " + sid)
logger.debug("Invalid re-subscription")
headers = {"NT": "upnp:event",
"sid": "123456734567854",
"timeout": "Second-1234"}
conn.request("SUBSCRIBE", eventurl.path, "\r\n\r\n", headers)
resp = conn.getresponse()
if resp.status != 400:
raise Exception("Unexpected HTTP response: %d" % resp.status)
logger.debug("Invalid re-subscription")
headers = {"NT": "upnp:event",
"sid": "uuid:123456734567854",
"timeout": "Second-1234"}
conn.request("SUBSCRIBE", eventurl.path, "\r\n\r\n", headers)
resp = conn.getresponse()
if resp.status != 400:
raise Exception("Unexpected HTTP response: %d" % resp.status)
logger.debug("Invalid re-subscription")
headers = {"callback": '<http://127.0.0.1:12345/event>',
"NT": "upnp:event",
"sid": sid,
"timeout": "Second-1234"}
conn.request("SUBSCRIBE", eventurl.path, "\r\n\r\n", headers)
resp = conn.getresponse()
if resp.status != 400:
raise Exception("Unexpected HTTP response: %d" % resp.status)
logger.debug("SID mismatch in re-subscription")
headers = {"NT": "upnp:event",
"sid": "uuid:4c2bca79-1ff4-4e43-85d4-952a2b8a51fb",
"timeout": "Second-1234"}
conn.request("SUBSCRIBE", eventurl.path, "\r\n\r\n", headers)
resp = conn.getresponse()
if resp.status != 412:
raise Exception("Unexpected HTTP response: %d" % resp.status)
logger.debug("Valid re-subscription")
headers = {"NT": "upnp:event",
"sid": sid,
"timeout": "Second-1234"}
conn.request("SUBSCRIBE", eventurl.path, "\r\n\r\n", headers)
resp = conn.getresponse()
if resp.status != 200:
raise Exception("Unexpected HTTP response: %d" % resp.status)
sid2 = resp.getheader("sid")
logger.debug("Subscription SID " + sid2)
if sid != sid2:
raise Exception("Unexpected SID change")
logger.debug("Valid re-subscription")
headers = {"NT": "upnp:event",
"sid": "uuid: \t \t" + sid.split(':')[1],
"timeout": "Second-1234"}
conn.request("SUBSCRIBE", eventurl.path, "\r\n\r\n", headers)
resp = conn.getresponse()
if resp.status != 200:
raise Exception("Unexpected HTTP response: %d" % resp.status)
logger.debug("Invalid unsubscription")
headers = {"sid": sid}
conn.request("UNSUBSCRIBE", "/hello", "\r\n\r\n", headers)
resp = conn.getresponse()
if resp.status != 412:
raise Exception("Unexpected HTTP response: %d" % resp.status)
headers = {"foo": "bar"}
conn.request("UNSUBSCRIBE", eventurl.path, "\r\n\r\n", headers)
resp = conn.getresponse()
if resp.status != 412:
raise Exception("Unexpected HTTP response: %d" % resp.status)
logger.debug("Valid unsubscription")
headers = {"sid": sid}
conn.request("UNSUBSCRIBE", eventurl.path, "\r\n\r\n", headers)
resp = conn.getresponse()
if resp.status != 200:
raise Exception("Unexpected HTTP response: %d" % resp.status)
logger.debug("Unsubscription for not existing SID")
headers = {"sid": sid}
conn.request("UNSUBSCRIBE", eventurl.path, "\r\n\r\n", headers)
resp = conn.getresponse()
if resp.status != 412:
raise Exception("Unexpected HTTP response: %d" % resp.status)
logger.debug("Invalid unsubscription")
headers = {"sid": " \t \tfoo"}
conn.request("UNSUBSCRIBE", eventurl.path, "\r\n\r\n", headers)
resp = conn.getresponse()
if resp.status != 400:
raise Exception("Unexpected HTTP response: %d" % resp.status)
logger.debug("Invalid unsubscription")
headers = {"sid": "uuid:\t \tfoo"}
conn.request("UNSUBSCRIBE", eventurl.path, "\r\n\r\n", headers)
resp = conn.getresponse()
if resp.status != 400:
raise Exception("Unexpected HTTP response: %d" % resp.status)
logger.debug("Invalid unsubscription")
headers = {"NT": "upnp:event",
"sid": sid}
conn.request("UNSUBSCRIBE", eventurl.path, "\r\n\r\n", headers)
resp = conn.getresponse()
if resp.status != 400:
raise Exception("Unexpected HTTP response: %d" % resp.status)
headers = {"callback": '<http://127.0.0.1:12345/event>',
"sid": sid}
conn.request("UNSUBSCRIBE", eventurl.path, "\r\n\r\n", headers)
resp = conn.getresponse()
if resp.status != 400:
raise Exception("Unexpected HTTP response: %d" % resp.status)
logger.debug("Valid subscription with multiple callbacks")
headers = {"callback": '<http://127.0.0.1:12345/event> <http://127.0.0.1:12345/event>\t<http://127.0.0.1:12345/event><http://127.0.0.1:12345/event><http://127.0.0.1:12345/event><http://127.0.0.1:12345/event><http://127.0.0.1:12345/event><http://127.0.0.1:12345/event><http://127.0.0.1:12345/event><http://127.0.0.1:12345/event><http://127.0.0.1:12345/event><http://127.0.0.1:12345/event><http://127.0.0.1:12345/event><http://127.0.0.1:12345/event><http://127.0.0.1:12345/event><http://127.0.0.1:12345/event><http://127.0.0.1:12345/event><http://127.0.0.1:12345/event><http://127.0.0.1:12345/event><http://127.0.0.1:12345/event><http://127.0.0.1:12345/event><http://127.0.0.1:12345/event><http://127.0.0.1:12345/event><http://127.0.0.1:12345/event>',
"NT": "upnp:event",
"timeout": "Second-1234"}
conn.request("SUBSCRIBE", eventurl.path, "\r\n\r\n", headers)
resp = conn.getresponse()
if resp.status != 200:
raise Exception("Unexpected HTTP response: %d" % resp.status)
sid = resp.getheader("sid")
logger.debug("Subscription SID " + sid)
# Force subscription to be deleted due to errors
dev[1].scan_for_bss(apdev[0]['bssid'], freq=2412)
dev[2].scan_for_bss(apdev[0]['bssid'], freq=2412)
with alloc_fail(hapd, 1, "event_build_message"):
for i in range(10):
dev[1].dump_monitor()
dev[2].dump_monitor()
dev[1].request("WPS_PIN " + apdev[0]['bssid'] + " 12345670")
dev[2].request("WPS_PIN " + apdev[0]['bssid'] + " 12345670")
dev[1].wait_event(["CTRL-EVENT-SCAN-RESULTS"], 5)
dev[1].request("WPS_CANCEL")
dev[2].wait_event(["CTRL-EVENT-SCAN-RESULTS"], 5)
dev[2].request("WPS_CANCEL")
if i % 4 == 1:
time.sleep(1)
else:
time.sleep(0.1)
time.sleep(0.2)
headers = {"sid": sid}
conn.request("UNSUBSCRIBE", eventurl.path, "", headers)
resp = conn.getresponse()
if resp.status != 200 and resp.status != 412:
raise Exception("Unexpected HTTP response for UNSUBSCRIBE: %d" % resp.status)
headers = {"callback": '<http://127.0.0.1:12345/event>',
"NT": "upnp:event",
"timeout": "Second-1234"}
with alloc_fail(hapd, 1, "http_client_addr;event_send_start"):
conn.request("SUBSCRIBE", eventurl.path, "\r\n\r\n", headers)
resp = conn.getresponse()
if resp.status != 200:
raise Exception("Unexpected HTTP response for SUBSCRIBE: %d" % resp.status)
sid = resp.getheader("sid")
logger.debug("Subscription SID " + sid)
headers = {"sid": sid}
conn.request("UNSUBSCRIBE", eventurl.path, "\r\n\r\n", headers)
resp = conn.getresponse()
if resp.status != 200:
raise Exception("Unexpected HTTP response for UNSUBSCRIBE: %d" % resp.status)
headers = {"callback": '<http://127.0.0.1:12345/event>',
"NT": "upnp:event",
"timeout": "Second-1234"}
conn.request("SUBSCRIBE", eventurl.path, "\r\n\r\n", headers)
resp = conn.getresponse()
if resp.status != 200:
raise Exception("Unexpected HTTP response: %d" % resp.status)
sid = resp.getheader("sid")
logger.debug("Subscription SID " + sid)
with alloc_fail(hapd, 1, "=wps_upnp_event_add"):
for i in range(2):
dev[1].dump_monitor()
dev[2].dump_monitor()
dev[1].request("WPS_PIN " + apdev[0]['bssid'] + " 12345670")
dev[2].request("WPS_PIN " + apdev[0]['bssid'] + " 12345670")
dev[1].wait_event(["CTRL-EVENT-SCAN-RESULTS"], 5)
dev[1].request("WPS_CANCEL")
dev[2].wait_event(["CTRL-EVENT-SCAN-RESULTS"], 5)
dev[2].request("WPS_CANCEL")
if i == 0:
time.sleep(1)
else:
time.sleep(0.1)
conn.request("SUBSCRIBE", eventurl.path, "\r\n\r\n", headers)
resp = conn.getresponse()
if resp.status != 200:
raise Exception("Unexpected HTTP response: %d" % resp.status)
with alloc_fail(hapd, 1, "wpabuf_dup;wps_upnp_event_add"):
dev[1].dump_monitor()
dev[2].dump_monitor()
dev[1].request("WPS_PIN " + apdev[0]['bssid'] + " 12345670")
dev[2].request("WPS_PIN " + apdev[0]['bssid'] + " 12345670")
dev[1].wait_event(["CTRL-EVENT-SCAN-RESULTS"], 5)
dev[1].request("WPS_CANCEL")
dev[2].wait_event(["CTRL-EVENT-SCAN-RESULTS"], 5)
dev[2].request("WPS_CANCEL")
time.sleep(0.1)
with fail_test(hapd, 1, "os_get_random;uuid_make;subscription_start"):
conn.request("SUBSCRIBE", eventurl.path, "\r\n\r\n", headers)
resp = conn.getresponse()
if resp.status != 500:
raise Exception("Unexpected HTTP response: %d" % resp.status)
with alloc_fail(hapd, 1, "=subscription_start"):
conn.request("SUBSCRIBE", eventurl.path, "\r\n\r\n", headers)
resp = conn.getresponse()
if resp.status != 500:
raise Exception("Unexpected HTTP response: %d" % resp.status)
headers = {"callback": '',
"NT": "upnp:event",
"timeout": "Second-1234"}
conn.request("SUBSCRIBE", eventurl.path, "\r\n\r\n", headers)
resp = conn.getresponse()
if resp.status != 500:
raise Exception("Unexpected HTTP response: %d" % resp.status)
headers = {"callback": ' <',
"NT": "upnp:event",
"timeout": "Second-1234"}
conn.request("SUBSCRIBE", eventurl.path, "\r\n\r\n", headers)
resp = conn.getresponse()
if resp.status != 500:
raise Exception("Unexpected HTTP response: %d" % resp.status)
headers = {"callback": '<http://127.0.0.1:12345/event>',
"NT": "upnp:event",
"timeout": "Second-1234"}
with alloc_fail(hapd, 1, "wpabuf_alloc;subscription_first_event"):
conn.request("SUBSCRIBE", eventurl.path, "\r\n\r\n", headers)
resp = conn.getresponse()
if resp.status != 500:
raise Exception("Unexpected HTTP response: %d" % resp.status)
with alloc_fail(hapd, 1, "wps_upnp_event_add;subscription_first_event"):
conn.request("SUBSCRIBE", eventurl.path, "\r\n\r\n", headers)
resp = conn.getresponse()
if resp.status != 500:
raise Exception("Unexpected HTTP response: %d" % resp.status)
with alloc_fail(hapd, 1, "subscr_addr_add_url"):
conn.request("SUBSCRIBE", eventurl.path, "\r\n\r\n", headers)
resp = conn.getresponse()
if resp.status != 500:
raise Exception("Unexpected HTTP response: %d" % resp.status)
with alloc_fail(hapd, 2, "subscr_addr_add_url"):
conn.request("SUBSCRIBE", eventurl.path, "\r\n\r\n", headers)
resp = conn.getresponse()
if resp.status != 500:
raise Exception("Unexpected HTTP response: %d" % resp.status)
for i in range(6):
headers = {"callback": '<http://127.0.0.1:%d/event>' % (12345 + i),
"NT": "upnp:event",
"timeout": "Second-1234"}
conn.request("SUBSCRIBE", eventurl.path, "\r\n\r\n", headers)
resp = conn.getresponse()
if resp.status != 200:
raise Exception("Unexpected HTTP response: %d" % resp.status)
with alloc_fail(hapd, 1, "=upnp_wps_device_send_wlan_event"):
dev[1].dump_monitor()
dev[1].request("WPS_PIN " + apdev[0]['bssid'] + " 12345670")
dev[1].wait_event(["CTRL-EVENT-SCAN-RESULTS"], 5)
dev[1].request("WPS_CANCEL")
time.sleep(0.1)
with alloc_fail(hapd, 1, "wpabuf_alloc;upnp_wps_device_send_event"):
dev[1].dump_monitor()
dev[1].request("WPS_PIN " + apdev[0]['bssid'] + " 12345670")
dev[1].wait_event(["CTRL-EVENT-SCAN-RESULTS"], 5)
dev[1].request("WPS_CANCEL")
time.sleep(0.1)
with alloc_fail(hapd, 1,
"base64_gen_encode;?base64_encode;upnp_wps_device_send_wlan_event"):
dev[1].dump_monitor()
dev[1].request("WPS_PIN " + apdev[0]['bssid'] + " 12345670")
dev[1].wait_event(["CTRL-EVENT-SCAN-RESULTS"], 5)
dev[1].request("WPS_CANCEL")
time.sleep(0.1)
hapd.disable()
with alloc_fail(hapd, 1, "get_netif_info"):
if "FAIL" not in hapd.request("ENABLE"):
raise Exception("ENABLE succeeded during OOM")
def test_ap_wps_upnp_subscribe_events(dev, apdev):
"""WPS AP and UPnP event subscription and many events"""
ap_uuid = "27ea801a-9e5c-4e73-bd82-f89cbcd10d7e"
hapd = add_ssdp_ap(apdev[0], ap_uuid)
location = ssdp_get_location(ap_uuid)
urls = upnp_get_urls(location)
eventurl = urlparse(urls['event_sub_url'])
class WPSERHTTPServer(StreamRequestHandler):
def handle(self):
data = self.rfile.readline().strip()
logger.debug(data)
self.wfile.write(gen_wps_event())
server = MyTCPServer(("127.0.0.1", 12345), WPSERHTTPServer)
server.timeout = 1
url = urlparse(location)
conn = HTTPConnection(url.netloc)
headers = {"callback": '<http://127.0.0.1:12345/event>',
"NT": "upnp:event",
"timeout": "Second-1234"}
conn.request("SUBSCRIBE", eventurl.path, "\r\n\r\n", headers)
resp = conn.getresponse()
if resp.status != 200:
raise Exception("Unexpected HTTP response: %d" % resp.status)
sid = resp.getheader("sid")
logger.debug("Subscription SID " + sid)
# Fetch the first event message
server.handle_request()
# Force subscription event queue to reach the maximum length by generating
# new proxied events without the ER fetching any of the pending events.
dev[1].scan_for_bss(apdev[0]['bssid'], freq=2412)
dev[2].scan_for_bss(apdev[0]['bssid'], freq=2412)
for i in range(16):
dev[1].dump_monitor()
dev[2].dump_monitor()
dev[1].request("WPS_PIN " + apdev[0]['bssid'] + " 12345670")
dev[2].request("WPS_PIN " + apdev[0]['bssid'] + " 12345670")
dev[1].wait_event(["CTRL-EVENT-SCAN-RESULTS"], 5)
dev[1].request("WPS_CANCEL")
dev[2].wait_event(["CTRL-EVENT-SCAN-RESULTS"], 5)
dev[2].request("WPS_CANCEL")
if i % 4 == 1:
time.sleep(1)
else:
time.sleep(0.1)
hapd.request("WPS_PIN any 12345670")
dev[1].dump_monitor()
dev[1].request("WPS_PIN " + apdev[0]['bssid'] + " 12345670")
ev = dev[1].wait_event(["WPS-SUCCESS"], timeout=10)
if ev is None:
raise Exception("WPS success not reported")
# Close the WPS ER HTTP server without fetching all the pending events.
# This tests hostapd code path that clears subscription and the remaining
# event queue when the interface is deinitialized.
server.handle_request()
server.server_close()
dev[1].wait_connected()
def test_ap_wps_upnp_http_proto(dev, apdev):
"""WPS AP and UPnP/HTTP protocol testing"""
ap_uuid = "27ea801a-9e5c-4e73-bd82-f89cbcd10d7e"
add_ssdp_ap(apdev[0], ap_uuid)
location = ssdp_get_location(ap_uuid)
url = urlparse(location)
conn = HTTPConnection(url.netloc, timeout=0.2)
#conn.set_debuglevel(1)
conn.request("HEAD", "hello")
resp = conn.getresponse()
if resp.status != 501:
raise Exception("Unexpected response to HEAD: " + str(resp.status))
conn.close()
for cmd in ["PUT", "DELETE", "TRACE", "CONNECT", "M-SEARCH", "M-POST"]:
try:
conn.request(cmd, "hello")
resp = conn.getresponse()
except Exception as e:
pass
conn.close()
headers = {"Content-Length": 'abc'}
conn.request("HEAD", "hello", "\r\n\r\n", headers)
try:
resp = conn.getresponse()
except Exception as e:
pass
conn.close()
headers = {"Content-Length": '-10'}
conn.request("HEAD", "hello", "\r\n\r\n", headers)
try:
resp = conn.getresponse()
except Exception as e:
pass
conn.close()
headers = {"Content-Length": '10000000000000'}
conn.request("HEAD", "hello", "\r\n\r\nhello", headers)
try:
resp = conn.getresponse()
except Exception as e:
pass
conn.close()
headers = {"Transfer-Encoding": 'abc'}
conn.request("HEAD", "hello", "\r\n\r\n", headers)
resp = conn.getresponse()
if resp.status != 501:
raise Exception("Unexpected response to HEAD: " + str(resp.status))
conn.close()
headers = {"Transfer-Encoding": 'chunked'}
conn.request("HEAD", "hello", "\r\n\r\n", headers)
resp = conn.getresponse()
if resp.status != 501:
raise Exception("Unexpected response to HEAD: " + str(resp.status))
conn.close()
# Too long a header
conn.request("HEAD", 5000 * 'A')
try:
resp = conn.getresponse()
except Exception as e:
pass
conn.close()
# Long URL but within header length limits
conn.request("HEAD", 3000 * 'A')
resp = conn.getresponse()
if resp.status != 501:
raise Exception("Unexpected response to HEAD: " + str(resp.status))
conn.close()
headers = {"Content-Length": '20'}
conn.request("POST", "hello", 10 * 'A' + "\r\n\r\n", headers)
try:
resp = conn.getresponse()
except Exception as e:
pass
conn.close()
conn.request("POST", "hello", 5000 * 'A' + "\r\n\r\n")
resp = conn.getresponse()
if resp.status != 404:
raise Exception("Unexpected HTTP response: %d" % resp.status)
conn.close()
conn.request("POST", "hello", 60000 * 'A' + "\r\n\r\n")
try:
resp = conn.getresponse()
except Exception as e:
pass
conn.close()
def test_ap_wps_upnp_http_proto_chunked(dev, apdev):
"""WPS AP and UPnP/HTTP protocol testing for chunked encoding"""
ap_uuid = "27ea801a-9e5c-4e73-bd82-f89cbcd10d7e"
add_ssdp_ap(apdev[0], ap_uuid)
location = ssdp_get_location(ap_uuid)
url = urlparse(location)
conn = HTTPConnection(url.netloc)
#conn.set_debuglevel(1)
headers = {"Transfer-Encoding": 'chunked'}
conn.request("POST", "hello",
"a\r\nabcdefghij\r\n" + "2\r\nkl\r\n" + "0\r\n\r\n",
headers)
resp = conn.getresponse()
if resp.status != 404:
raise Exception("Unexpected HTTP response: %d" % resp.status)
conn.close()
conn.putrequest("POST", "hello")
conn.putheader('Transfer-Encoding', 'chunked')
conn.endheaders()
conn.send(b"a\r\nabcdefghij\r\n")
time.sleep(0.1)
conn.send(b"2\r\nkl\r\n")
conn.send(b"0\r\n\r\n")
resp = conn.getresponse()
if resp.status != 404:
raise Exception("Unexpected HTTP response: %d" % resp.status)
conn.close()
conn.putrequest("POST", "hello")
conn.putheader('Transfer-Encoding', 'chunked')
conn.endheaders()
completed = False
try:
for i in range(20000):
conn.send(b"1\r\nZ\r\n")
conn.send(b"0\r\n\r\n")
resp = conn.getresponse()
completed = True
except Exception as e:
pass
conn.close()
if completed:
raise Exception("Too long chunked request did not result in connection reset")
headers = {"Transfer-Encoding": 'chunked'}
conn.request("POST", "hello", "80000000\r\na", headers)
try:
resp = conn.getresponse()
except Exception as e:
pass
conn.close()
conn.request("POST", "hello", "10000000\r\na", headers)
try:
resp = conn.getresponse()
except Exception as e:
pass
conn.close()
@remote_compatible
def test_ap_wps_disabled(dev, apdev):
"""WPS operations while WPS is disabled"""
ssid = "test-wps-disabled"
hapd = hostapd.add_ap(apdev[0], {"ssid": ssid})
if "FAIL" not in hapd.request("WPS_PBC"):
raise Exception("WPS_PBC succeeded unexpectedly")
if "FAIL" not in hapd.request("WPS_CANCEL"):
raise Exception("WPS_CANCEL succeeded unexpectedly")
def test_ap_wps_mixed_cred(dev, apdev):
"""WPS 2.0 STA merging mixed mode WPA/WPA2 credentials"""
skip_without_tkip(dev[0])
ssid = "test-wps-wep"
params = {"ssid": ssid, "eap_server": "1", "wps_state": "2",
"skip_cred_build": "1", "extra_cred": "wps-mixed-cred"}
hapd = hostapd.add_ap(apdev[0], params)
hapd.request("WPS_PBC")
dev[0].scan_for_bss(apdev[0]['bssid'], freq="2412")
dev[0].request("WPS_PBC " + apdev[0]['bssid'])
ev = dev[0].wait_event(["WPS-SUCCESS"], timeout=30)
if ev is None:
raise Exception("WPS-SUCCESS event timed out")
nets = dev[0].list_networks()
if len(nets) != 1:
raise Exception("Unexpected number of network blocks")
id = nets[0]['id']
proto = dev[0].get_network(id, "proto")
if proto != "WPA RSN":
raise Exception("Unexpected merged proto field value: " + proto)
pairwise = dev[0].get_network(id, "pairwise")
p = pairwise.split()
if "CCMP" not in p or "TKIP" not in p:
raise Exception("Unexpected merged pairwise field value: " + pairwise)
@remote_compatible
def test_ap_wps_while_connected(dev, apdev):
"""WPS PBC provisioning while connected to another AP"""
ssid = "test-wps-conf"
hapd = hostapd.add_ap(apdev[0],
{"ssid": ssid, "eap_server": "1", "wps_state": "2",
"wpa_passphrase": "12345678", "wpa": "2",
"wpa_key_mgmt": "WPA-PSK", "rsn_pairwise": "CCMP"})
hostapd.add_ap(apdev[1], {"ssid": "open"})
dev[0].connect("open", key_mgmt="NONE", scan_freq="2412")
logger.info("WPS provisioning step")
hapd.request("WPS_PBC")
dev[0].dump_monitor()
dev[0].request("WPS_PBC " + apdev[0]['bssid'])
dev[0].wait_connected(timeout=30)
status = dev[0].get_status()
if status['bssid'] != apdev[0]['bssid']:
raise Exception("Unexpected BSSID")
@remote_compatible
def test_ap_wps_while_connected_no_autoconnect(dev, apdev):
"""WPS PBC provisioning while connected to another AP and STA_AUTOCONNECT disabled"""
ssid = "test-wps-conf"
hapd = hostapd.add_ap(apdev[0],
{"ssid": ssid, "eap_server": "1", "wps_state": "2",
"wpa_passphrase": "12345678", "wpa": "2",
"wpa_key_mgmt": "WPA-PSK", "rsn_pairwise": "CCMP"})
hostapd.add_ap(apdev[1], {"ssid": "open"})
try:
dev[0].request("STA_AUTOCONNECT 0")
dev[0].connect("open", key_mgmt="NONE", scan_freq="2412")
logger.info("WPS provisioning step")
hapd.request("WPS_PBC")
dev[0].dump_monitor()
dev[0].request("WPS_PBC " + apdev[0]['bssid'])
dev[0].wait_connected(timeout=30)
status = dev[0].get_status()
if status['bssid'] != apdev[0]['bssid']:
raise Exception("Unexpected BSSID")
finally:
dev[0].request("STA_AUTOCONNECT 1")
@remote_compatible
def test_ap_wps_from_event(dev, apdev):
"""WPS PBC event on AP to enable PBC"""
ssid = "test-wps-conf"
hapd = hostapd.add_ap(apdev[0],
{"ssid": ssid, "eap_server": "1", "wps_state": "2",
"wpa_passphrase": "12345678", "wpa": "2",
"wpa_key_mgmt": "WPA-PSK", "rsn_pairwise": "CCMP"})
dev[0].scan_for_bss(apdev[0]['bssid'], freq="2412")
dev[0].dump_monitor()
hapd.dump_monitor()
dev[0].request("WPS_PBC " + apdev[0]['bssid'])
ev = hapd.wait_event(['WPS-ENROLLEE-SEEN'], timeout=15)
if ev is None:
raise Exception("No WPS-ENROLLEE-SEEN event on AP")
vals = ev.split(' ')
if vals[1] != dev[0].p2p_interface_addr():
raise Exception("Unexpected enrollee address: " + vals[1])
if vals[5] != '4':
raise Exception("Unexpected Device Password Id: " + vals[5])
hapd.request("WPS_PBC")
dev[0].wait_connected(timeout=30)
def test_ap_wps_ap_scan_2(dev, apdev):
"""AP_SCAN 2 for WPS"""
ssid = "test-wps-conf"
hapd = hostapd.add_ap(apdev[0],
{"ssid": ssid, "eap_server": "1", "wps_state": "2",
"wpa_passphrase": "12345678", "wpa": "2",
"wpa_key_mgmt": "WPA-PSK", "rsn_pairwise": "CCMP"})
hapd.request("WPS_PBC")
wpas = WpaSupplicant(global_iface='/tmp/wpas-wlan5')
wpas.interface_add("wlan5", drv_params="force_connect_cmd=1")
wpas.dump_monitor()
if "OK" not in wpas.request("AP_SCAN 2"):
raise Exception("Failed to set AP_SCAN 2")
wpas.flush_scan_cache()
wpas.scan_for_bss(apdev[0]['bssid'], freq="2412")
wpas.dump_monitor()
wpas.request("WPS_PBC " + apdev[0]['bssid'])
ev = wpas.wait_event(["WPS-SUCCESS"], timeout=15)
if ev is None:
raise Exception("WPS-SUCCESS event timed out")
wpas.wait_connected(timeout=30)
wpas.dump_monitor()
wpas.request("DISCONNECT")
wpas.wait_disconnected()
id = wpas.list_networks()[0]['id']
pairwise = wpas.get_network(id, "pairwise")
if "CCMP" not in pairwise.split():
raise Exception("Unexpected pairwise parameter value: " + pairwise)
group = wpas.get_network(id, "group")
if "CCMP" not in group.split():
raise Exception("Unexpected group parameter value: " + group)
# Need to select a single cipher for ap_scan=2 testing
wpas.set_network(id, "pairwise", "CCMP")
wpas.set_network(id, "group", "CCMP")
wpas.request("BSS_FLUSH 0")
wpas.dump_monitor()
wpas.request("REASSOCIATE")
wpas.wait_connected(timeout=30)
wpas.dump_monitor()
wpas.request("DISCONNECT")
wpas.wait_disconnected()
wpas.flush_scan_cache()
@remote_compatible
def test_ap_wps_eapol_workaround(dev, apdev):
"""EAPOL workaround code path for 802.1X header length mismatch"""
ssid = "test-wps"
hapd = hostapd.add_ap(apdev[0],
{"ssid": ssid, "eap_server": "1", "wps_state": "1"})
bssid = apdev[0]['bssid']
hapd.request("SET ext_eapol_frame_io 1")
dev[0].request("SET ext_eapol_frame_io 1")
hapd.request("WPS_PBC")
dev[0].request("WPS_PBC")
ev = hapd.wait_event(["EAPOL-TX"], timeout=15)
if ev is None:
raise Exception("Timeout on EAPOL-TX from hostapd")
res = dev[0].request("EAPOL_RX " + bssid + " 020000040193000501FFFF")
if "OK" not in res:
raise Exception("EAPOL_RX to wpa_supplicant failed")
def test_ap_wps_iteration(dev, apdev):
"""WPS PIN and iterate through APs without selected registrar"""
ssid = "test-wps-conf"
hapd = hostapd.add_ap(apdev[0],
{"ssid": ssid, "eap_server": "1", "wps_state": "2",
"wpa_passphrase": "12345678", "wpa": "2",
"wpa_key_mgmt": "WPA-PSK", "rsn_pairwise": "CCMP"})
ssid2 = "test-wps-conf2"
hapd2 = hostapd.add_ap(apdev[1],
{"ssid": ssid2, "eap_server": "1", "wps_state": "2",
"wpa_passphrase": "12345678", "wpa": "2",
"wpa_key_mgmt": "WPA-PSK", "rsn_pairwise": "CCMP"})
dev[0].scan_for_bss(apdev[0]['bssid'], freq="2412")
dev[0].scan_for_bss(apdev[1]['bssid'], freq="2412")
dev[0].dump_monitor()
pin = dev[0].request("WPS_PIN any")
# Wait for iteration through all WPS APs to happen before enabling any
# Registrar.
for i in range(2):
ev = dev[0].wait_event(["Associated with"], timeout=30)
if ev is None:
raise Exception("No association seen")
ev = dev[0].wait_event(["WPS-M2D"], timeout=10)
if ev is None:
raise Exception("No M2D from AP")
dev[0].wait_disconnected()
# Verify that each AP requested PIN
ev = hapd.wait_event(["WPS-PIN-NEEDED"], timeout=1)
if ev is None:
raise Exception("No WPS-PIN-NEEDED event from AP")
ev = hapd2.wait_event(["WPS-PIN-NEEDED"], timeout=1)
if ev is None:
raise Exception("No WPS-PIN-NEEDED event from AP2")
# Provide PIN to one of the APs and verify that connection gets formed
hapd.request("WPS_PIN any " + pin)
dev[0].wait_connected(timeout=30)
def test_ap_wps_iteration_error(dev, apdev):
"""WPS AP iteration on no Selected Registrar and error case with an AP"""
ssid = "test-wps-conf-pin"
hapd = hostapd.add_ap(apdev[0],
{"ssid": ssid, "eap_server": "1", "wps_state": "2",
"wpa_passphrase": "12345678", "wpa": "2",
"wpa_key_mgmt": "WPA-PSK", "rsn_pairwise": "CCMP",
"wps_independent": "1"})
hapd.request("SET ext_eapol_frame_io 1")
bssid = apdev[0]['bssid']
pin = dev[0].wps_read_pin()
dev[0].request("WPS_PIN any " + pin)
ev = hapd.wait_event(["EAPOL-TX"], timeout=15)
if ev is None:
raise Exception("No EAPOL-TX (EAP-Request/Identity) from hostapd")
dev[0].request("EAPOL_RX " + bssid + " " + ev.split(' ')[2])
ev = hapd.wait_event(["EAPOL-TX"], timeout=15)
if ev is None:
raise Exception("No EAPOL-TX (EAP-WSC/Start) from hostapd")
ev = dev[0].wait_event(["CTRL-EVENT-EAP-STARTED"], timeout=5)
if ev is None:
raise Exception("No CTRL-EVENT-EAP-STARTED")
# Do not forward any more EAPOL frames to test wpa_supplicant behavior for
# a case with an incorrectly behaving WPS AP.
# Start the real target AP and activate registrar on it.
hapd2 = hostapd.add_ap(apdev[1],
{"ssid": ssid, "eap_server": "1", "wps_state": "2",
"wpa_passphrase": "12345678", "wpa": "2",
"wpa_key_mgmt": "WPA-PSK", "rsn_pairwise": "CCMP",
"wps_independent": "1"})
hapd2.request("WPS_PIN any " + pin)
dev[0].wait_disconnected(timeout=15)
ev = dev[0].wait_event(["CTRL-EVENT-EAP-STARTED"], timeout=15)
if ev is None:
raise Exception("No CTRL-EVENT-EAP-STARTED for the second AP")
ev = dev[0].wait_event(["WPS-CRED-RECEIVED"], timeout=15)
if ev is None:
raise Exception("No WPS-CRED-RECEIVED for the second AP")
dev[0].wait_connected(timeout=15)
@remote_compatible
def test_ap_wps_priority(dev, apdev):
"""WPS PIN provisioning with configured AP and wps_priority"""
ssid = "test-wps-conf-pin"
hapd = hostapd.add_ap(apdev[0],
{"ssid": ssid, "eap_server": "1", "wps_state": "2",
"wpa_passphrase": "12345678", "wpa": "2",
"wpa_key_mgmt": "WPA-PSK", "rsn_pairwise": "CCMP"})
logger.info("WPS provisioning step")
pin = dev[0].wps_read_pin()
hapd.request("WPS_PIN any " + pin)
dev[0].scan_for_bss(apdev[0]['bssid'], freq="2412")
dev[0].dump_monitor()
try:
dev[0].request("SET wps_priority 6")
dev[0].request("WPS_PIN %s %s" % (apdev[0]['bssid'], pin))
dev[0].wait_connected(timeout=30)
netw = dev[0].list_networks()
prio = dev[0].get_network(netw[0]['id'], 'priority')
if prio != '6':
raise Exception("Unexpected network priority: " + prio)
finally:
dev[0].request("SET wps_priority 0")
@remote_compatible
def test_ap_wps_and_non_wps(dev, apdev):
"""WPS and non-WPS AP in single hostapd process"""
params = {"ssid": "wps", "eap_server": "1", "wps_state": "1"}
hapd = hostapd.add_ap(apdev[0], params)
params = {"ssid": "no wps"}
hapd2 = hostapd.add_ap(apdev[1], params)
appin = hapd.request("WPS_AP_PIN random")
if "FAIL" in appin:
raise Exception("Could not generate random AP PIN")
if appin not in hapd.request("WPS_AP_PIN get"):
raise Exception("Could not fetch current AP PIN")
if "FAIL" in hapd.request("WPS_PBC"):
raise Exception("WPS_PBC failed")
if "FAIL" in hapd.request("WPS_CANCEL"):
raise Exception("WPS_CANCEL failed")
def test_ap_wps_init_oom(dev, apdev):
"""Initial AP configuration and OOM during PSK generation"""
ssid = "test-wps"
params = {"ssid": ssid, "eap_server": "1", "wps_state": "1"}
hapd = hostapd.add_ap(apdev[0], params)
with alloc_fail(hapd, 1, "base64_gen_encode;?base64_encode;wps_build_cred"):
pin = dev[0].wps_read_pin()
hapd.request("WPS_PIN any " + pin)
dev[0].scan_for_bss(apdev[0]['bssid'], freq="2412")
dev[0].request("WPS_PIN %s %s" % (apdev[0]['bssid'], pin))
dev[0].wait_disconnected()
hapd.request("WPS_PIN any " + pin)
dev[0].wait_connected(timeout=30)
@remote_compatible
def test_ap_wps_er_oom(dev, apdev):
"""WPS ER OOM in XML processing"""
try:
_test_ap_wps_er_oom(dev, apdev)
finally:
dev[0].request("WPS_ER_STOP")
dev[1].request("WPS_CANCEL")
dev[0].request("DISCONNECT")
def _test_ap_wps_er_oom(dev, apdev):
ssid = "wps-er-ap-config"
ap_pin = "12345670"
ap_uuid = "27ea801a-9e5c-4e73-bd82-f89cbcd10d7e"
hostapd.add_ap(apdev[0],
{"ssid": ssid, "eap_server": "1", "wps_state": "2",
"wpa_passphrase": "12345678", "wpa": "2",
"wpa_key_mgmt": "WPA-PSK", "rsn_pairwise": "CCMP",
"device_name": "Wireless AP", "manufacturer": "Company",
"model_name": "WAP", "model_number": "123",
"serial_number": "12345", "device_type": "6-0050F204-1",
"os_version": "01020300",
"config_methods": "label push_button",
"ap_pin": ap_pin, "uuid": ap_uuid, "upnp_iface": "lo"})
dev[0].connect(ssid, psk="12345678", scan_freq="2412")
with alloc_fail(dev[0], 1,
"base64_gen_decode;?base64_decode;xml_get_base64_item"):
dev[0].request("WPS_ER_START ifname=lo")
ev = dev[0].wait_event(["WPS-ER-AP-ADD"], timeout=3)
if ev is not None:
raise Exception("Unexpected AP discovery")
dev[0].request("WPS_ER_STOP")
dev[0].request("WPS_ER_START ifname=lo")
ev = dev[0].wait_event(["WPS-ER-AP-ADD"], timeout=10)
if ev is None:
raise Exception("AP discovery timed out")
dev[1].scan_for_bss(apdev[0]['bssid'], freq=2412)
with alloc_fail(dev[0], 1,
"base64_gen_decode;?base64_decode;xml_get_base64_item"):
dev[1].request("WPS_PBC " + apdev[0]['bssid'])
ev = dev[1].wait_event(["CTRL-EVENT-SCAN-RESULTS"], timeout=10)
if ev is None:
raise Exception("PBC scan failed")
ev = dev[0].wait_event(["WPS-ER-ENROLLEE-ADD"], timeout=15)
if ev is None:
raise Exception("Enrollee discovery timed out")
@remote_compatible
def test_ap_wps_er_init_oom(dev, apdev):
"""WPS ER and OOM during init"""
try:
_test_ap_wps_er_init_oom(dev, apdev)
finally:
dev[0].request("WPS_ER_STOP")
def _test_ap_wps_er_init_oom(dev, apdev):
with alloc_fail(dev[0], 1, "wps_er_init"):
if "FAIL" not in dev[0].request("WPS_ER_START ifname=lo"):
raise Exception("WPS_ER_START succeeded during OOM")
with alloc_fail(dev[0], 1, "http_server_init"):
if "FAIL" not in dev[0].request("WPS_ER_START ifname=lo"):
raise Exception("WPS_ER_START succeeded during OOM")
with alloc_fail(dev[0], 2, "http_server_init"):
if "FAIL" not in dev[0].request("WPS_ER_START ifname=lo"):
raise Exception("WPS_ER_START succeeded during OOM")
with alloc_fail(dev[0], 1, "eloop_sock_table_add_sock;?eloop_register_sock;wps_er_ssdp_init"):
if "FAIL" not in dev[0].request("WPS_ER_START ifname=lo"):
raise Exception("WPS_ER_START succeeded during OOM")
with fail_test(dev[0], 1, "os_get_random;wps_er_init"):
if "FAIL" not in dev[0].request("WPS_ER_START ifname=lo"):
raise Exception("WPS_ER_START succeeded during os_get_random failure")
@remote_compatible
def test_ap_wps_er_init_fail(dev, apdev):
"""WPS ER init failure"""
if "FAIL" not in dev[0].request("WPS_ER_START ifname=does-not-exist"):
dev[0].request("WPS_ER_STOP")
raise Exception("WPS_ER_START with non-existing ifname succeeded")
def test_ap_wps_wpa_cli_action(dev, apdev, test_params):
"""WPS events and wpa_cli action script"""
logdir = os.path.abspath(test_params['logdir'])
pidfile = os.path.join(logdir, 'ap_wps_wpa_cli_action.wpa_cli.pid')
logfile = os.path.join(logdir, 'ap_wps_wpa_cli_action.wpa_cli.res')
actionfile = os.path.join(logdir, 'ap_wps_wpa_cli_action.wpa_cli.action.sh')
with open(actionfile, 'w') as f:
f.write('#!/bin/sh\n')
f.write('echo $* >> %s\n' % logfile)
# Kill the process and wait some time before returning to allow all the
# pending events to be processed with some of this happening after the
# eloop SIGALRM signal has been scheduled.
f.write('if [ $2 = "WPS-SUCCESS" -a -r %s ]; then kill `cat %s`; sleep 1; fi\n' % (pidfile, pidfile))
os.chmod(actionfile, stat.S_IREAD | stat.S_IWRITE | stat.S_IEXEC |
stat.S_IRGRP | stat.S_IXGRP | stat.S_IROTH | stat.S_IXOTH)
ssid = "test-wps-conf"
hapd = hostapd.add_ap(apdev[0],
{"ssid": ssid, "eap_server": "1", "wps_state": "2",
"wpa_passphrase": "12345678", "wpa": "2",
"wpa_key_mgmt": "WPA-PSK", "rsn_pairwise": "CCMP"})
prg = os.path.join(test_params['logdir'],
'alt-wpa_supplicant/wpa_supplicant/wpa_cli')
if not os.path.exists(prg):
prg = '../../wpa_supplicant/wpa_cli'
arg = [prg, '-P', pidfile, '-B', '-i', dev[0].ifname, '-a', actionfile]
subprocess.call(arg)
arg = ['ps', 'ax']
cmd = subprocess.Popen(arg, stdout=subprocess.PIPE)
out = cmd.communicate()[0].decode()
cmd.wait()
logger.debug("Processes:\n" + out)
if "wpa_cli -P %s -B -i %s" % (pidfile, dev[0].ifname) not in out:
raise Exception("Did not see wpa_cli running")
hapd.request("WPS_PIN any 12345670")
dev[0].scan_for_bss(apdev[0]['bssid'], freq="2412")
dev[0].dump_monitor()
dev[0].request("WPS_PIN " + apdev[0]['bssid'] + " 12345670")
dev[0].wait_connected(timeout=30)
for i in range(30):
if not os.path.exists(pidfile):
break
time.sleep(0.1)
if not os.path.exists(logfile):
raise Exception("wpa_cli action results file not found")
with open(logfile, 'r') as f:
res = f.read()
if "WPS-SUCCESS" not in res:
raise Exception("WPS-SUCCESS event not seen in action file")
arg = ['ps', 'ax']
cmd = subprocess.Popen(arg, stdout=subprocess.PIPE)
out = cmd.communicate()[0].decode()
cmd.wait()
logger.debug("Remaining processes:\n" + out)
if "wpa_cli -P %s -B -i %s" % (pidfile, dev[0].ifname) in out:
raise Exception("wpa_cli still running")
if os.path.exists(pidfile):
raise Exception("PID file not removed")
def test_ap_wps_er_ssdp_proto(dev, apdev):
"""WPS ER SSDP protocol testing"""
try:
_test_ap_wps_er_ssdp_proto(dev, apdev)
finally:
dev[0].request("WPS_ER_STOP")
def _test_ap_wps_er_ssdp_proto(dev, apdev):
socket.setdefaulttimeout(1)
sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM, socket.IPPROTO_UDP)
sock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
sock.bind(("239.255.255.250", 1900))
if "FAIL" not in dev[0].request("WPS_ER_START ifname=lo foo"):
raise Exception("Invalid filter accepted")
if "OK" not in dev[0].request("WPS_ER_START ifname=lo 1.2.3.4"):
raise Exception("WPS_ER_START with filter failed")
(msg, addr) = sock.recvfrom(1000)
msg = msg.decode()
logger.debug("Received SSDP message from %s: %s" % (str(addr), msg))
if "M-SEARCH" not in msg:
raise Exception("Not an M-SEARCH")
sock.sendto(b"FOO", addr)
time.sleep(0.1)
dev[0].request("WPS_ER_STOP")
dev[0].request("WPS_ER_START ifname=lo")
(msg, addr) = sock.recvfrom(1000)
msg = msg.decode()
logger.debug("Received SSDP message from %s: %s" % (str(addr), msg))
if "M-SEARCH" not in msg:
raise Exception("Not an M-SEARCH")
sock.sendto(b"FOO", addr)
sock.sendto(b"HTTP/1.1 200 OK\r\nFOO\r\n\r\n", addr)
sock.sendto(b"HTTP/1.1 200 OK\r\nNTS:foo\r\n\r\n", addr)
sock.sendto(b"HTTP/1.1 200 OK\r\nNTS:ssdp:byebye\r\n\r\n", addr)
sock.sendto(b"HTTP/1.1 200 OK\r\ncache-control: foo=1\r\n\r\n", addr)
sock.sendto(b"HTTP/1.1 200 OK\r\ncache-control: max-age=1\r\n\r\n", addr)
sock.sendto(b"HTTP/1.1 200 OK\r\nusn:\r\n\r\n", addr)
sock.sendto(b"HTTP/1.1 200 OK\r\nusn:foo\r\n\r\n", addr)
sock.sendto(b"HTTP/1.1 200 OK\r\nusn: uuid:\r\n\r\n", addr)
sock.sendto(b"HTTP/1.1 200 OK\r\nusn: uuid: \r\n\r\n", addr)
sock.sendto(b"HTTP/1.1 200 OK\r\nusn: uuid: foo\r\n\r\n", addr)
sock.sendto(b"HTTP/1.1 200 OK\r\nST: urn:schemas-wifialliance-org:device:WFADevice:1\r\n\r\n", addr)
sock.sendto(b"HTTP/1.1 200 OK\r\nST: urn:schemas-wifialliance-org:device:WFADevice:1\r\nNTS:ssdp:byebye\r\n\r\n", addr)
sock.sendto(b"HTTP/1.1 200 OK\r\nST: urn:schemas-wifialliance-org:device:WFADevice:1\r\nlocation:foo\r\n\r\n", addr)
with alloc_fail(dev[0], 1, "wps_er_ap_add"):
sock.sendto(b"HTTP/1.1 200 OK\r\nST: urn:schemas-wifialliance-org:device:WFADevice:1\r\nlocation:foo\r\ncache-control:max-age=1\r\n\r\n", addr)
time.sleep(0.1)
with alloc_fail(dev[0], 2, "wps_er_ap_add"):
sock.sendto(b"HTTP/1.1 200 OK\r\nST: urn:schemas-wifialliance-org:device:WFADevice:1\r\nlocation:foo\r\ncache-control:max-age=1\r\n\r\n", addr)
time.sleep(0.1)
# Add an AP with bogus URL
sock.sendto(b"HTTP/1.1 200 OK\r\nST: urn:schemas-wifialliance-org:device:WFADevice:1\r\nlocation:foo\r\ncache-control:max-age=1\r\n\r\n", addr)
# Update timeout on AP without updating URL
sock.sendto(b"HTTP/1.1 200 OK\r\nST: urn:schemas-wifialliance-org:device:WFADevice:1\r\nlocation:http://127.0.0.1:12345/foo.xml\r\ncache-control:max-age=1\r\n\r\n", addr)
ev = dev[0].wait_event(["WPS-ER-AP-REMOVE"], timeout=5)
if ev is None:
raise Exception("No WPS-ER-AP-REMOVE event on max-age timeout")
# Add an AP with a valid URL (but no server listing to it)
sock.sendto(b"HTTP/1.1 200 OK\r\nST: urn:schemas-wifialliance-org:device:WFADevice:1\r\nlocation:http://127.0.0.1:12345/foo.xml\r\ncache-control:max-age=1\r\n\r\n", addr)
ev = dev[0].wait_event(["WPS-ER-AP-REMOVE"], timeout=5)
if ev is None:
raise Exception("No WPS-ER-AP-REMOVE event on max-age timeout")
sock.close()
wps_event_url = None
def gen_upnp_info(eventSubURL='wps_event', controlURL='wps_control',
udn='uuid:27ea801a-9e5c-4e73-bd82-f89cbcd10d7e'):
payload = '''<?xml version="1.0"?>
<root xmlns="urn:schemas-upnp-org:device-1-0">
<specVersion>
<major>1</major>
<minor>0</minor>
</specVersion>
<device>
<deviceType>urn:schemas-wifialliance-org:device:WFADevice:1</deviceType>
<friendlyName>WPS Access Point</friendlyName>
<manufacturer>Company</manufacturer>
<modelName>WAP</modelName>
<modelNumber>123</modelNumber>
<serialNumber>12345</serialNumber>
'''
if udn:
payload += '<UDN>' + udn + '</UDN>'
payload += '''<serviceList>
<service>
<serviceType>urn:schemas-wifialliance-org:service:WFAWLANConfig:1</serviceType>
<serviceId>urn:wifialliance-org:serviceId:WFAWLANConfig1</serviceId>
<SCPDURL>wps_scpd.xml</SCPDURL>
'''
if controlURL:
payload += '<controlURL>' + controlURL + '</controlURL>\n'
if eventSubURL:
payload += '<eventSubURL>' + eventSubURL + '</eventSubURL>\n'
payload += '''</service>
</serviceList>
</device>
</root>
'''
hdr = 'HTTP/1.1 200 OK\r\n' + \
'Content-Type: text/xml; charset="utf-8"\r\n' + \
'Server: Unspecified, UPnP/1.0, Unspecified\r\n' + \
'Connection: close\r\n' + \
'Content-Length: ' + str(len(payload)) + '\r\n' + \
'Date: Sat, 15 Aug 2015 18:55:08 GMT\r\n\r\n'
return (hdr + payload).encode()
def gen_wps_control(payload_override=None):
payload = '''<?xml version="1.0"?>
<s:Envelope xmlns:s="http://schemas.xmlsoap.org/soap/envelope/" s:encodingStyle="http://schemas.xmlsoap.org/soap/encoding/">
<s:Body>
<u:GetDeviceInfoResponse xmlns:u="urn:schemas-wifialliance-org:service:WFAWLANConfig:1">
<NewDeviceInfo>EEoAARAQIgABBBBHABAn6oAanlxOc72C+Jy80Q1+ECAABgIAAAADABAaABCJZ7DPtbU3Ust9
Z3wJF07WEDIAwH45D3i1OqB7eJGwTzqeapS71h3KyXncK2xJZ+xqScrlorNEg6LijBJzG2Ca
+FZli0iliDJd397yAx/jk4nFXco3q5ylBSvSw9dhJ5u1xBKSnTilKGlUHPhLP75PUqM3fot9
7zwtFZ4bx6x1sBA6oEe2d0aUJmLumQGCiKEIWlnxs44zego/2tAe81bDzdPBM7o5HH/FUhD+
KoGzFXp51atP+1n9Vta6AkI0Vye99JKLcC6Md9dMJltSVBgd4Xc4lRAEAAIAIxAQAAIADRAN
AAEBEAgAAgAEEEQAAQIQIQAHQ29tcGFueRAjAANXQVAQJAADMTIzEEIABTEyMzQ1EFQACAAG
AFDyBAABEBEAC1dpcmVsZXNzIEFQEDwAAQEQAgACAAAQEgACAAAQCQACAAAQLQAEgQIDABBJ
AAYANyoAASA=
</NewDeviceInfo>
</u:GetDeviceInfoResponse>
</s:Body>
</s:Envelope>
'''
if payload_override:
payload = payload_override
hdr = 'HTTP/1.1 200 OK\r\n' + \
'Content-Type: text/xml; charset="utf-8"\r\n' + \
'Server: Unspecified, UPnP/1.0, Unspecified\r\n' + \
'Connection: close\r\n' + \
'Content-Length: ' + str(len(payload)) + '\r\n' + \
'Date: Sat, 15 Aug 2015 18:55:08 GMT\r\n\r\n'
return (hdr + payload).encode()
def gen_wps_event(sid='uuid:7eb3342a-8a5f-47fe-a585-0785bfec6d8a'):
payload = ""
hdr = 'HTTP/1.1 200 OK\r\n' + \
'Content-Type: text/xml; charset="utf-8"\r\n' + \
'Server: Unspecified, UPnP/1.0, Unspecified\r\n' + \
'Connection: close\r\n' + \
'Content-Length: ' + str(len(payload)) + '\r\n'
if sid:
hdr += 'SID: ' + sid + '\r\n'
hdr += 'Timeout: Second-1801\r\n' + \
'Date: Sat, 15 Aug 2015 18:55:08 GMT\r\n\r\n'
return (hdr + payload).encode()
class WPSAPHTTPServer(StreamRequestHandler):
def handle(self):
data = self.rfile.readline().decode().strip()
logger.info("HTTP server received: " + data)
while True:
hdr = self.rfile.readline().decode().strip()
if len(hdr) == 0:
break
logger.info("HTTP header: " + hdr)
if "CALLBACK:" in hdr:
global wps_event_url
wps_event_url = hdr.split(' ')[1].strip('<>')
if "GET /foo.xml" in data:
self.handle_upnp_info()
elif "POST /wps_control" in data:
self.handle_wps_control()
elif "SUBSCRIBE /wps_event" in data:
self.handle_wps_event()
else:
self.handle_others(data)
def handle_upnp_info(self):
self.wfile.write(gen_upnp_info())
def handle_wps_control(self):
self.wfile.write(gen_wps_control())
def handle_wps_event(self):
self.wfile.write(gen_wps_event())
def handle_others(self, data):
logger.info("Ignore HTTP request: " + data)
class MyTCPServer(TCPServer):
def __init__(self, addr, handler):
self.allow_reuse_address = True
TCPServer.__init__(self, addr, handler)
def wps_er_start(dev, http_server, max_age=1, wait_m_search=False,
location_url=None):
socket.setdefaulttimeout(1)
sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM, socket.IPPROTO_UDP)
sock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
sock.bind(("239.255.255.250", 1900))
dev.request("WPS_ER_START ifname=lo")
for i in range(100):
(msg, addr) = sock.recvfrom(1000)
msg = msg.decode()
logger.debug("Received SSDP message from %s: %s" % (str(addr), msg))
if "M-SEARCH" in msg:
break
if not wait_m_search:
raise Exception("Not an M-SEARCH")
if i == 99:
raise Exception("No M-SEARCH seen")
# Add an AP with a valid URL and server listing to it
server = MyTCPServer(("127.0.0.1", 12345), http_server)
if not location_url:
location_url = 'http://127.0.0.1:12345/foo.xml'
sock.sendto(("HTTP/1.1 200 OK\r\nST: urn:schemas-wifialliance-org:device:WFADevice:1\r\nlocation:%s\r\ncache-control:max-age=%d\r\n\r\n" % (location_url, max_age)).encode(), addr)
server.timeout = 1
return server, sock
def wps_er_stop(dev, sock, server, on_alloc_fail=False):
sock.close()
server.server_close()
if on_alloc_fail:
done = False
for i in range(50):
res = dev.request("GET_ALLOC_FAIL")
if res.startswith("0:"):
done = True
break
time.sleep(0.1)
if not done:
raise Exception("No allocation failure reported")
else:
ev = dev.wait_event(["WPS-ER-AP-REMOVE"], timeout=5)
if ev is None:
raise Exception("No WPS-ER-AP-REMOVE event on max-age timeout")
dev.request("WPS_ER_STOP")
def run_wps_er_proto_test(dev, handler, no_event_url=False, location_url=None,
max_age=1):
try:
uuid = '27ea801a-9e5c-4e73-bd82-f89cbcd10d7e'
server, sock = wps_er_start(dev, handler, location_url=location_url,
max_age=max_age)
global wps_event_url
wps_event_url = None
server.handle_request()
server.handle_request()
server.handle_request()
server.server_close()
if no_event_url:
if wps_event_url:
raise Exception("Received event URL unexpectedly")
return
if wps_event_url is None:
raise Exception("Did not get event URL")
logger.info("Event URL: " + wps_event_url)
finally:
dev.request("WPS_ER_STOP")
def send_wlanevent(url, uuid, data, no_response=False):
conn = HTTPConnection(url.netloc)
payload = '''<?xml version="1.0" encoding="utf-8"?>
<e:propertyset xmlns:e="urn:schemas-upnp-org:event-1-0">
<e:property><STAStatus>1</STAStatus></e:property>
<e:property><APStatus>1</APStatus></e:property>
<e:property><WLANEvent>'''
payload += base64.b64encode(data).decode()
payload += '</WLANEvent></e:property></e:propertyset>'
headers = {"Content-type": 'text/xml; charset="utf-8"',
"Server": "Unspecified, UPnP/1.0, Unspecified",
"HOST": url.netloc,
"NT": "upnp:event",
"SID": "uuid:" + uuid,
"SEQ": "0",
"Content-Length": str(len(payload))}
conn.request("NOTIFY", url.path, payload, headers)
if no_response:
try:
conn.getresponse()
except Exception as e:
pass
return
resp = conn.getresponse()
if resp.status != 200:
raise Exception("Unexpected HTTP response: %d" % resp.status)
def test_ap_wps_er_http_proto(dev, apdev):
"""WPS ER HTTP protocol testing"""
try:
_test_ap_wps_er_http_proto(dev, apdev)
finally:
dev[0].request("WPS_ER_STOP")
def _test_ap_wps_er_http_proto(dev, apdev):
uuid = '27ea801a-9e5c-4e73-bd82-f89cbcd10d7e'
server, sock = wps_er_start(dev[0], WPSAPHTTPServer, max_age=15)
global wps_event_url
wps_event_url = None
server.handle_request()
server.handle_request()
server.handle_request()
server.server_close()
if wps_event_url is None:
raise Exception("Did not get event URL")
logger.info("Event URL: " + wps_event_url)
ev = dev[0].wait_event(["WPS-ER-AP-ADD"], timeout=10)
if ev is None:
raise Exception("No WPS-ER-AP-ADD event")
if uuid not in ev:
raise Exception("UUID mismatch")
sock.close()
logger.info("Valid Probe Request notification")
url = urlparse(wps_event_url)
conn = HTTPConnection(url.netloc)
payload = '''<?xml version="1.0" encoding="utf-8"?>
<e:propertyset xmlns:e="urn:schemas-upnp-org:event-1-0">
<e:property><STAStatus>1</STAStatus></e:property>
<e:property><APStatus>1</APStatus></e:property>
<e:property><WLANEvent>ATAyOjAwOjAwOjAwOjAwOjAwEEoAARAQOgABAhAIAAIxSBBHABA2LbR7pTpRkYj7VFi5hrLk
EFQACAAAAAAAAAAAEDwAAQMQAgACAAAQCQACAAAQEgACAAAQIQABIBAjAAEgECQAASAQEQAI
RGV2aWNlIEEQSQAGADcqAAEg
</WLANEvent></e:property>
</e:propertyset>
'''
headers = {"Content-type": 'text/xml; charset="utf-8"',
"Server": "Unspecified, UPnP/1.0, Unspecified",
"HOST": url.netloc,
"NT": "upnp:event",
"SID": "uuid:" + uuid,
"SEQ": "0",
"Content-Length": str(len(payload))}
conn.request("NOTIFY", url.path, payload, headers)
resp = conn.getresponse()
if resp.status != 200:
raise Exception("Unexpected HTTP response: %d" % resp.status)
ev = dev[0].wait_event(["WPS-ER-ENROLLEE-ADD"], timeout=5)
if ev is None:
raise Exception("No WPS-ER-ENROLLEE-ADD event")
if "362db47b-a53a-5191-88fb-5458b986b2e4" not in ev:
raise Exception("No Enrollee UUID match")
logger.info("Incorrect event URL AP id")
conn = HTTPConnection(url.netloc)
conn.request("NOTIFY", url.path + '123', payload, headers)
resp = conn.getresponse()
if resp.status != 404:
raise Exception("Unexpected HTTP response: %d" % resp.status)
logger.info("Missing AP id")
conn = HTTPConnection(url.netloc)
conn.request("NOTIFY", '/event/' + url.path.split('/')[2],
payload, headers)
time.sleep(0.1)
logger.info("Incorrect event URL event id")
conn = HTTPConnection(url.netloc)
conn.request("NOTIFY", '/event/123456789/123', payload, headers)
time.sleep(0.1)
logger.info("Incorrect event URL prefix")
conn = HTTPConnection(url.netloc)
conn.request("NOTIFY", '/foobar/123456789/123', payload, headers)
resp = conn.getresponse()
if resp.status != 404:
raise Exception("Unexpected HTTP response: %d" % resp.status)
logger.info("Unsupported request")
conn = HTTPConnection(url.netloc)
conn.request("FOOBAR", '/foobar/123456789/123', payload, headers)
resp = conn.getresponse()
if resp.status != 501:
raise Exception("Unexpected HTTP response: %d" % resp.status)
logger.info("Unsupported request and OOM")
with alloc_fail(dev[0], 1, "wps_er_http_req"):
conn = HTTPConnection(url.netloc)
conn.request("FOOBAR", '/foobar/123456789/123', payload, headers)
time.sleep(0.5)
logger.info("Too short WLANEvent")
data = b'\x00'
send_wlanevent(url, uuid, data)
logger.info("Invalid WLANEventMAC")
data = b'\x00qwertyuiopasdfghjklzxcvbnm'
send_wlanevent(url, uuid, data)
logger.info("Unknown WLANEventType")
data = b'\xff02:00:00:00:00:00'
send_wlanevent(url, uuid, data)
logger.info("Probe Request notification without any attributes")
data = b'\x0102:00:00:00:00:00'
send_wlanevent(url, uuid, data)
logger.info("Probe Request notification with invalid attribute")
data = b'\x0102:00:00:00:00:00\xff'
send_wlanevent(url, uuid, data)
logger.info("EAP message without any attributes")
data = b'\x0202:00:00:00:00:00'
send_wlanevent(url, uuid, data)
logger.info("EAP message with invalid attribute")
data = b'\x0202:00:00:00:00:00\xff'
send_wlanevent(url, uuid, data)
logger.info("EAP message from new STA and not M1")
data = b'\x0202:ff:ff:ff:ff:ff' + b'\x10\x22\x00\x01\x05'
send_wlanevent(url, uuid, data)
logger.info("EAP message: M1")
data = b'\x0202:00:00:00:00:00'
data += b'\x10\x22\x00\x01\x04'
data += b'\x10\x47\x00\x10' + 16 * b'\x00'
data += b'\x10\x20\x00\x06\x02\x00\x00\x00\x00\x00'
data += b'\x10\x1a\x00\x10' + 16 * b'\x00'
data += b'\x10\x32\x00\xc0' + 192 * b'\x00'
data += b'\x10\x04\x00\x02\x00\x00'
data += b'\x10\x10\x00\x02\x00\x00'
data += b'\x10\x0d\x00\x01\x00'
data += b'\x10\x08\x00\x02\x00\x00'
data += b'\x10\x44\x00\x01\x00'
data += b'\x10\x21\x00\x00'
data += b'\x10\x23\x00\x00'
data += b'\x10\x24\x00\x00'
data += b'\x10\x42\x00\x00'
data += b'\x10\x54\x00\x08' + 8 * b'\x00'
data += b'\x10\x11\x00\x00'
data += b'\x10\x3c\x00\x01\x00'
data += b'\x10\x02\x00\x02\x00\x00'
data += b'\x10\x12\x00\x02\x00\x00'
data += b'\x10\x09\x00\x02\x00\x00'
data += b'\x10\x2d\x00\x04\x00\x00\x00\x00'
m1 = data
send_wlanevent(url, uuid, data)
logger.info("EAP message: WSC_ACK")
data = b'\x0202:00:00:00:00:00' + b'\x10\x22\x00\x01\x0d'
send_wlanevent(url, uuid, data)
logger.info("EAP message: M1")
send_wlanevent(url, uuid, m1)
logger.info("EAP message: WSC_NACK")
data = b'\x0202:00:00:00:00:00' + b'\x10\x22\x00\x01\x0e'
send_wlanevent(url, uuid, data)
logger.info("EAP message: M1 - Too long attribute values")
data = b'\x0202:00:00:00:00:00'
data += b'\x10\x11\x00\x21' + 33 * b'\x00'
data += b'\x10\x45\x00\x21' + 33 * b'\x00'
data += b'\x10\x42\x00\x21' + 33 * b'\x00'
data += b'\x10\x24\x00\x21' + 33 * b'\x00'
data += b'\x10\x23\x00\x21' + 33 * b'\x00'
data += b'\x10\x21\x00\x41' + 65 * b'\x00'
data += b'\x10\x49\x00\x09\x00\x37\x2a\x05\x02\x00\x00\x05\x00'
send_wlanevent(url, uuid, data)
logger.info("EAP message: M1 missing UUID-E")
data = b'\x0202:00:00:00:00:00'
data += b'\x10\x22\x00\x01\x04'
send_wlanevent(url, uuid, data)
logger.info("EAP message: M1 missing MAC Address")
data += b'\x10\x47\x00\x10' + 16 * b'\x00'
send_wlanevent(url, uuid, data)
logger.info("EAP message: M1 missing Enrollee Nonce")
data += b'\x10\x20\x00\x06\x02\x00\x00\x00\x00\x00'
send_wlanevent(url, uuid, data)
logger.info("EAP message: M1 missing Public Key")
data += b'\x10\x1a\x00\x10' + 16 * b'\x00'
send_wlanevent(url, uuid, data)
logger.info("EAP message: M1 missing Authentication Type flags")
data += b'\x10\x32\x00\xc0' + 192 * b'\x00'
send_wlanevent(url, uuid, data)
logger.info("EAP message: M1 missing Encryption Type Flags")
data += b'\x10\x04\x00\x02\x00\x00'
send_wlanevent(url, uuid, data)
logger.info("EAP message: M1 missing Connection Type flags")
data += b'\x10\x10\x00\x02\x00\x00'
send_wlanevent(url, uuid, data)
logger.info("EAP message: M1 missing Config Methods")
data += b'\x10\x0d\x00\x01\x00'
send_wlanevent(url, uuid, data)
logger.info("EAP message: M1 missing Wi-Fi Protected Setup State")
data += b'\x10\x08\x00\x02\x00\x00'
send_wlanevent(url, uuid, data)
logger.info("EAP message: M1 missing Manufacturer")
data += b'\x10\x44\x00\x01\x00'
send_wlanevent(url, uuid, data)
logger.info("EAP message: M1 missing Model Name")
data += b'\x10\x21\x00\x00'
send_wlanevent(url, uuid, data)
logger.info("EAP message: M1 missing Model Number")
data += b'\x10\x23\x00\x00'
send_wlanevent(url, uuid, data)
logger.info("EAP message: M1 missing Serial Number")
data += b'\x10\x24\x00\x00'
send_wlanevent(url, uuid, data)
logger.info("EAP message: M1 missing Primary Device Type")
data += b'\x10\x42\x00\x00'
send_wlanevent(url, uuid, data)
logger.info("EAP message: M1 missing Device Name")
data += b'\x10\x54\x00\x08' + 8 * b'\x00'
send_wlanevent(url, uuid, data)
logger.info("EAP message: M1 missing RF Bands")
data += b'\x10\x11\x00\x00'
send_wlanevent(url, uuid, data)
logger.info("EAP message: M1 missing Association State")
data += b'\x10\x3c\x00\x01\x00'
send_wlanevent(url, uuid, data)
logger.info("EAP message: M1 missing Device Password ID")
data += b'\x10\x02\x00\x02\x00\x00'
send_wlanevent(url, uuid, data)
logger.info("EAP message: M1 missing Configuration Error")
data += b'\x10\x12\x00\x02\x00\x00'
send_wlanevent(url, uuid, data)
logger.info("EAP message: M1 missing OS Version")
data += b'\x10\x09\x00\x02\x00\x00'
send_wlanevent(url, uuid, data)
logger.info("Check max concurrent requests")
addr = (url.hostname, url.port)
socks = {}
for i in range(20):
socks[i] = socket.socket(socket.AF_INET, socket.SOCK_STREAM,
socket.IPPROTO_TCP)
socks[i].settimeout(10)
socks[i].connect(addr)
for i in range(20):
socks[i].send(b"GET / HTTP/1.1\r\n\r\n")
count = 0
for i in range(20):
try:
res = socks[i].recv(100).decode()
if "HTTP/1" in res:
count += 1
else:
logger.info("recv[%d]: len=%d" % (i, len(res)))
except:
pass
socks[i].close()
logger.info("%d concurrent HTTP GET operations returned response" % count)
if count < 8:
raise Exception("Too few concurrent HTTP connections accepted")
logger.info("OOM in HTTP server")
for func in ["http_request_init", "httpread_create",
"eloop_register_timeout;httpread_create",
"eloop_sock_table_add_sock;?eloop_register_sock;httpread_create",
"httpread_hdr_analyze"]:
with alloc_fail(dev[0], 1, func):
sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM,
socket.IPPROTO_TCP)
sock.connect(addr)
sock.send(b"GET / HTTP/1.1\r\n\r\n")
try:
sock.recv(100)
except:
pass
sock.close()
logger.info("Invalid HTTP header")
for req in [" GET / HTTP/1.1\r\n\r\n",
"HTTP/1.1 200 OK\r\n\r\n",
"HTTP/\r\n\r\n",
"GET %%a%aa% HTTP/1.1\r\n\r\n",
"GET / HTTP/1.1\r\n FOO\r\n\r\n",
"NOTIFY / HTTP/1.1\r\n" + 4097*'a' + '\r\n\r\n',
"NOTIFY / HTTP/1.1\r\n\r\n" + 8193*'a',
"POST / HTTP/1.1\r\nTransfer-Encoding: CHUNKED\r\n\r\n foo\r\n",
"POST / HTTP/1.1\r\nTransfer-Encoding: CHUNKED\r\n\r\n1\r\nfoo\r\n",
"POST / HTTP/1.1\r\nTransfer-Encoding: CHUNKED\r\n\r\n0\r\n",
"POST / HTTP/1.1\r\nTransfer-Encoding: CHUNKED\r\n\r\n0\r\naa\ra\r\n\ra"]:
sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM,
socket.IPPROTO_TCP)
sock.settimeout(0.1)
sock.connect(addr)
sock.send(req.encode())
try:
sock.recv(100)
except:
pass
sock.close()
with alloc_fail(dev[0], 2, "httpread_read_handler"):
sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM,
socket.IPPROTO_TCP)
sock.connect(addr)
sock.send(b"NOTIFY / HTTP/1.1\r\n\r\n" + 4500 * b'a')
try:
sock.recv(100)
except:
pass
sock.close()
conn = HTTPConnection(url.netloc)
payload = '<foo'
headers = {"Content-type": 'text/xml; charset="utf-8"',
"Server": "Unspecified, UPnP/1.0, Unspecified",
"HOST": url.netloc,
"NT": "upnp:event",
"SID": "uuid:" + uuid,
"SEQ": "0",
"Content-Length": str(len(payload))}
conn.request("NOTIFY", url.path, payload, headers)
resp = conn.getresponse()
if resp.status != 200:
raise Exception("Unexpected HTTP response: %d" % resp.status)
conn = HTTPConnection(url.netloc)
payload = '<WLANEvent foo></WLANEvent>'
headers = {"Content-type": 'text/xml; charset="utf-8"',
"Server": "Unspecified, UPnP/1.0, Unspecified",
"HOST": url.netloc,
"NT": "upnp:event",
"SID": "uuid:" + uuid,
"SEQ": "0",
"Content-Length": str(len(payload))}
conn.request("NOTIFY", url.path, payload, headers)
resp = conn.getresponse()
if resp.status != 200:
raise Exception("Unexpected HTTP response: %d" % resp.status)
with alloc_fail(dev[0], 1, "xml_get_first_item"):
send_wlanevent(url, uuid, b'')
with alloc_fail(dev[0], 1, "wpabuf_alloc_ext_data;xml_get_base64_item"):
send_wlanevent(url, uuid, b'foo')
for func in ["wps_init",
"wps_process_manufacturer",
"wps_process_model_name",
"wps_process_model_number",
"wps_process_serial_number",
"wps_process_dev_name"]:
with alloc_fail(dev[0], 1, func):
send_wlanevent(url, uuid, m1)
with alloc_fail(dev[0], 1, "wps_er_http_resp_ok"):
send_wlanevent(url, uuid, m1, no_response=True)
with alloc_fail(dev[0], 1, "wps_er_http_resp_not_found"):
url2 = urlparse(wps_event_url.replace('/event/', '/notfound/'))
send_wlanevent(url2, uuid, m1, no_response=True)
logger.info("EAP message: M1")
data = b'\x0202:11:22:00:00:00'
data += b'\x10\x22\x00\x01\x04'
data += b'\x10\x47\x00\x10' + 16 * b'\x00'
data += b'\x10\x20\x00\x06\x02\x00\x00\x00\x00\x00'
data += b'\x10\x1a\x00\x10' + 16 * b'\x00'
data += b'\x10\x32\x00\xc0' + 192 * b'\x00'
data += b'\x10\x04\x00\x02\x00\x00'
data += b'\x10\x10\x00\x02\x00\x00'
data += b'\x10\x0d\x00\x01\x00'
data += b'\x10\x08\x00\x02\x00\x00'
data += b'\x10\x44\x00\x01\x00'
data += b'\x10\x21\x00\x00'
data += b'\x10\x23\x00\x00'
data += b'\x10\x24\x00\x00'
data += b'\x10\x42\x00\x00'
data += b'\x10\x54\x00\x08' + 8 * b'\x00'
data += b'\x10\x11\x00\x00'
data += b'\x10\x3c\x00\x01\x00'
data += b'\x10\x02\x00\x02\x00\x00'
data += b'\x10\x12\x00\x02\x00\x00'
data += b'\x10\x09\x00\x02\x00\x00'
data += b'\x10\x2d\x00\x04\x00\x00\x00\x00'
dev[0].dump_monitor()
with alloc_fail(dev[0], 1, "wps_er_add_sta_data"):
send_wlanevent(url, uuid, data)
ev = dev[0].wait_event(["WPS-ER-ENROLLEE-ADD"], timeout=0.1)
if ev is not None:
raise Exception("Unexpected enrollee add event")
send_wlanevent(url, uuid, data)
ev = dev[0].wait_event(["WPS-ER-ENROLLEE-ADD"], timeout=2)
if ev is None:
raise Exception("Enrollee add event not seen")
with alloc_fail(dev[0], 1,
"base64_gen_encode;?base64_encode;wps_er_soap_hdr"):
send_wlanevent(url, uuid, data)
with alloc_fail(dev[0], 1, "wpabuf_alloc;wps_er_soap_hdr"):
send_wlanevent(url, uuid, data)
with alloc_fail(dev[0], 1, "http_client_url_parse;wps_er_sta_send_msg"):
send_wlanevent(url, uuid, data)
with alloc_fail(dev[0], 1, "http_client_addr;wps_er_sta_send_msg"):
send_wlanevent(url, uuid, data)
def test_ap_wps_er_http_proto_no_event_sub_url(dev, apdev):
"""WPS ER HTTP protocol testing - no eventSubURL"""
class WPSAPHTTPServer_no_event_sub_url(WPSAPHTTPServer):
def handle_upnp_info(self):
self.wfile.write(gen_upnp_info(eventSubURL=None))
run_wps_er_proto_test(dev[0], WPSAPHTTPServer_no_event_sub_url,
no_event_url=True)
def test_ap_wps_er_http_proto_event_sub_url_dns(dev, apdev):
"""WPS ER HTTP protocol testing - DNS name in eventSubURL"""
class WPSAPHTTPServer_event_sub_url_dns(WPSAPHTTPServer):
def handle_upnp_info(self):
self.wfile.write(gen_upnp_info(eventSubURL='http://example.com/wps_event'))
run_wps_er_proto_test(dev[0], WPSAPHTTPServer_event_sub_url_dns,
no_event_url=True)
def test_ap_wps_er_http_proto_subscribe_oom(dev, apdev):
"""WPS ER HTTP protocol testing - subscribe OOM"""
try:
_test_ap_wps_er_http_proto_subscribe_oom(dev, apdev)
finally:
dev[0].request("WPS_ER_STOP")
def _test_ap_wps_er_http_proto_subscribe_oom(dev, apdev):
tests = [(1, "http_client_url_parse"),
(1, "wpabuf_alloc;wps_er_subscribe"),
(1, "http_client_addr"),
(1, "eloop_sock_table_add_sock;?eloop_register_sock;http_client_addr"),
(1, "eloop_register_timeout;http_client_addr")]
for count, func in tests:
with alloc_fail(dev[0], count, func):
server, sock = wps_er_start(dev[0], WPSAPHTTPServer)
server.handle_request()
server.handle_request()
wps_er_stop(dev[0], sock, server, on_alloc_fail=True)
def test_ap_wps_er_http_proto_no_sid(dev, apdev):
"""WPS ER HTTP protocol testing - no SID"""
class WPSAPHTTPServer_no_sid(WPSAPHTTPServer):
def handle_wps_event(self):
self.wfile.write(gen_wps_event(sid=None))
run_wps_er_proto_test(dev[0], WPSAPHTTPServer_no_sid)
def test_ap_wps_er_http_proto_invalid_sid_no_uuid(dev, apdev):
"""WPS ER HTTP protocol testing - invalid SID - no UUID"""
class WPSAPHTTPServer_invalid_sid_no_uuid(WPSAPHTTPServer):
def handle_wps_event(self):
self.wfile.write(gen_wps_event(sid='FOO'))
run_wps_er_proto_test(dev[0], WPSAPHTTPServer_invalid_sid_no_uuid)
def test_ap_wps_er_http_proto_invalid_sid_uuid(dev, apdev):
"""WPS ER HTTP protocol testing - invalid SID UUID"""
class WPSAPHTTPServer_invalid_sid_uuid(WPSAPHTTPServer):
def handle_wps_event(self):
self.wfile.write(gen_wps_event(sid='uuid:FOO'))
run_wps_er_proto_test(dev[0], WPSAPHTTPServer_invalid_sid_uuid)
def test_ap_wps_er_http_proto_subscribe_failing(dev, apdev):
"""WPS ER HTTP protocol testing - SUBSCRIBE failing"""
class WPSAPHTTPServer_fail_subscribe(WPSAPHTTPServer):
def handle_wps_event(self):
payload = ""
hdr = 'HTTP/1.1 404 Not Found\r\n' + \
'Content-Type: text/xml; charset="utf-8"\r\n' + \
'Server: Unspecified, UPnP/1.0, Unspecified\r\n' + \
'Connection: close\r\n' + \
'Content-Length: ' + str(len(payload)) + '\r\n' + \
'Timeout: Second-1801\r\n' + \
'Date: Sat, 15 Aug 2015 18:55:08 GMT\r\n\r\n'
self.wfile.write((hdr + payload).encode())
run_wps_er_proto_test(dev[0], WPSAPHTTPServer_fail_subscribe)
def test_ap_wps_er_http_proto_subscribe_invalid_response(dev, apdev):
"""WPS ER HTTP protocol testing - SUBSCRIBE and invalid response"""
class WPSAPHTTPServer_subscribe_invalid_response(WPSAPHTTPServer):
def handle_wps_event(self):
payload = ""
hdr = 'HTTP/1.1 FOO\r\n' + \
'Content-Type: text/xml; charset="utf-8"\r\n' + \
'Server: Unspecified, UPnP/1.0, Unspecified\r\n' + \
'Connection: close\r\n' + \
'Content-Length: ' + str(len(payload)) + '\r\n' + \
'Timeout: Second-1801\r\n' + \
'Date: Sat, 15 Aug 2015 18:55:08 GMT\r\n\r\n'
self.wfile.write((hdr + payload).encode())
run_wps_er_proto_test(dev[0], WPSAPHTTPServer_subscribe_invalid_response)
def test_ap_wps_er_http_proto_subscribe_invalid_response(dev, apdev):
"""WPS ER HTTP protocol testing - SUBSCRIBE and invalid response"""
class WPSAPHTTPServer_invalid_m1(WPSAPHTTPServer):
def handle_wps_control(self):
payload = '''<?xml version="1.0"?>
<s:Envelope xmlns:s="http://schemas.xmlsoap.org/soap/envelope/" s:encodingStyle="http://schemas.xmlsoap.org/soap/encoding/">
<s:Body>
<u:GetDeviceInfoResponse xmlns:u="urn:schemas-wifialliance-org:service:WFAWLANConfig:1">
<NewDeviceInfo>Rk9P</NewDeviceInfo>
</u:GetDeviceInfoResponse>
</s:Body>
</s:Envelope>
'''
self.wfile.write(gen_wps_control(payload_override=payload))
run_wps_er_proto_test(dev[0], WPSAPHTTPServer_invalid_m1, no_event_url=True)
def test_ap_wps_er_http_proto_upnp_info_no_device(dev, apdev):
"""WPS ER HTTP protocol testing - No device in UPnP info"""
class WPSAPHTTPServer_no_device(WPSAPHTTPServer):
def handle_upnp_info(self):
payload = '''<?xml version="1.0"?>
<root xmlns="urn:schemas-upnp-org:device-1-0">
<specVersion>
<major>1</major>
<minor>0</minor>
</specVersion>
</root>
'''
hdr = 'HTTP/1.1 200 OK\r\n' + \
'Content-Type: text/xml; charset="utf-8"\r\n' + \
'Server: Unspecified, UPnP/1.0, Unspecified\r\n' + \
'Connection: close\r\n' + \
'Content-Length: ' + str(len(payload)) + '\r\n' + \
'Date: Sat, 15 Aug 2015 18:55:08 GMT\r\n\r\n'
self.wfile.write((hdr + payload).encode())
run_wps_er_proto_test(dev[0], WPSAPHTTPServer_no_device, no_event_url=True)
def test_ap_wps_er_http_proto_upnp_info_no_device_type(dev, apdev):
"""WPS ER HTTP protocol testing - No deviceType in UPnP info"""
class WPSAPHTTPServer_no_device(WPSAPHTTPServer):
def handle_upnp_info(self):
payload = '''<?xml version="1.0"?>
<root xmlns="urn:schemas-upnp-org:device-1-0">
<specVersion>
<major>1</major>
<minor>0</minor>
</specVersion>
<device>
</device>
</root>
'''
hdr = 'HTTP/1.1 200 OK\r\n' + \
'Content-Type: text/xml; charset="utf-8"\r\n' + \
'Server: Unspecified, UPnP/1.0, Unspecified\r\n' + \
'Connection: close\r\n' + \
'Content-Length: ' + str(len(payload)) + '\r\n' + \
'Date: Sat, 15 Aug 2015 18:55:08 GMT\r\n\r\n'
self.wfile.write((hdr + payload).encode())
run_wps_er_proto_test(dev[0], WPSAPHTTPServer_no_device, no_event_url=True)
def test_ap_wps_er_http_proto_upnp_info_invalid_udn_uuid(dev, apdev):
"""WPS ER HTTP protocol testing - Invalid UDN UUID"""
class WPSAPHTTPServer_invalid_udn_uuid(WPSAPHTTPServer):
def handle_upnp_info(self):
self.wfile.write(gen_upnp_info(udn='uuid:foo'))
run_wps_er_proto_test(dev[0], WPSAPHTTPServer_invalid_udn_uuid)
def test_ap_wps_er_http_proto_no_control_url(dev, apdev):
"""WPS ER HTTP protocol testing - no controlURL"""
class WPSAPHTTPServer_no_control_url(WPSAPHTTPServer):
def handle_upnp_info(self):
self.wfile.write(gen_upnp_info(controlURL=None))
run_wps_er_proto_test(dev[0], WPSAPHTTPServer_no_control_url,
no_event_url=True)
def test_ap_wps_er_http_proto_control_url_dns(dev, apdev):
"""WPS ER HTTP protocol testing - DNS name in controlURL"""
class WPSAPHTTPServer_control_url_dns(WPSAPHTTPServer):
def handle_upnp_info(self):
self.wfile.write(gen_upnp_info(controlURL='http://example.com/wps_control'))
run_wps_er_proto_test(dev[0], WPSAPHTTPServer_control_url_dns,
no_event_url=True)
def test_ap_wps_http_timeout(dev, apdev):
"""WPS AP/ER and HTTP timeout"""
try:
_test_ap_wps_http_timeout(dev, apdev)
finally:
dev[0].request("WPS_ER_STOP")
def _test_ap_wps_http_timeout(dev, apdev):
ap_uuid = "27ea801a-9e5c-4e73-bd82-f89cbcd10d7e"
add_ssdp_ap(apdev[0], ap_uuid)
location = ssdp_get_location(ap_uuid)
url = urlparse(location)
addr = (url.hostname, url.port)
logger.debug("Open HTTP connection to hostapd, but do not complete request")
sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM,
socket.IPPROTO_TCP)
sock.connect(addr)
sock.send(b"G")
class StubServer(StreamRequestHandler):
def handle(self):
logger.debug("StubServer - start 31 sec wait")
time.sleep(31)
logger.debug("StubServer - wait done")
logger.debug("Start WPS ER")
server, sock2 = wps_er_start(dev[0], StubServer, max_age=40,
wait_m_search=True)
logger.debug("Start server to accept, but not complete, HTTP connection from WPS ER")
# This will wait for 31 seconds..
server.handle_request()
logger.debug("Complete HTTP connection with hostapd (that should have already closed the connection)")
try:
sock.send("ET / HTTP/1.1\r\n\r\n")
res = sock.recv(100)
sock.close()
except:
pass
def test_ap_wps_er_url_parse(dev, apdev):
"""WPS ER and URL parsing special cases"""
try:
_test_ap_wps_er_url_parse(dev, apdev)
finally:
dev[0].request("WPS_ER_STOP")
def _test_ap_wps_er_url_parse(dev, apdev):
sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM, socket.IPPROTO_UDP)
sock.settimeout(1)
sock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
sock.bind(("239.255.255.250", 1900))
dev[0].request("WPS_ER_START ifname=lo")
(msg, addr) = sock.recvfrom(1000)
msg = msg.decode()
logger.debug("Received SSDP message from %s: %s" % (str(addr), msg))
if "M-SEARCH" not in msg:
raise Exception("Not an M-SEARCH")
sock.sendto(b"HTTP/1.1 200 OK\r\nST: urn:schemas-wifialliance-org:device:WFADevice:1\r\nlocation:http://127.0.0.1\r\ncache-control:max-age=1\r\n\r\n", addr)
ev = dev[0].wait_event(["WPS-ER-AP-REMOVE"], timeout=2)
sock.sendto(b"HTTP/1.1 200 OK\r\nST: urn:schemas-wifialliance-org:device:WFADevice:1\r\nlocation:http://127.0.0.1/:foo\r\ncache-control:max-age=1\r\n\r\n", addr)
ev = dev[0].wait_event(["WPS-ER-AP-REMOVE"], timeout=2)
sock.sendto(b"HTTP/1.1 200 OK\r\nST: urn:schemas-wifialliance-org:device:WFADevice:1\r\nlocation:http://255.255.255.255:0/foo.xml\r\ncache-control:max-age=1\r\n\r\n", addr)
ev = dev[0].wait_event(["WPS-ER-AP-REMOVE"], timeout=2)
sock.close()
def test_ap_wps_er_link_update(dev, apdev):
"""WPS ER and link update special cases"""
class WPSAPHTTPServer_link_update(WPSAPHTTPServer):
def handle_upnp_info(self):
self.wfile.write(gen_upnp_info(controlURL='/wps_control'))
run_wps_er_proto_test(dev[0], WPSAPHTTPServer_link_update)
class WPSAPHTTPServer_link_update2(WPSAPHTTPServer):
def handle_others(self, data):
if "GET / " in data:
self.wfile.write(gen_upnp_info(controlURL='/wps_control'))
run_wps_er_proto_test(dev[0], WPSAPHTTPServer_link_update2,
location_url='http://127.0.0.1:12345')
def test_ap_wps_er_http_client(dev, apdev):
"""WPS ER and HTTP client special cases"""
with alloc_fail(dev[0], 1, "http_link_update"):
run_wps_er_proto_test(dev[0], WPSAPHTTPServer)
with alloc_fail(dev[0], 1, "wpabuf_alloc;http_client_url"):
run_wps_er_proto_test(dev[0], WPSAPHTTPServer, no_event_url=True)
with alloc_fail(dev[0], 1, "httpread_create;http_client_tx_ready"):
run_wps_er_proto_test(dev[0], WPSAPHTTPServer, no_event_url=True)
class WPSAPHTTPServer_req_as_resp(WPSAPHTTPServer):
def handle_upnp_info(self):
self.wfile.write(b"GET / HTTP/1.1\r\n\r\n")
run_wps_er_proto_test(dev[0], WPSAPHTTPServer_req_as_resp,
no_event_url=True)
def test_ap_wps_er_http_client_timeout(dev, apdev):
"""WPS ER and HTTP client timeout"""
class WPSAPHTTPServer_timeout(WPSAPHTTPServer):
def handle_upnp_info(self):
time.sleep(31)
self.wfile.write(b"GET / HTTP/1.1\r\n\r\n")
run_wps_er_proto_test(dev[0], WPSAPHTTPServer_timeout,
no_event_url=True, max_age=60)
def test_ap_wps_init_oom(dev, apdev):
"""wps_init OOM cases"""
ssid = "test-wps"
appin = "12345670"
params = {"ssid": ssid, "eap_server": "1", "wps_state": "2",
"ap_pin": appin}
hapd = hostapd.add_ap(apdev[0], params)
pin = dev[0].wps_read_pin()
with alloc_fail(hapd, 1, "wps_init"):
hapd.request("WPS_PIN any " + pin)
dev[0].scan_for_bss(apdev[0]['bssid'], freq="2412")
dev[0].request("WPS_PIN %s %s" % (apdev[0]['bssid'], pin))
ev = hapd.wait_event(["CTRL-EVENT-EAP-FAILURE"], timeout=10)
if ev is None:
raise Exception("No EAP failure reported")
dev[0].request("WPS_CANCEL")
with alloc_fail(dev[0], 2, "wps_init"):
hapd.request("WPS_PIN any " + pin)
dev[0].scan_for_bss(apdev[0]['bssid'], freq="2412")
dev[0].request("WPS_PIN %s %s" % (apdev[0]['bssid'], pin))
ev = hapd.wait_event(["CTRL-EVENT-EAP-FAILURE"], timeout=10)
if ev is None:
raise Exception("No EAP failure reported")
dev[0].request("WPS_CANCEL")
with alloc_fail(dev[0], 2, "wps_init"):
hapd.request("WPS_PBC")
dev[0].scan_for_bss(apdev[0]['bssid'], freq="2412")
dev[0].request("WPS_PBC %s" % (apdev[0]['bssid']))
ev = hapd.wait_event(["CTRL-EVENT-EAP-FAILURE"], timeout=10)
if ev is None:
raise Exception("No EAP failure reported")
dev[0].request("WPS_CANCEL")
dev[0].dump_monitor()
new_ssid = "wps-new-ssid"
new_passphrase = "1234567890"
with alloc_fail(dev[0], 3, "wps_init"):
dev[0].wps_reg(apdev[0]['bssid'], appin, new_ssid, "WPA2PSK", "CCMP",
new_passphrase, no_wait=True)
ev = dev[0].wait_event(["CTRL-EVENT-EAP-FAILURE"], timeout=10)
if ev is None:
raise Exception("No EAP failure reported")
dev[0].flush_scan_cache()
@remote_compatible
def test_ap_wps_invalid_assoc_req_elem(dev, apdev):
"""WPS and invalid IE in Association Request frame"""
ssid = "test-wps"
params = {"ssid": ssid, "eap_server": "1", "wps_state": "2"}
hapd = hostapd.add_ap(apdev[0], params)
pin = "12345670"
hapd.request("WPS_PIN any " + pin)
dev[0].scan_for_bss(apdev[0]['bssid'], freq="2412")
try:
dev[0].request("VENDOR_ELEM_ADD 13 dd050050f20410")
dev[0].request("WPS_PIN %s %s" % (apdev[0]['bssid'], pin))
for i in range(5):
ev = hapd.wait_event(["CTRL-EVENT-EAP-PROPOSED-METHOD"], timeout=10)
if ev and "vendor=14122" in ev:
break
if ev is None or "vendor=14122" not in ev:
raise Exception("EAP-WSC not started")
dev[0].request("WPS_CANCEL")
finally:
dev[0].request("VENDOR_ELEM_REMOVE 13 *")
def test_ap_wps_pbc_pin_mismatch(dev, apdev):
"""WPS PBC/PIN mismatch"""
ssid = "test-wps"
params = {"ssid": ssid, "eap_server": "1", "wps_state": "2"}
hapd = hostapd.add_ap(apdev[0], params)
hapd.request("SET wps_version_number 0x10")
dev[0].scan_for_bss(apdev[0]['bssid'], freq="2412")
hapd.request("WPS_PBC")
pin = dev[0].wps_read_pin()
dev[0].request("WPS_PIN %s %s" % (apdev[0]['bssid'], pin))
ev = dev[0].wait_event(["CTRL-EVENT-SCAN-RESULTS"])
if ev is None:
raise Exception("Scan did not complete")
dev[0].request("WPS_CANCEL")
hapd.request("WPS_CANCEL")
dev[0].flush_scan_cache()
@remote_compatible
def test_ap_wps_ie_invalid(dev, apdev):
"""WPS PIN attempt with AP that has invalid WSC IE"""
ssid = "test-wps"
params = {"ssid": ssid, "eap_server": "1", "wps_state": "2",
"vendor_elements": "dd050050f20410"}
hapd = hostapd.add_ap(apdev[0], params)
params = {'ssid': "another", "vendor_elements": "dd050050f20410"}
hostapd.add_ap(apdev[1], params)
dev[0].scan_for_bss(apdev[0]['bssid'], freq="2412")
pin = dev[0].wps_read_pin()
dev[0].request("WPS_PIN %s %s" % (apdev[0]['bssid'], pin))
ev = dev[0].wait_event(["CTRL-EVENT-SCAN-RESULTS"])
if ev is None:
raise Exception("Scan did not complete")
dev[0].request("WPS_CANCEL")
@remote_compatible
def test_ap_wps_scan_prio_order(dev, apdev):
"""WPS scan priority ordering"""
ssid = "test-wps"
params = {"ssid": ssid, "eap_server": "1", "wps_state": "2"}
hapd = hostapd.add_ap(apdev[0], params)
params = {'ssid': "another", "vendor_elements": "dd050050f20410"}
hostapd.add_ap(apdev[1], params)
dev[0].scan_for_bss(apdev[0]['bssid'], freq="2412")
dev[0].scan_for_bss(apdev[1]['bssid'], freq="2412")
pin = dev[0].wps_read_pin()
dev[0].request("WPS_PIN %s %s" % (apdev[0]['bssid'], pin))
ev = dev[0].wait_event(["CTRL-EVENT-SCAN-RESULTS"])
if ev is None:
raise Exception("Scan did not complete")
dev[0].request("WPS_CANCEL")
def test_ap_wps_probe_req_ie_oom(dev, apdev):
"""WPS ProbeReq IE OOM"""
ssid = "test-wps"
params = {"ssid": ssid, "eap_server": "1", "wps_state": "2"}
hapd = hostapd.add_ap(apdev[0], params)
pin = dev[0].wps_read_pin()
hapd.request("WPS_PIN any " + pin)
dev[0].scan_for_bss(apdev[0]['bssid'], freq="2412")
with alloc_fail(dev[0], 1, "wps_build_probe_req_ie"):
dev[0].request("WPS_PIN %s %s" % (apdev[0]['bssid'], pin))
ev = hapd.wait_event(["AP-STA-CONNECTED"], timeout=10)
if ev is None:
raise Exception("Association not seen")
dev[0].request("WPS_CANCEL")
dev[0].wait_disconnected()
with alloc_fail(dev[0], 1, "wps_ie_encapsulate"):
dev[0].request("WPS_PIN %s %s" % (apdev[0]['bssid'], pin))
ev = hapd.wait_event(["AP-STA-CONNECTED"], timeout=10)
if ev is None:
raise Exception("Association not seen")
dev[0].request("WPS_CANCEL")
hapd.disable()
dev[0].request("REMOVE_NETWORK all")
dev[0].wait_disconnected()
time.sleep(0.2)
dev[0].flush_scan_cache()
def test_ap_wps_assoc_req_ie_oom(dev, apdev):
"""WPS AssocReq IE OOM"""
ssid = "test-wps"
params = {"ssid": ssid, "eap_server": "1", "wps_state": "2"}
hapd = hostapd.add_ap(apdev[0], params)
pin = dev[0].wps_read_pin()
hapd.request("WPS_PIN any " + pin)
dev[0].scan_for_bss(apdev[0]['bssid'], freq="2412")
with alloc_fail(dev[0], 1, "wps_build_assoc_req_ie"):
dev[0].request("WPS_PIN %s %s" % (apdev[0]['bssid'], pin))
ev = hapd.wait_event(["AP-STA-CONNECTED"], timeout=10)
if ev is None:
raise Exception("Association not seen")
dev[0].request("WPS_CANCEL")
def test_ap_wps_assoc_resp_ie_oom(dev, apdev):
"""WPS AssocResp IE OOM"""
ssid = "test-wps"
params = {"ssid": ssid, "eap_server": "1", "wps_state": "2"}
hapd = hostapd.add_ap(apdev[0], params)
pin = dev[0].wps_read_pin()
hapd.request("WPS_PIN any " + pin)
dev[0].scan_for_bss(apdev[0]['bssid'], freq="2412")
with alloc_fail(hapd, 1, "wps_build_assoc_resp_ie"):
dev[0].request("WPS_PIN %s %s" % (apdev[0]['bssid'], pin))
ev = hapd.wait_event(["AP-STA-CONNECTED"], timeout=10)
if ev is None:
raise Exception("Association not seen")
dev[0].request("WPS_CANCEL")
@remote_compatible
def test_ap_wps_bss_info_errors(dev, apdev):
"""WPS BSS info errors"""
params = {"ssid": "1",
"vendor_elements": "dd0e0050f20410440001ff101100010a"}
hostapd.add_ap(apdev[0], params)
params = {'ssid': "2", "vendor_elements": "dd050050f20410"}
hostapd.add_ap(apdev[1], params)
dev[0].scan_for_bss(apdev[0]['bssid'], freq="2412")
dev[0].scan_for_bss(apdev[1]['bssid'], freq="2412")
bss = dev[0].get_bss(apdev[0]['bssid'])
logger.info("BSS: " + str(bss))
if "wps_state" in bss:
raise Exception("Unexpected wps_state in BSS info")
if 'wps_device_name' not in bss:
raise Exception("No wps_device_name in BSS info")
if bss['wps_device_name'] != '_':
raise Exception("Unexpected wps_device_name value")
bss = dev[0].get_bss(apdev[1]['bssid'])
logger.info("BSS: " + str(bss))
with alloc_fail(dev[0], 1, "=wps_attr_text"):
bss = dev[0].get_bss(apdev[0]['bssid'])
logger.info("BSS(OOM): " + str(bss))
def wps_run_pbc_fail_ap(apdev, dev, hapd):
hapd.request("WPS_PBC")
dev.scan_for_bss(apdev['bssid'], freq="2412")
dev.request("WPS_PBC " + apdev['bssid'])
ev = dev.wait_event(["CTRL-EVENT-EAP-FAILURE"], timeout=10)
if ev is None:
raise Exception("No EAP failure reported")
dev.request("WPS_CANCEL")
dev.wait_disconnected()
for i in range(5):
try:
dev.flush_scan_cache()
break
except Exception as e:
if str(e).startswith("Failed to trigger scan"):
# Try again
time.sleep(1)
else:
raise
def wps_run_pbc_fail(apdev, dev):
hapd = wps_start_ap(apdev)
wps_run_pbc_fail_ap(apdev, dev, hapd)
@remote_compatible
def test_ap_wps_pk_oom(dev, apdev):
"""WPS and public key OOM"""
with alloc_fail(dev[0], 1, "wps_build_public_key"):
wps_run_pbc_fail(apdev[0], dev[0])
@remote_compatible
def test_ap_wps_pk_oom_ap(dev, apdev):
"""WPS and public key OOM on AP"""
hapd = wps_start_ap(apdev[0])
with alloc_fail(hapd, 1, "wps_build_public_key"):
wps_run_pbc_fail_ap(apdev[0], dev[0], hapd)
@remote_compatible
def test_ap_wps_encr_oom_ap(dev, apdev):
"""WPS and encrypted settings decryption OOM on AP"""
hapd = wps_start_ap(apdev[0])
pin = dev[0].wps_read_pin()
hapd.request("WPS_PIN any " + pin)
dev[0].scan_for_bss(apdev[0]['bssid'], freq="2412")
with alloc_fail(hapd, 1, "wps_decrypt_encr_settings"):
dev[0].request("WPS_PIN " + apdev[0]['bssid'] + " " + pin)
ev = hapd.wait_event(["WPS-FAIL"], timeout=10)
if ev is None:
raise Exception("No WPS-FAIL reported")
dev[0].request("WPS_CANCEL")
dev[0].wait_disconnected()
@remote_compatible
def test_ap_wps_encr_no_random_ap(dev, apdev):
"""WPS and no random data available for encryption on AP"""
hapd = wps_start_ap(apdev[0])
with fail_test(hapd, 1, "os_get_random;wps_build_encr_settings"):
wps_run_pbc_fail_ap(apdev[0], dev[0], hapd)
@remote_compatible
def test_ap_wps_e_hash_no_random_sta(dev, apdev):
"""WPS and no random data available for e-hash on STA"""
with fail_test(dev[0], 1, "os_get_random;wps_build_e_hash"):
wps_run_pbc_fail(apdev[0], dev[0])
@remote_compatible
def test_ap_wps_m1_no_random(dev, apdev):
"""WPS and no random for M1 on STA"""
with fail_test(dev[0], 1, "os_get_random;wps_build_m1"):
wps_run_pbc_fail(apdev[0], dev[0])
@remote_compatible
def test_ap_wps_m1_oom(dev, apdev):
"""WPS and OOM for M1 on STA"""
with alloc_fail(dev[0], 1, "wps_build_m1"):
wps_run_pbc_fail(apdev[0], dev[0])
@remote_compatible
def test_ap_wps_m3_oom(dev, apdev):
"""WPS and OOM for M3 on STA"""
with alloc_fail(dev[0], 1, "wps_build_m3"):
wps_run_pbc_fail(apdev[0], dev[0])
@remote_compatible
def test_ap_wps_m5_oom(dev, apdev):
"""WPS and OOM for M5 on STA"""
hapd = wps_start_ap(apdev[0])
hapd.request("WPS_PBC")
dev[0].scan_for_bss(apdev[0]['bssid'], freq="2412")
for i in range(1, 3):
with alloc_fail(dev[0], i, "wps_build_m5"):
dev[0].request("WPS_PBC " + apdev[0]['bssid'])
ev = dev[0].wait_event(["CTRL-EVENT-EAP-FAILURE"], timeout=10)
if ev is None:
raise Exception("No EAP failure reported")
dev[0].request("WPS_CANCEL")
dev[0].wait_disconnected()
dev[0].flush_scan_cache()
@remote_compatible
def test_ap_wps_m5_no_random(dev, apdev):
"""WPS and no random for M5 on STA"""
with fail_test(dev[0], 1,
"os_get_random;wps_build_encr_settings;wps_build_m5"):
wps_run_pbc_fail(apdev[0], dev[0])
@remote_compatible
def test_ap_wps_m7_oom(dev, apdev):
"""WPS and OOM for M7 on STA"""
hapd = wps_start_ap(apdev[0])
hapd.request("WPS_PBC")
dev[0].scan_for_bss(apdev[0]['bssid'], freq="2412")
for i in range(1, 3):
with alloc_fail(dev[0], i, "wps_build_m7"):
dev[0].request("WPS_PBC " + apdev[0]['bssid'])
ev = dev[0].wait_event(["CTRL-EVENT-EAP-FAILURE"], timeout=10)
if ev is None:
raise Exception("No EAP failure reported")
dev[0].request("WPS_CANCEL")
dev[0].wait_disconnected()
dev[0].flush_scan_cache()
@remote_compatible
def test_ap_wps_m7_no_random(dev, apdev):
"""WPS and no random for M7 on STA"""
with fail_test(dev[0], 1,
"os_get_random;wps_build_encr_settings;wps_build_m7"):
wps_run_pbc_fail(apdev[0], dev[0])
@remote_compatible
def test_ap_wps_wsc_done_oom(dev, apdev):
"""WPS and OOM for WSC_Done on STA"""
with alloc_fail(dev[0], 1, "wps_build_wsc_done"):
wps_run_pbc_fail(apdev[0], dev[0])
def test_ap_wps_random_psk_fail(dev, apdev):
"""WPS and no random for PSK on AP"""
ssid = "test-wps"
pskfile = "/tmp/ap_wps_per_enrollee_psk.psk_file"
appin = "12345670"
try:
os.remove(pskfile)
except:
pass
try:
with open(pskfile, "w") as f:
f.write("# WPA PSKs\n")
params = {"ssid": ssid, "eap_server": "1", "wps_state": "2",
"wpa": "2", "wpa_key_mgmt": "WPA-PSK",
"rsn_pairwise": "CCMP", "ap_pin": appin,
"wpa_psk_file": pskfile}
hapd = hostapd.add_ap(apdev[0], params)
dev[0].scan_for_bss(apdev[0]['bssid'], freq="2412")
with fail_test(hapd, 1, "os_get_random;wps_build_cred_network_key"):
dev[0].request("WPS_REG " + apdev[0]['bssid'] + " " + appin)
ev = dev[0].wait_event(["CTRL-EVENT-EAP-FAILURE"], timeout=10)
if ev is None:
raise Exception("No EAP failure reported")
dev[0].request("WPS_CANCEL")
dev[0].wait_disconnected()
with fail_test(hapd, 1, "os_get_random;wps_build_cred"):
wps_run_pbc_fail_ap(apdev[0], dev[0], hapd)
with alloc_fail(hapd, 1, "wps_build_cred"):
wps_run_pbc_fail_ap(apdev[0], dev[0], hapd)
with alloc_fail(hapd, 2, "wps_build_cred"):
wps_run_pbc_fail_ap(apdev[0], dev[0], hapd)
finally:
os.remove(pskfile)
def wps_ext_eap_identity_req(dev, hapd, bssid):
logger.debug("EAP-Identity/Request")
ev = hapd.wait_event(["EAPOL-TX"], timeout=10)
if ev is None:
raise Exception("Timeout on EAPOL-TX from hostapd")
res = dev.request("EAPOL_RX " + bssid + " " + ev.split(' ')[2])
if "OK" not in res:
raise Exception("EAPOL_RX to wpa_supplicant failed")
def wps_ext_eap_identity_resp(hapd, dev, addr):
ev = dev.wait_event(["EAPOL-TX"], timeout=10)
if ev is None:
raise Exception("Timeout on EAPOL-TX from wpa_supplicant")
res = hapd.request("EAPOL_RX " + addr + " " + ev.split(' ')[2])
if "OK" not in res:
raise Exception("EAPOL_RX to hostapd failed")
def wps_ext_eap_wsc(dst, src, src_addr, msg):
logger.debug(msg)
ev = src.wait_event(["EAPOL-TX"], timeout=10)
if ev is None:
raise Exception("Timeout on EAPOL-TX")
res = dst.request("EAPOL_RX " + src_addr + " " + ev.split(' ')[2])
if "OK" not in res:
raise Exception("EAPOL_RX failed")
def wps_start_ext(apdev, dev, pbc=False, pin=None):
addr = dev.own_addr()
bssid = apdev['bssid']
ssid = "test-wps-conf"
params = {"ssid": ssid, "eap_server": "1", "wps_state": "2",
"wpa_passphrase": "12345678", "wpa": "2",
"wpa_key_mgmt": "WPA-PSK", "rsn_pairwise": "CCMP"}
hapd = hostapd.add_ap(apdev, params)
if pbc:
hapd.request("WPS_PBC")
else:
if pin is None:
pin = dev.wps_read_pin()
hapd.request("WPS_PIN any " + pin)
dev.scan_for_bss(bssid, freq="2412")
hapd.request("SET ext_eapol_frame_io 1")
dev.request("SET ext_eapol_frame_io 1")
if pbc:
dev.request("WPS_PBC " + bssid)
else:
dev.request("WPS_PIN " + bssid + " " + pin)
return addr, bssid, hapd
def wps_auth_corrupt(dst, src, addr):
ev = src.wait_event(["EAPOL-TX"], timeout=10)
if ev is None:
raise Exception("Timeout on EAPOL-TX")
src.request("SET ext_eapol_frame_io 0")
dst.request("SET ext_eapol_frame_io 0")
msg = ev.split(' ')[2]
if msg[-24:-16] != '10050008':
raise Exception("Could not find Authenticator attribute")
# Corrupt Authenticator value
msg = msg[:-1] + '%x' % ((int(msg[-1], 16) + 1) % 16)
res = dst.request("EAPOL_RX " + addr + " " + msg)
if "OK" not in res:
raise Exception("EAPOL_RX failed")
def wps_fail_finish(hapd, dev, fail_str):
ev = hapd.wait_event(["WPS-FAIL"], timeout=5)
if ev is None:
raise Exception("WPS-FAIL not indicated")
if fail_str not in ev:
raise Exception("Unexpected WPS-FAIL value: " + ev)
dev.request("WPS_CANCEL")
dev.wait_disconnected()
def wps_auth_corrupt_from_ap(dev, hapd, bssid, fail_str):
wps_auth_corrupt(dev, hapd, bssid)
wps_fail_finish(hapd, dev, fail_str)
def wps_auth_corrupt_to_ap(dev, hapd, addr, fail_str):
wps_auth_corrupt(hapd, dev, addr)
wps_fail_finish(hapd, dev, fail_str)
def test_ap_wps_authenticator_mismatch_m2(dev, apdev):
"""WPS and Authenticator attribute mismatch in M2"""
addr, bssid, hapd = wps_start_ext(apdev[0], dev[0])
wps_ext_eap_identity_req(dev[0], hapd, bssid)
wps_ext_eap_identity_resp(hapd, dev[0], addr)
wps_ext_eap_wsc(dev[0], hapd, bssid, "EAP-WSC/Start")
wps_ext_eap_wsc(hapd, dev[0], addr, "M1")
logger.debug("M2")
wps_auth_corrupt_from_ap(dev[0], hapd, bssid, "msg=5")
def test_ap_wps_authenticator_mismatch_m3(dev, apdev):
"""WPS and Authenticator attribute mismatch in M3"""
addr, bssid, hapd = wps_start_ext(apdev[0], dev[0])
wps_ext_eap_identity_req(dev[0], hapd, bssid)
wps_ext_eap_identity_resp(hapd, dev[0], addr)
wps_ext_eap_wsc(dev[0], hapd, bssid, "EAP-WSC/Start")
wps_ext_eap_wsc(hapd, dev[0], addr, "M1")
wps_ext_eap_wsc(dev[0], hapd, bssid, "M2")
logger.debug("M3")
wps_auth_corrupt_to_ap(dev[0], hapd, addr, "msg=7")
def test_ap_wps_authenticator_mismatch_m4(dev, apdev):
"""WPS and Authenticator attribute mismatch in M4"""
addr, bssid, hapd = wps_start_ext(apdev[0], dev[0])
wps_ext_eap_identity_req(dev[0], hapd, bssid)
wps_ext_eap_identity_resp(hapd, dev[0], addr)
wps_ext_eap_wsc(dev[0], hapd, bssid, "EAP-WSC/Start")
wps_ext_eap_wsc(hapd, dev[0], addr, "M1")
wps_ext_eap_wsc(dev[0], hapd, bssid, "M2")
wps_ext_eap_wsc(hapd, dev[0], addr, "M3")
logger.debug("M4")
wps_auth_corrupt_from_ap(dev[0], hapd, bssid, "msg=8")
def test_ap_wps_authenticator_mismatch_m5(dev, apdev):
"""WPS and Authenticator attribute mismatch in M5"""
addr, bssid, hapd = wps_start_ext(apdev[0], dev[0])
wps_ext_eap_identity_req(dev[0], hapd, bssid)
wps_ext_eap_identity_resp(hapd, dev[0], addr)
wps_ext_eap_wsc(dev[0], hapd, bssid, "EAP-WSC/Start")
wps_ext_eap_wsc(hapd, dev[0], addr, "M1")
wps_ext_eap_wsc(dev[0], hapd, bssid, "M2")
wps_ext_eap_wsc(hapd, dev[0], addr, "M3")
wps_ext_eap_wsc(dev[0], hapd, bssid, "M4")
logger.debug("M5")
wps_auth_corrupt_to_ap(dev[0], hapd, addr, "msg=9")
def test_ap_wps_authenticator_mismatch_m6(dev, apdev):
"""WPS and Authenticator attribute mismatch in M6"""
addr, bssid, hapd = wps_start_ext(apdev[0], dev[0])
wps_ext_eap_identity_req(dev[0], hapd, bssid)
wps_ext_eap_identity_resp(hapd, dev[0], addr)
wps_ext_eap_wsc(dev[0], hapd, bssid, "EAP-WSC/Start")
wps_ext_eap_wsc(hapd, dev[0], addr, "M1")
wps_ext_eap_wsc(dev[0], hapd, bssid, "M2")
wps_ext_eap_wsc(hapd, dev[0], addr, "M3")
wps_ext_eap_wsc(dev[0], hapd, bssid, "M4")
wps_ext_eap_wsc(hapd, dev[0], addr, "M5")
logger.debug("M6")
wps_auth_corrupt_from_ap(dev[0], hapd, bssid, "msg=10")
def test_ap_wps_authenticator_mismatch_m7(dev, apdev):
"""WPS and Authenticator attribute mismatch in M7"""
addr, bssid, hapd = wps_start_ext(apdev[0], dev[0])
wps_ext_eap_identity_req(dev[0], hapd, bssid)
wps_ext_eap_identity_resp(hapd, dev[0], addr)
wps_ext_eap_wsc(dev[0], hapd, bssid, "EAP-WSC/Start")
wps_ext_eap_wsc(hapd, dev[0], addr, "M1")
wps_ext_eap_wsc(dev[0], hapd, bssid, "M2")
wps_ext_eap_wsc(hapd, dev[0], addr, "M3")
wps_ext_eap_wsc(dev[0], hapd, bssid, "M4")
wps_ext_eap_wsc(hapd, dev[0], addr, "M5")
wps_ext_eap_wsc(dev[0], hapd, bssid, "M6")
logger.debug("M7")
wps_auth_corrupt_to_ap(dev[0], hapd, addr, "msg=11")
def test_ap_wps_authenticator_mismatch_m8(dev, apdev):
"""WPS and Authenticator attribute mismatch in M8"""
addr, bssid, hapd = wps_start_ext(apdev[0], dev[0])
wps_ext_eap_identity_req(dev[0], hapd, bssid)
wps_ext_eap_identity_resp(hapd, dev[0], addr)
wps_ext_eap_wsc(dev[0], hapd, bssid, "EAP-WSC/Start")
wps_ext_eap_wsc(hapd, dev[0], addr, "M1")
wps_ext_eap_wsc(dev[0], hapd, bssid, "M2")
wps_ext_eap_wsc(hapd, dev[0], addr, "M3")
wps_ext_eap_wsc(dev[0], hapd, bssid, "M4")
wps_ext_eap_wsc(hapd, dev[0], addr, "M5")
wps_ext_eap_wsc(dev[0], hapd, bssid, "M6")
wps_ext_eap_wsc(hapd, dev[0], addr, "M7")
logger.debug("M8")
wps_auth_corrupt_from_ap(dev[0], hapd, bssid, "msg=12")
def test_ap_wps_authenticator_missing_m2(dev, apdev):
"""WPS and Authenticator attribute missing from M2"""
addr, bssid, hapd = wps_start_ext(apdev[0], dev[0])
wps_ext_eap_identity_req(dev[0], hapd, bssid)
wps_ext_eap_identity_resp(hapd, dev[0], addr)
wps_ext_eap_wsc(dev[0], hapd, bssid, "EAP-WSC/Start")
wps_ext_eap_wsc(hapd, dev[0], addr, "M1")
logger.debug("M2")
ev = hapd.wait_event(["EAPOL-TX"], timeout=10)
if ev is None:
raise Exception("Timeout on EAPOL-TX")
hapd.request("SET ext_eapol_frame_io 0")
dev[0].request("SET ext_eapol_frame_io 0")
msg = ev.split(' ')[2]
if msg[-24:-16] != '10050008':
raise Exception("Could not find Authenticator attribute")
# Remove Authenticator value
msg = msg[:-24]
mlen = "%04x" % (int(msg[4:8], 16) - 12)
msg = msg[0:4] + mlen + msg[8:12] + mlen + msg[16:]
res = dev[0].request("EAPOL_RX " + bssid + " " + msg)
if "OK" not in res:
raise Exception("EAPOL_RX failed")
wps_fail_finish(hapd, dev[0], "msg=5")
def test_ap_wps_m2_dev_passwd_id_p2p(dev, apdev):
"""WPS and M2 with different Device Password ID (P2P)"""
addr, bssid, hapd = wps_start_ext(apdev[0], dev[0])
wps_ext_eap_identity_req(dev[0], hapd, bssid)
wps_ext_eap_identity_resp(hapd, dev[0], addr)
wps_ext_eap_wsc(dev[0], hapd, bssid, "EAP-WSC/Start")
wps_ext_eap_wsc(hapd, dev[0], addr, "M1")
logger.debug("M2")
ev = hapd.wait_event(["EAPOL-TX"], timeout=10)
if ev is None:
raise Exception("Timeout on EAPOL-TX")
hapd.request("SET ext_eapol_frame_io 0")
dev[0].request("SET ext_eapol_frame_io 0")
msg = ev.split(' ')[2]
if msg[722:730] != '10120002':
raise Exception("Could not find Device Password ID attribute")
# Replace Device Password ID value. This will fail Authenticator check, but
# allows the code path in wps_process_dev_pw_id() to be checked from debug
# log.
msg = msg[0:730] + "0005" + msg[734:]
res = dev[0].request("EAPOL_RX " + bssid + " " + msg)
if "OK" not in res:
raise Exception("EAPOL_RX failed")
wps_fail_finish(hapd, dev[0], "msg=5")
def test_ap_wps_m2_dev_passwd_id_change_pin_to_pbc(dev, apdev):
"""WPS and M2 with different Device Password ID (PIN to PBC)"""
addr, bssid, hapd = wps_start_ext(apdev[0], dev[0])
wps_ext_eap_identity_req(dev[0], hapd, bssid)
wps_ext_eap_identity_resp(hapd, dev[0], addr)
wps_ext_eap_wsc(dev[0], hapd, bssid, "EAP-WSC/Start")
wps_ext_eap_wsc(hapd, dev[0], addr, "M1")
logger.debug("M2")
ev = hapd.wait_event(["EAPOL-TX"], timeout=10)
if ev is None:
raise Exception("Timeout on EAPOL-TX")
hapd.request("SET ext_eapol_frame_io 0")
dev[0].request("SET ext_eapol_frame_io 0")
msg = ev.split(' ')[2]
if msg[722:730] != '10120002':
raise Exception("Could not find Device Password ID attribute")
# Replace Device Password ID value (PIN --> PBC). This will be rejected.
msg = msg[0:730] + "0004" + msg[734:]
res = dev[0].request("EAPOL_RX " + bssid + " " + msg)
if "OK" not in res:
raise Exception("EAPOL_RX failed")
wps_fail_finish(hapd, dev[0], "msg=5")
def test_ap_wps_m2_dev_passwd_id_change_pbc_to_pin(dev, apdev):
"""WPS and M2 with different Device Password ID (PBC to PIN)"""
addr, bssid, hapd = wps_start_ext(apdev[0], dev[0], pbc=True)
wps_ext_eap_identity_req(dev[0], hapd, bssid)
wps_ext_eap_identity_resp(hapd, dev[0], addr)
wps_ext_eap_wsc(dev[0], hapd, bssid, "EAP-WSC/Start")
wps_ext_eap_wsc(hapd, dev[0], addr, "M1")
logger.debug("M2")
ev = hapd.wait_event(["EAPOL-TX"], timeout=10)
if ev is None:
raise Exception("Timeout on EAPOL-TX")
hapd.request("SET ext_eapol_frame_io 0")
dev[0].request("SET ext_eapol_frame_io 0")
msg = ev.split(' ')[2]
if msg[722:730] != '10120002':
raise Exception("Could not find Device Password ID attribute")
# Replace Device Password ID value. This will fail Authenticator check, but
# allows the code path in wps_process_dev_pw_id() to be checked from debug
# log.
msg = msg[0:730] + "0000" + msg[734:]
res = dev[0].request("EAPOL_RX " + bssid + " " + msg)
if "OK" not in res:
raise Exception("EAPOL_RX failed")
wps_fail_finish(hapd, dev[0], "msg=5")
dev[0].flush_scan_cache()
def test_ap_wps_m2_missing_dev_passwd_id(dev, apdev):
"""WPS and M2 without Device Password ID"""
addr, bssid, hapd = wps_start_ext(apdev[0], dev[0])
wps_ext_eap_identity_req(dev[0], hapd, bssid)
wps_ext_eap_identity_resp(hapd, dev[0], addr)
wps_ext_eap_wsc(dev[0], hapd, bssid, "EAP-WSC/Start")
wps_ext_eap_wsc(hapd, dev[0], addr, "M1")
logger.debug("M2")
ev = hapd.wait_event(["EAPOL-TX"], timeout=10)
if ev is None:
raise Exception("Timeout on EAPOL-TX")
hapd.request("SET ext_eapol_frame_io 0")
dev[0].request("SET ext_eapol_frame_io 0")
msg = ev.split(' ')[2]
if msg[722:730] != '10120002':
raise Exception("Could not find Device Password ID attribute")
# Remove Device Password ID value. This will fail Authenticator check, but
# allows the code path in wps_process_dev_pw_id() to be checked from debug
# log.
mlen = "%04x" % (int(msg[4:8], 16) - 6)
msg = msg[0:4] + mlen + msg[8:12] + mlen + msg[16:722] + msg[734:]
res = dev[0].request("EAPOL_RX " + bssid + " " + msg)
if "OK" not in res:
raise Exception("EAPOL_RX failed")
wps_fail_finish(hapd, dev[0], "msg=5")
def test_ap_wps_m2_missing_registrar_nonce(dev, apdev):
"""WPS and M2 without Registrar Nonce"""
addr, bssid, hapd = wps_start_ext(apdev[0], dev[0], pbc=True)
wps_ext_eap_identity_req(dev[0], hapd, bssid)
wps_ext_eap_identity_resp(hapd, dev[0], addr)
wps_ext_eap_wsc(dev[0], hapd, bssid, "EAP-WSC/Start")
wps_ext_eap_wsc(hapd, dev[0], addr, "M1")
logger.debug("M2")
ev = hapd.wait_event(["EAPOL-TX"], timeout=10)
if ev is None:
raise Exception("Timeout on EAPOL-TX")
hapd.request("SET ext_eapol_frame_io 0")
dev[0].request("SET ext_eapol_frame_io 0")
msg = ev.split(' ')[2]
if msg[96:104] != '10390010':
raise Exception("Could not find Registrar Nonce attribute")
# Remove Registrar Nonce. This will fail Authenticator check, but
# allows the code path in wps_process_registrar_nonce() to be checked from
# the debug log.
mlen = "%04x" % (int(msg[4:8], 16) - 20)
msg = msg[0:4] + mlen + msg[8:12] + mlen + msg[16:96] + msg[136:]
res = dev[0].request("EAPOL_RX " + bssid + " " + msg)
if "OK" not in res:
raise Exception("EAPOL_RX failed")
ev = dev[0].wait_event(["CTRL-EVENT-DISCONNECT"], timeout=5)
if ev is None:
raise Exception("Disconnect event not seen")
dev[0].request("WPS_CANCEL")
dev[0].flush_scan_cache()
def test_ap_wps_m2_missing_enrollee_nonce(dev, apdev):
"""WPS and M2 without Enrollee Nonce"""
addr, bssid, hapd = wps_start_ext(apdev[0], dev[0], pbc=True)
wps_ext_eap_identity_req(dev[0], hapd, bssid)
wps_ext_eap_identity_resp(hapd, dev[0], addr)
wps_ext_eap_wsc(dev[0], hapd, bssid, "EAP-WSC/Start")
wps_ext_eap_wsc(hapd, dev[0], addr, "M1")
logger.debug("M2")
ev = hapd.wait_event(["EAPOL-TX"], timeout=10)
if ev is None:
raise Exception("Timeout on EAPOL-TX")
hapd.request("SET ext_eapol_frame_io 0")
dev[0].request("SET ext_eapol_frame_io 0")
msg = ev.split(' ')[2]
if msg[56:64] != '101a0010':
raise Exception("Could not find enrollee Nonce attribute")
# Remove Enrollee Nonce. This will fail Authenticator check, but
# allows the code path in wps_process_enrollee_nonce() to be checked from
# the debug log.
mlen = "%04x" % (int(msg[4:8], 16) - 20)
msg = msg[0:4] + mlen + msg[8:12] + mlen + msg[16:56] + msg[96:]
res = dev[0].request("EAPOL_RX " + bssid + " " + msg)
if "OK" not in res:
raise Exception("EAPOL_RX failed")
ev = dev[0].wait_event(["CTRL-EVENT-DISCONNECT"], timeout=5)
if ev is None:
raise Exception("Disconnect event not seen")
dev[0].request("WPS_CANCEL")
dev[0].flush_scan_cache()
def test_ap_wps_m2_missing_uuid_r(dev, apdev):
"""WPS and M2 without UUID-R"""
addr, bssid, hapd = wps_start_ext(apdev[0], dev[0], pbc=True)
wps_ext_eap_identity_req(dev[0], hapd, bssid)
wps_ext_eap_identity_resp(hapd, dev[0], addr)
wps_ext_eap_wsc(dev[0], hapd, bssid, "EAP-WSC/Start")
wps_ext_eap_wsc(hapd, dev[0], addr, "M1")
logger.debug("M2")
ev = hapd.wait_event(["EAPOL-TX"], timeout=10)
if ev is None:
raise Exception("Timeout on EAPOL-TX")
hapd.request("SET ext_eapol_frame_io 0")
dev[0].request("SET ext_eapol_frame_io 0")
msg = ev.split(' ')[2]
if msg[136:144] != '10480010':
raise Exception("Could not find enrollee Nonce attribute")
# Remove UUID-R. This will fail Authenticator check, but allows the code
# path in wps_process_uuid_r() to be checked from the debug log.
mlen = "%04x" % (int(msg[4:8], 16) - 20)
msg = msg[0:4] + mlen + msg[8:12] + mlen + msg[16:136] + msg[176:]
res = dev[0].request("EAPOL_RX " + bssid + " " + msg)
if "OK" not in res:
raise Exception("EAPOL_RX failed")
ev = dev[0].wait_event(["CTRL-EVENT-DISCONNECT"], timeout=5)
if ev is None:
raise Exception("Disconnect event not seen")
dev[0].request("WPS_CANCEL")
dev[0].flush_scan_cache()
def test_ap_wps_m2_invalid(dev, apdev):
"""WPS and M2 parsing failure"""
addr, bssid, hapd = wps_start_ext(apdev[0], dev[0], pbc=True)
wps_ext_eap_identity_req(dev[0], hapd, bssid)
wps_ext_eap_identity_resp(hapd, dev[0], addr)
wps_ext_eap_wsc(dev[0], hapd, bssid, "EAP-WSC/Start")
wps_ext_eap_wsc(hapd, dev[0], addr, "M1")
logger.debug("M2")
ev = hapd.wait_event(["EAPOL-TX"], timeout=10)
if ev is None:
raise Exception("Timeout on EAPOL-TX")
hapd.request("SET ext_eapol_frame_io 0")
dev[0].request("SET ext_eapol_frame_io 0")
msg = ev.split(' ')[2]
if msg[136:144] != '10480010':
raise Exception("Could not find enrollee Nonce attribute")
# Remove UUID-R. This will fail Authenticator check, but allows the code
# path in wps_process_uuid_r() to be checked from the debug log.
mlen = "%04x" % (int(msg[4:8], 16) - 1)
msg = msg[0:4] + mlen + msg[8:12] + mlen + msg[16:-2]
res = dev[0].request("EAPOL_RX " + bssid + " " + msg)
if "OK" not in res:
raise Exception("EAPOL_RX failed")
ev = dev[0].wait_event(["CTRL-EVENT-DISCONNECT"], timeout=5)
if ev is None:
raise Exception("Disconnect event not seen")
dev[0].request("WPS_CANCEL")
dev[0].flush_scan_cache()
def test_ap_wps_m2_missing_msg_type(dev, apdev):
"""WPS and M2 without Message Type"""
addr, bssid, hapd = wps_start_ext(apdev[0], dev[0], pbc=True)
wps_ext_eap_identity_req(dev[0], hapd, bssid)
wps_ext_eap_identity_resp(hapd, dev[0], addr)
wps_ext_eap_wsc(dev[0], hapd, bssid, "EAP-WSC/Start")
wps_ext_eap_wsc(hapd, dev[0], addr, "M1")
logger.debug("M2")
ev = hapd.wait_event(["EAPOL-TX"], timeout=10)
if ev is None:
raise Exception("Timeout on EAPOL-TX")
hapd.request("SET ext_eapol_frame_io 0")
dev[0].request("SET ext_eapol_frame_io 0")
msg = ev.split(' ')[2]
if msg[46:54] != '10220001':
raise Exception("Could not find Message Type attribute")
# Remove Message Type. This will fail Authenticator check, but allows the
# code path in wps_process_wsc_msg() to be checked from the debug log.
mlen = "%04x" % (int(msg[4:8], 16) - 5)
msg = msg[0:4] + mlen + msg[8:12] + mlen + msg[16:46] + msg[56:]
res = dev[0].request("EAPOL_RX " + bssid + " " + msg)
if "OK" not in res:
raise Exception("EAPOL_RX failed")
ev = dev[0].wait_event(["CTRL-EVENT-DISCONNECT"], timeout=5)
if ev is None:
raise Exception("Disconnect event not seen")
dev[0].request("WPS_CANCEL")
dev[0].flush_scan_cache()
def test_ap_wps_m2_unknown_msg_type(dev, apdev):
"""WPS and M2 but unknown Message Type"""
addr, bssid, hapd = wps_start_ext(apdev[0], dev[0], pbc=True)
wps_ext_eap_identity_req(dev[0], hapd, bssid)
wps_ext_eap_identity_resp(hapd, dev[0], addr)
wps_ext_eap_wsc(dev[0], hapd, bssid, "EAP-WSC/Start")
wps_ext_eap_wsc(hapd, dev[0], addr, "M1")
logger.debug("M2")
ev = hapd.wait_event(["EAPOL-TX"], timeout=10)
if ev is None:
raise Exception("Timeout on EAPOL-TX")
hapd.request("SET ext_eapol_frame_io 0")
dev[0].request("SET ext_eapol_frame_io 0")
msg = ev.split(' ')[2]
if msg[46:54] != '10220001':
raise Exception("Could not find Message Type attribute")
# Replace Message Type value. This will be rejected.
msg = msg[0:54] + "00" + msg[56:]
res = dev[0].request("EAPOL_RX " + bssid + " " + msg)
if "OK" not in res:
raise Exception("EAPOL_RX failed")
ev = dev[0].wait_event(["CTRL-EVENT-DISCONNECT"], timeout=5)
if ev is None:
raise Exception("Disconnect event not seen")
dev[0].request("WPS_CANCEL")
dev[0].flush_scan_cache()
def test_ap_wps_m2_unknown_opcode(dev, apdev):
"""WPS and M2 but unknown opcode"""
addr, bssid, hapd = wps_start_ext(apdev[0], dev[0], pbc=True)
wps_ext_eap_identity_req(dev[0], hapd, bssid)
wps_ext_eap_identity_resp(hapd, dev[0], addr)
wps_ext_eap_wsc(dev[0], hapd, bssid, "EAP-WSC/Start")
wps_ext_eap_wsc(hapd, dev[0], addr, "M1")
logger.debug("M2")
ev = hapd.wait_event(["EAPOL-TX"], timeout=10)
if ev is None:
raise Exception("Timeout on EAPOL-TX")
hapd.request("SET ext_eapol_frame_io 0")
dev[0].request("SET ext_eapol_frame_io 0")
msg = ev.split(' ')[2]
# Replace opcode. This will be discarded in EAP-WSC processing.
msg = msg[0:32] + "00" + msg[34:]
res = dev[0].request("EAPOL_RX " + bssid + " " + msg)
if "OK" not in res:
raise Exception("EAPOL_RX failed")
dev[0].request("WPS_CANCEL")
dev[0].wait_disconnected()
dev[0].flush_scan_cache()
def test_ap_wps_m2_unknown_opcode2(dev, apdev):
"""WPS and M2 but unknown opcode (WSC_Start)"""
addr, bssid, hapd = wps_start_ext(apdev[0], dev[0], pbc=True)
wps_ext_eap_identity_req(dev[0], hapd, bssid)
wps_ext_eap_identity_resp(hapd, dev[0], addr)
wps_ext_eap_wsc(dev[0], hapd, bssid, "EAP-WSC/Start")
wps_ext_eap_wsc(hapd, dev[0], addr, "M1")
logger.debug("M2")
ev = hapd.wait_event(["EAPOL-TX"], timeout=10)
if ev is None:
raise Exception("Timeout on EAPOL-TX")
hapd.request("SET ext_eapol_frame_io 0")
dev[0].request("SET ext_eapol_frame_io 0")
msg = ev.split(' ')[2]
# Replace opcode. This will be discarded in EAP-WSC processing.
msg = msg[0:32] + "01" + msg[34:]
res = dev[0].request("EAPOL_RX " + bssid + " " + msg)
if "OK" not in res:
raise Exception("EAPOL_RX failed")
dev[0].request("WPS_CANCEL")
dev[0].wait_disconnected()
dev[0].flush_scan_cache()
def test_ap_wps_m2_unknown_opcode3(dev, apdev):
"""WPS and M2 but unknown opcode (WSC_Done)"""
addr, bssid, hapd = wps_start_ext(apdev[0], dev[0], pbc=True)
wps_ext_eap_identity_req(dev[0], hapd, bssid)
wps_ext_eap_identity_resp(hapd, dev[0], addr)
wps_ext_eap_wsc(dev[0], hapd, bssid, "EAP-WSC/Start")
wps_ext_eap_wsc(hapd, dev[0], addr, "M1")
logger.debug("M2")
ev = hapd.wait_event(["EAPOL-TX"], timeout=10)
if ev is None:
raise Exception("Timeout on EAPOL-TX")
hapd.request("SET ext_eapol_frame_io 0")
dev[0].request("SET ext_eapol_frame_io 0")
msg = ev.split(' ')[2]
# Replace opcode. This will be discarded in WPS Enrollee processing.
msg = msg[0:32] + "05" + msg[34:]
res = dev[0].request("EAPOL_RX " + bssid + " " + msg)
if "OK" not in res:
raise Exception("EAPOL_RX failed")
dev[0].request("WPS_CANCEL")
dev[0].wait_disconnected()
dev[0].flush_scan_cache()
def wps_m2_but_other(dev, apdev, title, msgtype):
addr, bssid, hapd = wps_start_ext(apdev, dev)
wps_ext_eap_identity_req(dev, hapd, bssid)
wps_ext_eap_identity_resp(hapd, dev, addr)
wps_ext_eap_wsc(dev, hapd, bssid, "EAP-WSC/Start")
wps_ext_eap_wsc(hapd, dev, addr, "M1")
logger.debug(title)
ev = hapd.wait_event(["EAPOL-TX"], timeout=10)
if ev is None:
raise Exception("Timeout on EAPOL-TX")
hapd.request("SET ext_eapol_frame_io 0")
dev.request("SET ext_eapol_frame_io 0")
msg = ev.split(' ')[2]
if msg[46:54] != '10220001':
raise Exception("Could not find Message Type attribute")
# Replace Message Type value. This will be rejected.
msg = msg[0:54] + msgtype + msg[56:]
res = dev.request("EAPOL_RX " + bssid + " " + msg)
if "OK" not in res:
raise Exception("EAPOL_RX failed")
ev = dev.wait_event(["WPS-FAIL"], timeout=5)
if ev is None:
raise Exception("WPS-FAIL event not seen")
dev.request("WPS_CANCEL")
dev.wait_disconnected()
def wps_m4_but_other(dev, apdev, title, msgtype):
addr, bssid, hapd = wps_start_ext(apdev, dev)
wps_ext_eap_identity_req(dev, hapd, bssid)
wps_ext_eap_identity_resp(hapd, dev, addr)
wps_ext_eap_wsc(dev, hapd, bssid, "EAP-WSC/Start")
wps_ext_eap_wsc(hapd, dev, addr, "M1")
wps_ext_eap_wsc(dev, hapd, bssid, "M2")
wps_ext_eap_wsc(hapd, dev, addr, "M3")
logger.debug(title)
ev = hapd.wait_event(["EAPOL-TX"], timeout=10)
if ev is None:
raise Exception("Timeout on EAPOL-TX")
hapd.request("SET ext_eapol_frame_io 0")
dev.request("SET ext_eapol_frame_io 0")
msg = ev.split(' ')[2]
if msg[46:54] != '10220001':
raise Exception("Could not find Message Type attribute")
# Replace Message Type value. This will be rejected.
msg = msg[0:54] + msgtype + msg[56:]
res = dev.request("EAPOL_RX " + bssid + " " + msg)
if "OK" not in res:
raise Exception("EAPOL_RX failed")
ev = hapd.wait_event(["WPS-FAIL"], timeout=5)
if ev is None:
raise Exception("WPS-FAIL event not seen")
dev.request("WPS_CANCEL")
dev.wait_disconnected()
def test_ap_wps_m2_msg_type_m4(dev, apdev):
"""WPS and M2 but Message Type M4"""
wps_m2_but_other(dev[0], apdev[0], "M2/M4", "08")
def test_ap_wps_m2_msg_type_m6(dev, apdev):
"""WPS and M2 but Message Type M6"""
wps_m2_but_other(dev[0], apdev[0], "M2/M6", "0a")
def test_ap_wps_m2_msg_type_m8(dev, apdev):
"""WPS and M2 but Message Type M8"""
wps_m2_but_other(dev[0], apdev[0], "M2/M8", "0c")
def test_ap_wps_m4_msg_type_m2(dev, apdev):
"""WPS and M4 but Message Type M2"""
wps_m4_but_other(dev[0], apdev[0], "M4/M2", "05")
def test_ap_wps_m4_msg_type_m2d(dev, apdev):
"""WPS and M4 but Message Type M2D"""
wps_m4_but_other(dev[0], apdev[0], "M4/M2D", "06")
@remote_compatible
def test_ap_wps_config_methods(dev, apdev):
"""WPS configuration method parsing"""
ssid = "test-wps-conf"
params = {"ssid": ssid, "eap_server": "1", "wps_state": "2",
"wpa_passphrase": "12345678", "wpa": "2",
"wpa_key_mgmt": "WPA-PSK", "rsn_pairwise": "CCMP",
"config_methods": "ethernet display ext_nfc_token int_nfc_token physical_display physical_push_button"}
hapd = hostapd.add_ap(apdev[0], params)
params = {"ssid": ssid, "eap_server": "1", "wps_state": "2",
"wpa_passphrase": "12345678", "wpa": "2",
"wpa_key_mgmt": "WPA-PSK", "rsn_pairwise": "CCMP",
"config_methods": "display push_button"}
hapd2 = hostapd.add_ap(apdev[1], params)
def test_ap_wps_set_selected_registrar_proto(dev, apdev):
"""WPS UPnP SetSelectedRegistrar protocol testing"""
ap_uuid = "27ea801a-9e5c-4e73-bd82-f89cbcd10d7e"
hapd = add_ssdp_ap(apdev[0], ap_uuid)
location = ssdp_get_location(ap_uuid)
urls = upnp_get_urls(location)
eventurl = urlparse(urls['event_sub_url'])
ctrlurl = urlparse(urls['control_url'])
url = urlparse(location)
conn = HTTPConnection(url.netloc)
class WPSERHTTPServer(StreamRequestHandler):
def handle(self):
data = self.rfile.readline().strip()
logger.debug(data)
self.wfile.write(gen_wps_event())
server = MyTCPServer(("127.0.0.1", 12345), WPSERHTTPServer)
server.timeout = 1
headers = {"callback": '<http://127.0.0.1:12345/event>',
"NT": "upnp:event",
"timeout": "Second-1234"}
conn.request("SUBSCRIBE", eventurl.path, "\r\n\r\n", headers)
resp = conn.getresponse()
if resp.status != 200:
raise Exception("Unexpected HTTP response: %d" % resp.status)
sid = resp.getheader("sid")
logger.debug("Subscription SID " + sid)
server.handle_request()
tests = [(500, "10"),
(200, "104a000110" + "1041000101" + "101200020000" +
"105300023148" +
"1049002c00372a0001200124111111111111222222222222333333333333444444444444555555555555666666666666" +
"10480010362db47ba53a519188fb5458b986b2e4"),
(200, "104a000110" + "1041000100" + "101200020000" +
"105300020000"),
(200, "104a000110" + "1041000100"),
(200, "104a000110")]
for status, test in tests:
tlvs = binascii.unhexlify(test)
newmsg = base64.b64encode(tlvs).decode()
msg = '<?xml version="1.0"?>\n'
msg += '<s:Envelope xmlns:s="http://schemas.xmlsoap.org/soap/envelope/" s:encodingStyle="http://schemas.xmlsoap.org/soap/encoding/">'
msg += '<s:Body>'
msg += '<u:SetSelectedRegistrar xmlns:u="urn:schemas-wifialliance-org:service:WFAWLANConfig:1">'
msg += '<NewMessage>'
msg += newmsg
msg += "</NewMessage></u:SetSelectedRegistrar></s:Body></s:Envelope>"
headers = {"Content-type": 'text/xml; charset="utf-8"'}
headers["SOAPAction"] = '"urn:schemas-wifialliance-org:service:WFAWLANConfig:1#%s"' % "SetSelectedRegistrar"
conn.request("POST", ctrlurl.path, msg, headers)
resp = conn.getresponse()
if resp.status != status:
raise Exception("Unexpected HTTP response: %d (expected %d)" % (resp.status, status))
def test_ap_wps_adv_oom(dev, apdev):
"""WPS AP and advertisement OOM"""
ap_uuid = "27ea801a-9e5c-4e73-bd82-f89cbcd10d7e"
hapd = add_ssdp_ap(apdev[0], ap_uuid)
with alloc_fail(hapd, 1, "=msearchreply_state_machine_start"):
ssdp_send_msearch("urn:schemas-wifialliance-org:service:WFAWLANConfig:1",
no_recv=True)
time.sleep(0.2)
with alloc_fail(hapd, 1, "eloop_register_timeout;msearchreply_state_machine_start"):
ssdp_send_msearch("urn:schemas-wifialliance-org:service:WFAWLANConfig:1",
no_recv=True)
time.sleep(0.2)
with alloc_fail(hapd, 1,
"next_advertisement;advertisement_state_machine_stop"):
hapd.disable()
with alloc_fail(hapd, 1, "ssdp_listener_start"):
if "FAIL" not in hapd.request("ENABLE"):
raise Exception("ENABLE succeeded during OOM")
def test_wps_config_methods(dev):
"""WPS config method update"""
wpas = WpaSupplicant(global_iface='/tmp/wpas-wlan5')
wpas.interface_add("wlan5")
if "OK" not in wpas.request("SET config_methods display label"):
raise Exception("Failed to set config_methods")
if wpas.request("GET config_methods").strip() != "display label":
raise Exception("config_methods were not updated")
if "OK" not in wpas.request("SET config_methods "):
raise Exception("Failed to clear config_methods")
if wpas.request("GET config_methods").strip() != "":
raise Exception("config_methods were not cleared")
WPS_VENDOR_ID_WFA = 14122
WPS_VENDOR_TYPE = 1
# EAP-WSC Op-Code values
WSC_Start = 0x01
WSC_ACK = 0x02
WSC_NACK = 0x03
WSC_MSG = 0x04
WSC_Done = 0x05
WSC_FRAG_ACK = 0x06
ATTR_AP_CHANNEL = 0x1001
ATTR_ASSOC_STATE = 0x1002
ATTR_AUTH_TYPE = 0x1003
ATTR_AUTH_TYPE_FLAGS = 0x1004
ATTR_AUTHENTICATOR = 0x1005
ATTR_CONFIG_METHODS = 0x1008
ATTR_CONFIG_ERROR = 0x1009
ATTR_CONFIRM_URL4 = 0x100a
ATTR_CONFIRM_URL6 = 0x100b
ATTR_CONN_TYPE = 0x100c
ATTR_CONN_TYPE_FLAGS = 0x100d
ATTR_CRED = 0x100e
ATTR_ENCR_TYPE = 0x100f
ATTR_ENCR_TYPE_FLAGS = 0x1010
ATTR_DEV_NAME = 0x1011
ATTR_DEV_PASSWORD_ID = 0x1012
ATTR_E_HASH1 = 0x1014
ATTR_E_HASH2 = 0x1015
ATTR_E_SNONCE1 = 0x1016
ATTR_E_SNONCE2 = 0x1017
ATTR_ENCR_SETTINGS = 0x1018
ATTR_ENROLLEE_NONCE = 0x101a
ATTR_FEATURE_ID = 0x101b
ATTR_IDENTITY = 0x101c
ATTR_IDENTITY_PROOF = 0x101d
ATTR_KEY_WRAP_AUTH = 0x101e
ATTR_KEY_ID = 0x101f
ATTR_MAC_ADDR = 0x1020
ATTR_MANUFACTURER = 0x1021
ATTR_MSG_TYPE = 0x1022
ATTR_MODEL_NAME = 0x1023
ATTR_MODEL_NUMBER = 0x1024
ATTR_NETWORK_INDEX = 0x1026
ATTR_NETWORK_KEY = 0x1027
ATTR_NETWORK_KEY_INDEX = 0x1028
ATTR_NEW_DEVICE_NAME = 0x1029
ATTR_NEW_PASSWORD = 0x102a
ATTR_OOB_DEVICE_PASSWORD = 0x102c
ATTR_OS_VERSION = 0x102d
ATTR_POWER_LEVEL = 0x102f
ATTR_PSK_CURRENT = 0x1030
ATTR_PSK_MAX = 0x1031
ATTR_PUBLIC_KEY = 0x1032
ATTR_RADIO_ENABLE = 0x1033
ATTR_REBOOT = 0x1034
ATTR_REGISTRAR_CURRENT = 0x1035
ATTR_REGISTRAR_ESTABLISHED = 0x1036
ATTR_REGISTRAR_LIST = 0x1037
ATTR_REGISTRAR_MAX = 0x1038
ATTR_REGISTRAR_NONCE = 0x1039
ATTR_REQUEST_TYPE = 0x103a
ATTR_RESPONSE_TYPE = 0x103b
ATTR_RF_BANDS = 0x103c
ATTR_R_HASH1 = 0x103d
ATTR_R_HASH2 = 0x103e
ATTR_R_SNONCE1 = 0x103f
ATTR_R_SNONCE2 = 0x1040
ATTR_SELECTED_REGISTRAR = 0x1041
ATTR_SERIAL_NUMBER = 0x1042
ATTR_WPS_STATE = 0x1044
ATTR_SSID = 0x1045
ATTR_TOTAL_NETWORKS = 0x1046
ATTR_UUID_E = 0x1047
ATTR_UUID_R = 0x1048
ATTR_VENDOR_EXT = 0x1049
ATTR_VERSION = 0x104a
ATTR_X509_CERT_REQ = 0x104b
ATTR_X509_CERT = 0x104c
ATTR_EAP_IDENTITY = 0x104d
ATTR_MSG_COUNTER = 0x104e
ATTR_PUBKEY_HASH = 0x104f
ATTR_REKEY_KEY = 0x1050
ATTR_KEY_LIFETIME = 0x1051
ATTR_PERMITTED_CFG_METHODS = 0x1052
ATTR_SELECTED_REGISTRAR_CONFIG_METHODS = 0x1053
ATTR_PRIMARY_DEV_TYPE = 0x1054
ATTR_SECONDARY_DEV_TYPE_LIST = 0x1055
ATTR_PORTABLE_DEV = 0x1056
ATTR_AP_SETUP_LOCKED = 0x1057
ATTR_APPLICATION_EXT = 0x1058
ATTR_EAP_TYPE = 0x1059
ATTR_IV = 0x1060
ATTR_KEY_PROVIDED_AUTO = 0x1061
ATTR_802_1X_ENABLED = 0x1062
ATTR_APPSESSIONKEY = 0x1063
ATTR_WEPTRANSMITKEY = 0x1064
ATTR_REQUESTED_DEV_TYPE = 0x106a
# Message Type
WPS_Beacon = 0x01
WPS_ProbeRequest = 0x02
WPS_ProbeResponse = 0x03
WPS_M1 = 0x04
WPS_M2 = 0x05
WPS_M2D = 0x06
WPS_M3 = 0x07
WPS_M4 = 0x08
WPS_M5 = 0x09
WPS_M6 = 0x0a
WPS_M7 = 0x0b
WPS_M8 = 0x0c
WPS_WSC_ACK = 0x0d
WPS_WSC_NACK = 0x0e
WPS_WSC_DONE = 0x0f
def get_wsc_msg(dev):
ev = dev.wait_event(["EAPOL-TX"], timeout=10)
if ev is None:
raise Exception("Timeout on EAPOL-TX")
data = binascii.unhexlify(ev.split(' ')[2])
msg = {}
# Parse EAPOL header
if len(data) < 4:
raise Exception("No room for EAPOL header")
version, type, length = struct.unpack('>BBH', data[0:4])
msg['eapol_version'] = version
msg['eapol_type'] = type
msg['eapol_length'] = length
data = data[4:]
if length != len(data):
raise Exception("EAPOL header length mismatch (%d != %d)" % (length, len(data)))
if type != 0:
raise Exception("Unexpected EAPOL header type: %d" % type)
# Parse EAP header
if len(data) < 4:
raise Exception("No room for EAP header")
code, identifier, length = struct.unpack('>BBH', data[0:4])
msg['eap_code'] = code
msg['eap_identifier'] = identifier
msg['eap_length'] = length
data = data[4:]
if msg['eapol_length'] != msg['eap_length']:
raise Exception("EAP header length mismatch (%d != %d)" % (msg['eapol_length'], length))
# Parse EAP expanded header
if len(data) < 1:
raise Exception("No EAP type included")
msg['eap_type'], = struct.unpack('B', data[0:1])
data = data[1:]
if msg['eap_type'] == 254:
if len(data) < 3 + 4:
raise Exception("Truncated EAP expanded header")
msg['eap_vendor_id'], msg['eap_vendor_type'] = struct.unpack('>LL', b'\x00' + data[0:7])
data = data[7:]
else:
raise Exception("Unexpected EAP type")
if msg['eap_vendor_id'] != WPS_VENDOR_ID_WFA:
raise Exception("Unexpected Vendor-Id")
if msg['eap_vendor_type'] != WPS_VENDOR_TYPE:
raise Exception("Unexpected Vendor-Type")
# Parse EAP-WSC header
if len(data) < 2:
raise Exception("Truncated EAP-WSC header")
msg['wsc_opcode'], msg['wsc_flags'] = struct.unpack('BB', data[0:2])
data = data[2:]
# Parse WSC attributes
msg['raw_attrs'] = data
attrs = {}
while len(data) > 0:
if len(data) < 4:
raise Exception("Truncated attribute header")
attr, length = struct.unpack('>HH', data[0:4])
data = data[4:]
if length > len(data):
raise Exception("Truncated attribute 0x%04x" % attr)
attrs[attr] = data[0:length]
data = data[length:]
msg['wsc_attrs'] = attrs
if ATTR_MSG_TYPE in attrs:
msg['wsc_msg_type'], = struct.unpack('B', attrs[ATTR_MSG_TYPE])
return msg
def recv_wsc_msg(dev, opcode, msg_type):
msg = get_wsc_msg(dev)
if msg['wsc_opcode'] != opcode or msg['wsc_msg_type'] != msg_type:
raise Exception("Unexpected Op-Code/MsgType")
return msg, msg['wsc_attrs'], msg['raw_attrs']
def build_wsc_attr(attr, payload):
_payload = payload if type(payload) == bytes else payload.encode()
return struct.pack('>HH', attr, len(_payload)) + _payload
def build_attr_msg_type(msg_type):
return build_wsc_attr(ATTR_MSG_TYPE, struct.pack('B', msg_type))
def build_eap_wsc(eap_code, eap_id, payload, opcode=WSC_MSG):
length = 4 + 8 + 2 + len(payload)
# EAPOL header
msg = struct.pack('>BBH', 2, 0, length)
# EAP header
msg += struct.pack('>BBH', eap_code, eap_id, length)
# EAP expanded header for EAP-WSC
msg += struct.pack('B', 254)
msg += struct.pack('>L', WPS_VENDOR_ID_WFA)[1:4]
msg += struct.pack('>L', WPS_VENDOR_TYPE)
# EAP-WSC header
msg += struct.pack('BB', opcode, 0)
# WSC attributes
msg += payload
return msg
def build_eap_success(eap_id):
length = 4
# EAPOL header
msg = struct.pack('>BBH', 2, 0, length)
# EAP header
msg += struct.pack('>BBH', 3, eap_id, length)
return msg
def build_eap_failure(eap_id):
length = 4
# EAPOL header
msg = struct.pack('>BBH', 2, 0, length)
# EAP header
msg += struct.pack('>BBH', 4, eap_id, length)
return msg
def send_wsc_msg(dev, src, msg):
res = dev.request("EAPOL_RX " + src + " " + binascii.hexlify(msg).decode())
if "OK" not in res:
raise Exception("EAPOL_RX failed")
group_5_prime = 0xFFFFFFFFFFFFFFFFC90FDAA22168C234C4C6628B80DC1CD129024E088A67CC74020BBEA63B139B22514A08798E3404DDEF9519B3CD3A431B302B0A6DF25F14374FE1356D6D51C245E485B576625E7EC6F44C42E9A637ED6B0BFF5CB6F406B7EDEE386BFB5A899FA5AE9F24117C4B1FE649286651ECE45B3DC2007CB8A163BF0598DA48361C55D39A69163FA8FD24CF5F83655D23DCA3AD961C62F356208552BB9ED529077096966D670C354E4ABC9804F1746C08CA237327FFFFFFFFFFFFFFFF
group_5_generator = 2
def wsc_kdf(key, label, bits):
result = b''
i = 1
while len(result) * 8 < bits:
data = struct.pack('>L', i) + label.encode() + struct.pack('>L', bits)
m = hmac.new(key, data, hashlib.sha256)
result += m.digest()
i += 1
return result[0:bits // 8]
def wsc_keys(kdk):
keys = wsc_kdf(kdk, "Wi-Fi Easy and Secure Key Derivation", 640)
authkey = keys[0:32]
keywrapkey = keys[32:48]
emsk = keys[48:80]
return authkey, keywrapkey, emsk
def wsc_dev_pw_half_psk(authkey, dev_pw):
m = hmac.new(authkey, dev_pw.encode(), hashlib.sha256)
return m.digest()[0:16]
def wsc_dev_pw_psk(authkey, dev_pw):
dev_pw_1 = dev_pw[0:len(dev_pw) // 2]
dev_pw_2 = dev_pw[len(dev_pw) // 2:]
psk1 = wsc_dev_pw_half_psk(authkey, dev_pw_1)
psk2 = wsc_dev_pw_half_psk(authkey, dev_pw_2)
return psk1, psk2
def build_attr_authenticator(authkey, prev_msg, curr_msg):
m = hmac.new(authkey, prev_msg + curr_msg, hashlib.sha256)
auth = m.digest()[0:8]
return build_wsc_attr(ATTR_AUTHENTICATOR, auth)
def build_attr_encr_settings(authkey, keywrapkey, data):
m = hmac.new(authkey, data, hashlib.sha256)
kwa = m.digest()[0:8]
data += build_wsc_attr(ATTR_KEY_WRAP_AUTH, kwa)
iv = 16*b'\x99'
aes = AES.new(keywrapkey, AES.MODE_CBC, iv)
pad_len = 16 - len(data) % 16
ps = pad_len * struct.pack('B', pad_len)
data += ps
wrapped = aes.encrypt(data)
return build_wsc_attr(ATTR_ENCR_SETTINGS, iv + wrapped)
def decrypt_attr_encr_settings(authkey, keywrapkey, data):
if len(data) < 32 or len(data) % 16 != 0:
raise Exception("Unexpected Encrypted Settings length: %d" % len(data))
iv = data[0:16]
encr = data[16:]
aes = AES.new(keywrapkey, AES.MODE_CBC, iv)
decrypted = aes.decrypt(encr)
pad_len, = struct.unpack('B', decrypted[-1:])
if pad_len > len(decrypted):
raise Exception("Invalid padding in Encrypted Settings")
for i in range(-pad_len, -1):
if decrypted[i] != decrypted[-1]:
raise Exception("Invalid PS value in Encrypted Settings")
decrypted = decrypted[0:len(decrypted) - pad_len]
if len(decrypted) < 12:
raise Exception("Truncated Encrypted Settings plaintext")
kwa = decrypted[-12:]
attr, length = struct.unpack(">HH", kwa[0:4])
if attr != ATTR_KEY_WRAP_AUTH or length != 8:
raise Exception("Invalid KWA header")
kwa = kwa[4:]
decrypted = decrypted[0:len(decrypted) - 12]
m = hmac.new(authkey, decrypted, hashlib.sha256)
calc_kwa = m.digest()[0:8]
if kwa != calc_kwa:
raise Exception("KWA mismatch")
return decrypted
def zeropad_str(val, pad_len):
while len(val) < pad_len * 2:
val = '0' + val
return val
def wsc_dh_init():
# For now, use a hardcoded private key. In theory, this is supposed to be
# randomly selected.
own_private = 0x123456789
own_public = pow(group_5_generator, own_private, group_5_prime)
pk = binascii.unhexlify(zeropad_str(format(own_public, '02x'), 192))
return own_private, pk
def wsc_dh_kdf(peer_pk, own_private, mac_addr, e_nonce, r_nonce):
peer_public = int(binascii.hexlify(peer_pk), 16)
if peer_public < 2 or peer_public >= group_5_prime:
raise Exception("Invalid peer public key")
if pow(peer_public, (group_5_prime - 1) // 2, group_5_prime) != 1:
raise Exception("Unexpected Legendre symbol for peer public key")
shared_secret = pow(peer_public, own_private, group_5_prime)
ss = zeropad_str(format(shared_secret, "02x"), 192)
logger.debug("DH shared secret: " + ss)
dhkey = hashlib.sha256(binascii.unhexlify(ss)).digest()
logger.debug("DHKey: " + binascii.hexlify(dhkey).decode())
m = hmac.new(dhkey, e_nonce + mac_addr + r_nonce, hashlib.sha256)
kdk = m.digest()
logger.debug("KDK: " + binascii.hexlify(kdk).decode())
authkey, keywrapkey, emsk = wsc_keys(kdk)
logger.debug("AuthKey: " + binascii.hexlify(authkey).decode())
logger.debug("KeyWrapKey: " + binascii.hexlify(keywrapkey).decode())
logger.debug("EMSK: " + binascii.hexlify(emsk).decode())
return authkey, keywrapkey
def wsc_dev_pw_hash(authkey, dev_pw, e_pk, r_pk):
psk1, psk2 = wsc_dev_pw_psk(authkey, dev_pw)
logger.debug("PSK1: " + binascii.hexlify(psk1).decode())
logger.debug("PSK2: " + binascii.hexlify(psk2).decode())
# Note: Secret values are supposed to be random, but hardcoded values are
# fine for testing.
s1 = 16*b'\x77'
m = hmac.new(authkey, s1 + psk1 + e_pk + r_pk, hashlib.sha256)
hash1 = m.digest()
logger.debug("Hash1: " + binascii.hexlify(hash1).decode())
s2 = 16*b'\x88'
m = hmac.new(authkey, s2 + psk2 + e_pk + r_pk, hashlib.sha256)
hash2 = m.digest()
logger.debug("Hash2: " + binascii.hexlify(hash2).decode())
return s1, s2, hash1, hash2
def build_m1(eap_id, uuid_e, mac_addr, e_nonce, e_pk,
manufacturer='', model_name='', config_methods='\x00\x00'):
attrs = build_wsc_attr(ATTR_VERSION, '\x10')
attrs += build_attr_msg_type(WPS_M1)
attrs += build_wsc_attr(ATTR_UUID_E, uuid_e)
attrs += build_wsc_attr(ATTR_MAC_ADDR, mac_addr)
attrs += build_wsc_attr(ATTR_ENROLLEE_NONCE, e_nonce)
attrs += build_wsc_attr(ATTR_PUBLIC_KEY, e_pk)
attrs += build_wsc_attr(ATTR_AUTH_TYPE_FLAGS, '\x00\x00')
attrs += build_wsc_attr(ATTR_ENCR_TYPE_FLAGS, '\x00\x00')
attrs += build_wsc_attr(ATTR_CONN_TYPE_FLAGS, '\x00')
attrs += build_wsc_attr(ATTR_CONFIG_METHODS, config_methods)
attrs += build_wsc_attr(ATTR_WPS_STATE, '\x00')
attrs += build_wsc_attr(ATTR_MANUFACTURER, manufacturer)
attrs += build_wsc_attr(ATTR_MODEL_NAME, model_name)
attrs += build_wsc_attr(ATTR_MODEL_NUMBER, '')
attrs += build_wsc_attr(ATTR_SERIAL_NUMBER, '')
attrs += build_wsc_attr(ATTR_PRIMARY_DEV_TYPE, 8*'\x00')
attrs += build_wsc_attr(ATTR_DEV_NAME, '')
attrs += build_wsc_attr(ATTR_RF_BANDS, '\x00')
attrs += build_wsc_attr(ATTR_ASSOC_STATE, '\x00\x00')
attrs += build_wsc_attr(ATTR_DEV_PASSWORD_ID, '\x00\x00')
attrs += build_wsc_attr(ATTR_CONFIG_ERROR, '\x00\x00')
attrs += build_wsc_attr(ATTR_OS_VERSION, '\x00\x00\x00\x00')
m1 = build_eap_wsc(2, eap_id, attrs)
return m1, attrs
def build_m2(authkey, m1, eap_id, e_nonce, r_nonce, uuid_r, r_pk,
dev_pw_id='\x00\x00', eap_code=1):
attrs = build_wsc_attr(ATTR_VERSION, '\x10')
attrs += build_attr_msg_type(WPS_M2)
if e_nonce:
attrs += build_wsc_attr(ATTR_ENROLLEE_NONCE, e_nonce)
if r_nonce:
attrs += build_wsc_attr(ATTR_REGISTRAR_NONCE, r_nonce)
attrs += build_wsc_attr(ATTR_UUID_R, uuid_r)
if r_pk:
attrs += build_wsc_attr(ATTR_PUBLIC_KEY, r_pk)
attrs += build_wsc_attr(ATTR_AUTH_TYPE_FLAGS, '\x00\x00')
attrs += build_wsc_attr(ATTR_ENCR_TYPE_FLAGS, '\x00\x00')
attrs += build_wsc_attr(ATTR_CONN_TYPE_FLAGS, '\x00')
attrs += build_wsc_attr(ATTR_CONFIG_METHODS, '\x00\x00')
attrs += build_wsc_attr(ATTR_MANUFACTURER, '')
attrs += build_wsc_attr(ATTR_MODEL_NAME, '')
attrs += build_wsc_attr(ATTR_MODEL_NUMBER, '')
attrs += build_wsc_attr(ATTR_SERIAL_NUMBER, '')
attrs += build_wsc_attr(ATTR_PRIMARY_DEV_TYPE, 8*'\x00')
attrs += build_wsc_attr(ATTR_DEV_NAME, '')
attrs += build_wsc_attr(ATTR_RF_BANDS, '\x00')
attrs += build_wsc_attr(ATTR_ASSOC_STATE, '\x00\x00')
attrs += build_wsc_attr(ATTR_CONFIG_ERROR, '\x00\x00')
attrs += build_wsc_attr(ATTR_DEV_PASSWORD_ID, dev_pw_id)
attrs += build_wsc_attr(ATTR_OS_VERSION, '\x00\x00\x00\x00')
attrs += build_attr_authenticator(authkey, m1, attrs)
m2 = build_eap_wsc(eap_code, eap_id, attrs)
return m2, attrs
def build_m2d(m1, eap_id, e_nonce, r_nonce, uuid_r, dev_pw_id=None, eap_code=1):
attrs = build_wsc_attr(ATTR_VERSION, '\x10')
attrs += build_attr_msg_type(WPS_M2D)
attrs += build_wsc_attr(ATTR_ENROLLEE_NONCE, e_nonce)
attrs += build_wsc_attr(ATTR_REGISTRAR_NONCE, r_nonce)
attrs += build_wsc_attr(ATTR_UUID_R, uuid_r)
attrs += build_wsc_attr(ATTR_AUTH_TYPE_FLAGS, '\x00\x00')
attrs += build_wsc_attr(ATTR_ENCR_TYPE_FLAGS, '\x00\x00')
attrs += build_wsc_attr(ATTR_CONN_TYPE_FLAGS, '\x00')
attrs += build_wsc_attr(ATTR_CONFIG_METHODS, '\x00\x00')
attrs += build_wsc_attr(ATTR_MANUFACTURER, '')
attrs += build_wsc_attr(ATTR_MODEL_NAME, '')
#attrs += build_wsc_attr(ATTR_MODEL_NUMBER, '')
attrs += build_wsc_attr(ATTR_SERIAL_NUMBER, '')
attrs += build_wsc_attr(ATTR_PRIMARY_DEV_TYPE, 8*'\x00')
attrs += build_wsc_attr(ATTR_DEV_NAME, '')
attrs += build_wsc_attr(ATTR_RF_BANDS, '\x00')
attrs += build_wsc_attr(ATTR_ASSOC_STATE, '\x00\x00')
attrs += build_wsc_attr(ATTR_CONFIG_ERROR, '\x00\x00')
attrs += build_wsc_attr(ATTR_OS_VERSION, '\x00\x00\x00\x00')
if dev_pw_id:
attrs += build_wsc_attr(ATTR_DEV_PASSWORD_ID, dev_pw_id)
m2d = build_eap_wsc(eap_code, eap_id, attrs)
return m2d, attrs
def build_ack(eap_id, e_nonce, r_nonce, msg_type=WPS_WSC_ACK, eap_code=1):
attrs = build_wsc_attr(ATTR_VERSION, '\x10')
if msg_type is not None:
attrs += build_attr_msg_type(msg_type)
if e_nonce:
attrs += build_wsc_attr(ATTR_ENROLLEE_NONCE, e_nonce)
if r_nonce:
attrs += build_wsc_attr(ATTR_REGISTRAR_NONCE, r_nonce)
msg = build_eap_wsc(eap_code, eap_id, attrs, opcode=WSC_ACK)
return msg, attrs
def build_nack(eap_id, e_nonce, r_nonce, config_error='\x00\x00',
msg_type=WPS_WSC_NACK, eap_code=1):
attrs = build_wsc_attr(ATTR_VERSION, '\x10')
if msg_type is not None:
attrs += build_attr_msg_type(msg_type)
if e_nonce:
attrs += build_wsc_attr(ATTR_ENROLLEE_NONCE, e_nonce)
if r_nonce:
attrs += build_wsc_attr(ATTR_REGISTRAR_NONCE, r_nonce)
if config_error:
attrs += build_wsc_attr(ATTR_CONFIG_ERROR, config_error)
msg = build_eap_wsc(eap_code, eap_id, attrs, opcode=WSC_NACK)
return msg, attrs
def test_wps_ext(dev, apdev):
"""WPS against external implementation"""
pin = "12345670"
addr, bssid, hapd = wps_start_ext(apdev[0], dev[0], pin=pin)
wps_ext_eap_identity_req(dev[0], hapd, bssid)
wps_ext_eap_identity_resp(hapd, dev[0], addr)
logger.debug("Receive WSC/Start from AP")
msg = get_wsc_msg(hapd)
if msg['wsc_opcode'] != WSC_Start:
raise Exception("Unexpected Op-Code for WSC/Start")
wsc_start_id = msg['eap_identifier']
mac_addr = binascii.unhexlify(dev[0].own_addr().replace(':', ''))
uuid_e = 16*b'\x11'
e_nonce = 16*b'\x22'
own_private, e_pk = wsc_dh_init()
logger.debug("Send M1 to AP")
m1, raw_m1_attrs = build_m1(msg['eap_identifier'], uuid_e, mac_addr,
e_nonce, e_pk)
send_wsc_msg(hapd, addr, m1)
logger.debug("Receive M2 from AP")
msg, m2_attrs, raw_m2_attrs = recv_wsc_msg(hapd, WSC_MSG, WPS_M2)
authkey, keywrapkey = wsc_dh_kdf(m2_attrs[ATTR_PUBLIC_KEY], own_private,
mac_addr, e_nonce,
m2_attrs[ATTR_REGISTRAR_NONCE])
e_s1, e_s2, e_hash1, e_hash2 = wsc_dev_pw_hash(authkey, pin, e_pk,
m2_attrs[ATTR_PUBLIC_KEY])
logger.debug("Send M3 to AP")
attrs = build_wsc_attr(ATTR_VERSION, '\x10')
attrs += build_attr_msg_type(WPS_M3)
attrs += build_wsc_attr(ATTR_REGISTRAR_NONCE,
m2_attrs[ATTR_REGISTRAR_NONCE])
attrs += build_wsc_attr(ATTR_E_HASH1, e_hash1)
attrs += build_wsc_attr(ATTR_E_HASH2, e_hash2)
attrs += build_attr_authenticator(authkey, raw_m2_attrs, attrs)
raw_m3_attrs = attrs
m3 = build_eap_wsc(2, msg['eap_identifier'], attrs)
send_wsc_msg(hapd, addr, m3)
logger.debug("Receive M4 from AP")
msg, m4_attrs, raw_m4_attrs = recv_wsc_msg(hapd, WSC_MSG, WPS_M4)
logger.debug("Send M5 to AP")
attrs = build_wsc_attr(ATTR_VERSION, '\x10')
attrs += build_attr_msg_type(WPS_M5)
attrs += build_wsc_attr(ATTR_REGISTRAR_NONCE,
m2_attrs[ATTR_REGISTRAR_NONCE])
data = build_wsc_attr(ATTR_E_SNONCE1, e_s1)
attrs += build_attr_encr_settings(authkey, keywrapkey, data)
attrs += build_attr_authenticator(authkey, raw_m4_attrs, attrs)
raw_m5_attrs = attrs
m5 = build_eap_wsc(2, msg['eap_identifier'], attrs)
send_wsc_msg(hapd, addr, m5)
logger.debug("Receive M6 from AP")
msg, m6_attrs, raw_m6_attrs = recv_wsc_msg(hapd, WSC_MSG, WPS_M6)
logger.debug("Send M7 to AP")
attrs = build_wsc_attr(ATTR_VERSION, '\x10')
attrs += build_attr_msg_type(WPS_M7)
attrs += build_wsc_attr(ATTR_REGISTRAR_NONCE,
m2_attrs[ATTR_REGISTRAR_NONCE])
data = build_wsc_attr(ATTR_E_SNONCE2, e_s2)
attrs += build_attr_encr_settings(authkey, keywrapkey, data)
attrs += build_attr_authenticator(authkey, raw_m6_attrs, attrs)
m7 = build_eap_wsc(2, msg['eap_identifier'], attrs)
raw_m7_attrs = attrs
send_wsc_msg(hapd, addr, m7)
logger.debug("Receive M8 from AP")
msg, m8_attrs, raw_m8_attrs = recv_wsc_msg(hapd, WSC_MSG, WPS_M8)
m8_cred = decrypt_attr_encr_settings(authkey, keywrapkey,
m8_attrs[ATTR_ENCR_SETTINGS])
logger.debug("M8 Credential: " + binascii.hexlify(m8_cred).decode())
logger.debug("Prepare WSC_Done")
attrs = build_wsc_attr(ATTR_VERSION, '\x10')
attrs += build_attr_msg_type(WPS_WSC_DONE)
attrs += build_wsc_attr(ATTR_ENROLLEE_NONCE, e_nonce)
attrs += build_wsc_attr(ATTR_REGISTRAR_NONCE,
m2_attrs[ATTR_REGISTRAR_NONCE])
wsc_done = build_eap_wsc(2, msg['eap_identifier'], attrs, opcode=WSC_Done)
# Do not send WSC_Done yet to allow exchangw with STA complete before the
# AP disconnects.
uuid_r = 16*b'\x33'
r_nonce = 16*b'\x44'
eap_id = wsc_start_id
logger.debug("Send WSC/Start to STA")
wsc_start = build_eap_wsc(1, eap_id, b'', opcode=WSC_Start)
send_wsc_msg(dev[0], bssid, wsc_start)
eap_id = (eap_id + 1) % 256
logger.debug("Receive M1 from STA")
msg, m1_attrs, raw_m1_attrs = recv_wsc_msg(dev[0], WSC_MSG, WPS_M1)
authkey, keywrapkey = wsc_dh_kdf(m1_attrs[ATTR_PUBLIC_KEY], own_private,
mac_addr, m1_attrs[ATTR_ENROLLEE_NONCE],
r_nonce)
r_s1, r_s2, r_hash1, r_hash2 = wsc_dev_pw_hash(authkey, pin,
m1_attrs[ATTR_PUBLIC_KEY],
e_pk)
logger.debug("Send M2 to STA")
m2, raw_m2_attrs = build_m2(authkey, raw_m1_attrs, eap_id,
m1_attrs[ATTR_ENROLLEE_NONCE],
r_nonce, uuid_r, e_pk)
send_wsc_msg(dev[0], bssid, m2)
eap_id = (eap_id + 1) % 256
logger.debug("Receive M3 from STA")
msg, m3_attrs, raw_m3_attrs = recv_wsc_msg(dev[0], WSC_MSG, WPS_M3)
logger.debug("Send M4 to STA")
attrs = build_wsc_attr(ATTR_VERSION, '\x10')
attrs += build_attr_msg_type(WPS_M4)
attrs += build_wsc_attr(ATTR_ENROLLEE_NONCE, m1_attrs[ATTR_ENROLLEE_NONCE])
attrs += build_wsc_attr(ATTR_R_HASH1, r_hash1)
attrs += build_wsc_attr(ATTR_R_HASH2, r_hash2)
data = build_wsc_attr(ATTR_R_SNONCE1, r_s1)
attrs += build_attr_encr_settings(authkey, keywrapkey, data)
attrs += build_attr_authenticator(authkey, raw_m3_attrs, attrs)
raw_m4_attrs = attrs
m4 = build_eap_wsc(1, eap_id, attrs)
send_wsc_msg(dev[0], bssid, m4)
eap_id = (eap_id + 1) % 256
logger.debug("Receive M5 from STA")
msg, m5_attrs, raw_m5_attrs = recv_wsc_msg(dev[0], WSC_MSG, WPS_M5)
logger.debug("Send M6 to STA")
attrs = build_wsc_attr(ATTR_VERSION, '\x10')
attrs += build_attr_msg_type(WPS_M6)
attrs += build_wsc_attr(ATTR_ENROLLEE_NONCE,
m1_attrs[ATTR_ENROLLEE_NONCE])
data = build_wsc_attr(ATTR_R_SNONCE2, r_s2)
attrs += build_attr_encr_settings(authkey, keywrapkey, data)
attrs += build_attr_authenticator(authkey, raw_m5_attrs, attrs)
raw_m6_attrs = attrs
m6 = build_eap_wsc(1, eap_id, attrs)
send_wsc_msg(dev[0], bssid, m6)
eap_id = (eap_id + 1) % 256
logger.debug("Receive M7 from STA")
msg, m7_attrs, raw_m7_attrs = recv_wsc_msg(dev[0], WSC_MSG, WPS_M7)
logger.debug("Send M8 to STA")
attrs = build_wsc_attr(ATTR_VERSION, '\x10')
attrs += build_attr_msg_type(WPS_M8)
attrs += build_wsc_attr(ATTR_ENROLLEE_NONCE,
m1_attrs[ATTR_ENROLLEE_NONCE])
attrs += build_attr_encr_settings(authkey, keywrapkey, m8_cred)
attrs += build_attr_authenticator(authkey, raw_m7_attrs, attrs)
raw_m8_attrs = attrs
m8 = build_eap_wsc(1, eap_id, attrs)
send_wsc_msg(dev[0], bssid, m8)
eap_id = (eap_id + 1) % 256
ev = dev[0].wait_event(["WPS-CRED-RECEIVED"], timeout=5)
if ev is None:
raise Exception("wpa_supplicant did not report credential")
logger.debug("Receive WSC_Done from STA")
msg = get_wsc_msg(dev[0])
if msg['wsc_opcode'] != WSC_Done or msg['wsc_msg_type'] != WPS_WSC_DONE:
raise Exception("Unexpected Op-Code/MsgType for WSC_Done")
logger.debug("Send WSC_Done to AP")
hapd.request("SET ext_eapol_frame_io 0")
dev[0].request("SET ext_eapol_frame_io 0")
send_wsc_msg(hapd, addr, wsc_done)
ev = hapd.wait_event(["WPS-REG-SUCCESS"], timeout=5)
if ev is None:
raise Exception("hostapd did not report WPS success")
dev[0].wait_connected()
def wps_start_kwa(dev, apdev):
pin = "12345670"
addr, bssid, hapd = wps_start_ext(apdev[0], dev[0], pin=pin)
wps_ext_eap_identity_req(dev[0], hapd, bssid)
wps_ext_eap_identity_resp(hapd, dev[0], addr)
wps_ext_eap_wsc(dev[0], hapd, bssid, "EAP-WSC/Start")
mac_addr = binascii.unhexlify(dev[0].own_addr().replace(':', ''))
uuid_r = 16*b'\x33'
r_nonce = 16*b'\x44'
own_private, e_pk = wsc_dh_init()
logger.debug("Receive M1 from STA")
msg, m1_attrs, raw_m1_attrs = recv_wsc_msg(dev[0], WSC_MSG, WPS_M1)
eap_id = (msg['eap_identifier'] + 1) % 256
authkey, keywrapkey = wsc_dh_kdf(m1_attrs[ATTR_PUBLIC_KEY], own_private,
mac_addr, m1_attrs[ATTR_ENROLLEE_NONCE],
r_nonce)
r_s1, r_s2, r_hash1, r_hash2 = wsc_dev_pw_hash(authkey, pin,
m1_attrs[ATTR_PUBLIC_KEY],
e_pk)
logger.debug("Send M2 to STA")
m2, raw_m2_attrs = build_m2(authkey, raw_m1_attrs, eap_id,
m1_attrs[ATTR_ENROLLEE_NONCE],
r_nonce, uuid_r, e_pk)
send_wsc_msg(dev[0], bssid, m2)
eap_id = (eap_id + 1) % 256
logger.debug("Receive M3 from STA")
msg, m3_attrs, raw_m3_attrs = recv_wsc_msg(dev[0], WSC_MSG, WPS_M3)
logger.debug("Send M4 to STA")
attrs = build_wsc_attr(ATTR_VERSION, '\x10')
attrs += build_attr_msg_type(WPS_M4)
attrs += build_wsc_attr(ATTR_ENROLLEE_NONCE, m1_attrs[ATTR_ENROLLEE_NONCE])
attrs += build_wsc_attr(ATTR_R_HASH1, r_hash1)
attrs += build_wsc_attr(ATTR_R_HASH2, r_hash2)
return r_s1, keywrapkey, authkey, raw_m3_attrs, eap_id, bssid, attrs
def wps_stop_kwa(dev, bssid, attrs, authkey, raw_m3_attrs, eap_id):
attrs += build_attr_authenticator(authkey, raw_m3_attrs, attrs)
m4 = build_eap_wsc(1, eap_id, attrs)
send_wsc_msg(dev[0], bssid, m4)
eap_id = (eap_id + 1) % 256
logger.debug("Receive M5 from STA")
msg = get_wsc_msg(dev[0])
if msg['wsc_opcode'] != WSC_NACK:
raise Exception("Unexpected message - expected WSC_Nack")
dev[0].request("WPS_CANCEL")
send_wsc_msg(dev[0], bssid, build_eap_failure(eap_id))
dev[0].wait_disconnected()
def test_wps_ext_kwa_proto_no_kwa(dev, apdev):
"""WPS and KWA error: No KWA attribute"""
r_s1, keywrapkey, authkey, raw_m3_attrs, eap_id, bssid, attrs = wps_start_kwa(dev, apdev)
data = build_wsc_attr(ATTR_R_SNONCE1, r_s1)
# Encrypted Settings without KWA
iv = 16*b'\x99'
aes = AES.new(keywrapkey, AES.MODE_CBC, iv)
pad_len = 16 - len(data) % 16
ps = pad_len * struct.pack('B', pad_len)
data += ps
wrapped = aes.encrypt(data)
attrs += build_wsc_attr(ATTR_ENCR_SETTINGS, iv + wrapped)
wps_stop_kwa(dev, bssid, attrs, authkey, raw_m3_attrs, eap_id)
def test_wps_ext_kwa_proto_data_after_kwa(dev, apdev):
"""WPS and KWA error: Data after KWA"""
r_s1, keywrapkey, authkey, raw_m3_attrs, eap_id, bssid, attrs = wps_start_kwa(dev, apdev)
data = build_wsc_attr(ATTR_R_SNONCE1, r_s1)
# Encrypted Settings and data after KWA
m = hmac.new(authkey, data, hashlib.sha256)
kwa = m.digest()[0:8]
data += build_wsc_attr(ATTR_KEY_WRAP_AUTH, kwa)
data += build_wsc_attr(ATTR_VENDOR_EXT, "1234567890")
iv = 16*b'\x99'
aes = AES.new(keywrapkey, AES.MODE_CBC, iv)
pad_len = 16 - len(data) % 16
ps = pad_len * struct.pack('B', pad_len)
data += ps
wrapped = aes.encrypt(data)
attrs += build_wsc_attr(ATTR_ENCR_SETTINGS, iv + wrapped)
wps_stop_kwa(dev, bssid, attrs, authkey, raw_m3_attrs, eap_id)
def test_wps_ext_kwa_proto_kwa_mismatch(dev, apdev):
"""WPS and KWA error: KWA mismatch"""
r_s1, keywrapkey, authkey, raw_m3_attrs, eap_id, bssid, attrs = wps_start_kwa(dev, apdev)
data = build_wsc_attr(ATTR_R_SNONCE1, r_s1)
# Encrypted Settings and KWA with incorrect value
data += build_wsc_attr(ATTR_KEY_WRAP_AUTH, 8*'\x00')
iv = 16*b'\x99'
aes = AES.new(keywrapkey, AES.MODE_CBC, iv)
pad_len = 16 - len(data) % 16
ps = pad_len * struct.pack('B', pad_len)
data += ps
wrapped = aes.encrypt(data)
attrs += build_wsc_attr(ATTR_ENCR_SETTINGS, iv + wrapped)
wps_stop_kwa(dev, bssid, attrs, authkey, raw_m3_attrs, eap_id)
def wps_run_cred_proto(dev, apdev, m8_cred, connect=False, no_connect=False):
pin = "12345670"
addr, bssid, hapd = wps_start_ext(apdev[0], dev[0], pin=pin)
wps_ext_eap_identity_req(dev[0], hapd, bssid)
wps_ext_eap_identity_resp(hapd, dev[0], addr)
wps_ext_eap_wsc(dev[0], hapd, bssid, "EAP-WSC/Start")
mac_addr = binascii.unhexlify(dev[0].own_addr().replace(':', ''))
uuid_r = 16*b'\x33'
r_nonce = 16*b'\x44'
own_private, e_pk = wsc_dh_init()
logger.debug("Receive M1 from STA")
msg, m1_attrs, raw_m1_attrs = recv_wsc_msg(dev[0], WSC_MSG, WPS_M1)
eap_id = (msg['eap_identifier'] + 1) % 256
authkey, keywrapkey = wsc_dh_kdf(m1_attrs[ATTR_PUBLIC_KEY], own_private,
mac_addr, m1_attrs[ATTR_ENROLLEE_NONCE],
r_nonce)
r_s1, r_s2, r_hash1, r_hash2 = wsc_dev_pw_hash(authkey, pin,
m1_attrs[ATTR_PUBLIC_KEY],
e_pk)
logger.debug("Send M2 to STA")
m2, raw_m2_attrs = build_m2(authkey, raw_m1_attrs, eap_id,
m1_attrs[ATTR_ENROLLEE_NONCE],
r_nonce, uuid_r, e_pk)
send_wsc_msg(dev[0], bssid, m2)
eap_id = (eap_id + 1) % 256
logger.debug("Receive M3 from STA")
msg, m3_attrs, raw_m3_attrs = recv_wsc_msg(dev[0], WSC_MSG, WPS_M3)
logger.debug("Send M4 to STA")
attrs = build_wsc_attr(ATTR_VERSION, '\x10')
attrs += build_attr_msg_type(WPS_M4)
attrs += build_wsc_attr(ATTR_ENROLLEE_NONCE, m1_attrs[ATTR_ENROLLEE_NONCE])
attrs += build_wsc_attr(ATTR_R_HASH1, r_hash1)
attrs += build_wsc_attr(ATTR_R_HASH2, r_hash2)
data = build_wsc_attr(ATTR_R_SNONCE1, r_s1)
attrs += build_attr_encr_settings(authkey, keywrapkey, data)
attrs += build_attr_authenticator(authkey, raw_m3_attrs, attrs)
raw_m4_attrs = attrs
m4 = build_eap_wsc(1, eap_id, attrs)
send_wsc_msg(dev[0], bssid, m4)
eap_id = (eap_id + 1) % 256
logger.debug("Receive M5 from STA")
msg, m5_attrs, raw_m5_attrs = recv_wsc_msg(dev[0], WSC_MSG, WPS_M5)
logger.debug("Send M6 to STA")
attrs = build_wsc_attr(ATTR_VERSION, '\x10')
attrs += build_attr_msg_type(WPS_M6)
attrs += build_wsc_attr(ATTR_ENROLLEE_NONCE,
m1_attrs[ATTR_ENROLLEE_NONCE])
data = build_wsc_attr(ATTR_R_SNONCE2, r_s2)
attrs += build_attr_encr_settings(authkey, keywrapkey, data)
attrs += build_attr_authenticator(authkey, raw_m5_attrs, attrs)
raw_m6_attrs = attrs
m6 = build_eap_wsc(1, eap_id, attrs)
send_wsc_msg(dev[0], bssid, m6)
eap_id = (eap_id + 1) % 256
logger.debug("Receive M7 from STA")
msg, m7_attrs, raw_m7_attrs = recv_wsc_msg(dev[0], WSC_MSG, WPS_M7)
logger.debug("Send M8 to STA")
attrs = build_wsc_attr(ATTR_VERSION, '\x10')
attrs += build_attr_msg_type(WPS_M8)
attrs += build_wsc_attr(ATTR_ENROLLEE_NONCE,
m1_attrs[ATTR_ENROLLEE_NONCE])
attrs += build_attr_encr_settings(authkey, keywrapkey, m8_cred)
attrs += build_attr_authenticator(authkey, raw_m7_attrs, attrs)
raw_m8_attrs = attrs
m8 = build_eap_wsc(1, eap_id, attrs)
send_wsc_msg(dev[0], bssid, m8)
eap_id = (eap_id + 1) % 256
if no_connect:
logger.debug("Receive WSC_Done from STA")
msg = get_wsc_msg(dev[0])
if msg['wsc_opcode'] != WSC_Done or msg['wsc_msg_type'] != WPS_WSC_DONE:
raise Exception("Unexpected Op-Code/MsgType for WSC_Done")
hapd.request("SET ext_eapol_frame_io 0")
dev[0].request("SET ext_eapol_frame_io 0")
send_wsc_msg(dev[0], bssid, build_eap_failure(eap_id))
dev[0].wait_disconnected()
dev[0].request("REMOVE_NETWORK all")
elif connect:
logger.debug("Receive WSC_Done from STA")
msg = get_wsc_msg(dev[0])
if msg['wsc_opcode'] != WSC_Done or msg['wsc_msg_type'] != WPS_WSC_DONE:
raise Exception("Unexpected Op-Code/MsgType for WSC_Done")
hapd.request("SET ext_eapol_frame_io 0")
dev[0].request("SET ext_eapol_frame_io 0")
send_wsc_msg(dev[0], bssid, build_eap_failure(eap_id))
dev[0].wait_connected()
else:
# Verify STA NACK's the credential
msg = get_wsc_msg(dev[0])
if msg['wsc_opcode'] != WSC_NACK:
raise Exception("Unexpected message - expected WSC_Nack")
dev[0].request("WPS_CANCEL")
send_wsc_msg(dev[0], bssid, build_eap_failure(eap_id))
dev[0].wait_disconnected()
def build_cred(nw_idx='\x01', ssid='test-wps-conf', auth_type='\x00\x20',
encr_type='\x00\x08', nw_key="12345678",
mac_addr='\x00\x00\x00\x00\x00\x00'):
attrs = b''
if nw_idx is not None:
attrs += build_wsc_attr(ATTR_NETWORK_INDEX, nw_idx)
if ssid is not None:
attrs += build_wsc_attr(ATTR_SSID, ssid)
if auth_type is not None:
attrs += build_wsc_attr(ATTR_AUTH_TYPE, auth_type)
if encr_type is not None:
attrs += build_wsc_attr(ATTR_ENCR_TYPE, encr_type)
if nw_key is not None:
attrs += build_wsc_attr(ATTR_NETWORK_KEY, nw_key)
if mac_addr is not None:
attrs += build_wsc_attr(ATTR_MAC_ADDR, mac_addr)
return build_wsc_attr(ATTR_CRED, attrs)
def test_wps_ext_cred_proto_success(dev, apdev):
"""WPS and Credential: success"""
mac_addr = binascii.unhexlify(dev[0].own_addr().replace(':', ''))
m8_cred = build_cred(mac_addr=mac_addr)
wps_run_cred_proto(dev, apdev, m8_cred, connect=True)
def test_wps_ext_cred_proto_mac_addr_mismatch(dev, apdev):
"""WPS and Credential: MAC Address mismatch"""
m8_cred = build_cred()
wps_run_cred_proto(dev, apdev, m8_cred, connect=True)
def test_wps_ext_cred_proto_zero_padding(dev, apdev):
"""WPS and Credential: zeropadded attributes"""
mac_addr = binascii.unhexlify(dev[0].own_addr().replace(':', ''))
m8_cred = build_cred(mac_addr=mac_addr, ssid='test-wps-conf\x00',
nw_key="12345678\x00")
wps_run_cred_proto(dev, apdev, m8_cred, connect=True)
def test_wps_ext_cred_proto_ssid_missing(dev, apdev):
"""WPS and Credential: SSID missing"""
mac_addr = binascii.unhexlify(dev[0].own_addr().replace(':', ''))
m8_cred = build_cred(mac_addr=mac_addr, ssid=None)
wps_run_cred_proto(dev, apdev, m8_cred)
def test_wps_ext_cred_proto_ssid_zero_len(dev, apdev):
"""WPS and Credential: Zero-length SSID"""
mac_addr = binascii.unhexlify(dev[0].own_addr().replace(':', ''))
m8_cred = build_cred(mac_addr=mac_addr, ssid="")
wps_run_cred_proto(dev, apdev, m8_cred, no_connect=True)
def test_wps_ext_cred_proto_auth_type_missing(dev, apdev):
"""WPS and Credential: Auth Type missing"""
mac_addr = binascii.unhexlify(dev[0].own_addr().replace(':', ''))
m8_cred = build_cred(mac_addr=mac_addr, auth_type=None)
wps_run_cred_proto(dev, apdev, m8_cred)
def test_wps_ext_cred_proto_encr_type_missing(dev, apdev):
"""WPS and Credential: Encr Type missing"""
mac_addr = binascii.unhexlify(dev[0].own_addr().replace(':', ''))
m8_cred = build_cred(mac_addr=mac_addr, encr_type=None)
wps_run_cred_proto(dev, apdev, m8_cred)
def test_wps_ext_cred_proto_network_key_missing(dev, apdev):
"""WPS and Credential: Network Key missing"""
mac_addr = binascii.unhexlify(dev[0].own_addr().replace(':', ''))
m8_cred = build_cred(mac_addr=mac_addr, nw_key=None)
wps_run_cred_proto(dev, apdev, m8_cred)
def test_wps_ext_cred_proto_network_key_missing_open(dev, apdev):
"""WPS and Credential: Network Key missing (open)"""
mac_addr = binascii.unhexlify(dev[0].own_addr().replace(':', ''))
m8_cred = build_cred(mac_addr=mac_addr, auth_type='\x00\x01',
encr_type='\x00\x01', nw_key=None, ssid="foo")
wps_run_cred_proto(dev, apdev, m8_cred, no_connect=True)
def test_wps_ext_cred_proto_mac_addr_missing(dev, apdev):
"""WPS and Credential: MAC Address missing"""
m8_cred = build_cred(mac_addr=None)
wps_run_cred_proto(dev, apdev, m8_cred)
def test_wps_ext_cred_proto_invalid_encr_type(dev, apdev):
"""WPS and Credential: Invalid Encr Type"""
mac_addr = binascii.unhexlify(dev[0].own_addr().replace(':', ''))
m8_cred = build_cred(mac_addr=mac_addr, encr_type='\x00\x00')
wps_run_cred_proto(dev, apdev, m8_cred)
def test_wps_ext_cred_proto_missing_cred(dev, apdev):
"""WPS and Credential: Missing Credential"""
mac_addr = binascii.unhexlify(dev[0].own_addr().replace(':', ''))
m8_cred = b''
wps_run_cred_proto(dev, apdev, m8_cred)
def test_wps_ext_proto_m2_no_public_key(dev, apdev):
"""WPS and no Public Key in M2"""
pin = "12345670"
addr, bssid, hapd = wps_start_ext(apdev[0], dev[0], pin=pin)
wps_ext_eap_identity_req(dev[0], hapd, bssid)
wps_ext_eap_identity_resp(hapd, dev[0], addr)
wps_ext_eap_wsc(dev[0], hapd, bssid, "EAP-WSC/Start")
mac_addr = binascii.unhexlify(dev[0].own_addr().replace(':', ''))
uuid_r = 16*b'\x33'
r_nonce = 16*b'\x44'
own_private, e_pk = wsc_dh_init()
logger.debug("Receive M1 from STA")
msg, m1_attrs, raw_m1_attrs = recv_wsc_msg(dev[0], WSC_MSG, WPS_M1)
eap_id = (msg['eap_identifier'] + 1) % 256
authkey, keywrapkey = wsc_dh_kdf(m1_attrs[ATTR_PUBLIC_KEY], own_private,
mac_addr, m1_attrs[ATTR_ENROLLEE_NONCE],
r_nonce)
r_s1, r_s2, r_hash1, r_hash2 = wsc_dev_pw_hash(authkey, pin,
m1_attrs[ATTR_PUBLIC_KEY],
e_pk)
logger.debug("Send M2 to STA")
m2, raw_m2_attrs = build_m2(authkey, raw_m1_attrs, eap_id,
m1_attrs[ATTR_ENROLLEE_NONCE],
r_nonce, uuid_r, None)
send_wsc_msg(dev[0], bssid, m2)
eap_id = (eap_id + 1) % 256
# Verify STA NACK's the credential
msg = get_wsc_msg(dev[0])
if msg['wsc_opcode'] != WSC_NACK:
raise Exception("Unexpected message - expected WSC_Nack")
dev[0].request("WPS_CANCEL")
send_wsc_msg(dev[0], bssid, build_eap_failure(eap_id))
dev[0].wait_disconnected()
def test_wps_ext_proto_m2_invalid_public_key(dev, apdev):
"""WPS and invalid Public Key in M2"""
pin = "12345670"
addr, bssid, hapd = wps_start_ext(apdev[0], dev[0], pin=pin)
wps_ext_eap_identity_req(dev[0], hapd, bssid)
wps_ext_eap_identity_resp(hapd, dev[0], addr)
wps_ext_eap_wsc(dev[0], hapd, bssid, "EAP-WSC/Start")
mac_addr = binascii.unhexlify(dev[0].own_addr().replace(':', ''))
uuid_r = 16*b'\x33'
r_nonce = 16*b'\x44'
own_private, e_pk = wsc_dh_init()
logger.debug("Receive M1 from STA")
msg, m1_attrs, raw_m1_attrs = recv_wsc_msg(dev[0], WSC_MSG, WPS_M1)
eap_id = (msg['eap_identifier'] + 1) % 256
authkey, keywrapkey = wsc_dh_kdf(m1_attrs[ATTR_PUBLIC_KEY], own_private,
mac_addr, m1_attrs[ATTR_ENROLLEE_NONCE],
r_nonce)
r_s1, r_s2, r_hash1, r_hash2 = wsc_dev_pw_hash(authkey, pin,
m1_attrs[ATTR_PUBLIC_KEY],
e_pk)
logger.debug("Send M2 to STA")
m2, raw_m2_attrs = build_m2(authkey, raw_m1_attrs, eap_id,
m1_attrs[ATTR_ENROLLEE_NONCE],
r_nonce, uuid_r, 192*b'\xff')
send_wsc_msg(dev[0], bssid, m2)
eap_id = (eap_id + 1) % 256
# Verify STA NACK's the credential
msg = get_wsc_msg(dev[0])
if msg['wsc_opcode'] != WSC_NACK:
raise Exception("Unexpected message - expected WSC_Nack")
dev[0].request("WPS_CANCEL")
send_wsc_msg(dev[0], bssid, build_eap_failure(eap_id))
dev[0].wait_disconnected()
def test_wps_ext_proto_m2_public_key_oom(dev, apdev):
"""WPS and Public Key OOM in M2"""
pin = "12345670"
addr, bssid, hapd = wps_start_ext(apdev[0], dev[0], pin=pin)
wps_ext_eap_identity_req(dev[0], hapd, bssid)
wps_ext_eap_identity_resp(hapd, dev[0], addr)
wps_ext_eap_wsc(dev[0], hapd, bssid, "EAP-WSC/Start")
mac_addr = binascii.unhexlify(dev[0].own_addr().replace(':', ''))
uuid_r = 16*b'\x33'
r_nonce = 16*b'\x44'
own_private, e_pk = wsc_dh_init()
logger.debug("Receive M1 from STA")
msg, m1_attrs, raw_m1_attrs = recv_wsc_msg(dev[0], WSC_MSG, WPS_M1)
eap_id = (msg['eap_identifier'] + 1) % 256
authkey, keywrapkey = wsc_dh_kdf(m1_attrs[ATTR_PUBLIC_KEY], own_private,
mac_addr, m1_attrs[ATTR_ENROLLEE_NONCE],
r_nonce)
r_s1, r_s2, r_hash1, r_hash2 = wsc_dev_pw_hash(authkey, pin,
m1_attrs[ATTR_PUBLIC_KEY],
e_pk)
logger.debug("Send M2 to STA")
m2, raw_m2_attrs = build_m2(authkey, raw_m1_attrs, eap_id,
m1_attrs[ATTR_ENROLLEE_NONCE],
r_nonce, uuid_r, e_pk)
with alloc_fail(dev[0], 1, "wpabuf_alloc_copy;wps_process_pubkey"):
send_wsc_msg(dev[0], bssid, m2)
eap_id = (eap_id + 1) % 256
# Verify STA NACK's the credential
msg = get_wsc_msg(dev[0])
if msg['wsc_opcode'] != WSC_NACK:
raise Exception("Unexpected message - expected WSC_Nack")
dev[0].request("WPS_CANCEL")
send_wsc_msg(dev[0], bssid, build_eap_failure(eap_id))
dev[0].wait_disconnected()
def test_wps_ext_proto_nack_m3(dev, apdev):
"""WPS and NACK M3"""
pin = "12345670"
addr, bssid, hapd = wps_start_ext(apdev[0], dev[0], pin=pin)
wps_ext_eap_identity_req(dev[0], hapd, bssid)
wps_ext_eap_identity_resp(hapd, dev[0], addr)
wps_ext_eap_wsc(dev[0], hapd, bssid, "EAP-WSC/Start")
mac_addr = binascii.unhexlify(dev[0].own_addr().replace(':', ''))
uuid_r = 16*b'\x33'
r_nonce = 16*b'\x44'
own_private, e_pk = wsc_dh_init()
logger.debug("Receive M1 from STA")
msg, m1_attrs, raw_m1_attrs = recv_wsc_msg(dev[0], WSC_MSG, WPS_M1)
eap_id = (msg['eap_identifier'] + 1) % 256
authkey, keywrapkey = wsc_dh_kdf(m1_attrs[ATTR_PUBLIC_KEY], own_private,
mac_addr, m1_attrs[ATTR_ENROLLEE_NONCE],
r_nonce)
r_s1, r_s2, r_hash1, r_hash2 = wsc_dev_pw_hash(authkey, pin,
m1_attrs[ATTR_PUBLIC_KEY],
e_pk)
logger.debug("Send M2 to STA")
m2, raw_m2_attrs = build_m2(authkey, raw_m1_attrs, eap_id,
m1_attrs[ATTR_ENROLLEE_NONCE],
r_nonce, uuid_r, e_pk)
send_wsc_msg(dev[0], bssid, m2)
eap_id = (eap_id + 1) % 256
logger.debug("Receive M3 from STA")
msg, m3_attrs, raw_m3_attrs = recv_wsc_msg(dev[0], WSC_MSG, WPS_M3)
logger.debug("Send NACK to STA")
msg, attrs = build_nack(eap_id, m1_attrs[ATTR_ENROLLEE_NONCE],
r_nonce, config_error='\x01\x23')
send_wsc_msg(dev[0], bssid, msg)
ev = dev[0].wait_event(["WPS-FAIL"], timeout=5)
if ev is None:
raise Exception("Failure not reported")
if "msg=7 config_error=291" not in ev:
raise Exception("Unexpected failure reason: " + ev)
def test_wps_ext_proto_nack_m5(dev, apdev):
"""WPS and NACK M5"""
pin = "12345670"
addr, bssid, hapd = wps_start_ext(apdev[0], dev[0], pin=pin)
wps_ext_eap_identity_req(dev[0], hapd, bssid)
wps_ext_eap_identity_resp(hapd, dev[0], addr)
wps_ext_eap_wsc(dev[0], hapd, bssid, "EAP-WSC/Start")
mac_addr = binascii.unhexlify(dev[0].own_addr().replace(':', ''))
uuid_r = 16*b'\x33'
r_nonce = 16*b'\x44'
own_private, e_pk = wsc_dh_init()
logger.debug("Receive M1 from STA")
msg, m1_attrs, raw_m1_attrs = recv_wsc_msg(dev[0], WSC_MSG, WPS_M1)
eap_id = (msg['eap_identifier'] + 1) % 256
authkey, keywrapkey = wsc_dh_kdf(m1_attrs[ATTR_PUBLIC_KEY], own_private,
mac_addr, m1_attrs[ATTR_ENROLLEE_NONCE],
r_nonce)
r_s1, r_s2, r_hash1, r_hash2 = wsc_dev_pw_hash(authkey, pin,
m1_attrs[ATTR_PUBLIC_KEY],
e_pk)
logger.debug("Send M2 to STA")
m2, raw_m2_attrs = build_m2(authkey, raw_m1_attrs, eap_id,
m1_attrs[ATTR_ENROLLEE_NONCE],
r_nonce, uuid_r, e_pk)
send_wsc_msg(dev[0], bssid, m2)
eap_id = (eap_id + 1) % 256
logger.debug("Receive M3 from STA")
msg, m3_attrs, raw_m3_attrs = recv_wsc_msg(dev[0], WSC_MSG, WPS_M3)
logger.debug("Send M4 to STA")
attrs = build_wsc_attr(ATTR_VERSION, '\x10')
attrs += build_attr_msg_type(WPS_M4)
attrs += build_wsc_attr(ATTR_ENROLLEE_NONCE, m1_attrs[ATTR_ENROLLEE_NONCE])
attrs += build_wsc_attr(ATTR_R_HASH1, r_hash1)
attrs += build_wsc_attr(ATTR_R_HASH2, r_hash2)
data = build_wsc_attr(ATTR_R_SNONCE1, r_s1)
attrs += build_attr_encr_settings(authkey, keywrapkey, data)
attrs += build_attr_authenticator(authkey, raw_m3_attrs, attrs)
raw_m4_attrs = attrs
m4 = build_eap_wsc(1, eap_id, attrs)
send_wsc_msg(dev[0], bssid, m4)
eap_id = (eap_id + 1) % 256
logger.debug("Receive M5 from STA")
msg, m5_attrs, raw_m5_attrs = recv_wsc_msg(dev[0], WSC_MSG, WPS_M5)
logger.debug("Send NACK to STA")
msg, attrs = build_nack(eap_id, m1_attrs[ATTR_ENROLLEE_NONCE],
r_nonce, config_error='\x01\x24')
send_wsc_msg(dev[0], bssid, msg)
ev = dev[0].wait_event(["WPS-FAIL"], timeout=5)
if ev is None:
raise Exception("Failure not reported")
if "msg=9 config_error=292" not in ev:
raise Exception("Unexpected failure reason: " + ev)
def wps_nack_m3(dev, apdev):
pin = "00000000"
addr, bssid, hapd = wps_start_ext(apdev[0], dev[0], pbc=True)
wps_ext_eap_identity_req(dev[0], hapd, bssid)
wps_ext_eap_identity_resp(hapd, dev[0], addr)
wps_ext_eap_wsc(dev[0], hapd, bssid, "EAP-WSC/Start")
mac_addr = binascii.unhexlify(dev[0].own_addr().replace(':', ''))
uuid_r = 16*b'\x33'
r_nonce = 16*b'\x44'
own_private, e_pk = wsc_dh_init()
logger.debug("Receive M1 from STA")
msg, m1_attrs, raw_m1_attrs = recv_wsc_msg(dev[0], WSC_MSG, WPS_M1)
eap_id = (msg['eap_identifier'] + 1) % 256
authkey, keywrapkey = wsc_dh_kdf(m1_attrs[ATTR_PUBLIC_KEY], own_private,
mac_addr, m1_attrs[ATTR_ENROLLEE_NONCE],
r_nonce)
r_s1, r_s2, r_hash1, r_hash2 = wsc_dev_pw_hash(authkey, pin,
m1_attrs[ATTR_PUBLIC_KEY],
e_pk)
logger.debug("Send M2 to STA")
m2, raw_m2_attrs = build_m2(authkey, raw_m1_attrs, eap_id,
m1_attrs[ATTR_ENROLLEE_NONCE],
r_nonce, uuid_r, e_pk, dev_pw_id='\x00\x04')
send_wsc_msg(dev[0], bssid, m2)
eap_id = (eap_id + 1) % 256
logger.debug("Receive M3 from STA")
msg, m3_attrs, raw_m3_attrs = recv_wsc_msg(dev[0], WSC_MSG, WPS_M3)
return eap_id, m1_attrs[ATTR_ENROLLEE_NONCE], r_nonce, bssid
def test_wps_ext_proto_nack_m3_no_config_error(dev, apdev):
"""WPS and NACK M3 missing Config Error"""
eap_id, e_nonce, r_nonce, bssid = wps_nack_m3(dev, apdev)
logger.debug("Send NACK to STA")
msg, attrs = build_nack(eap_id, e_nonce, r_nonce, config_error=None)
send_wsc_msg(dev[0], bssid, msg)
dev[0].request("WPS_CANCEL")
dev[0].wait_disconnected()
dev[0].flush_scan_cache()
def test_wps_ext_proto_nack_m3_no_e_nonce(dev, apdev):
"""WPS and NACK M3 missing E-Nonce"""
eap_id, e_nonce, r_nonce, bssid = wps_nack_m3(dev, apdev)
logger.debug("Send NACK to STA")
msg, attrs = build_nack(eap_id, None, r_nonce)
send_wsc_msg(dev[0], bssid, msg)
dev[0].request("WPS_CANCEL")
dev[0].wait_disconnected()
dev[0].flush_scan_cache()
def test_wps_ext_proto_nack_m3_e_nonce_mismatch(dev, apdev):
"""WPS and NACK M3 E-Nonce mismatch"""
eap_id, e_nonce, r_nonce, bssid = wps_nack_m3(dev, apdev)
logger.debug("Send NACK to STA")
msg, attrs = build_nack(eap_id, 16*'\x00', r_nonce)
send_wsc_msg(dev[0], bssid, msg)
dev[0].request("WPS_CANCEL")
dev[0].wait_disconnected()
dev[0].flush_scan_cache()
def test_wps_ext_proto_nack_m3_no_r_nonce(dev, apdev):
"""WPS and NACK M3 missing R-Nonce"""
eap_id, e_nonce, r_nonce, bssid = wps_nack_m3(dev, apdev)
logger.debug("Send NACK to STA")
msg, attrs = build_nack(eap_id, e_nonce, None)
send_wsc_msg(dev[0], bssid, msg)
dev[0].request("WPS_CANCEL")
dev[0].wait_disconnected()
dev[0].flush_scan_cache()
def test_wps_ext_proto_nack_m3_r_nonce_mismatch(dev, apdev):
"""WPS and NACK M3 R-Nonce mismatch"""
eap_id, e_nonce, r_nonce, bssid = wps_nack_m3(dev, apdev)
logger.debug("Send NACK to STA")
msg, attrs = build_nack(eap_id, e_nonce, 16*'\x00')
send_wsc_msg(dev[0], bssid, msg)
dev[0].request("WPS_CANCEL")
dev[0].wait_disconnected()
dev[0].flush_scan_cache()
def test_wps_ext_proto_nack_m3_no_msg_type(dev, apdev):
"""WPS and NACK M3 no Message Type"""
eap_id, e_nonce, r_nonce, bssid = wps_nack_m3(dev, apdev)
logger.debug("Send NACK to STA")
msg, attrs = build_nack(eap_id, e_nonce, r_nonce, msg_type=None)
send_wsc_msg(dev[0], bssid, msg)
dev[0].request("WPS_CANCEL")
dev[0].wait_disconnected()
dev[0].flush_scan_cache()
def test_wps_ext_proto_nack_m3_invalid_msg_type(dev, apdev):
"""WPS and NACK M3 invalid Message Type"""
eap_id, e_nonce, r_nonce, bssid = wps_nack_m3(dev, apdev)
logger.debug("Send NACK to STA")
msg, attrs = build_nack(eap_id, e_nonce, r_nonce, msg_type=123)
send_wsc_msg(dev[0], bssid, msg)
dev[0].request("WPS_CANCEL")
dev[0].wait_disconnected()
dev[0].flush_scan_cache()
def test_wps_ext_proto_nack_m3_invalid_attr(dev, apdev):
"""WPS and NACK M3 invalid attribute"""
eap_id, e_nonce, r_nonce, bssid = wps_nack_m3(dev, apdev)
logger.debug("Send NACK to STA")
attrs = b'\x10\x10\x00'
msg = build_eap_wsc(1, eap_id, attrs, opcode=WSC_NACK)
send_wsc_msg(dev[0], bssid, msg)
dev[0].request("WPS_CANCEL")
dev[0].wait_disconnected()
dev[0].flush_scan_cache()
def test_wps_ext_proto_ack_m3_no_e_nonce(dev, apdev):
"""WPS and ACK M3 missing E-Nonce"""
eap_id, e_nonce, r_nonce, bssid = wps_nack_m3(dev, apdev)
logger.debug("Send NACK to STA")
msg, attrs = build_ack(eap_id, None, r_nonce)
send_wsc_msg(dev[0], bssid, msg)
dev[0].request("WPS_CANCEL")
dev[0].wait_disconnected()
dev[0].flush_scan_cache()
def test_wps_ext_proto_ack_m3_e_nonce_mismatch(dev, apdev):
"""WPS and ACK M3 E-Nonce mismatch"""
eap_id, e_nonce, r_nonce, bssid = wps_nack_m3(dev, apdev)
logger.debug("Send NACK to STA")
msg, attrs = build_ack(eap_id, 16*'\x00', r_nonce)
send_wsc_msg(dev[0], bssid, msg)
dev[0].request("WPS_CANCEL")
dev[0].wait_disconnected()
dev[0].flush_scan_cache()
def test_wps_ext_proto_ack_m3_no_r_nonce(dev, apdev):
"""WPS and ACK M3 missing R-Nonce"""
eap_id, e_nonce, r_nonce, bssid = wps_nack_m3(dev, apdev)
logger.debug("Send NACK to STA")
msg, attrs = build_ack(eap_id, e_nonce, None)
send_wsc_msg(dev[0], bssid, msg)
dev[0].request("WPS_CANCEL")
dev[0].wait_disconnected()
dev[0].flush_scan_cache()
def test_wps_ext_proto_ack_m3_r_nonce_mismatch(dev, apdev):
"""WPS and ACK M3 R-Nonce mismatch"""
eap_id, e_nonce, r_nonce, bssid = wps_nack_m3(dev, apdev)
logger.debug("Send NACK to STA")
msg, attrs = build_ack(eap_id, e_nonce, 16*'\x00')
send_wsc_msg(dev[0], bssid, msg)
dev[0].request("WPS_CANCEL")
dev[0].wait_disconnected()
dev[0].flush_scan_cache()
def test_wps_ext_proto_ack_m3_no_msg_type(dev, apdev):
"""WPS and ACK M3 no Message Type"""
eap_id, e_nonce, r_nonce, bssid = wps_nack_m3(dev, apdev)
logger.debug("Send NACK to STA")
msg, attrs = build_ack(eap_id, e_nonce, r_nonce, msg_type=None)
send_wsc_msg(dev[0], bssid, msg)
dev[0].request("WPS_CANCEL")
dev[0].wait_disconnected()
dev[0].flush_scan_cache()
def test_wps_ext_proto_ack_m3_invalid_msg_type(dev, apdev):
"""WPS and ACK M3 invalid Message Type"""
eap_id, e_nonce, r_nonce, bssid = wps_nack_m3(dev, apdev)
logger.debug("Send NACK to STA")
msg, attrs = build_ack(eap_id, e_nonce, r_nonce, msg_type=123)
send_wsc_msg(dev[0], bssid, msg)
dev[0].request("WPS_CANCEL")
dev[0].wait_disconnected()
dev[0].flush_scan_cache()
def test_wps_ext_proto_ack_m3_invalid_attr(dev, apdev):
"""WPS and ACK M3 invalid attribute"""
eap_id, e_nonce, r_nonce, bssid = wps_nack_m3(dev, apdev)
logger.debug("Send ACK to STA")
attrs = b'\x10\x10\x00'
msg = build_eap_wsc(1, eap_id, attrs, opcode=WSC_ACK)
send_wsc_msg(dev[0], bssid, msg)
dev[0].request("WPS_CANCEL")
dev[0].wait_disconnected()
dev[0].flush_scan_cache()
def test_wps_ext_proto_ack_m3(dev, apdev):
"""WPS and ACK M3"""
eap_id, e_nonce, r_nonce, bssid = wps_nack_m3(dev, apdev)
logger.debug("Send ACK to STA")
msg, attrs = build_ack(eap_id, e_nonce, r_nonce)
send_wsc_msg(dev[0], bssid, msg)
dev[0].request("WPS_CANCEL")
dev[0].wait_disconnected()
dev[0].flush_scan_cache()
def wps_to_m3_helper(dev, apdev):
pin = "12345670"
addr, bssid, hapd = wps_start_ext(apdev[0], dev[0], pin=pin)
wps_ext_eap_identity_req(dev[0], hapd, bssid)
wps_ext_eap_identity_resp(hapd, dev[0], addr)
wps_ext_eap_wsc(dev[0], hapd, bssid, "EAP-WSC/Start")
mac_addr = binascii.unhexlify(dev[0].own_addr().replace(':', ''))
uuid_r = 16*b'\x33'
r_nonce = 16*b'\x44'
own_private, e_pk = wsc_dh_init()
logger.debug("Receive M1 from STA")
msg, m1_attrs, raw_m1_attrs = recv_wsc_msg(dev[0], WSC_MSG, WPS_M1)
eap_id = (msg['eap_identifier'] + 1) % 256
authkey, keywrapkey = wsc_dh_kdf(m1_attrs[ATTR_PUBLIC_KEY], own_private,
mac_addr, m1_attrs[ATTR_ENROLLEE_NONCE],
r_nonce)
r_s1, r_s2, r_hash1, r_hash2 = wsc_dev_pw_hash(authkey, pin,
m1_attrs[ATTR_PUBLIC_KEY],
e_pk)
logger.debug("Send M2 to STA")
m2, raw_m2_attrs = build_m2(authkey, raw_m1_attrs, eap_id,
m1_attrs[ATTR_ENROLLEE_NONCE],
r_nonce, uuid_r, e_pk)
send_wsc_msg(dev[0], bssid, m2)
eap_id = (eap_id + 1) % 256
logger.debug("Receive M3 from STA")
msg, m3_attrs, raw_m3_attrs = recv_wsc_msg(dev[0], WSC_MSG, WPS_M3)
return eap_id, m1_attrs, r_nonce, bssid, r_hash1, r_hash2, r_s1, r_s2, raw_m3_attrs, authkey, keywrapkey
def wps_to_m3(dev, apdev):
eap_id, m1_attrs, r_nonce, bssid, r_hash1, r_hash2, r_s1, r_s2, raw_m3_attrs, authkey, keywrapkey = wps_to_m3_helper(dev, apdev)
return eap_id, m1_attrs[ATTR_ENROLLEE_NONCE], r_nonce, bssid, r_hash1, r_hash2, r_s1, raw_m3_attrs, authkey, keywrapkey
def wps_to_m5(dev, apdev):
eap_id, m1_attrs, r_nonce, bssid, r_hash1, r_hash2, r_s1, r_s2, raw_m3_attrs, authkey, keywrapkey = wps_to_m3_helper(dev, apdev)
logger.debug("Send M4 to STA")
attrs = build_wsc_attr(ATTR_VERSION, '\x10')
attrs += build_attr_msg_type(WPS_M4)
attrs += build_wsc_attr(ATTR_ENROLLEE_NONCE, m1_attrs[ATTR_ENROLLEE_NONCE])
attrs += build_wsc_attr(ATTR_R_HASH1, r_hash1)
attrs += build_wsc_attr(ATTR_R_HASH2, r_hash2)
data = build_wsc_attr(ATTR_R_SNONCE1, r_s1)
attrs += build_attr_encr_settings(authkey, keywrapkey, data)
attrs += build_attr_authenticator(authkey, raw_m3_attrs, attrs)
raw_m4_attrs = attrs
m4 = build_eap_wsc(1, eap_id, attrs)
send_wsc_msg(dev[0], bssid, m4)
eap_id = (eap_id + 1) % 256
logger.debug("Receive M5 from STA")
msg, m5_attrs, raw_m5_attrs = recv_wsc_msg(dev[0], WSC_MSG, WPS_M5)
return eap_id, m1_attrs[ATTR_ENROLLEE_NONCE], r_nonce, bssid, r_hash1, r_hash2, r_s2, raw_m5_attrs, authkey, keywrapkey
def test_wps_ext_proto_m4_missing_r_hash1(dev, apdev):
"""WPS and no R-Hash1 in M4"""
eap_id, e_nonce, r_nonce, bssid, r_hash1, r_hash2, r_s1, m3, authkey, keywrapkey = wps_to_m3(dev, apdev)
logger.debug("Send M4 to STA")
attrs = build_wsc_attr(ATTR_VERSION, '\x10')
attrs += build_attr_msg_type(WPS_M4)
attrs += build_wsc_attr(ATTR_ENROLLEE_NONCE, e_nonce)
#attrs += build_wsc_attr(ATTR_R_HASH1, r_hash1)
attrs += build_wsc_attr(ATTR_R_HASH2, r_hash2)
data = build_wsc_attr(ATTR_R_SNONCE1, r_s1)
attrs += build_attr_encr_settings(authkey, keywrapkey, data)
attrs += build_attr_authenticator(authkey, m3, attrs)
m4 = build_eap_wsc(1, eap_id, attrs)
send_wsc_msg(dev[0], bssid, m4)
eap_id = (eap_id + 1) % 256
logger.debug("Receive M5 (NACK) from STA")
msg = get_wsc_msg(dev[0])
if msg['wsc_opcode'] != WSC_NACK:
raise Exception("Unexpected message - expected WSC_Nack")
dev[0].request("WPS_CANCEL")
send_wsc_msg(dev[0], bssid, build_eap_failure(eap_id))
dev[0].wait_disconnected()
def test_wps_ext_proto_m4_missing_r_hash2(dev, apdev):
"""WPS and no R-Hash2 in M4"""
eap_id, e_nonce, r_nonce, bssid, r_hash1, r_hash2, r_s1, m3, authkey, keywrapkey = wps_to_m3(dev, apdev)
logger.debug("Send M4 to STA")
attrs = build_wsc_attr(ATTR_VERSION, '\x10')
attrs += build_attr_msg_type(WPS_M4)
attrs += build_wsc_attr(ATTR_ENROLLEE_NONCE, e_nonce)
attrs += build_wsc_attr(ATTR_R_HASH1, r_hash1)
#attrs += build_wsc_attr(ATTR_R_HASH2, r_hash2)
data = build_wsc_attr(ATTR_R_SNONCE1, r_s1)
attrs += build_attr_encr_settings(authkey, keywrapkey, data)
attrs += build_attr_authenticator(authkey, m3, attrs)
m4 = build_eap_wsc(1, eap_id, attrs)
send_wsc_msg(dev[0], bssid, m4)
eap_id = (eap_id + 1) % 256
logger.debug("Receive M5 (NACK) from STA")
msg = get_wsc_msg(dev[0])
if msg['wsc_opcode'] != WSC_NACK:
raise Exception("Unexpected message - expected WSC_Nack")
dev[0].request("WPS_CANCEL")
send_wsc_msg(dev[0], bssid, build_eap_failure(eap_id))
dev[0].wait_disconnected()
def test_wps_ext_proto_m4_missing_r_snonce1(dev, apdev):
"""WPS and no R-SNonce1 in M4"""
eap_id, e_nonce, r_nonce, bssid, r_hash1, r_hash2, r_s1, m3, authkey, keywrapkey = wps_to_m3(dev, apdev)
logger.debug("Send M4 to STA")
attrs = build_wsc_attr(ATTR_VERSION, '\x10')
attrs += build_attr_msg_type(WPS_M4)
attrs += build_wsc_attr(ATTR_ENROLLEE_NONCE, e_nonce)
attrs += build_wsc_attr(ATTR_R_HASH1, r_hash1)
attrs += build_wsc_attr(ATTR_R_HASH2, r_hash2)
#data = build_wsc_attr(ATTR_R_SNONCE1, r_s1)
data = b''
attrs += build_attr_encr_settings(authkey, keywrapkey, data)
attrs += build_attr_authenticator(authkey, m3, attrs)
m4 = build_eap_wsc(1, eap_id, attrs)
send_wsc_msg(dev[0], bssid, m4)
eap_id = (eap_id + 1) % 256
logger.debug("Receive M5 (NACK) from STA")
msg = get_wsc_msg(dev[0])
if msg['wsc_opcode'] != WSC_NACK:
raise Exception("Unexpected message - expected WSC_Nack")
dev[0].request("WPS_CANCEL")
send_wsc_msg(dev[0], bssid, build_eap_failure(eap_id))
dev[0].wait_disconnected()
def test_wps_ext_proto_m4_invalid_pad_string(dev, apdev):
"""WPS and invalid pad string in M4"""
eap_id, e_nonce, r_nonce, bssid, r_hash1, r_hash2, r_s1, m3, authkey, keywrapkey = wps_to_m3(dev, apdev)
logger.debug("Send M4 to STA")
attrs = build_wsc_attr(ATTR_VERSION, '\x10')
attrs += build_attr_msg_type(WPS_M4)
attrs += build_wsc_attr(ATTR_ENROLLEE_NONCE, e_nonce)
attrs += build_wsc_attr(ATTR_R_HASH1, r_hash1)
attrs += build_wsc_attr(ATTR_R_HASH2, r_hash2)
data = build_wsc_attr(ATTR_R_SNONCE1, r_s1)
m = hmac.new(authkey, data, hashlib.sha256)
kwa = m.digest()[0:8]
data += build_wsc_attr(ATTR_KEY_WRAP_AUTH, kwa)
iv = 16*b'\x99'
aes = AES.new(keywrapkey, AES.MODE_CBC, iv)
pad_len = 16 - len(data) % 16
ps = (pad_len - 1) * struct.pack('B', pad_len) + struct.pack('B', pad_len - 1)
data += ps
wrapped = aes.encrypt(data)
attrs += build_wsc_attr(ATTR_ENCR_SETTINGS, iv + wrapped)
attrs += build_attr_authenticator(authkey, m3, attrs)
m4 = build_eap_wsc(1, eap_id, attrs)
send_wsc_msg(dev[0], bssid, m4)
eap_id = (eap_id + 1) % 256
logger.debug("Receive M5 (NACK) from STA")
msg = get_wsc_msg(dev[0])
if msg['wsc_opcode'] != WSC_NACK:
raise Exception("Unexpected message - expected WSC_Nack")
dev[0].request("WPS_CANCEL")
send_wsc_msg(dev[0], bssid, build_eap_failure(eap_id))
dev[0].wait_disconnected()
def test_wps_ext_proto_m4_invalid_pad_value(dev, apdev):
"""WPS and invalid pad value in M4"""
eap_id, e_nonce, r_nonce, bssid, r_hash1, r_hash2, r_s1, m3, authkey, keywrapkey = wps_to_m3(dev, apdev)
logger.debug("Send M4 to STA")
attrs = build_wsc_attr(ATTR_VERSION, '\x10')
attrs += build_attr_msg_type(WPS_M4)
attrs += build_wsc_attr(ATTR_ENROLLEE_NONCE, e_nonce)
attrs += build_wsc_attr(ATTR_R_HASH1, r_hash1)
attrs += build_wsc_attr(ATTR_R_HASH2, r_hash2)
data = build_wsc_attr(ATTR_R_SNONCE1, r_s1)
m = hmac.new(authkey, data, hashlib.sha256)
kwa = m.digest()[0:8]
data += build_wsc_attr(ATTR_KEY_WRAP_AUTH, kwa)
iv = 16*b'\x99'
aes = AES.new(keywrapkey, AES.MODE_CBC, iv)
pad_len = 16 - len(data) % 16
ps = (pad_len - 1) * struct.pack('B', pad_len) + struct.pack('B', 255)
data += ps
wrapped = aes.encrypt(data)
attrs += build_wsc_attr(ATTR_ENCR_SETTINGS, iv + wrapped)
attrs += build_attr_authenticator(authkey, m3, attrs)
m4 = build_eap_wsc(1, eap_id, attrs)
send_wsc_msg(dev[0], bssid, m4)
eap_id = (eap_id + 1) % 256
logger.debug("Receive M5 (NACK) from STA")
msg = get_wsc_msg(dev[0])
if msg['wsc_opcode'] != WSC_NACK:
raise Exception("Unexpected message - expected WSC_Nack")
dev[0].request("WPS_CANCEL")
send_wsc_msg(dev[0], bssid, build_eap_failure(eap_id))
dev[0].wait_disconnected()
def test_wps_ext_proto_m4_no_encr_settings(dev, apdev):
"""WPS and no Encr Settings in M4"""
eap_id, e_nonce, r_nonce, bssid, r_hash1, r_hash2, r_s1, m3, authkey, keywrapkey = wps_to_m3(dev, apdev)
logger.debug("Send M4 to STA")
attrs = build_wsc_attr(ATTR_VERSION, '\x10')
attrs += build_attr_msg_type(WPS_M4)
attrs += build_wsc_attr(ATTR_ENROLLEE_NONCE, e_nonce)
attrs += build_wsc_attr(ATTR_R_HASH1, r_hash1)
attrs += build_wsc_attr(ATTR_R_HASH2, r_hash2)
attrs += build_attr_authenticator(authkey, m3, attrs)
m4 = build_eap_wsc(1, eap_id, attrs)
send_wsc_msg(dev[0], bssid, m4)
eap_id = (eap_id + 1) % 256
logger.debug("Receive M5 (NACK) from STA")
msg = get_wsc_msg(dev[0])
if msg['wsc_opcode'] != WSC_NACK:
raise Exception("Unexpected message - expected WSC_Nack")
dev[0].request("WPS_CANCEL")
send_wsc_msg(dev[0], bssid, build_eap_failure(eap_id))
dev[0].wait_disconnected()
def test_wps_ext_proto_m6_missing_r_snonce2(dev, apdev):
"""WPS and no R-SNonce2 in M6"""
eap_id, e_nonce, r_nonce, bssid, r_hash1, r_hash2, r_s2, m5, authkey, keywrapkey = wps_to_m5(dev, apdev)
logger.debug("Send M6 to STA")
attrs = build_wsc_attr(ATTR_VERSION, '\x10')
attrs += build_attr_msg_type(WPS_M6)
attrs += build_wsc_attr(ATTR_ENROLLEE_NONCE, e_nonce)
#data = build_wsc_attr(ATTR_R_SNONCE2, r_s2)
data = b''
attrs += build_attr_encr_settings(authkey, keywrapkey, data)
attrs += build_attr_authenticator(authkey, m5, attrs)
m6 = build_eap_wsc(1, eap_id, attrs)
send_wsc_msg(dev[0], bssid, m6)
eap_id = (eap_id + 1) % 256
logger.debug("Receive M7 (NACK) from STA")
msg = get_wsc_msg(dev[0])
if msg['wsc_opcode'] != WSC_NACK:
raise Exception("Unexpected message - expected WSC_Nack")
dev[0].request("WPS_CANCEL")
send_wsc_msg(dev[0], bssid, build_eap_failure(eap_id))
dev[0].wait_disconnected()
def test_wps_ext_proto_m6_no_encr_settings(dev, apdev):
"""WPS and no Encr Settings in M6"""
eap_id, e_nonce, r_nonce, bssid, r_hash1, r_hash2, r_s2, m5, authkey, keywrapkey = wps_to_m5(dev, apdev)
logger.debug("Send M6 to STA")
attrs = build_wsc_attr(ATTR_VERSION, '\x10')
attrs += build_attr_msg_type(WPS_M6)
attrs += build_wsc_attr(ATTR_ENROLLEE_NONCE, e_nonce)
data = build_wsc_attr(ATTR_R_SNONCE2, r_s2)
#attrs += build_attr_encr_settings(authkey, keywrapkey, data)
attrs += build_attr_authenticator(authkey, m5, attrs)
m6 = build_eap_wsc(1, eap_id, attrs)
send_wsc_msg(dev[0], bssid, m6)
eap_id = (eap_id + 1) % 256
logger.debug("Receive M7 (NACK) from STA")
msg = get_wsc_msg(dev[0])
if msg['wsc_opcode'] != WSC_NACK:
raise Exception("Unexpected message - expected WSC_Nack")
dev[0].request("WPS_CANCEL")
send_wsc_msg(dev[0], bssid, build_eap_failure(eap_id))
dev[0].wait_disconnected()
def test_wps_ext_proto_m8_no_encr_settings(dev, apdev):
"""WPS and no Encr Settings in M6"""
eap_id, e_nonce, r_nonce, bssid, r_hash1, r_hash2, r_s2, m5, authkey, keywrapkey = wps_to_m5(dev, apdev)
logger.debug("Send M6 to STA")
attrs = build_wsc_attr(ATTR_VERSION, '\x10')
attrs += build_attr_msg_type(WPS_M6)
attrs += build_wsc_attr(ATTR_ENROLLEE_NONCE, e_nonce)
data = build_wsc_attr(ATTR_R_SNONCE2, r_s2)
attrs += build_attr_encr_settings(authkey, keywrapkey, data)
attrs += build_attr_authenticator(authkey, m5, attrs)
raw_m6_attrs = attrs
m6 = build_eap_wsc(1, eap_id, attrs)
send_wsc_msg(dev[0], bssid, m6)
eap_id = (eap_id + 1) % 256
logger.debug("Receive M7 from STA")
msg, m7_attrs, raw_m7_attrs = recv_wsc_msg(dev[0], WSC_MSG, WPS_M7)
logger.debug("Send M8 to STA")
attrs = build_wsc_attr(ATTR_VERSION, '\x10')
attrs += build_attr_msg_type(WPS_M8)
attrs += build_wsc_attr(ATTR_ENROLLEE_NONCE, e_nonce)
#attrs += build_attr_encr_settings(authkey, keywrapkey, m8_cred)
attrs += build_attr_authenticator(authkey, raw_m7_attrs, attrs)
raw_m8_attrs = attrs
m8 = build_eap_wsc(1, eap_id, attrs)
send_wsc_msg(dev[0], bssid, m8)
logger.debug("Receive WSC_Done (NACK) from STA")
msg = get_wsc_msg(dev[0])
if msg['wsc_opcode'] != WSC_NACK:
raise Exception("Unexpected message - expected WSC_Nack")
dev[0].request("WPS_CANCEL")
send_wsc_msg(dev[0], bssid, build_eap_failure(eap_id))
dev[0].wait_disconnected()
def wps_start_ext_reg(apdev, dev):
addr = dev.own_addr()
bssid = apdev['bssid']
ssid = "test-wps-conf"
appin = "12345670"
params = {"ssid": ssid, "eap_server": "1", "wps_state": "2",
"wpa_passphrase": "12345678", "wpa": "2",
"wpa_key_mgmt": "WPA-PSK", "rsn_pairwise": "CCMP",
"ap_pin": appin}
hapd = hostapd.add_ap(apdev, params)
dev.scan_for_bss(bssid, freq="2412")
hapd.request("SET ext_eapol_frame_io 1")
dev.request("SET ext_eapol_frame_io 1")
dev.request("WPS_REG " + bssid + " " + appin)
return addr, bssid, hapd
def wps_run_ap_settings_proto(dev, apdev, ap_settings, success):
addr, bssid, hapd = wps_start_ext_reg(apdev[0], dev[0])
wps_ext_eap_identity_req(dev[0], hapd, bssid)
wps_ext_eap_identity_resp(hapd, dev[0], addr)
logger.debug("Receive M1 from AP")
msg, m1_attrs, raw_m1_attrs = recv_wsc_msg(hapd, WSC_MSG, WPS_M1)
mac_addr = m1_attrs[ATTR_MAC_ADDR]
e_nonce = m1_attrs[ATTR_ENROLLEE_NONCE]
e_pk = m1_attrs[ATTR_PUBLIC_KEY]
appin = '12345670'
uuid_r = 16*b'\x33'
r_nonce = 16*b'\x44'
own_private, r_pk = wsc_dh_init()
authkey, keywrapkey = wsc_dh_kdf(e_pk, own_private, mac_addr, e_nonce,
r_nonce)
r_s1, r_s2, r_hash1, r_hash2 = wsc_dev_pw_hash(authkey, appin, e_pk, r_pk)
logger.debug("Send M2 to AP")
m2, raw_m2_attrs = build_m2(authkey, raw_m1_attrs, msg['eap_identifier'],
e_nonce, r_nonce, uuid_r, r_pk, eap_code=2)
send_wsc_msg(hapd, addr, m2)
logger.debug("Receive M3 from AP")
msg, m3_attrs, raw_m3_attrs = recv_wsc_msg(hapd, WSC_MSG, WPS_M3)
logger.debug("Send M4 to AP")
attrs = build_wsc_attr(ATTR_VERSION, '\x10')
attrs += build_attr_msg_type(WPS_M4)
attrs += build_wsc_attr(ATTR_ENROLLEE_NONCE, e_nonce)
attrs += build_wsc_attr(ATTR_R_HASH1, r_hash1)
attrs += build_wsc_attr(ATTR_R_HASH2, r_hash2)
data = build_wsc_attr(ATTR_R_SNONCE1, r_s1)
attrs += build_attr_encr_settings(authkey, keywrapkey, data)
attrs += build_attr_authenticator(authkey, raw_m3_attrs, attrs)
raw_m4_attrs = attrs
m4 = build_eap_wsc(2, msg['eap_identifier'], attrs)
send_wsc_msg(hapd, addr, m4)
logger.debug("Receive M5 from AP")
msg, m5_attrs, raw_m5_attrs = recv_wsc_msg(hapd, WSC_MSG, WPS_M5)
logger.debug("Send M6 to STA")
attrs = build_wsc_attr(ATTR_VERSION, '\x10')
attrs += build_attr_msg_type(WPS_M6)
attrs += build_wsc_attr(ATTR_ENROLLEE_NONCE, e_nonce)
data = build_wsc_attr(ATTR_R_SNONCE2, r_s2)
attrs += build_attr_encr_settings(authkey, keywrapkey, data)
attrs += build_attr_authenticator(authkey, raw_m5_attrs, attrs)
raw_m6_attrs = attrs
m6 = build_eap_wsc(2, msg['eap_identifier'], attrs)
send_wsc_msg(hapd, addr, m6)
logger.debug("Receive M7 from AP")
msg, m7_attrs, raw_m7_attrs = recv_wsc_msg(hapd, WSC_MSG, WPS_M7)
logger.debug("Send M8 to STA")
attrs = build_wsc_attr(ATTR_VERSION, '\x10')
attrs += build_attr_msg_type(WPS_M8)
attrs += build_wsc_attr(ATTR_ENROLLEE_NONCE, e_nonce)
if ap_settings:
attrs += build_attr_encr_settings(authkey, keywrapkey, ap_settings)
attrs += build_attr_authenticator(authkey, raw_m7_attrs, attrs)
raw_m8_attrs = attrs
m8 = build_eap_wsc(2, msg['eap_identifier'], attrs)
send_wsc_msg(hapd, addr, m8)
if success:
ev = hapd.wait_event(["WPS-NEW-AP-SETTINGS"], timeout=5)
if ev is None:
raise Exception("New AP settings not reported")
logger.debug("Receive WSC_Done from AP")
msg = get_wsc_msg(hapd)
if msg['wsc_opcode'] != WSC_Done:
raise Exception("Unexpected message - expected WSC_Done")
logger.debug("Send WSC_ACK to AP")
ack, attrs = build_ack(msg['eap_identifier'], e_nonce, r_nonce,
eap_code=2)
send_wsc_msg(hapd, addr, ack)
dev[0].wait_disconnected()
else:
ev = hapd.wait_event(["WPS-FAIL"], timeout=5)
if ev is None:
raise Exception("WPS failure not reported")
logger.debug("Receive WSC_NACK from AP")
msg = get_wsc_msg(hapd)
if msg['wsc_opcode'] != WSC_NACK:
raise Exception("Unexpected message - expected WSC_NACK")
logger.debug("Send WSC_NACK to AP")
nack, attrs = build_nack(msg['eap_identifier'], e_nonce, r_nonce,
eap_code=2)
send_wsc_msg(hapd, addr, nack)
dev[0].wait_disconnected()
def test_wps_ext_ap_settings_success(dev, apdev):
"""WPS and AP Settings: success"""
ap_settings = build_wsc_attr(ATTR_NETWORK_INDEX, '\x01')
ap_settings += build_wsc_attr(ATTR_SSID, "test")
ap_settings += build_wsc_attr(ATTR_AUTH_TYPE, '\x00\x01')
ap_settings += build_wsc_attr(ATTR_ENCR_TYPE, '\x00\x01')
ap_settings += build_wsc_attr(ATTR_NETWORK_KEY, '')
ap_settings += build_wsc_attr(ATTR_MAC_ADDR, binascii.unhexlify(apdev[0]['bssid'].replace(':', '')))
wps_run_ap_settings_proto(dev, apdev, ap_settings, True)
@remote_compatible
def test_wps_ext_ap_settings_missing(dev, apdev):
"""WPS and AP Settings: missing"""
wps_run_ap_settings_proto(dev, apdev, None, False)
@remote_compatible
def test_wps_ext_ap_settings_mac_addr_mismatch(dev, apdev):
"""WPS and AP Settings: MAC Address mismatch"""
ap_settings = build_wsc_attr(ATTR_NETWORK_INDEX, '\x01')
ap_settings += build_wsc_attr(ATTR_SSID, "test")
ap_settings += build_wsc_attr(ATTR_AUTH_TYPE, '\x00\x01')
ap_settings += build_wsc_attr(ATTR_ENCR_TYPE, '\x00\x01')
ap_settings += build_wsc_attr(ATTR_NETWORK_KEY, '')
ap_settings += build_wsc_attr(ATTR_MAC_ADDR, '\x00\x00\x00\x00\x00\x00')
wps_run_ap_settings_proto(dev, apdev, ap_settings, True)
@remote_compatible
def test_wps_ext_ap_settings_mac_addr_missing(dev, apdev):
"""WPS and AP Settings: missing MAC Address"""
ap_settings = build_wsc_attr(ATTR_NETWORK_INDEX, '\x01')
ap_settings += build_wsc_attr(ATTR_SSID, "test")
ap_settings += build_wsc_attr(ATTR_AUTH_TYPE, '\x00\x01')
ap_settings += build_wsc_attr(ATTR_ENCR_TYPE, '\x00\x01')
ap_settings += build_wsc_attr(ATTR_NETWORK_KEY, '')
wps_run_ap_settings_proto(dev, apdev, ap_settings, False)
@remote_compatible
def test_wps_ext_ap_settings_reject_encr_type(dev, apdev):
"""WPS and AP Settings: reject Encr Type"""
ap_settings = build_wsc_attr(ATTR_NETWORK_INDEX, '\x01')
ap_settings += build_wsc_attr(ATTR_SSID, "test")
ap_settings += build_wsc_attr(ATTR_AUTH_TYPE, '\x00\x01')
ap_settings += build_wsc_attr(ATTR_ENCR_TYPE, '\x00\x00')
ap_settings += build_wsc_attr(ATTR_NETWORK_KEY, '')
ap_settings += build_wsc_attr(ATTR_MAC_ADDR, binascii.unhexlify(apdev[0]['bssid'].replace(':', '')))
wps_run_ap_settings_proto(dev, apdev, ap_settings, False)
@remote_compatible
def test_wps_ext_ap_settings_m2d(dev, apdev):
"""WPS and AP Settings: M2D"""
addr, bssid, hapd = wps_start_ext_reg(apdev[0], dev[0])
wps_ext_eap_identity_req(dev[0], hapd, bssid)
wps_ext_eap_identity_resp(hapd, dev[0], addr)
logger.debug("Receive M1 from AP")
msg, m1_attrs, raw_m1_attrs = recv_wsc_msg(hapd, WSC_MSG, WPS_M1)
e_nonce = m1_attrs[ATTR_ENROLLEE_NONCE]
r_nonce = 16*'\x44'
uuid_r = 16*'\x33'
logger.debug("Send M2D to AP")
m2d, raw_m2d_attrs = build_m2d(raw_m1_attrs, msg['eap_identifier'],
e_nonce, r_nonce, uuid_r,
dev_pw_id='\x00\x00', eap_code=2)
send_wsc_msg(hapd, addr, m2d)
ev = hapd.wait_event(["WPS-M2D"], timeout=5)
if ev is None:
raise Exception("M2D not reported")
wps_wait_ap_nack(hapd, dev[0], e_nonce, r_nonce)
def wps_wait_ap_nack(hapd, dev, e_nonce, r_nonce):
logger.debug("Receive WSC_NACK from AP")
msg = get_wsc_msg(hapd)
if msg['wsc_opcode'] != WSC_NACK:
raise Exception("Unexpected message - expected WSC_NACK")
logger.debug("Send WSC_NACK to AP")
nack, attrs = build_nack(msg['eap_identifier'], e_nonce, r_nonce,
eap_code=2)
send_wsc_msg(hapd, dev.own_addr(), nack)
dev.wait_disconnected()
@remote_compatible
def test_wps_ext_m3_missing_e_hash1(dev, apdev):
"""WPS proto: M3 missing E-Hash1"""
pin = "12345670"
addr, bssid, hapd = wps_start_ext(apdev[0], dev[0], pin=pin)
wps_ext_eap_identity_req(dev[0], hapd, bssid)
wps_ext_eap_identity_resp(hapd, dev[0], addr)
logger.debug("Receive WSC/Start from AP")
msg = get_wsc_msg(hapd)
if msg['wsc_opcode'] != WSC_Start:
raise Exception("Unexpected Op-Code for WSC/Start")
mac_addr = binascii.unhexlify(dev[0].own_addr().replace(':', ''))
uuid_e = 16*b'\x11'
e_nonce = 16*b'\x22'
own_private, e_pk = wsc_dh_init()
logger.debug("Send M1 to AP")
m1, raw_m1_attrs = build_m1(msg['eap_identifier'], uuid_e, mac_addr,
e_nonce, e_pk)
send_wsc_msg(hapd, addr, m1)
logger.debug("Receive M2 from AP")
msg, m2_attrs, raw_m2_attrs = recv_wsc_msg(hapd, WSC_MSG, WPS_M2)
r_nonce = m2_attrs[ATTR_REGISTRAR_NONCE]
r_pk = m2_attrs[ATTR_PUBLIC_KEY]
authkey, keywrapkey = wsc_dh_kdf(r_pk, own_private, mac_addr, e_nonce,
r_nonce)
e_s1, e_s2, e_hash1, e_hash2 = wsc_dev_pw_hash(authkey, pin, e_pk, r_pk)
logger.debug("Send M3 to AP")
attrs = build_wsc_attr(ATTR_VERSION, '\x10')
attrs += build_attr_msg_type(WPS_M3)
attrs += build_wsc_attr(ATTR_REGISTRAR_NONCE, r_nonce)
#attrs += build_wsc_attr(ATTR_E_HASH1, e_hash1)
attrs += build_wsc_attr(ATTR_E_HASH2, e_hash2)
attrs += build_attr_authenticator(authkey, raw_m2_attrs, attrs)
raw_m3_attrs = attrs
m3 = build_eap_wsc(2, msg['eap_identifier'], attrs)
send_wsc_msg(hapd, addr, m3)
wps_wait_ap_nack(hapd, dev[0], e_nonce, r_nonce)
@remote_compatible
def test_wps_ext_m3_missing_e_hash2(dev, apdev):
"""WPS proto: M3 missing E-Hash2"""
pin = "12345670"
addr, bssid, hapd = wps_start_ext(apdev[0], dev[0], pin=pin)
wps_ext_eap_identity_req(dev[0], hapd, bssid)
wps_ext_eap_identity_resp(hapd, dev[0], addr)
logger.debug("Receive WSC/Start from AP")
msg = get_wsc_msg(hapd)
if msg['wsc_opcode'] != WSC_Start:
raise Exception("Unexpected Op-Code for WSC/Start")
mac_addr = binascii.unhexlify(dev[0].own_addr().replace(':', ''))
uuid_e = 16*b'\x11'
e_nonce = 16*b'\x22'
own_private, e_pk = wsc_dh_init()
logger.debug("Send M1 to AP")
m1, raw_m1_attrs = build_m1(msg['eap_identifier'], uuid_e, mac_addr,
e_nonce, e_pk)
send_wsc_msg(hapd, addr, m1)
logger.debug("Receive M2 from AP")
msg, m2_attrs, raw_m2_attrs = recv_wsc_msg(hapd, WSC_MSG, WPS_M2)
r_nonce = m2_attrs[ATTR_REGISTRAR_NONCE]
r_pk = m2_attrs[ATTR_PUBLIC_KEY]
authkey, keywrapkey = wsc_dh_kdf(r_pk, own_private, mac_addr, e_nonce,
r_nonce)
e_s1, e_s2, e_hash1, e_hash2 = wsc_dev_pw_hash(authkey, pin, e_pk, r_pk)
logger.debug("Send M3 to AP")
attrs = build_wsc_attr(ATTR_VERSION, '\x10')
attrs += build_attr_msg_type(WPS_M3)
attrs += build_wsc_attr(ATTR_REGISTRAR_NONCE, r_nonce)
attrs += build_wsc_attr(ATTR_E_HASH1, e_hash1)
#attrs += build_wsc_attr(ATTR_E_HASH2, e_hash2)
attrs += build_attr_authenticator(authkey, raw_m2_attrs, attrs)
raw_m3_attrs = attrs
m3 = build_eap_wsc(2, msg['eap_identifier'], attrs)
send_wsc_msg(hapd, addr, m3)
wps_wait_ap_nack(hapd, dev[0], e_nonce, r_nonce)
@remote_compatible
def test_wps_ext_m5_missing_e_snonce1(dev, apdev):
"""WPS proto: M5 missing E-SNonce1"""
pin = "12345670"
addr, bssid, hapd = wps_start_ext(apdev[0], dev[0], pin=pin)
wps_ext_eap_identity_req(dev[0], hapd, bssid)
wps_ext_eap_identity_resp(hapd, dev[0], addr)
logger.debug("Receive WSC/Start from AP")
msg = get_wsc_msg(hapd)
if msg['wsc_opcode'] != WSC_Start:
raise Exception("Unexpected Op-Code for WSC/Start")
mac_addr = binascii.unhexlify(dev[0].own_addr().replace(':', ''))
uuid_e = 16*b'\x11'
e_nonce = 16*b'\x22'
own_private, e_pk = wsc_dh_init()
logger.debug("Send M1 to AP")
m1, raw_m1_attrs = build_m1(msg['eap_identifier'], uuid_e, mac_addr,
e_nonce, e_pk)
send_wsc_msg(hapd, addr, m1)
logger.debug("Receive M2 from AP")
msg, m2_attrs, raw_m2_attrs = recv_wsc_msg(hapd, WSC_MSG, WPS_M2)
r_nonce = m2_attrs[ATTR_REGISTRAR_NONCE]
r_pk = m2_attrs[ATTR_PUBLIC_KEY]
authkey, keywrapkey = wsc_dh_kdf(r_pk, own_private, mac_addr, e_nonce,
r_nonce)
e_s1, e_s2, e_hash1, e_hash2 = wsc_dev_pw_hash(authkey, pin, e_pk, r_pk)
logger.debug("Send M3 to AP")
attrs = build_wsc_attr(ATTR_VERSION, '\x10')
attrs += build_attr_msg_type(WPS_M3)
attrs += build_wsc_attr(ATTR_REGISTRAR_NONCE, r_nonce)
attrs += build_wsc_attr(ATTR_E_HASH1, e_hash1)
attrs += build_wsc_attr(ATTR_E_HASH2, e_hash2)
attrs += build_attr_authenticator(authkey, raw_m2_attrs, attrs)
raw_m3_attrs = attrs
m3 = build_eap_wsc(2, msg['eap_identifier'], attrs)
send_wsc_msg(hapd, addr, m3)
logger.debug("Receive M4 from AP")
msg, m4_attrs, raw_m4_attrs = recv_wsc_msg(hapd, WSC_MSG, WPS_M4)
logger.debug("Send M5 to AP")
attrs = build_wsc_attr(ATTR_VERSION, '\x10')
attrs += build_attr_msg_type(WPS_M5)
attrs += build_wsc_attr(ATTR_REGISTRAR_NONCE, r_nonce)
#data = build_wsc_attr(ATTR_E_SNONCE1, e_s1)
data = b''
attrs += build_attr_encr_settings(authkey, keywrapkey, data)
attrs += build_attr_authenticator(authkey, raw_m4_attrs, attrs)
raw_m5_attrs = attrs
m5 = build_eap_wsc(2, msg['eap_identifier'], attrs)
send_wsc_msg(hapd, addr, m5)
wps_wait_ap_nack(hapd, dev[0], e_nonce, r_nonce)
@remote_compatible
def test_wps_ext_m5_e_snonce1_mismatch(dev, apdev):
"""WPS proto: M5 E-SNonce1 mismatch"""
pin = "12345670"
addr, bssid, hapd = wps_start_ext(apdev[0], dev[0], pin=pin)
wps_ext_eap_identity_req(dev[0], hapd, bssid)
wps_ext_eap_identity_resp(hapd, dev[0], addr)
logger.debug("Receive WSC/Start from AP")
msg = get_wsc_msg(hapd)
if msg['wsc_opcode'] != WSC_Start:
raise Exception("Unexpected Op-Code for WSC/Start")
mac_addr = binascii.unhexlify(dev[0].own_addr().replace(':', ''))
uuid_e = 16*b'\x11'
e_nonce = 16*b'\x22'
own_private, e_pk = wsc_dh_init()
logger.debug("Send M1 to AP")
m1, raw_m1_attrs = build_m1(msg['eap_identifier'], uuid_e, mac_addr,
e_nonce, e_pk)
send_wsc_msg(hapd, addr, m1)
logger.debug("Receive M2 from AP")
msg, m2_attrs, raw_m2_attrs = recv_wsc_msg(hapd, WSC_MSG, WPS_M2)
r_nonce = m2_attrs[ATTR_REGISTRAR_NONCE]
r_pk = m2_attrs[ATTR_PUBLIC_KEY]
authkey, keywrapkey = wsc_dh_kdf(r_pk, own_private, mac_addr, e_nonce,
r_nonce)
e_s1, e_s2, e_hash1, e_hash2 = wsc_dev_pw_hash(authkey, pin, e_pk, r_pk)
logger.debug("Send M3 to AP")
attrs = build_wsc_attr(ATTR_VERSION, '\x10')
attrs += build_attr_msg_type(WPS_M3)
attrs += build_wsc_attr(ATTR_REGISTRAR_NONCE, r_nonce)
attrs += build_wsc_attr(ATTR_E_HASH1, e_hash1)
attrs += build_wsc_attr(ATTR_E_HASH2, e_hash2)
attrs += build_attr_authenticator(authkey, raw_m2_attrs, attrs)
raw_m3_attrs = attrs
m3 = build_eap_wsc(2, msg['eap_identifier'], attrs)
send_wsc_msg(hapd, addr, m3)
logger.debug("Receive M4 from AP")
msg, m4_attrs, raw_m4_attrs = recv_wsc_msg(hapd, WSC_MSG, WPS_M4)
logger.debug("Send M5 to AP")
attrs = build_wsc_attr(ATTR_VERSION, '\x10')
attrs += build_attr_msg_type(WPS_M5)
attrs += build_wsc_attr(ATTR_REGISTRAR_NONCE, r_nonce)
data = build_wsc_attr(ATTR_E_SNONCE1, 16*'\x00')
attrs += build_attr_encr_settings(authkey, keywrapkey, data)
attrs += build_attr_authenticator(authkey, raw_m4_attrs, attrs)
raw_m5_attrs = attrs
m5 = build_eap_wsc(2, msg['eap_identifier'], attrs)
send_wsc_msg(hapd, addr, m5)
wps_wait_ap_nack(hapd, dev[0], e_nonce, r_nonce)
def test_wps_ext_m7_missing_e_snonce2(dev, apdev):
"""WPS proto: M7 missing E-SNonce2"""
pin = "12345670"
addr, bssid, hapd = wps_start_ext(apdev[0], dev[0], pin=pin)
wps_ext_eap_identity_req(dev[0], hapd, bssid)
wps_ext_eap_identity_resp(hapd, dev[0], addr)
logger.debug("Receive WSC/Start from AP")
msg = get_wsc_msg(hapd)
if msg['wsc_opcode'] != WSC_Start:
raise Exception("Unexpected Op-Code for WSC/Start")
mac_addr = binascii.unhexlify(dev[0].own_addr().replace(':', ''))
uuid_e = 16*b'\x11'
e_nonce = 16*b'\x22'
own_private, e_pk = wsc_dh_init()
logger.debug("Send M1 to AP")
m1, raw_m1_attrs = build_m1(msg['eap_identifier'], uuid_e, mac_addr,
e_nonce, e_pk)
send_wsc_msg(hapd, addr, m1)
logger.debug("Receive M2 from AP")
msg, m2_attrs, raw_m2_attrs = recv_wsc_msg(hapd, WSC_MSG, WPS_M2)
r_nonce = m2_attrs[ATTR_REGISTRAR_NONCE]
r_pk = m2_attrs[ATTR_PUBLIC_KEY]
authkey, keywrapkey = wsc_dh_kdf(r_pk, own_private, mac_addr, e_nonce,
r_nonce)
e_s1, e_s2, e_hash1, e_hash2 = wsc_dev_pw_hash(authkey, pin, e_pk, r_pk)
logger.debug("Send M3 to AP")
attrs = build_wsc_attr(ATTR_VERSION, '\x10')
attrs += build_attr_msg_type(WPS_M3)
attrs += build_wsc_attr(ATTR_REGISTRAR_NONCE, r_nonce)
attrs += build_wsc_attr(ATTR_E_HASH1, e_hash1)
attrs += build_wsc_attr(ATTR_E_HASH2, e_hash2)
attrs += build_attr_authenticator(authkey, raw_m2_attrs, attrs)
raw_m3_attrs = attrs
m3 = build_eap_wsc(2, msg['eap_identifier'], attrs)
send_wsc_msg(hapd, addr, m3)
logger.debug("Receive M4 from AP")
msg, m4_attrs, raw_m4_attrs = recv_wsc_msg(hapd, WSC_MSG, WPS_M4)
logger.debug("Send M5 to AP")
attrs = build_wsc_attr(ATTR_VERSION, '\x10')
attrs += build_attr_msg_type(WPS_M5)
attrs += build_wsc_attr(ATTR_REGISTRAR_NONCE, r_nonce)
data = build_wsc_attr(ATTR_E_SNONCE1, e_s1)
attrs += build_attr_encr_settings(authkey, keywrapkey, data)
attrs += build_attr_authenticator(authkey, raw_m4_attrs, attrs)
raw_m5_attrs = attrs
m5 = build_eap_wsc(2, msg['eap_identifier'], attrs)
send_wsc_msg(hapd, addr, m5)
logger.debug("Receive M6 from AP")
msg, m6_attrs, raw_m6_attrs = recv_wsc_msg(hapd, WSC_MSG, WPS_M6)
logger.debug("Send M7 to AP")
attrs = build_wsc_attr(ATTR_VERSION, '\x10')
attrs += build_attr_msg_type(WPS_M7)
attrs += build_wsc_attr(ATTR_REGISTRAR_NONCE, r_nonce)
#data = build_wsc_attr(ATTR_E_SNONCE2, e_s2)
data = b''
attrs += build_attr_encr_settings(authkey, keywrapkey, data)
attrs += build_attr_authenticator(authkey, raw_m6_attrs, attrs)
m7 = build_eap_wsc(2, msg['eap_identifier'], attrs)
raw_m7_attrs = attrs
send_wsc_msg(hapd, addr, m7)
wps_wait_ap_nack(hapd, dev[0], e_nonce, r_nonce)
@remote_compatible
def test_wps_ext_m7_e_snonce2_mismatch(dev, apdev):
"""WPS proto: M7 E-SNonce2 mismatch"""
pin = "12345670"
addr, bssid, hapd = wps_start_ext(apdev[0], dev[0], pin=pin)
wps_ext_eap_identity_req(dev[0], hapd, bssid)
wps_ext_eap_identity_resp(hapd, dev[0], addr)
logger.debug("Receive WSC/Start from AP")
msg = get_wsc_msg(hapd)
if msg['wsc_opcode'] != WSC_Start:
raise Exception("Unexpected Op-Code for WSC/Start")
mac_addr = binascii.unhexlify(dev[0].own_addr().replace(':', ''))
uuid_e = 16*b'\x11'
e_nonce = 16*b'\x22'
own_private, e_pk = wsc_dh_init()
logger.debug("Send M1 to AP")
m1, raw_m1_attrs = build_m1(msg['eap_identifier'], uuid_e, mac_addr,
e_nonce, e_pk)
send_wsc_msg(hapd, addr, m1)
logger.debug("Receive M2 from AP")
msg, m2_attrs, raw_m2_attrs = recv_wsc_msg(hapd, WSC_MSG, WPS_M2)
r_nonce = m2_attrs[ATTR_REGISTRAR_NONCE]
r_pk = m2_attrs[ATTR_PUBLIC_KEY]
authkey, keywrapkey = wsc_dh_kdf(r_pk, own_private, mac_addr, e_nonce,
r_nonce)
e_s1, e_s2, e_hash1, e_hash2 = wsc_dev_pw_hash(authkey, pin, e_pk, r_pk)
logger.debug("Send M3 to AP")
attrs = build_wsc_attr(ATTR_VERSION, '\x10')
attrs += build_attr_msg_type(WPS_M3)
attrs += build_wsc_attr(ATTR_REGISTRAR_NONCE, r_nonce)
attrs += build_wsc_attr(ATTR_E_HASH1, e_hash1)
attrs += build_wsc_attr(ATTR_E_HASH2, e_hash2)
attrs += build_attr_authenticator(authkey, raw_m2_attrs, attrs)
raw_m3_attrs = attrs
m3 = build_eap_wsc(2, msg['eap_identifier'], attrs)
send_wsc_msg(hapd, addr, m3)
logger.debug("Receive M4 from AP")
msg, m4_attrs, raw_m4_attrs = recv_wsc_msg(hapd, WSC_MSG, WPS_M4)
logger.debug("Send M5 to AP")
attrs = build_wsc_attr(ATTR_VERSION, '\x10')
attrs += build_attr_msg_type(WPS_M5)
attrs += build_wsc_attr(ATTR_REGISTRAR_NONCE, r_nonce)
data = build_wsc_attr(ATTR_E_SNONCE1, e_s1)
attrs += build_attr_encr_settings(authkey, keywrapkey, data)
attrs += build_attr_authenticator(authkey, raw_m4_attrs, attrs)
raw_m5_attrs = attrs
m5 = build_eap_wsc(2, msg['eap_identifier'], attrs)
send_wsc_msg(hapd, addr, m5)
logger.debug("Receive M6 from AP")
msg, m6_attrs, raw_m6_attrs = recv_wsc_msg(hapd, WSC_MSG, WPS_M6)
logger.debug("Send M7 to AP")
attrs = build_wsc_attr(ATTR_VERSION, '\x10')
attrs += build_attr_msg_type(WPS_M7)
attrs += build_wsc_attr(ATTR_REGISTRAR_NONCE, r_nonce)
data = build_wsc_attr(ATTR_E_SNONCE2, 16*'\x00')
attrs += build_attr_encr_settings(authkey, keywrapkey, data)
attrs += build_attr_authenticator(authkey, raw_m6_attrs, attrs)
m7 = build_eap_wsc(2, msg['eap_identifier'], attrs)
raw_m7_attrs = attrs
send_wsc_msg(hapd, addr, m7)
wps_wait_ap_nack(hapd, dev[0], e_nonce, r_nonce)
@remote_compatible
def test_wps_ext_m1_pubkey_oom(dev, apdev):
"""WPS proto: M1 PubKey OOM"""
pin = "12345670"
addr, bssid, hapd = wps_start_ext(apdev[0], dev[0], pin=pin)
wps_ext_eap_identity_req(dev[0], hapd, bssid)
wps_ext_eap_identity_resp(hapd, dev[0], addr)
logger.debug("Receive WSC/Start from AP")
msg = get_wsc_msg(hapd)
if msg['wsc_opcode'] != WSC_Start:
raise Exception("Unexpected Op-Code for WSC/Start")
mac_addr = binascii.unhexlify(dev[0].own_addr().replace(':', ''))
uuid_e = 16*'\x11'
e_nonce = 16*'\x22'
own_private, e_pk = wsc_dh_init()
logger.debug("Send M1 to AP")
with alloc_fail(hapd, 1, "wpabuf_alloc_copy;wps_process_pubkey"):
m1, raw_m1_attrs = build_m1(msg['eap_identifier'], uuid_e, mac_addr,
e_nonce, e_pk)
send_wsc_msg(hapd, addr, m1)
wps_wait_eap_failure(hapd, dev[0])
def wps_wait_eap_failure(hapd, dev):
ev = hapd.wait_event(["CTRL-EVENT-EAP-FAILURE"], timeout=5)
if ev is None:
raise Exception("EAP-Failure not reported")
dev.wait_disconnected()
@remote_compatible
def test_wps_ext_m3_m1(dev, apdev):
"""WPS proto: M3 replaced with M1"""
pin = "12345670"
addr, bssid, hapd = wps_start_ext(apdev[0], dev[0], pin=pin)
wps_ext_eap_identity_req(dev[0], hapd, bssid)
wps_ext_eap_identity_resp(hapd, dev[0], addr)
logger.debug("Receive WSC/Start from AP")
msg = get_wsc_msg(hapd)
if msg['wsc_opcode'] != WSC_Start:
raise Exception("Unexpected Op-Code for WSC/Start")
mac_addr = binascii.unhexlify(dev[0].own_addr().replace(':', ''))
uuid_e = 16*b'\x11'
e_nonce = 16*b'\x22'
own_private, e_pk = wsc_dh_init()
logger.debug("Send M1 to AP")
m1, raw_m1_attrs = build_m1(msg['eap_identifier'], uuid_e, mac_addr,
e_nonce, e_pk)
send_wsc_msg(hapd, addr, m1)
logger.debug("Receive M2 from AP")
msg, m2_attrs, raw_m2_attrs = recv_wsc_msg(hapd, WSC_MSG, WPS_M2)
r_nonce = m2_attrs[ATTR_REGISTRAR_NONCE]
r_pk = m2_attrs[ATTR_PUBLIC_KEY]
authkey, keywrapkey = wsc_dh_kdf(r_pk, own_private, mac_addr, e_nonce,
r_nonce)
e_s1, e_s2, e_hash1, e_hash2 = wsc_dev_pw_hash(authkey, pin, e_pk, r_pk)
logger.debug("Send M3(M1) to AP")
attrs = build_wsc_attr(ATTR_VERSION, '\x10')
attrs += build_attr_msg_type(WPS_M1)
attrs += build_wsc_attr(ATTR_REGISTRAR_NONCE, r_nonce)
attrs += build_wsc_attr(ATTR_E_HASH1, e_hash1)
attrs += build_wsc_attr(ATTR_E_HASH2, e_hash2)
attrs += build_attr_authenticator(authkey, raw_m2_attrs, attrs)
raw_m3_attrs = attrs
m3 = build_eap_wsc(2, msg['eap_identifier'], attrs)
send_wsc_msg(hapd, addr, m3)
wps_wait_eap_failure(hapd, dev[0])
@remote_compatible
def test_wps_ext_m5_m3(dev, apdev):
"""WPS proto: M5 replaced with M3"""
pin = "12345670"
addr, bssid, hapd = wps_start_ext(apdev[0], dev[0], pin=pin)
wps_ext_eap_identity_req(dev[0], hapd, bssid)
wps_ext_eap_identity_resp(hapd, dev[0], addr)
logger.debug("Receive WSC/Start from AP")
msg = get_wsc_msg(hapd)
if msg['wsc_opcode'] != WSC_Start:
raise Exception("Unexpected Op-Code for WSC/Start")
mac_addr = binascii.unhexlify(dev[0].own_addr().replace(':', ''))
uuid_e = 16*b'\x11'
e_nonce = 16*b'\x22'
own_private, e_pk = wsc_dh_init()
logger.debug("Send M1 to AP")
m1, raw_m1_attrs = build_m1(msg['eap_identifier'], uuid_e, mac_addr,
e_nonce, e_pk)
send_wsc_msg(hapd, addr, m1)
logger.debug("Receive M2 from AP")
msg, m2_attrs, raw_m2_attrs = recv_wsc_msg(hapd, WSC_MSG, WPS_M2)
r_nonce = m2_attrs[ATTR_REGISTRAR_NONCE]
r_pk = m2_attrs[ATTR_PUBLIC_KEY]
authkey, keywrapkey = wsc_dh_kdf(r_pk, own_private, mac_addr, e_nonce,
r_nonce)
e_s1, e_s2, e_hash1, e_hash2 = wsc_dev_pw_hash(authkey, pin, e_pk, r_pk)
logger.debug("Send M3 to AP")
attrs = build_wsc_attr(ATTR_VERSION, '\x10')
attrs += build_attr_msg_type(WPS_M3)
attrs += build_wsc_attr(ATTR_REGISTRAR_NONCE, r_nonce)
attrs += build_wsc_attr(ATTR_E_HASH1, e_hash1)
attrs += build_wsc_attr(ATTR_E_HASH2, e_hash2)
attrs += build_attr_authenticator(authkey, raw_m2_attrs, attrs)
raw_m3_attrs = attrs
m3 = build_eap_wsc(2, msg['eap_identifier'], attrs)
send_wsc_msg(hapd, addr, m3)
logger.debug("Receive M4 from AP")
msg, m4_attrs, raw_m4_attrs = recv_wsc_msg(hapd, WSC_MSG, WPS_M4)
logger.debug("Send M5(M3) to AP")
attrs = build_wsc_attr(ATTR_VERSION, '\x10')
attrs += build_attr_msg_type(WPS_M3)
attrs += build_wsc_attr(ATTR_REGISTRAR_NONCE, r_nonce)
data = build_wsc_attr(ATTR_E_SNONCE1, e_s1)
attrs += build_attr_encr_settings(authkey, keywrapkey, data)
attrs += build_attr_authenticator(authkey, raw_m4_attrs, attrs)
raw_m5_attrs = attrs
m5 = build_eap_wsc(2, msg['eap_identifier'], attrs)
send_wsc_msg(hapd, addr, m5)
wps_wait_ap_nack(hapd, dev[0], e_nonce, r_nonce)
@remote_compatible
def test_wps_ext_m3_m2(dev, apdev):
"""WPS proto: M3 replaced with M2"""
pin = "12345670"
addr, bssid, hapd = wps_start_ext(apdev[0], dev[0], pin=pin)
wps_ext_eap_identity_req(dev[0], hapd, bssid)
wps_ext_eap_identity_resp(hapd, dev[0], addr)
logger.debug("Receive WSC/Start from AP")
msg = get_wsc_msg(hapd)
if msg['wsc_opcode'] != WSC_Start:
raise Exception("Unexpected Op-Code for WSC/Start")
mac_addr = binascii.unhexlify(dev[0].own_addr().replace(':', ''))
uuid_e = 16*b'\x11'
e_nonce = 16*b'\x22'
own_private, e_pk = wsc_dh_init()
logger.debug("Send M1 to AP")
m1, raw_m1_attrs = build_m1(msg['eap_identifier'], uuid_e, mac_addr,
e_nonce, e_pk)
send_wsc_msg(hapd, addr, m1)
logger.debug("Receive M2 from AP")
msg, m2_attrs, raw_m2_attrs = recv_wsc_msg(hapd, WSC_MSG, WPS_M2)
r_nonce = m2_attrs[ATTR_REGISTRAR_NONCE]
r_pk = m2_attrs[ATTR_PUBLIC_KEY]
authkey, keywrapkey = wsc_dh_kdf(r_pk, own_private, mac_addr, e_nonce,
r_nonce)
e_s1, e_s2, e_hash1, e_hash2 = wsc_dev_pw_hash(authkey, pin, e_pk, r_pk)
logger.debug("Send M3(M2) to AP")
attrs = build_wsc_attr(ATTR_VERSION, '\x10')
attrs += build_attr_msg_type(WPS_M2)
attrs += build_wsc_attr(ATTR_REGISTRAR_NONCE, r_nonce)
attrs += build_attr_authenticator(authkey, raw_m2_attrs, attrs)
raw_m3_attrs = attrs
m3 = build_eap_wsc(2, msg['eap_identifier'], attrs)
send_wsc_msg(hapd, addr, m3)
wps_wait_eap_failure(hapd, dev[0])
@remote_compatible
def test_wps_ext_m3_m5(dev, apdev):
"""WPS proto: M3 replaced with M5"""
pin = "12345670"
addr, bssid, hapd = wps_start_ext(apdev[0], dev[0], pin=pin)
wps_ext_eap_identity_req(dev[0], hapd, bssid)
wps_ext_eap_identity_resp(hapd, dev[0], addr)
logger.debug("Receive WSC/Start from AP")
msg = get_wsc_msg(hapd)
if msg['wsc_opcode'] != WSC_Start:
raise Exception("Unexpected Op-Code for WSC/Start")
mac_addr = binascii.unhexlify(dev[0].own_addr().replace(':', ''))
uuid_e = 16*b'\x11'
e_nonce = 16*b'\x22'
own_private, e_pk = wsc_dh_init()
logger.debug("Send M1 to AP")
m1, raw_m1_attrs = build_m1(msg['eap_identifier'], uuid_e, mac_addr,
e_nonce, e_pk)
send_wsc_msg(hapd, addr, m1)
logger.debug("Receive M2 from AP")
msg, m2_attrs, raw_m2_attrs = recv_wsc_msg(hapd, WSC_MSG, WPS_M2)
r_nonce = m2_attrs[ATTR_REGISTRAR_NONCE]
r_pk = m2_attrs[ATTR_PUBLIC_KEY]
authkey, keywrapkey = wsc_dh_kdf(r_pk, own_private, mac_addr, e_nonce,
r_nonce)
e_s1, e_s2, e_hash1, e_hash2 = wsc_dev_pw_hash(authkey, pin, e_pk, r_pk)
logger.debug("Send M3(M5) to AP")
attrs = build_wsc_attr(ATTR_VERSION, '\x10')
attrs += build_attr_msg_type(WPS_M5)
attrs += build_wsc_attr(ATTR_REGISTRAR_NONCE, r_nonce)
attrs += build_wsc_attr(ATTR_E_HASH1, e_hash1)
attrs += build_wsc_attr(ATTR_E_HASH2, e_hash2)
attrs += build_attr_authenticator(authkey, raw_m2_attrs, attrs)
raw_m3_attrs = attrs
m3 = build_eap_wsc(2, msg['eap_identifier'], attrs)
send_wsc_msg(hapd, addr, m3)
wps_wait_ap_nack(hapd, dev[0], e_nonce, r_nonce)
@remote_compatible
def test_wps_ext_m3_m7(dev, apdev):
"""WPS proto: M3 replaced with M7"""
pin = "12345670"
addr, bssid, hapd = wps_start_ext(apdev[0], dev[0], pin=pin)
wps_ext_eap_identity_req(dev[0], hapd, bssid)
wps_ext_eap_identity_resp(hapd, dev[0], addr)
logger.debug("Receive WSC/Start from AP")
msg = get_wsc_msg(hapd)
if msg['wsc_opcode'] != WSC_Start:
raise Exception("Unexpected Op-Code for WSC/Start")
mac_addr = binascii.unhexlify(dev[0].own_addr().replace(':', ''))
uuid_e = 16*b'\x11'
e_nonce = 16*b'\x22'
own_private, e_pk = wsc_dh_init()
logger.debug("Send M1 to AP")
m1, raw_m1_attrs = build_m1(msg['eap_identifier'], uuid_e, mac_addr,
e_nonce, e_pk)
send_wsc_msg(hapd, addr, m1)
logger.debug("Receive M2 from AP")
msg, m2_attrs, raw_m2_attrs = recv_wsc_msg(hapd, WSC_MSG, WPS_M2)
r_nonce = m2_attrs[ATTR_REGISTRAR_NONCE]
r_pk = m2_attrs[ATTR_PUBLIC_KEY]
authkey, keywrapkey = wsc_dh_kdf(r_pk, own_private, mac_addr, e_nonce,
r_nonce)
e_s1, e_s2, e_hash1, e_hash2 = wsc_dev_pw_hash(authkey, pin, e_pk, r_pk)
logger.debug("Send M3(M7) to AP")
attrs = build_wsc_attr(ATTR_VERSION, '\x10')
attrs += build_attr_msg_type(WPS_M7)
attrs += build_wsc_attr(ATTR_REGISTRAR_NONCE, r_nonce)
attrs += build_wsc_attr(ATTR_E_HASH1, e_hash1)
attrs += build_wsc_attr(ATTR_E_HASH2, e_hash2)
attrs += build_attr_authenticator(authkey, raw_m2_attrs, attrs)
raw_m3_attrs = attrs
m3 = build_eap_wsc(2, msg['eap_identifier'], attrs)
send_wsc_msg(hapd, addr, m3)
wps_wait_ap_nack(hapd, dev[0], e_nonce, r_nonce)
@remote_compatible
def test_wps_ext_m3_done(dev, apdev):
"""WPS proto: M3 replaced with WSC_Done"""
pin = "12345670"
addr, bssid, hapd = wps_start_ext(apdev[0], dev[0], pin=pin)
wps_ext_eap_identity_req(dev[0], hapd, bssid)
wps_ext_eap_identity_resp(hapd, dev[0], addr)
logger.debug("Receive WSC/Start from AP")
msg = get_wsc_msg(hapd)
if msg['wsc_opcode'] != WSC_Start:
raise Exception("Unexpected Op-Code for WSC/Start")
mac_addr = binascii.unhexlify(dev[0].own_addr().replace(':', ''))
uuid_e = 16*b'\x11'
e_nonce = 16*b'\x22'
own_private, e_pk = wsc_dh_init()
logger.debug("Send M1 to AP")
m1, raw_m1_attrs = build_m1(msg['eap_identifier'], uuid_e, mac_addr,
e_nonce, e_pk)
send_wsc_msg(hapd, addr, m1)
logger.debug("Receive M2 from AP")
msg, m2_attrs, raw_m2_attrs = recv_wsc_msg(hapd, WSC_MSG, WPS_M2)
r_nonce = m2_attrs[ATTR_REGISTRAR_NONCE]
r_pk = m2_attrs[ATTR_PUBLIC_KEY]
authkey, keywrapkey = wsc_dh_kdf(r_pk, own_private, mac_addr, e_nonce,
r_nonce)
e_s1, e_s2, e_hash1, e_hash2 = wsc_dev_pw_hash(authkey, pin, e_pk, r_pk)
logger.debug("Send M3(WSC_Done) to AP")
attrs = build_wsc_attr(ATTR_VERSION, '\x10')
attrs += build_attr_msg_type(WPS_WSC_DONE)
attrs += build_attr_authenticator(authkey, raw_m2_attrs, attrs)
raw_m3_attrs = attrs
m3 = build_eap_wsc(2, msg['eap_identifier'], attrs, opcode=WSC_Done)
send_wsc_msg(hapd, addr, m3)
wps_wait_eap_failure(hapd, dev[0])
@remote_compatible
def test_wps_ext_m2_nack_invalid(dev, apdev):
"""WPS proto: M2 followed by invalid NACK"""
pin = "12345670"
addr, bssid, hapd = wps_start_ext(apdev[0], dev[0], pin=pin)
wps_ext_eap_identity_req(dev[0], hapd, bssid)
wps_ext_eap_identity_resp(hapd, dev[0], addr)
logger.debug("Receive WSC/Start from AP")
msg = get_wsc_msg(hapd)
if msg['wsc_opcode'] != WSC_Start:
raise Exception("Unexpected Op-Code for WSC/Start")
mac_addr = binascii.unhexlify(dev[0].own_addr().replace(':', ''))
uuid_e = 16*b'\x11'
e_nonce = 16*b'\x22'
own_private, e_pk = wsc_dh_init()
logger.debug("Send M1 to AP")
m1, raw_m1_attrs = build_m1(msg['eap_identifier'], uuid_e, mac_addr,
e_nonce, e_pk)
send_wsc_msg(hapd, addr, m1)
logger.debug("Receive M2 from AP")
msg, m2_attrs, raw_m2_attrs = recv_wsc_msg(hapd, WSC_MSG, WPS_M2)
r_nonce = m2_attrs[ATTR_REGISTRAR_NONCE]
r_pk = m2_attrs[ATTR_PUBLIC_KEY]
authkey, keywrapkey = wsc_dh_kdf(r_pk, own_private, mac_addr, e_nonce,
r_nonce)
e_s1, e_s2, e_hash1, e_hash2 = wsc_dev_pw_hash(authkey, pin, e_pk, r_pk)
logger.debug("Send WSC_NACK to AP")
attrs = b'\x10\x00\x00'
nack = build_eap_wsc(2, msg['eap_identifier'], attrs, opcode=WSC_NACK)
send_wsc_msg(hapd, addr, nack)
wps_wait_eap_failure(hapd, dev[0])
@remote_compatible
def test_wps_ext_m2_nack_no_msg_type(dev, apdev):
"""WPS proto: M2 followed by NACK without Msg Type"""
pin = "12345670"
addr, bssid, hapd = wps_start_ext(apdev[0], dev[0], pin=pin)
wps_ext_eap_identity_req(dev[0], hapd, bssid)
wps_ext_eap_identity_resp(hapd, dev[0], addr)
logger.debug("Receive WSC/Start from AP")
msg = get_wsc_msg(hapd)
if msg['wsc_opcode'] != WSC_Start:
raise Exception("Unexpected Op-Code for WSC/Start")
mac_addr = binascii.unhexlify(dev[0].own_addr().replace(':', ''))
uuid_e = 16*b'\x11'
e_nonce = 16*b'\x22'
own_private, e_pk = wsc_dh_init()
logger.debug("Send M1 to AP")
m1, raw_m1_attrs = build_m1(msg['eap_identifier'], uuid_e, mac_addr,
e_nonce, e_pk)
send_wsc_msg(hapd, addr, m1)
logger.debug("Receive M2 from AP")
msg, m2_attrs, raw_m2_attrs = recv_wsc_msg(hapd, WSC_MSG, WPS_M2)
r_nonce = m2_attrs[ATTR_REGISTRAR_NONCE]
r_pk = m2_attrs[ATTR_PUBLIC_KEY]
authkey, keywrapkey = wsc_dh_kdf(r_pk, own_private, mac_addr, e_nonce,
r_nonce)
e_s1, e_s2, e_hash1, e_hash2 = wsc_dev_pw_hash(authkey, pin, e_pk, r_pk)
logger.debug("Send WSC_NACK to AP")
nack, attrs = build_nack(msg['eap_identifier'], e_nonce, r_nonce,
msg_type=None, eap_code=2)
send_wsc_msg(hapd, addr, nack)
wps_wait_eap_failure(hapd, dev[0])
@remote_compatible
def test_wps_ext_m2_nack_invalid_msg_type(dev, apdev):
"""WPS proto: M2 followed by NACK with invalid Msg Type"""
pin = "12345670"
addr, bssid, hapd = wps_start_ext(apdev[0], dev[0], pin=pin)
wps_ext_eap_identity_req(dev[0], hapd, bssid)
wps_ext_eap_identity_resp(hapd, dev[0], addr)
logger.debug("Receive WSC/Start from AP")
msg = get_wsc_msg(hapd)
if msg['wsc_opcode'] != WSC_Start:
raise Exception("Unexpected Op-Code for WSC/Start")
mac_addr = binascii.unhexlify(dev[0].own_addr().replace(':', ''))
uuid_e = 16*b'\x11'
e_nonce = 16*b'\x22'
own_private, e_pk = wsc_dh_init()
logger.debug("Send M1 to AP")
m1, raw_m1_attrs = build_m1(msg['eap_identifier'], uuid_e, mac_addr,
e_nonce, e_pk)
send_wsc_msg(hapd, addr, m1)
logger.debug("Receive M2 from AP")
msg, m2_attrs, raw_m2_attrs = recv_wsc_msg(hapd, WSC_MSG, WPS_M2)
r_nonce = m2_attrs[ATTR_REGISTRAR_NONCE]
r_pk = m2_attrs[ATTR_PUBLIC_KEY]
authkey, keywrapkey = wsc_dh_kdf(r_pk, own_private, mac_addr, e_nonce,
r_nonce)
e_s1, e_s2, e_hash1, e_hash2 = wsc_dev_pw_hash(authkey, pin, e_pk, r_pk)
logger.debug("Send WSC_NACK to AP")
nack, attrs = build_nack(msg['eap_identifier'], e_nonce, r_nonce,
msg_type=WPS_WSC_ACK, eap_code=2)
send_wsc_msg(hapd, addr, nack)
wps_wait_eap_failure(hapd, dev[0])
@remote_compatible
def test_wps_ext_m2_nack_e_nonce_mismatch(dev, apdev):
"""WPS proto: M2 followed by NACK with e-nonce mismatch"""
pin = "12345670"
addr, bssid, hapd = wps_start_ext(apdev[0], dev[0], pin=pin)
wps_ext_eap_identity_req(dev[0], hapd, bssid)
wps_ext_eap_identity_resp(hapd, dev[0], addr)
logger.debug("Receive WSC/Start from AP")
msg = get_wsc_msg(hapd)
if msg['wsc_opcode'] != WSC_Start:
raise Exception("Unexpected Op-Code for WSC/Start")
mac_addr = binascii.unhexlify(dev[0].own_addr().replace(':', ''))
uuid_e = 16*b'\x11'
e_nonce = 16*b'\x22'
own_private, e_pk = wsc_dh_init()
logger.debug("Send M1 to AP")
m1, raw_m1_attrs = build_m1(msg['eap_identifier'], uuid_e, mac_addr,
e_nonce, e_pk)
send_wsc_msg(hapd, addr, m1)
logger.debug("Receive M2 from AP")
msg, m2_attrs, raw_m2_attrs = recv_wsc_msg(hapd, WSC_MSG, WPS_M2)
r_nonce = m2_attrs[ATTR_REGISTRAR_NONCE]
r_pk = m2_attrs[ATTR_PUBLIC_KEY]
authkey, keywrapkey = wsc_dh_kdf(r_pk, own_private, mac_addr, e_nonce,
r_nonce)
e_s1, e_s2, e_hash1, e_hash2 = wsc_dev_pw_hash(authkey, pin, e_pk, r_pk)
logger.debug("Send WSC_NACK to AP")
nack, attrs = build_nack(msg['eap_identifier'], 16*b'\x00', r_nonce,
eap_code=2)
send_wsc_msg(hapd, addr, nack)
wps_wait_eap_failure(hapd, dev[0])
@remote_compatible
def test_wps_ext_m2_nack_no_config_error(dev, apdev):
"""WPS proto: M2 followed by NACK without Config Error"""
pin = "12345670"
addr, bssid, hapd = wps_start_ext(apdev[0], dev[0], pin=pin)
wps_ext_eap_identity_req(dev[0], hapd, bssid)
wps_ext_eap_identity_resp(hapd, dev[0], addr)
logger.debug("Receive WSC/Start from AP")
msg = get_wsc_msg(hapd)
if msg['wsc_opcode'] != WSC_Start:
raise Exception("Unexpected Op-Code for WSC/Start")
mac_addr = binascii.unhexlify(dev[0].own_addr().replace(':', ''))
uuid_e = 16*b'\x11'
e_nonce = 16*b'\x22'
own_private, e_pk = wsc_dh_init()
logger.debug("Send M1 to AP")
m1, raw_m1_attrs = build_m1(msg['eap_identifier'], uuid_e, mac_addr,
e_nonce, e_pk)
send_wsc_msg(hapd, addr, m1)
logger.debug("Receive M2 from AP")
msg, m2_attrs, raw_m2_attrs = recv_wsc_msg(hapd, WSC_MSG, WPS_M2)
r_nonce = m2_attrs[ATTR_REGISTRAR_NONCE]
r_pk = m2_attrs[ATTR_PUBLIC_KEY]
authkey, keywrapkey = wsc_dh_kdf(r_pk, own_private, mac_addr, e_nonce,
r_nonce)
e_s1, e_s2, e_hash1, e_hash2 = wsc_dev_pw_hash(authkey, pin, e_pk, r_pk)
logger.debug("Send WSC_NACK to AP")
nack, attrs = build_nack(msg['eap_identifier'], e_nonce, r_nonce,
config_error=None, eap_code=2)
send_wsc_msg(hapd, addr, nack)
wps_wait_eap_failure(hapd, dev[0])
@remote_compatible
def test_wps_ext_m2_ack_invalid(dev, apdev):
"""WPS proto: M2 followed by invalid ACK"""
pin = "12345670"
addr, bssid, hapd = wps_start_ext(apdev[0], dev[0], pin=pin)
wps_ext_eap_identity_req(dev[0], hapd, bssid)
wps_ext_eap_identity_resp(hapd, dev[0], addr)
logger.debug("Receive WSC/Start from AP")
msg = get_wsc_msg(hapd)
if msg['wsc_opcode'] != WSC_Start:
raise Exception("Unexpected Op-Code for WSC/Start")
mac_addr = binascii.unhexlify(dev[0].own_addr().replace(':', ''))
uuid_e = 16*b'\x11'
e_nonce = 16*b'\x22'
own_private, e_pk = wsc_dh_init()
logger.debug("Send M1 to AP")
m1, raw_m1_attrs = build_m1(msg['eap_identifier'], uuid_e, mac_addr,
e_nonce, e_pk)
send_wsc_msg(hapd, addr, m1)
logger.debug("Receive M2 from AP")
msg, m2_attrs, raw_m2_attrs = recv_wsc_msg(hapd, WSC_MSG, WPS_M2)
r_nonce = m2_attrs[ATTR_REGISTRAR_NONCE]
r_pk = m2_attrs[ATTR_PUBLIC_KEY]
authkey, keywrapkey = wsc_dh_kdf(r_pk, own_private, mac_addr, e_nonce,
r_nonce)
e_s1, e_s2, e_hash1, e_hash2 = wsc_dev_pw_hash(authkey, pin, e_pk, r_pk)
logger.debug("Send WSC_ACK to AP")
attrs = b'\x10\x00\x00'
ack = build_eap_wsc(2, msg['eap_identifier'], attrs, opcode=WSC_ACK)
send_wsc_msg(hapd, addr, ack)
wps_wait_eap_failure(hapd, dev[0])
@remote_compatible
def test_wps_ext_m2_ack(dev, apdev):
"""WPS proto: M2 followed by ACK"""
pin = "12345670"
addr, bssid, hapd = wps_start_ext(apdev[0], dev[0], pin=pin)
wps_ext_eap_identity_req(dev[0], hapd, bssid)
wps_ext_eap_identity_resp(hapd, dev[0], addr)
logger.debug("Receive WSC/Start from AP")
msg = get_wsc_msg(hapd)
if msg['wsc_opcode'] != WSC_Start:
raise Exception("Unexpected Op-Code for WSC/Start")
mac_addr = binascii.unhexlify(dev[0].own_addr().replace(':', ''))
uuid_e = 16*b'\x11'
e_nonce = 16*b'\x22'
own_private, e_pk = wsc_dh_init()
logger.debug("Send M1 to AP")
m1, raw_m1_attrs = build_m1(msg['eap_identifier'], uuid_e, mac_addr,
e_nonce, e_pk)
send_wsc_msg(hapd, addr, m1)
logger.debug("Receive M2 from AP")
msg, m2_attrs, raw_m2_attrs = recv_wsc_msg(hapd, WSC_MSG, WPS_M2)
r_nonce = m2_attrs[ATTR_REGISTRAR_NONCE]
r_pk = m2_attrs[ATTR_PUBLIC_KEY]
authkey, keywrapkey = wsc_dh_kdf(r_pk, own_private, mac_addr, e_nonce,
r_nonce)
e_s1, e_s2, e_hash1, e_hash2 = wsc_dev_pw_hash(authkey, pin, e_pk, r_pk)
logger.debug("Send WSC_ACK to AP")
ack, attrs = build_ack(msg['eap_identifier'], e_nonce, r_nonce, eap_code=2)
send_wsc_msg(hapd, addr, ack)
wps_wait_eap_failure(hapd, dev[0])
@remote_compatible
def test_wps_ext_m2_ack_no_msg_type(dev, apdev):
"""WPS proto: M2 followed by ACK missing Msg Type"""
pin = "12345670"
addr, bssid, hapd = wps_start_ext(apdev[0], dev[0], pin=pin)
wps_ext_eap_identity_req(dev[0], hapd, bssid)
wps_ext_eap_identity_resp(hapd, dev[0], addr)
logger.debug("Receive WSC/Start from AP")
msg = get_wsc_msg(hapd)
if msg['wsc_opcode'] != WSC_Start:
raise Exception("Unexpected Op-Code for WSC/Start")
mac_addr = binascii.unhexlify(dev[0].own_addr().replace(':', ''))
uuid_e = 16*b'\x11'
e_nonce = 16*b'\x22'
own_private, e_pk = wsc_dh_init()
logger.debug("Send M1 to AP")
m1, raw_m1_attrs = build_m1(msg['eap_identifier'], uuid_e, mac_addr,
e_nonce, e_pk)
send_wsc_msg(hapd, addr, m1)
logger.debug("Receive M2 from AP")
msg, m2_attrs, raw_m2_attrs = recv_wsc_msg(hapd, WSC_MSG, WPS_M2)
r_nonce = m2_attrs[ATTR_REGISTRAR_NONCE]
r_pk = m2_attrs[ATTR_PUBLIC_KEY]
authkey, keywrapkey = wsc_dh_kdf(r_pk, own_private, mac_addr, e_nonce,
r_nonce)
e_s1, e_s2, e_hash1, e_hash2 = wsc_dev_pw_hash(authkey, pin, e_pk, r_pk)
logger.debug("Send WSC_ACK to AP")
ack, attrs = build_ack(msg['eap_identifier'], e_nonce, r_nonce,
msg_type=None, eap_code=2)
send_wsc_msg(hapd, addr, ack)
wps_wait_eap_failure(hapd, dev[0])
@remote_compatible
def test_wps_ext_m2_ack_invalid_msg_type(dev, apdev):
"""WPS proto: M2 followed by ACK with invalid Msg Type"""
pin = "12345670"
addr, bssid, hapd = wps_start_ext(apdev[0], dev[0], pin=pin)
wps_ext_eap_identity_req(dev[0], hapd, bssid)
wps_ext_eap_identity_resp(hapd, dev[0], addr)
logger.debug("Receive WSC/Start from AP")
msg = get_wsc_msg(hapd)
if msg['wsc_opcode'] != WSC_Start:
raise Exception("Unexpected Op-Code for WSC/Start")
mac_addr = binascii.unhexlify(dev[0].own_addr().replace(':', ''))
uuid_e = 16*b'\x11'
e_nonce = 16*b'\x22'
own_private, e_pk = wsc_dh_init()
logger.debug("Send M1 to AP")
m1, raw_m1_attrs = build_m1(msg['eap_identifier'], uuid_e, mac_addr,
e_nonce, e_pk)
send_wsc_msg(hapd, addr, m1)
logger.debug("Receive M2 from AP")
msg, m2_attrs, raw_m2_attrs = recv_wsc_msg(hapd, WSC_MSG, WPS_M2)
r_nonce = m2_attrs[ATTR_REGISTRAR_NONCE]
r_pk = m2_attrs[ATTR_PUBLIC_KEY]
authkey, keywrapkey = wsc_dh_kdf(r_pk, own_private, mac_addr, e_nonce,
r_nonce)
e_s1, e_s2, e_hash1, e_hash2 = wsc_dev_pw_hash(authkey, pin, e_pk, r_pk)
logger.debug("Send WSC_ACK to AP")
ack, attrs = build_ack(msg['eap_identifier'], e_nonce, r_nonce,
msg_type=WPS_WSC_NACK, eap_code=2)
send_wsc_msg(hapd, addr, ack)
wps_wait_eap_failure(hapd, dev[0])
@remote_compatible
def test_wps_ext_m2_ack_e_nonce_mismatch(dev, apdev):
"""WPS proto: M2 followed by ACK with e-nonce mismatch"""
pin = "12345670"
addr, bssid, hapd = wps_start_ext(apdev[0], dev[0], pin=pin)
wps_ext_eap_identity_req(dev[0], hapd, bssid)
wps_ext_eap_identity_resp(hapd, dev[0], addr)
logger.debug("Receive WSC/Start from AP")
msg = get_wsc_msg(hapd)
if msg['wsc_opcode'] != WSC_Start:
raise Exception("Unexpected Op-Code for WSC/Start")
mac_addr = binascii.unhexlify(dev[0].own_addr().replace(':', ''))
uuid_e = 16*b'\x11'
e_nonce = 16*b'\x22'
own_private, e_pk = wsc_dh_init()
logger.debug("Send M1 to AP")
m1, raw_m1_attrs = build_m1(msg['eap_identifier'], uuid_e, mac_addr,
e_nonce, e_pk)
send_wsc_msg(hapd, addr, m1)
logger.debug("Receive M2 from AP")
msg, m2_attrs, raw_m2_attrs = recv_wsc_msg(hapd, WSC_MSG, WPS_M2)
r_nonce = m2_attrs[ATTR_REGISTRAR_NONCE]
r_pk = m2_attrs[ATTR_PUBLIC_KEY]
authkey, keywrapkey = wsc_dh_kdf(r_pk, own_private, mac_addr, e_nonce,
r_nonce)
e_s1, e_s2, e_hash1, e_hash2 = wsc_dev_pw_hash(authkey, pin, e_pk, r_pk)
logger.debug("Send WSC_ACK to AP")
ack, attrs = build_ack(msg['eap_identifier'], 16*b'\x00', r_nonce,
eap_code=2)
send_wsc_msg(hapd, addr, ack)
wps_wait_eap_failure(hapd, dev[0])
@remote_compatible
def test_wps_ext_m1_invalid(dev, apdev):
"""WPS proto: M1 failing parsing"""
pin = "12345670"
addr, bssid, hapd = wps_start_ext(apdev[0], dev[0], pin=pin)
wps_ext_eap_identity_req(dev[0], hapd, bssid)
wps_ext_eap_identity_resp(hapd, dev[0], addr)
logger.debug("Receive WSC/Start from AP")
msg = get_wsc_msg(hapd)
if msg['wsc_opcode'] != WSC_Start:
raise Exception("Unexpected Op-Code for WSC/Start")
logger.debug("Send M1 to AP")
attrs = b'\x10\x00\x00'
m1 = build_eap_wsc(2, msg['eap_identifier'], attrs)
send_wsc_msg(hapd, addr, m1)
wps_wait_eap_failure(hapd, dev[0])
def test_wps_ext_m1_missing_msg_type(dev, apdev):
"""WPS proto: M1 missing Msg Type"""
pin = "12345670"
addr, bssid, hapd = wps_start_ext(apdev[0], dev[0], pin=pin)
wps_ext_eap_identity_req(dev[0], hapd, bssid)
wps_ext_eap_identity_resp(hapd, dev[0], addr)
logger.debug("Receive WSC/Start from AP")
msg = get_wsc_msg(hapd)
if msg['wsc_opcode'] != WSC_Start:
raise Exception("Unexpected Op-Code for WSC/Start")
logger.debug("Send M1 to AP")
attrs = build_wsc_attr(ATTR_VERSION, '\x10')
m1 = build_eap_wsc(2, msg['eap_identifier'], attrs)
send_wsc_msg(hapd, addr, m1)
wps_wait_ap_nack(hapd, dev[0], 16*b'\x00', 16*b'\x00')
def wps_ext_wsc_done(dev, apdev):
pin = "12345670"
addr, bssid, hapd = wps_start_ext(apdev[0], dev[0], pin=pin)
wps_ext_eap_identity_req(dev[0], hapd, bssid)
wps_ext_eap_identity_resp(hapd, dev[0], addr)
logger.debug("Receive WSC/Start from AP")
msg = get_wsc_msg(hapd)
if msg['wsc_opcode'] != WSC_Start:
raise Exception("Unexpected Op-Code for WSC/Start")
mac_addr = binascii.unhexlify(dev[0].own_addr().replace(':', ''))
uuid_e = 16*b'\x11'
e_nonce = 16*b'\x22'
own_private, e_pk = wsc_dh_init()
logger.debug("Send M1 to AP")
m1, raw_m1_attrs = build_m1(msg['eap_identifier'], uuid_e, mac_addr,
e_nonce, e_pk)
send_wsc_msg(hapd, addr, m1)
logger.debug("Receive M2 from AP")
msg, m2_attrs, raw_m2_attrs = recv_wsc_msg(hapd, WSC_MSG, WPS_M2)
r_nonce = m2_attrs[ATTR_REGISTRAR_NONCE]
r_pk = m2_attrs[ATTR_PUBLIC_KEY]
authkey, keywrapkey = wsc_dh_kdf(r_pk, own_private, mac_addr, e_nonce,
r_nonce)
e_s1, e_s2, e_hash1, e_hash2 = wsc_dev_pw_hash(authkey, pin, e_pk, r_pk)
logger.debug("Send M3 to AP")
attrs = build_wsc_attr(ATTR_VERSION, '\x10')
attrs += build_attr_msg_type(WPS_M3)
attrs += build_wsc_attr(ATTR_REGISTRAR_NONCE, r_nonce)
attrs += build_wsc_attr(ATTR_E_HASH1, e_hash1)
attrs += build_wsc_attr(ATTR_E_HASH2, e_hash2)
attrs += build_attr_authenticator(authkey, raw_m2_attrs, attrs)
raw_m3_attrs = attrs
m3 = build_eap_wsc(2, msg['eap_identifier'], attrs)
send_wsc_msg(hapd, addr, m3)
logger.debug("Receive M4 from AP")
msg, m4_attrs, raw_m4_attrs = recv_wsc_msg(hapd, WSC_MSG, WPS_M4)
logger.debug("Send M5 to AP")
attrs = build_wsc_attr(ATTR_VERSION, '\x10')
attrs += build_attr_msg_type(WPS_M5)
attrs += build_wsc_attr(ATTR_REGISTRAR_NONCE, r_nonce)
data = build_wsc_attr(ATTR_E_SNONCE1, e_s1)
attrs += build_attr_encr_settings(authkey, keywrapkey, data)
attrs += build_attr_authenticator(authkey, raw_m4_attrs, attrs)
raw_m5_attrs = attrs
m5 = build_eap_wsc(2, msg['eap_identifier'], attrs)
send_wsc_msg(hapd, addr, m5)
logger.debug("Receive M6 from AP")
msg, m6_attrs, raw_m6_attrs = recv_wsc_msg(hapd, WSC_MSG, WPS_M6)
logger.debug("Send M7 to AP")
attrs = build_wsc_attr(ATTR_VERSION, '\x10')
attrs += build_attr_msg_type(WPS_M7)
attrs += build_wsc_attr(ATTR_REGISTRAR_NONCE, r_nonce)
data = build_wsc_attr(ATTR_E_SNONCE2, e_s2)
attrs += build_attr_encr_settings(authkey, keywrapkey, data)
attrs += build_attr_authenticator(authkey, raw_m6_attrs, attrs)
m7 = build_eap_wsc(2, msg['eap_identifier'], attrs)
raw_m7_attrs = attrs
send_wsc_msg(hapd, addr, m7)
logger.debug("Receive M8 from AP")
msg, m8_attrs, raw_m8_attrs = recv_wsc_msg(hapd, WSC_MSG, WPS_M8)
return hapd, msg, e_nonce, r_nonce
@remote_compatible
def test_wps_ext_wsc_done_invalid(dev, apdev):
"""WPS proto: invalid WSC_Done"""
hapd, msg, e_nonce, r_nonce = wps_ext_wsc_done(dev, apdev)
logger.debug("Send WSC_Done to AP")
attrs = b'\x10\x00\x00'
wsc_done = build_eap_wsc(2, msg['eap_identifier'], attrs, opcode=WSC_Done)
send_wsc_msg(hapd, dev[0].own_addr(), wsc_done)
wps_wait_eap_failure(hapd, dev[0])
@remote_compatible
def test_wps_ext_wsc_done_no_msg_type(dev, apdev):
"""WPS proto: invalid WSC_Done"""
hapd, msg, e_nonce, r_nonce = wps_ext_wsc_done(dev, apdev)
logger.debug("Send WSC_Done to AP")
attrs = build_wsc_attr(ATTR_VERSION, '\x10')
#attrs += build_attr_msg_type(WPS_WSC_DONE)
attrs += build_wsc_attr(ATTR_ENROLLEE_NONCE, e_nonce)
attrs += build_wsc_attr(ATTR_REGISTRAR_NONCE, r_nonce)
wsc_done = build_eap_wsc(2, msg['eap_identifier'], attrs, opcode=WSC_Done)
send_wsc_msg(hapd, dev[0].own_addr(), wsc_done)
wps_wait_eap_failure(hapd, dev[0])
@remote_compatible
def test_wps_ext_wsc_done_wrong_msg_type(dev, apdev):
"""WPS proto: WSC_Done with wrong Msg Type"""
hapd, msg, e_nonce, r_nonce = wps_ext_wsc_done(dev, apdev)
logger.debug("Send WSC_Done to AP")
attrs = build_wsc_attr(ATTR_VERSION, '\x10')
attrs += build_attr_msg_type(WPS_WSC_ACK)
attrs += build_wsc_attr(ATTR_ENROLLEE_NONCE, e_nonce)
attrs += build_wsc_attr(ATTR_REGISTRAR_NONCE, r_nonce)
wsc_done = build_eap_wsc(2, msg['eap_identifier'], attrs, opcode=WSC_Done)
send_wsc_msg(hapd, dev[0].own_addr(), wsc_done)
wps_wait_eap_failure(hapd, dev[0])
@remote_compatible
def test_wps_ext_wsc_done_no_e_nonce(dev, apdev):
"""WPS proto: WSC_Done without e_nonce"""
hapd, msg, e_nonce, r_nonce = wps_ext_wsc_done(dev, apdev)
logger.debug("Send WSC_Done to AP")
attrs = build_wsc_attr(ATTR_VERSION, '\x10')
attrs += build_attr_msg_type(WPS_WSC_DONE)
#attrs += build_wsc_attr(ATTR_ENROLLEE_NONCE, e_nonce)
attrs += build_wsc_attr(ATTR_REGISTRAR_NONCE, r_nonce)
wsc_done = build_eap_wsc(2, msg['eap_identifier'], attrs, opcode=WSC_Done)
send_wsc_msg(hapd, dev[0].own_addr(), wsc_done)
wps_wait_eap_failure(hapd, dev[0])
def test_wps_ext_wsc_done_no_r_nonce(dev, apdev):
"""WPS proto: WSC_Done without r_nonce"""
hapd, msg, e_nonce, r_nonce = wps_ext_wsc_done(dev, apdev)
logger.debug("Send WSC_Done to AP")
attrs = build_wsc_attr(ATTR_VERSION, '\x10')
attrs += build_attr_msg_type(WPS_WSC_DONE)
attrs += build_wsc_attr(ATTR_ENROLLEE_NONCE, e_nonce)
#attrs += build_wsc_attr(ATTR_REGISTRAR_NONCE, r_nonce)
wsc_done = build_eap_wsc(2, msg['eap_identifier'], attrs, opcode=WSC_Done)
send_wsc_msg(hapd, dev[0].own_addr(), wsc_done)
wps_wait_eap_failure(hapd, dev[0])
@remote_compatible
def test_wps_ext_m7_no_encr_settings(dev, apdev):
"""WPS proto: M7 without Encr Settings"""
pin = "12345670"
addr, bssid, hapd = wps_start_ext(apdev[0], dev[0], pin=pin)
wps_ext_eap_identity_req(dev[0], hapd, bssid)
wps_ext_eap_identity_resp(hapd, dev[0], addr)
logger.debug("Receive WSC/Start from AP")
msg = get_wsc_msg(hapd)
if msg['wsc_opcode'] != WSC_Start:
raise Exception("Unexpected Op-Code for WSC/Start")
mac_addr = binascii.unhexlify(dev[0].own_addr().replace(':', ''))
uuid_e = 16*b'\x11'
e_nonce = 16*b'\x22'
own_private, e_pk = wsc_dh_init()
logger.debug("Send M1 to AP")
m1, raw_m1_attrs = build_m1(msg['eap_identifier'], uuid_e, mac_addr,
e_nonce, e_pk)
send_wsc_msg(hapd, addr, m1)
logger.debug("Receive M2 from AP")
msg, m2_attrs, raw_m2_attrs = recv_wsc_msg(hapd, WSC_MSG, WPS_M2)
r_nonce = m2_attrs[ATTR_REGISTRAR_NONCE]
r_pk = m2_attrs[ATTR_PUBLIC_KEY]
authkey, keywrapkey = wsc_dh_kdf(r_pk, own_private, mac_addr, e_nonce,
r_nonce)
e_s1, e_s2, e_hash1, e_hash2 = wsc_dev_pw_hash(authkey, pin, e_pk, r_pk)
logger.debug("Send M3 to AP")
attrs = build_wsc_attr(ATTR_VERSION, '\x10')
attrs += build_attr_msg_type(WPS_M3)
attrs += build_wsc_attr(ATTR_REGISTRAR_NONCE, r_nonce)
attrs += build_wsc_attr(ATTR_E_HASH1, e_hash1)
attrs += build_wsc_attr(ATTR_E_HASH2, e_hash2)
attrs += build_attr_authenticator(authkey, raw_m2_attrs, attrs)
raw_m3_attrs = attrs
m3 = build_eap_wsc(2, msg['eap_identifier'], attrs)
send_wsc_msg(hapd, addr, m3)
logger.debug("Receive M4 from AP")
msg, m4_attrs, raw_m4_attrs = recv_wsc_msg(hapd, WSC_MSG, WPS_M4)
logger.debug("Send M5 to AP")
attrs = build_wsc_attr(ATTR_VERSION, '\x10')
attrs += build_attr_msg_type(WPS_M5)
attrs += build_wsc_attr(ATTR_REGISTRAR_NONCE, r_nonce)
data = build_wsc_attr(ATTR_E_SNONCE1, e_s1)
attrs += build_attr_encr_settings(authkey, keywrapkey, data)
attrs += build_attr_authenticator(authkey, raw_m4_attrs, attrs)
raw_m5_attrs = attrs
m5 = build_eap_wsc(2, msg['eap_identifier'], attrs)
send_wsc_msg(hapd, addr, m5)
logger.debug("Receive M6 from AP")
msg, m6_attrs, raw_m6_attrs = recv_wsc_msg(hapd, WSC_MSG, WPS_M6)
logger.debug("Send M7 to AP")
attrs = build_wsc_attr(ATTR_VERSION, '\x10')
attrs += build_attr_msg_type(WPS_M7)
attrs += build_wsc_attr(ATTR_REGISTRAR_NONCE, r_nonce)
#data = build_wsc_attr(ATTR_E_SNONCE2, e_s2)
#attrs += build_attr_encr_settings(authkey, keywrapkey, data)
attrs += build_attr_authenticator(authkey, raw_m6_attrs, attrs)
m7 = build_eap_wsc(2, msg['eap_identifier'], attrs)
raw_m7_attrs = attrs
send_wsc_msg(hapd, addr, m7)
wps_wait_ap_nack(hapd, dev[0], e_nonce, r_nonce)
@remote_compatible
def test_wps_ext_m1_workaround(dev, apdev):
"""WPS proto: M1 Manufacturer/Model workaround"""
pin = "12345670"
addr, bssid, hapd = wps_start_ext(apdev[0], dev[0], pin=pin)
wps_ext_eap_identity_req(dev[0], hapd, bssid)
wps_ext_eap_identity_resp(hapd, dev[0], addr)
logger.debug("Receive WSC/Start from AP")
msg = get_wsc_msg(hapd)
if msg['wsc_opcode'] != WSC_Start:
raise Exception("Unexpected Op-Code for WSC/Start")
mac_addr = binascii.unhexlify(dev[0].own_addr().replace(':', ''))
uuid_e = 16*b'\x11'
e_nonce = 16*b'\x22'
own_private, e_pk = wsc_dh_init()
logger.debug("Send M1 to AP")
m1, raw_m1_attrs = build_m1(msg['eap_identifier'], uuid_e, mac_addr,
e_nonce, e_pk, manufacturer='Apple TEST',
model_name='AirPort', config_methods=b'\xff\xff')
send_wsc_msg(hapd, addr, m1)
logger.debug("Receive M2 from AP")
msg, m2_attrs, raw_m2_attrs = recv_wsc_msg(hapd, WSC_MSG, WPS_M2)
@remote_compatible
def test_ap_wps_disable_enable(dev, apdev):
"""WPS and DISABLE/ENABLE AP"""
hapd = wps_start_ap(apdev[0])
hapd.disable()
hapd.enable()
dev[0].scan_for_bss(apdev[0]['bssid'], freq="2412")
def test_ap_wps_upnp_web_oom(dev, apdev, params):
"""hostapd WPS UPnP web OOM"""
ap_uuid = "27ea801a-9e5c-4e73-bd82-f89cbcd10d7e"
hapd = add_ssdp_ap(apdev[0], ap_uuid)
location = ssdp_get_location(ap_uuid)
url = urlparse(location)
urls = upnp_get_urls(location)
eventurl = urlparse(urls['event_sub_url'])
ctrlurl = urlparse(urls['control_url'])
conn = HTTPConnection(url.netloc)
with alloc_fail(hapd, 1, "web_connection_parse_get"):
conn.request("GET", "/wps_device.xml")
try:
resp = conn.getresponse()
except:
pass
conn = HTTPConnection(url.netloc)
conn.request("GET", "/unknown")
resp = conn.getresponse()
if resp.status != 404:
raise Exception("Unexpected HTTP result for unknown URL: %d" + resp.status)
with alloc_fail(hapd, 1, "web_connection_parse_get"):
conn.request("GET", "/unknown")
try:
resp = conn.getresponse()
print(resp.status)
except:
pass
conn = HTTPConnection(url.netloc)
conn.request("GET", "/wps_device.xml")
resp = conn.getresponse()
if resp.status != 200:
raise Exception("GET /wps_device.xml failed")
conn = HTTPConnection(url.netloc)
resp = upnp_soap_action(conn, ctrlurl.path, "GetDeviceInfo")
if resp.status != 200:
raise Exception("GetDeviceInfo failed")
with alloc_fail(hapd, 1, "web_process_get_device_info"):
conn = HTTPConnection(url.netloc)
resp = upnp_soap_action(conn, ctrlurl.path, "GetDeviceInfo")
if resp.status != 500:
raise Exception("Internal error not reported from GetDeviceInfo OOM")
with alloc_fail(hapd, 1, "wps_build_m1;web_process_get_device_info"):
conn = HTTPConnection(url.netloc)
resp = upnp_soap_action(conn, ctrlurl.path, "GetDeviceInfo")
if resp.status != 500:
raise Exception("Internal error not reported from GetDeviceInfo OOM")
with alloc_fail(hapd, 1, "wpabuf_alloc;web_connection_send_reply"):
conn = HTTPConnection(url.netloc)
try:
resp = upnp_soap_action(conn, ctrlurl.path, "GetDeviceInfo")
except:
pass
conn = HTTPConnection(url.netloc)
resp = upnp_soap_action(conn, ctrlurl.path, "GetDeviceInfo")
if resp.status != 200:
raise Exception("GetDeviceInfo failed")
# No NewWLANEventType in PutWLANResponse NewMessage
conn = HTTPConnection(url.netloc)
resp = upnp_soap_action(conn, ctrlurl.path, "PutWLANResponse", newmsg="foo")
if resp.status != 600:
raise Exception("Unexpected HTTP response: %d" % resp.status)
# No NewWLANEventMAC in PutWLANResponse NewMessage
conn = HTTPConnection(url.netloc)
resp = upnp_soap_action(conn, ctrlurl.path, "PutWLANResponse",
newmsg="foo", neweventtype="1")
if resp.status != 600:
raise Exception("Unexpected HTTP response: %d" % resp.status)
# Invalid NewWLANEventMAC in PutWLANResponse NewMessage
conn = HTTPConnection(url.netloc)
resp = upnp_soap_action(conn, ctrlurl.path, "PutWLANResponse",
newmsg="foo", neweventtype="1",
neweventmac="foo")
if resp.status != 600:
raise Exception("Unexpected HTTP response: %d" % resp.status)
# Workaround for NewWLANEventMAC in PutWLANResponse NewMessage
# Ignored unexpected PutWLANResponse WLANEventType 1
conn = HTTPConnection(url.netloc)
resp = upnp_soap_action(conn, ctrlurl.path, "PutWLANResponse",
newmsg="foo", neweventtype="1",
neweventmac="00.11.22.33.44.55")
if resp.status != 500:
raise Exception("Unexpected HTTP response: %d" % resp.status)
# PutWLANResponse NewMessage with invalid EAP message
conn = HTTPConnection(url.netloc)
resp = upnp_soap_action(conn, ctrlurl.path, "PutWLANResponse",
newmsg="foo", neweventtype="2",
neweventmac="00:11:22:33:44:55")
if resp.status != 200:
raise Exception("Unexpected HTTP response: %d" % resp.status)
with alloc_fail(hapd, 1, "web_connection_parse_subscribe"):
conn = HTTPConnection(url.netloc)
headers = {"callback": '<http://127.0.0.1:12345/event>',
"NT": "upnp:event",
"timeout": "Second-1234"}
conn.request("SUBSCRIBE", eventurl.path, "\r\n\r\n", headers)
try:
resp = conn.getresponse()
except:
pass
with alloc_fail(hapd, 1, "dup_binstr;web_connection_parse_subscribe"):
conn = HTTPConnection(url.netloc)
headers = {"callback": '<http://127.0.0.1:12345/event>',
"NT": "upnp:event",
"timeout": "Second-1234"}
conn.request("SUBSCRIBE", eventurl.path, "\r\n\r\n", headers)
resp = conn.getresponse()
if resp.status != 500:
raise Exception("Unexpected HTTP response: %d" % resp.status)
with alloc_fail(hapd, 1, "wpabuf_alloc;web_connection_parse_unsubscribe"):
conn = HTTPConnection(url.netloc)
headers = {"callback": '<http://127.0.0.1:12345/event>',
"NT": "upnp:event",
"timeout": "Second-1234"}
conn.request("UNSUBSCRIBE", eventurl.path, "\r\n\r\n", headers)
try:
resp = conn.getresponse()
except:
pass
with alloc_fail(hapd, 1, "web_connection_unimplemented"):
conn = HTTPConnection(url.netloc)
conn.request("HEAD", "/wps_device.xml")
try:
resp = conn.getresponse()
except:
pass
def test_ap_wps_frag_ack_oom(dev, apdev):
"""WPS and fragment ack OOM"""
dev[0].request("SET wps_fragment_size 50")
hapd = wps_start_ap(apdev[0])
with alloc_fail(hapd, 1, "eap_wsc_build_frag_ack"):
wps_run_pbc_fail_ap(apdev[0], dev[0], hapd)
def wait_scan_stopped(dev):
dev.request("ABORT_SCAN")
for i in range(50):
res = dev.get_driver_status_field("scan_state")
if "SCAN_STARTED" not in res and "SCAN_REQUESTED" not in res:
break
logger.debug("Waiting for scan to complete")
time.sleep(0.1)
@remote_compatible
def test_ap_wps_eap_wsc_errors(dev, apdev):
"""WPS and EAP-WSC error cases"""
ssid = "test-wps-conf-pin"
appin = "12345670"
params = {"ssid": ssid, "eap_server": "1", "wps_state": "2",
"wpa_passphrase": "12345678", "wpa": "2",
"wpa_key_mgmt": "WPA-PSK", "rsn_pairwise": "CCMP",
"fragment_size": "300", "ap_pin": appin}
hapd = hostapd.add_ap(apdev[0], params)
bssid = apdev[0]['bssid']
pin = dev[0].wps_read_pin()
hapd.request("WPS_PIN any " + pin)
dev[0].scan_for_bss(apdev[0]['bssid'], freq="2412")
dev[0].dump_monitor()
dev[0].wps_reg(bssid, appin + " new_ssid=a", "new ssid", "WPA2PSK", "CCMP",
"new passphrase", no_wait=True)
ev = dev[0].wait_event(["WPS-FAIL"], timeout=10)
if ev is None:
raise Exception("WPS-FAIL not reported")
dev[0].request("WPS_CANCEL")
dev[0].wait_disconnected()
wait_scan_stopped(dev[0])
dev[0].dump_monitor()
dev[0].wps_reg(bssid, appin, "new ssid", "FOO", "CCMP",
"new passphrase", no_wait=True)
ev = dev[0].wait_event(["WPS-FAIL"], timeout=10)
if ev is None:
raise Exception("WPS-FAIL not reported")
dev[0].request("WPS_CANCEL")
dev[0].wait_disconnected()
wait_scan_stopped(dev[0])
dev[0].dump_monitor()
dev[0].wps_reg(bssid, appin, "new ssid", "WPA2PSK", "FOO",
"new passphrase", no_wait=True)
ev = dev[0].wait_event(["WPS-FAIL"], timeout=10)
if ev is None:
raise Exception("WPS-FAIL not reported")
dev[0].request("WPS_CANCEL")
dev[0].wait_disconnected()
wait_scan_stopped(dev[0])
dev[0].dump_monitor()
dev[0].wps_reg(bssid, appin + "new_key=a", "new ssid", "WPA2PSK", "CCMP",
"new passphrase", no_wait=True)
ev = dev[0].wait_event(["WPS-FAIL"], timeout=10)
if ev is None:
raise Exception("WPS-FAIL not reported")
dev[0].request("WPS_CANCEL")
dev[0].wait_disconnected()
wait_scan_stopped(dev[0])
dev[0].dump_monitor()
tests = ["eap_wsc_init",
"eap_msg_alloc;eap_wsc_build_msg",
"wpabuf_alloc;eap_wsc_process_fragment"]
for func in tests:
with alloc_fail(dev[0], 1, func):
dev[0].request("WPS_PIN %s %s" % (bssid, pin))
wait_fail_trigger(dev[0], "GET_ALLOC_FAIL")
dev[0].request("WPS_CANCEL")
dev[0].wait_disconnected()
wait_scan_stopped(dev[0])
dev[0].dump_monitor()
tests = [(1, "wps_decrypt_encr_settings"),
(2, "hmac_sha256;wps_derive_psk")]
for count, func in tests:
hapd.request("WPS_PIN any " + pin)
with fail_test(dev[0], count, func):
dev[0].request("WPS_PIN %s %s" % (bssid, pin))
wait_fail_trigger(dev[0], "GET_FAIL")
dev[0].request("WPS_CANCEL")
dev[0].wait_disconnected()
wait_scan_stopped(dev[0])
dev[0].dump_monitor()
with alloc_fail(dev[0], 1, "eap_msg_alloc;eap_sm_build_expanded_nak"):
dev[0].wps_reg(bssid, appin + " new_ssid=a", "new ssid", "WPA2PSK",
"CCMP", "new passphrase", no_wait=True)
wait_fail_trigger(dev[0], "GET_ALLOC_FAIL")
dev[0].request("WPS_CANCEL")
dev[0].wait_disconnected()
wait_scan_stopped(dev[0])
dev[0].dump_monitor()
def test_ap_wps_eap_wsc(dev, apdev):
"""WPS and EAP-WSC in network profile"""
params = int_eap_server_params()
params["wps_state"] = "2"
hapd = hostapd.add_ap(apdev[0], params)
bssid = apdev[0]['bssid']
logger.info("Unexpected identity")
dev[0].connect("test-wpa2-eap", key_mgmt="WPA-EAP", scan_freq="2412",
eap="WSC", identity="WFA-SimpleConfig-Enrollee-unexpected",
wait_connect=False)
ev = dev[0].wait_event(["CTRL-EVENT-EAP-FAILURE"], timeout=5)
if ev is None:
raise Exception("No EAP-Failure seen")
dev[0].request("REMOVE_NETWORK all")
dev[0].wait_disconnected()
logger.info("No phase1 parameter")
dev[0].connect("test-wpa2-eap", key_mgmt="WPA-EAP", scan_freq="2412",
eap="WSC", identity="WFA-SimpleConfig-Enrollee-1-0",
wait_connect=False)
ev = dev[0].wait_event(["CTRL-EVENT-EAP-PROPOSED-METHOD"], timeout=5)
if ev is None:
raise Exception("Timeout on EAP method start")
ev = dev[0].wait_event(["CTRL-EVENT-EAP-FAILURE"], timeout=5)
if ev is None:
raise Exception("No EAP-Failure seen")
dev[0].request("REMOVE_NETWORK all")
dev[0].wait_disconnected()
logger.info("No PIN/PBC in phase1")
dev[0].connect("test-wpa2-eap", key_mgmt="WPA-EAP", scan_freq="2412",
eap="WSC", identity="WFA-SimpleConfig-Enrollee-1-0",
phase1="foo", wait_connect=False)
ev = dev[0].wait_event(["CTRL-EVENT-EAP-PROPOSED-METHOD"], timeout=5)
if ev is None:
raise Exception("Timeout on EAP method start")
ev = dev[0].wait_event(["CTRL-EVENT-EAP-FAILURE"], timeout=5)
if ev is None:
raise Exception("No EAP-Failure seen")
dev[0].request("REMOVE_NETWORK all")
dev[0].wait_disconnected()
logger.info("Invalid pkhash in phase1")
dev[0].connect("test-wpa2-eap", key_mgmt="WPA-EAP", scan_freq="2412",
eap="WSC", identity="WFA-SimpleConfig-Enrollee-1-0",
phase1="foo pkhash=q pbc=1", wait_connect=False)
ev = dev[0].wait_event(["CTRL-EVENT-EAP-PROPOSED-METHOD"], timeout=5)
if ev is None:
raise Exception("Timeout on EAP method start")
ev = dev[0].wait_event(["CTRL-EVENT-EAP-FAILURE"], timeout=5)
if ev is None:
raise Exception("No EAP-Failure seen")
dev[0].request("REMOVE_NETWORK all")
dev[0].wait_disconnected()
logger.info("Zero fragment_size")
dev[0].connect("test-wpa2-eap", key_mgmt="WPA-EAP", scan_freq="2412",
eap="WSC", identity="WFA-SimpleConfig-Enrollee-1-0",
fragment_size="0", phase1="pin=12345670", wait_connect=False)
ev = dev[0].wait_event(["CTRL-EVENT-EAP-PROPOSED-METHOD"], timeout=5)
if ev is None:
raise Exception("Timeout on EAP method start")
ev = dev[0].wait_event(["WPS-M2D"], timeout=5)
if ev is None:
raise Exception("No M2D seen")
ev = dev[0].wait_event(["CTRL-EVENT-EAP-FAILURE"], timeout=5)
if ev is None:
raise Exception("No EAP-Failure seen")
dev[0].request("REMOVE_NETWORK all")
dev[0].wait_disconnected()
logger.info("Missing new_auth")
dev[0].connect("test-wpa2-eap", key_mgmt="WPA-EAP", scan_freq="2412",
eap="WSC", identity="WFA-SimpleConfig-Enrollee-1-0",
phase1="pin=12345670 new_ssid=aa", wait_connect=False)
ev = dev[0].wait_event(["CTRL-EVENT-EAP-PROPOSED-METHOD"], timeout=5)
if ev is None:
raise Exception("Timeout on EAP method start")
ev = dev[0].wait_event(["CTRL-EVENT-EAP-FAILURE"], timeout=5)
if ev is None:
raise Exception("No EAP-Failure seen")
dev[0].request("REMOVE_NETWORK all")
dev[0].wait_disconnected()
logger.info("Missing new_encr")
dev[0].connect("test-wpa2-eap", key_mgmt="WPA-EAP", scan_freq="2412",
eap="WSC", identity="WFA-SimpleConfig-Enrollee-1-0",
phase1="pin=12345670 new_auth=WPA2PSK new_ssid=aa", wait_connect=False)
ev = dev[0].wait_event(["CTRL-EVENT-EAP-PROPOSED-METHOD"], timeout=5)
if ev is None:
raise Exception("Timeout on EAP method start")
ev = dev[0].wait_event(["CTRL-EVENT-EAP-FAILURE"], timeout=5)
if ev is None:
raise Exception("No EAP-Failure seen")
dev[0].request("REMOVE_NETWORK all")
dev[0].wait_disconnected()
logger.info("Missing new_key")
dev[0].connect("test-wpa2-eap", key_mgmt="WPA-EAP", scan_freq="2412",
eap="WSC", identity="WFA-SimpleConfig-Enrollee-1-0",
phase1="pin=12345670 new_auth=WPA2PSK new_ssid=aa new_encr=CCMP",
wait_connect=False)
ev = dev[0].wait_event(["CTRL-EVENT-EAP-PROPOSED-METHOD"], timeout=5)
if ev is None:
raise Exception("Timeout on EAP method start")
ev = dev[0].wait_event(["CTRL-EVENT-EAP-FAILURE"], timeout=5)
if ev is None:
raise Exception("No EAP-Failure seen")
dev[0].request("REMOVE_NETWORK all")
dev[0].wait_disconnected()
def test_ap_wps_and_bss_limit(dev, apdev):
"""WPS and wpa_supplicant BSS entry limit"""
try:
_test_ap_wps_and_bss_limit(dev, apdev)
finally:
dev[0].request("SET bss_max_count 200")
pass
def _test_ap_wps_and_bss_limit(dev, apdev):
params = {"ssid": "test-wps", "eap_server": "1", "wps_state": "2",
"wpa_passphrase": "12345678", "wpa": "2",
"wpa_key_mgmt": "WPA-PSK", "rsn_pairwise": "CCMP"}
hapd = hostapd.add_ap(apdev[0], params)
params = {"ssid": "test-wps-2", "eap_server": "1", "wps_state": "2",
"wpa_passphrase": "1234567890", "wpa": "2",
"wpa_key_mgmt": "WPA-PSK", "rsn_pairwise": "CCMP"}
hapd2 = hostapd.add_ap(apdev[1], params)
id = dev[1].add_network()
dev[1].set_network(id, "mode", "2")
dev[1].set_network_quoted(id, "ssid", "wpas-ap-no-wps")
dev[1].set_network_quoted(id, "psk", "12345678")
dev[1].set_network(id, "frequency", "2462")
dev[1].set_network(id, "scan_freq", "2462")
dev[1].set_network(id, "wps_disabled", "1")
dev[1].select_network(id)
id = dev[2].add_network()
dev[2].set_network(id, "mode", "2")
dev[2].set_network_quoted(id, "ssid", "wpas-ap")
dev[2].set_network_quoted(id, "psk", "12345678")
dev[2].set_network(id, "frequency", "2437")
dev[2].set_network(id, "scan_freq", "2437")
dev[2].select_network(id)
wpas = WpaSupplicant(global_iface='/tmp/wpas-wlan5')
wpas.interface_add("wlan5")
id = wpas.add_network()
wpas.set_network(id, "mode", "2")
wpas.set_network_quoted(id, "ssid", "wpas-ap")
wpas.set_network_quoted(id, "psk", "12345678")
wpas.set_network(id, "frequency", "2437")
wpas.set_network(id, "scan_freq", "2437")
wpas.select_network(id)
dev[1].wait_connected()
dev[2].wait_connected()
wpas.wait_connected()
wpas.request("WPS_PIN any 12345670")
hapd.request("WPS_PBC")
hapd2.request("WPS_PBC")
dev[0].request("SET bss_max_count 1")
id = dev[0].add_network()
dev[0].set_network_quoted(id, "ssid", "testing")
id = dev[0].add_network()
dev[0].set_network_quoted(id, "ssid", "testing")
dev[0].set_network(id, "key_mgmt", "WPS")
dev[0].request("WPS_PBC")
ev = dev[0].wait_event(["CTRL-EVENT-SCAN-RESULTS"], timeout=10)
dev[0].request("WPS_CANCEL")
id = dev[0].add_network()
dev[0].set_network_quoted(id, "ssid", "testing")
dev[0].set_network(id, "key_mgmt", "WPS")
dev[0].scan(freq="2412")
def test_ap_wps_pbc_2ap(dev, apdev):
"""WPS PBC with two APs advertising same SSID"""
params = {"ssid": "wps", "eap_server": "1", "wps_state": "2",
"wpa_passphrase": "12345678", "wpa": "2",
"wpa_key_mgmt": "WPA-PSK", "rsn_pairwise": "CCMP",
"wps_independent": "1"}
hapd = hostapd.add_ap(apdev[0], params)
params = {"ssid": "wps", "eap_server": "1", "wps_state": "2",
"wpa_passphrase": "123456789", "wpa": "2",
"wpa_key_mgmt": "WPA-PSK", "rsn_pairwise": "CCMP",
"wps_independent": "1"}
hapd2 = hostapd.add_ap(apdev[1], params)
hapd.request("WPS_PBC")
wpas = WpaSupplicant(global_iface='/tmp/wpas-wlan5')
wpas.interface_add("wlan5", drv_params="force_connect_cmd=1")
wpas.dump_monitor()
wpas.flush_scan_cache()
wpas.scan_for_bss(apdev[0]['bssid'], freq="2412", force_scan=True)
wpas.scan_for_bss(apdev[1]['bssid'], freq="2412")
wpas.request("WPS_PBC")
wpas.wait_connected()
wpas.request("DISCONNECT")
hapd.request("DISABLE")
hapd2.request("DISABLE")
wpas.flush_scan_cache()
def test_ap_wps_er_enrollee_to_conf_ap(dev, apdev):
"""WPS ER enrolling a new device to a configured AP"""
try:
_test_ap_wps_er_enrollee_to_conf_ap(dev, apdev)
finally:
dev[0].request("WPS_ER_STOP")
def _test_ap_wps_er_enrollee_to_conf_ap(dev, apdev):
ssid = "wps-er-enrollee-to-conf-ap"
ap_pin = "12345670"
ap_uuid = "27ea801a-9e5c-4e73-bd82-f89cbcd10d7e"
params = {"ssid": ssid, "eap_server": "1", "wps_state": "2",
"wpa_passphrase": "12345678", "wpa": "2",
"wpa_key_mgmt": "WPA-PSK", "rsn_pairwise": "CCMP",
"device_name": "Wireless AP", "manufacturer": "Company",
"model_name": "WAP", "model_number": "123",
"serial_number": "12345", "device_type": "6-0050F204-1",
"os_version": "01020300",
"config_methods": "label push_button",
"ap_pin": ap_pin, "uuid": ap_uuid, "upnp_iface": "lo"}
hapd = hostapd.add_ap(apdev[0], params)
bssid = hapd.own_addr()
id = dev[0].connect(ssid, psk="12345678", scan_freq="2412")
dev[0].dump_monitor()
dev[0].request("WPS_ER_START ifname=lo")
ev = dev[0].wait_event(["WPS-ER-AP-ADD"], timeout=15)
if ev is None:
raise Exception("AP discovery timed out")
if ap_uuid not in ev:
raise Exception("Expected AP UUID not found")
pin = dev[2].wps_read_pin()
addr2 = dev[2].own_addr()
dev[0].dump_monitor()
dev[2].scan_for_bss(bssid, freq=2412)
dev[2].dump_monitor()
dev[2].request("WPS_PIN %s %s" % (bssid, pin))
for i in range(3):
ev = dev[0].wait_event(["WPS-ER-ENROLLEE-ADD"], timeout=10)
if ev is None:
raise Exception("Enrollee not seen")
if addr2 in ev:
break
if addr2 not in ev:
raise Exception("Unexpected Enrollee MAC address")
dev[0].dump_monitor()
dev[0].request("WPS_ER_SET_CONFIG " + ap_uuid + " " + str(id))
dev[0].request("WPS_ER_PIN " + addr2 + " " + pin + " " + addr2)
dev[2].wait_connected(timeout=30)
ev = dev[0].wait_event(["WPS-SUCCESS"], timeout=15)
if ev is None:
raise Exception("WPS ER did not report success")
def test_ap_wps_er_enrollee_to_conf_ap2(dev, apdev):
"""WPS ER enrolling a new device to a configured AP (2)"""
try:
_test_ap_wps_er_enrollee_to_conf_ap2(dev, apdev)
finally:
dev[0].request("WPS_ER_STOP")
def _test_ap_wps_er_enrollee_to_conf_ap2(dev, apdev):
ssid = "wps-er-enrollee-to-conf-ap"
ap_pin = "12345670"
ap_uuid = "27ea801a-9e5c-4e73-bd82-f89cbcd10d7e"
params = {"ssid": ssid, "eap_server": "1", "wps_state": "2",
"wpa_passphrase": "12345678", "wpa": "2",
"wpa_key_mgmt": "WPA-PSK", "rsn_pairwise": "CCMP",
"device_name": "Wireless AP", "manufacturer": "Company",
"model_name": "WAP", "model_number": "123",
"serial_number": "12345", "device_type": "6-0050F204-1",
"os_version": "01020300",
"config_methods": "label push_button",
"ap_pin": ap_pin, "uuid": ap_uuid, "upnp_iface": "lo"}
hapd = hostapd.add_ap(apdev[0], params)
bssid = hapd.own_addr()
id = dev[0].connect(ssid, psk="12345678", scan_freq="2412")
dev[0].dump_monitor()
dev[0].request("WPS_ER_START ifname=lo")
ev = dev[0].wait_event(["WPS-ER-AP-ADD"], timeout=15)
if ev is None:
raise Exception("AP discovery timed out")
if ap_uuid not in ev:
raise Exception("Expected AP UUID not found")
dev[0].request("WPS_ER_LEARN " + ap_uuid + " " + ap_pin)
ev = dev[0].wait_event(["WPS-ER-AP-SETTINGS"], timeout=15)
if ev is None:
raise Exception("AP learn timed out")
if ap_uuid not in ev:
raise Exception("Expected AP UUID not in settings")
ev = dev[0].wait_event(["WPS-FAIL"], timeout=15)
if ev is None:
raise Exception("WPS-FAIL after AP learn timed out")
time.sleep(0.1)
pin = dev[1].wps_read_pin()
addr1 = dev[1].own_addr()
dev[0].dump_monitor()
dev[0].request("WPS_ER_PIN any " + pin)
time.sleep(0.1)
dev[1].scan_for_bss(bssid, freq=2412)
dev[1].request("WPS_PIN any %s" % pin)
ev = dev[1].wait_event(["WPS-SUCCESS"], timeout=30)
if ev is None:
raise Exception("Enrollee did not report success")
dev[1].wait_connected(timeout=15)
ev = dev[0].wait_event(["WPS-SUCCESS"], timeout=15)
if ev is None:
raise Exception("WPS ER did not report success")
def test_ap_wps_ignore_broadcast_ssid(dev, apdev):
"""WPS AP trying to ignore broadcast SSID"""
ssid = "test-wps"
hapd = hostapd.add_ap(apdev[0],
{"ssid": ssid, "eap_server": "1", "wps_state": "1",
"ignore_broadcast_ssid": "1"})
if "FAIL" not in hapd.request("WPS_PBC"):
raise Exception("WPS unexpectedly enabled")
def test_ap_wps_wep(dev, apdev):
"""WPS AP trying to enable WEP"""
check_wep_capa(dev[0])
ssid = "test-wps"
hapd = hostapd.add_ap(apdev[0],
{"ssid": ssid, "eap_server": "1", "wps_state": "1",
"ieee80211n": "0", "wep_key0": '"hello"'})
if "FAIL" not in hapd.request("WPS_PBC"):
raise Exception("WPS unexpectedly enabled")
def test_ap_wps_tkip(dev, apdev):
"""WPS AP trying to enable TKIP"""
ssid = "test-wps"
hapd = hostapd.add_ap(apdev[0],
{"ssid": ssid, "eap_server": "1", "wps_state": "1",
"ieee80211n": "0", "wpa": '1',
"wpa_key_mgmt": "WPA-PSK",
"wpa_passphrase": "12345678"})
if "FAIL" not in hapd.request("WPS_PBC"):
raise Exception("WPS unexpectedly enabled")
def test_ap_wps_conf_stub_cred(dev, apdev):
"""WPS PIN provisioning with configured AP using stub cred"""
ssid = "test-wps-conf"
hapd = hostapd.add_ap(apdev[0],
{"ssid": ssid, "eap_server": "1", "wps_state": "2",
"wpa_passphrase": "12345678", "wpa": "2",
"wpa_key_mgmt": "WPA-PSK", "rsn_pairwise": "CCMP"})
hapd.request("WPS_PIN any 12345670")
dev[0].scan_for_bss(apdev[0]['bssid'], freq="2412")
dev[0].dump_monitor()
try:
hapd.set("wps_testing_stub_cred", "1")
dev[0].request("WPS_PIN " + apdev[0]['bssid'] + " 12345670")
for i in range(1, 3):
ev = dev[0].wait_event(["WPS-CRED-RECEIVED"], timeout=15)
if ev is None:
raise Exception("WPS credential %d not received" % i)
dev[0].wait_connected(timeout=30)
finally:
hapd.set("wps_testing_stub_cred", "0")
def test_ap_wps_rf_bands(dev, apdev):
"""WPS and wps_rf_bands configuration"""
ssid = "test-wps-conf"
params = {"ssid": ssid, "eap_server": "1", "wps_state": "2",
"wpa_passphrase": "12345678", "wpa": "2",
"wpa_key_mgmt": "WPA-PSK", "rsn_pairwise": "CCMP",
"wps_rf_bands": "ag"}
hapd = hostapd.add_ap(apdev[0], params)
bssid = hapd.own_addr()
hapd.request("WPS_PBC")
dev[0].scan_for_bss(bssid, freq="2412")
dev[0].dump_monitor()
dev[0].request("WPS_PBC " + bssid)
dev[0].wait_connected(timeout=30)
bss = dev[0].get_bss(bssid)
logger.info("BSS: " + str(bss))
if "103c000103" not in bss['ie']:
raise Exception("RF Bands attribute with expected values not found")
dev[0].request("DISCONNECT")
dev[0].wait_disconnected()
hapd.set("wps_rf_bands", "ad")
hapd.set("wps_rf_bands", "a")
hapd.set("wps_rf_bands", "g")
hapd.set("wps_rf_bands", "b")
hapd.set("wps_rf_bands", "ga")
hapd.disable()
dev[0].dump_monitor()
dev[0].flush_scan_cache()
def test_ap_wps_pbc_in_m1(dev, apdev):
"""WPS and pbc_in_m1"""
ssid = "test-wps-conf"
params = {"ssid": ssid, "eap_server": "1", "wps_state": "2",
"wpa_passphrase": "12345678", "wpa": "2",
"wpa_key_mgmt": "WPA-PSK", "rsn_pairwise": "CCMP",
"config_methods": "virtual_push_button virtual_display",
"pbc_in_m1": "1"}
hapd = hostapd.add_ap(apdev[0], params)
bssid = hapd.own_addr()
hapd.request("WPS_PBC")
dev[0].scan_for_bss(bssid, freq="2412")
dev[0].dump_monitor()
dev[0].request("WPS_PBC " + bssid)
dev[0].wait_connected(timeout=30)
dev[0].request("DISCONNECT")
dev[0].wait_disconnected()
hapd.disable()
dev[0].dump_monitor()
dev[0].flush_scan_cache()
def test_ap_wps_pbc_mac_addr_change(dev, apdev, params):
"""WPS M1 with MAC address change"""
skip_without_tkip(dev[0])
ssid = "test-wps-mac-addr-change"
hapd = hostapd.add_ap(apdev[0],
{"ssid": ssid, "eap_server": "1", "wps_state": "1"})
hapd.request("WPS_PBC")
if "PBC Status: Active" not in hapd.request("WPS_GET_STATUS"):
raise Exception("PBC status not shown correctly")
dev[0].flush_scan_cache()
test_addr = '02:11:22:33:44:55'
addr = dev[0].get_status_field("address")
if addr == test_addr:
raise Exception("Unexpected initial MAC address")
try:
subprocess.call(['ip', 'link', 'set', 'dev', dev[0].ifname, 'down'])
subprocess.call(['ip', 'link', 'set', 'dev', dev[0].ifname, 'address',
test_addr])
subprocess.call(['ip', 'link', 'set', 'dev', dev[0].ifname, 'up'])
addr1 = dev[0].get_status_field("address")
if addr1 != test_addr:
raise Exception("Failed to change MAC address")
dev[0].scan_for_bss(apdev[0]['bssid'], freq="2412", force_scan=True)
dev[0].request("WPS_PBC " + apdev[0]['bssid'])
dev[0].wait_connected(timeout=30)
status = dev[0].get_status()
if status['wpa_state'] != 'COMPLETED' or \
status['bssid'] != apdev[0]['bssid']:
raise Exception("Not fully connected")
out = run_tshark(os.path.join(params['logdir'], "hwsim0.pcapng"),
"wps.message_type == 0x04",
display=["wps.mac_address"])
res = out.splitlines()
if len(res) < 1:
raise Exception("No M1 message with MAC address found")
if res[0] != addr1:
raise Exception("Wrong M1 MAC address")
dev[0].request("DISCONNECT")
dev[0].wait_disconnected()
hapd.disable()
dev[0].dump_monitor()
dev[0].flush_scan_cache()
finally:
# Restore MAC address
subprocess.call(['ip', 'link', 'set', 'dev', dev[0].ifname, 'down'])
subprocess.call(['ip', 'link', 'set', 'dev', dev[0].ifname, 'address',
addr])
subprocess.call(['ip', 'link', 'set', 'dev', dev[0].ifname, 'up'])
def test_ap_wps_pin_start_failure(dev, apdev):
"""WPS_PIN start failure"""
with alloc_fail(dev[0], 1, "wpas_wps_start_dev_pw"):
if "FAIL" not in dev[0].request("WPS_PIN any 12345670"):
raise Exception("WPS_PIN not rejected during OOM")
with alloc_fail(dev[0], 1, "wpas_wps_start_dev_pw"):
if "FAIL" not in dev[0].request("WPS_PIN any"):
raise Exception("WPS_PIN not rejected during OOM")
def test_ap_wps_ap_pin_failure(dev, apdev):
"""WPS_AP_PIN failure"""
id = dev[0].add_network()
dev[0].set_network(id, "mode", "2")
dev[0].set_network_quoted(id, "ssid", "wpas-ap-wps")
dev[0].set_network_quoted(id, "psk", "1234567890")
dev[0].set_network(id, "frequency", "2412")
dev[0].set_network(id, "scan_freq", "2412")
dev[0].select_network(id)
dev[0].wait_connected()
with fail_test(dev[0], 1,
"os_get_random;wpa_supplicant_ctrl_iface_wps_ap_pin"):
if "FAIL" not in dev[0].request("WPS_AP_PIN random"):
raise Exception("WPS_AP_PIN random accepted")
with alloc_fail(dev[0], 1, "wpas_wps_ap_pin_set"):
if "FAIL" not in dev[0].request("WPS_AP_PIN set 12345670"):
raise Exception("WPS_AP_PIN set accepted")
dev[0].request("DISCONNECT")
dev[0].wait_disconnected()
def test_ap_wps_random_uuid(dev, apdev, params):
"""WPS and random UUID on Enrollee"""
ssid = "test-wps-conf"
hapd = hostapd.add_ap(apdev[0],
{"ssid": ssid, "eap_server": "1", "wps_state": "2",
"wpa_passphrase": "12345678", "wpa": "2",
"wpa_key_mgmt": "WPA-PSK", "rsn_pairwise": "CCMP"})
config = os.path.join(params['logdir'], 'ap_wps_random_uuid.conf')
with open(config, "w") as f:
f.write("auto_uuid=1\n")
wpas = WpaSupplicant(global_iface='/tmp/wpas-wlan5')
uuid = []
for i in range(3):
wpas.interface_add("wlan5", config=config)
wpas.scan_for_bss(apdev[0]['bssid'], freq="2412")
wpas.dump_monitor()
wpas.request("WPS_PBC " + apdev[0]['bssid'])
ev = hapd.wait_event(["WPS-ENROLLEE-SEEN"], timeout=10)
if ev is None:
raise Exception("Enrollee not seen")
uuid.append(ev.split(' ')[2])
wpas.request("WPS_CANCEL")
wpas.dump_monitor()
wpas.interface_remove("wlan5")
hapd.dump_monitor()
logger.info("Seen UUIDs: " + str(uuid))
if uuid[0] == uuid[1] or uuid[0] == uuid[2] or uuid[1] == uuid[2]:
raise Exception("Same UUID used multiple times")
def test_ap_wps_conf_pin_gcmp_128(dev, apdev):
"""WPS PIN provisioning with configured AP using GCMP-128"""
run_ap_wps_conf_pin_cipher(dev, apdev, "GCMP")
def test_ap_wps_conf_pin_gcmp_256(dev, apdev):
"""WPS PIN provisioning with configured AP using GCMP-256"""
run_ap_wps_conf_pin_cipher(dev, apdev, "GCMP-256")
def test_ap_wps_conf_pin_ccmp_256(dev, apdev):
"""WPS PIN provisioning with configured AP using CCMP-256"""
run_ap_wps_conf_pin_cipher(dev, apdev, "CCMP-256")
def run_ap_wps_conf_pin_cipher(dev, apdev, cipher):
if cipher not in dev[0].get_capability("pairwise"):
raise HwsimSkip("Cipher %s not supported" % cipher)
ssid = "test-wps-conf-pin"
hapd = hostapd.add_ap(apdev[0],
{"ssid": ssid, "eap_server": "1", "wps_state": "2",
"wpa_passphrase": "12345678", "wpa": "2",
"wpa_key_mgmt": "WPA-PSK",
"rsn_pairwise": cipher})
logger.info("WPS provisioning step")
pin = dev[0].wps_read_pin()
hapd.request("WPS_PIN any " + pin)
dev[0].flush_scan_cache()
dev[0].scan_for_bss(apdev[0]['bssid'], freq="2412")
dev[0].request("WPS_PIN %s %s" % (apdev[0]['bssid'], pin))
dev[0].wait_connected(timeout=15)
def test_ap_wps_and_sae(dev, apdev):
"""Initial AP configuration with first WPS Enrollee and adding SAE"""
skip_without_tkip(dev[0])
skip_without_tkip(dev[1])
try:
run_ap_wps_and_sae(dev, apdev)
finally:
dev[0].set("wps_cred_add_sae", "0")
def run_ap_wps_and_sae(dev, apdev):
check_sae_capab(dev[0])
ssid = "test-wps-sae"
hapd = hostapd.add_ap(apdev[0],
{"ssid": ssid, "eap_server": "1", "wps_state": "1",
"wps_cred_add_sae": "1"})
logger.info("WPS provisioning step")
pin = dev[0].wps_read_pin()
hapd.request("WPS_PIN any " + pin)
dev[0].set("wps_cred_add_sae", "1")
dev[0].request("SET sae_groups ")
dev[0].scan_for_bss(apdev[0]['bssid'], freq="2412", force_scan=True)
dev[0].request("WPS_PIN " + apdev[0]['bssid'] + " " + pin)
dev[0].wait_connected(timeout=30)
status = dev[0].get_status()
if status['key_mgmt'] != "SAE":
raise Exception("SAE not used")
if 'pmf' not in status or status['pmf'] != "1":
raise Exception("PMF not enabled")
pin = dev[1].wps_read_pin()
hapd.request("WPS_PIN any " + pin)
dev[1].scan_for_bss(apdev[0]['bssid'], freq="2412", force_scan=True)
dev[1].request("WPS_PIN " + apdev[0]['bssid'] + " " + pin)
dev[1].wait_connected(timeout=30)
status = dev[1].get_status()
if status['key_mgmt'] != "WPA2-PSK":
raise Exception("WPA2-PSK not used")
if 'pmf' in status:
raise Exception("PMF enabled")
def test_ap_wps_conf_and_sae(dev, apdev):
"""WPS PBC provisioning with configured AP using PSK+SAE"""
try:
run_ap_wps_conf_and_sae(dev, apdev)
finally:
dev[0].set("wps_cred_add_sae", "0")
def run_ap_wps_conf_and_sae(dev, apdev):
check_sae_capab(dev[0])
ssid = "test-wps-conf-sae"
hapd = hostapd.add_ap(apdev[0],
{"ssid": ssid, "eap_server": "1", "wps_state": "2",
"wpa_passphrase": "12345678", "wpa": "2",
"ieee80211w": "1", "sae_require_mfp": "1",
"wpa_key_mgmt": "WPA-PSK SAE",
"rsn_pairwise": "CCMP"})
dev[0].set("wps_cred_add_sae", "1")
dev[0].request("SET sae_groups ")
dev[0].scan_for_bss(apdev[0]['bssid'], freq="2412")
pin = dev[0].wps_read_pin()
hapd.request("WPS_PIN any " + pin)
dev[0].request("WPS_PIN " + apdev[0]['bssid'] + " " + pin)
dev[0].wait_connected(timeout=30)
status = dev[0].get_status()
if status['key_mgmt'] != "SAE":
raise Exception("SAE not used")
if 'pmf' not in status or status['pmf'] != "1":
raise Exception("PMF not enabled")
dev[1].connect(ssid, psk="12345678", scan_freq="2412", proto="WPA2",
key_mgmt="WPA-PSK", ieee80211w="0")
def test_ap_wps_conf_and_sae_h2e(dev, apdev):
"""WPS PIN provisioning with configured AP using PSK+SAE(H2E)"""
try:
run_ap_wps_conf_and_sae_h2e(dev, apdev)
finally:
dev[0].set("wps_cred_add_sae", "0")
dev[0].set("sae_pwe", "0")
def run_ap_wps_conf_and_sae_h2e(dev, apdev):
check_sae_capab(dev[0])
ssid = "test-wps-conf-sae"
hapd = hostapd.add_ap(apdev[0],
{"ssid": ssid, "eap_server": "1", "wps_state": "2",
"wpa_passphrase": "12345678", "wpa": "2",
"ieee80211w": "1", "sae_require_mfp": "1",
"sae_pwe": "1",
"wpa_key_mgmt": "WPA-PSK SAE",
"rsn_pairwise": "CCMP"})
dev[0].set("wps_cred_add_sae", "1")
dev[0].set("sae_pwe", "1")
dev[0].request("SET sae_groups ")
dev[0].scan_for_bss(apdev[0]['bssid'], freq="2412")
pin = dev[0].wps_read_pin()
hapd.request("WPS_PIN any " + pin)
dev[0].request("WPS_PIN " + apdev[0]['bssid'] + " " + pin)
dev[0].wait_connected(timeout=30)
status = dev[0].get_status()
if status['key_mgmt'] != "SAE":
raise Exception("SAE not used")
if 'pmf' not in status or status['pmf'] != "1":
raise Exception("PMF not enabled")
def test_ap_wps_reg_config_and_sae(dev, apdev):
"""WPS registrar configuring an AP using AP PIN and using PSK+SAE"""
try:
run_ap_wps_reg_config_and_sae(dev, apdev)
finally:
dev[0].set("wps_cred_add_sae", "0")
def run_ap_wps_reg_config_and_sae(dev, apdev):
check_sae_capab(dev[0])
ssid = "test-wps-init-ap-pin-sae"
appin = "12345670"
hostapd.add_ap(apdev[0],
{"ssid": ssid, "eap_server": "1", "wps_state": "2",
"ap_pin": appin, "wps_cred_add_sae": "1"})
logger.info("WPS configuration step")
dev[0].flush_scan_cache()
dev[0].set("wps_cred_add_sae", "1")
dev[0].request("SET sae_groups ")
dev[0].scan_for_bss(apdev[0]['bssid'], freq=2412)
dev[0].dump_monitor()
new_ssid = "wps-new-ssid"
new_passphrase = "1234567890"
dev[0].wps_reg(apdev[0]['bssid'], appin, new_ssid, "WPA2PSK", "CCMP",
new_passphrase)
status = dev[0].get_status()
if status['key_mgmt'] != "SAE":
raise Exception("SAE not used")
if 'pmf' not in status or status['pmf'] != "1":
raise Exception("PMF not enabled")
dev[1].connect(new_ssid, psk=new_passphrase, scan_freq="2412", proto="WPA2",
key_mgmt="WPA-PSK", ieee80211w="0")
def test_ap_wps_appl_ext(dev, apdev):
"""WPS Application Extension attribute"""
ssid = "test-wps-conf"
params = {"ssid": ssid, "eap_server": "1", "wps_state": "2",
"wps_application_ext": 16*"11" + 5*"ee",
"wpa_passphrase": "12345678", "wpa": "2",
"wpa_key_mgmt": "WPA-PSK", "rsn_pairwise": "CCMP"}
hapd = hostapd.add_ap(apdev[0], params)
pin = dev[0].wps_read_pin()
hapd.request("WPS_PIN any " + pin)
dev[0].scan_for_bss(apdev[0]['bssid'], freq="2412")
dev[0].request("WPS_PIN %s %s" % (apdev[0]['bssid'], pin))
dev[0].wait_connected(timeout=30)
@long_duration_test
def test_ap_wps_pbc_ap_timeout(dev, apdev):
"""WPS PBC timeout on AP"""
run_ap_wps_ap_timeout(dev, apdev, "WPS_PBC")
@long_duration_test
def test_ap_wps_pin_ap_timeout(dev, apdev):
"""WPS PIN timeout on AP"""
run_ap_wps_ap_timeout(dev, apdev, "WPS_PIN any 12345670 10")
def run_ap_wps_ap_timeout(dev, apdev, cmd):
ssid = "test-wps-conf"
hapd = hostapd.add_ap(apdev[0],
{"ssid": ssid, "eap_server": "1", "wps_state": "2",
"wpa_passphrase": "12345678", "wpa": "2",
"wpa_key_mgmt": "WPA-PSK", "rsn_pairwise": "CCMP"})
bssid = hapd.own_addr()
hapd.request(cmd)
time.sleep(1)
dev[0].scan_for_bss(bssid, freq="2412")
bss = dev[0].get_bss(bssid)
logger.info("BSS during active Registrar: " + str(bss))
if not bss['ie'].endswith("0106ffffffffffff"):
raise Exception("Authorized MAC not included")
ev = hapd.wait_event(["WPS-TIMEOUT"], timeout=130)
if ev is None and "PBC" in cmd:
raise Exception("WPS-TIMEOUT not reported")
if "PBC" in cmd and \
"PBC Status: Timed-out" not in hapd.request("WPS_GET_STATUS"):
raise Exception("PBC status not shown correctly")
time.sleep(5)
dev[0].flush_scan_cache()
dev[0].scan_for_bss(bssid, freq="2412", force_scan=True)
bss = dev[0].get_bss(bssid)
logger.info("BSS after timeout: " + str(bss))
if bss['ie'].endswith("0106ffffffffffff"):
raise Exception("Authorized MAC not removed")
def test_ap_wps_er_unsubscribe_errors(dev, apdev):
"""WPS ER and UNSUBSCRIBE errors"""
start_wps_ap(apdev[0])
tests = [(1, "http_client_url_parse;wps_er_ap_unsubscribe"),
(1, "wpabuf_alloc;wps_er_ap_unsubscribe"),
(1, "http_client_addr;wps_er_ap_unsubscribe")]
try:
for count, func in tests:
start_wps_er(dev[0])
with alloc_fail(dev[0], count, func):
dev[0].request("WPS_ER_STOP")
dev[0].request("REMOVE_NETWORK all")
dev[0].wait_disconnected()
dev[0].dump_monitor()
finally:
dev[0].request("WPS_ER_STOP")
def start_wps_ap(apdev):
ssid = "wps-er-ap-config"
ap_pin = "12345670"
ap_uuid = "27ea801a-9e5c-4e73-bd82-f89cbcd10d7e"
params = {"ssid": ssid, "eap_server": "1", "wps_state": "2",
"wpa_passphrase": "12345678", "wpa": "2",
"wpa_key_mgmt": "WPA-PSK", "rsn_pairwise": "CCMP",
"device_name": "Wireless AP", "manufacturer": "Company",
"model_name": "WAP", "model_number": "123",
"serial_number": "12345", "device_type": "6-0050F204-1",
"os_version": "01020300",
"config_methods": "label push_button",
"ap_pin": ap_pin, "uuid": ap_uuid, "upnp_iface": "lo"}
hostapd.add_ap(apdev, params)
def start_wps_er(dev):
ssid = "wps-er-ap-config"
dev.connect(ssid, psk="12345678", scan_freq="2412")
dev.request("WPS_ER_START ifname=lo")
ev = dev.wait_event(["WPS-ER-AP-ADD"], timeout=15)
if ev is None:
raise Exception("AP discovery timed out")
def test_ap_wps_registrar_init_errors(dev, apdev):
"""WPS Registrar init errors"""
hapd = wps_start_ap(apdev[0], extra_cred="wps-mixed-cred")
hapd.disable()
tests = [(1, "wps_registrar_init"),
(1, "wpabuf_alloc_copy;wps_registrar_init"),
(1, "wps_set_ie;wps_registrar_init")]
for count, func in tests:
with alloc_fail(hapd, count, func):
if "FAIL" not in hapd.request("ENABLE"):
raise Exception("ENABLE succeeded unexpectedly")
def test_ap_wps_config_without_wps(dev, apdev):
"""AP configuration attempt using wps_config when WPS is disabled"""
ssid = "test-wps-init-config"
hapd = hostapd.add_ap(apdev[0], {"ssid": ssid})
if "FAIL" not in hapd.request("WPS_CONFIG " + binascii.hexlify(ssid.encode()).decode() + " WPA2PSK CCMP " + binascii.hexlify(b"12345678").decode()):
raise Exception("WPS_CONFIG command succeeded unexpectedly")
| 40.372762
| 753
| 0.629889
|
4a15610b4ed5cae8ed288a16d09937efb603ffd2
| 9,277
|
py
|
Python
|
oase-root/tests/web_app/views/system/test_sso_info.py
|
Masa-Yasuno/oase
|
90f3cee73c0d9b3153808a4a72bd19984a4873f9
|
[
"Apache-2.0"
] | 9
|
2020-03-25T07:51:47.000Z
|
2022-02-07T00:07:28.000Z
|
oase-root/tests/web_app/views/system/test_sso_info.py
|
Masa-Yasuno/oase
|
90f3cee73c0d9b3153808a4a72bd19984a4873f9
|
[
"Apache-2.0"
] | 1,164
|
2021-01-28T23:16:11.000Z
|
2022-03-28T07:23:10.000Z
|
oase-root/tests/web_app/views/system/test_sso_info.py
|
Masa-Yasuno/oase
|
90f3cee73c0d9b3153808a4a72bd19984a4873f9
|
[
"Apache-2.0"
] | 25
|
2020-03-17T06:48:30.000Z
|
2022-02-15T15:13:44.000Z
|
# Copyright 2019 NEC Corporation
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
"""
[概要]
sso_info tests
"""
import pytest
import unittest
import json
import datetime
import pytz
from django.urls import reverse
from django.test import Client
from web_app.templatetags.common import get_message
from web_app.models.models import System, Menu, SsoInfo, User, PasswordHistory
from libs.commonlibs import define as defs
from libs.commonlibs.common import Common
from importlib import import_module
def get_adminstrator():
"""
サイトにログインしwebページをクロールできるシステム管理者を返す
ユーザデータの加工、セッションの保存の後ログインをしている。
"""
password = 'OaseTest@1'
admin = User.objects.get(pk=1)
admin.password = Common.oase_hash(password)
admin.last_login = datetime.datetime.now(pytz.timezone('UTC'))
admin.password_last_modified = datetime.datetime.now(pytz.timezone('UTC'))
admin.save(force_update=True)
PasswordHistory.objects.create(
user_id=1,
password=Common.oase_hash(password),
last_update_timestamp=datetime.datetime.now(pytz.timezone('UTC')),
last_update_user=admin.user_name
)
client = Client()
session = client.session
session['cookie_age'] = (
datetime.datetime.now(pytz.timezone('UTC')) +
datetime.timedelta(minutes=30)
).strftime('%Y-%m-%d %H:%M:%S')
session.save()
_ = client.login(username='administrator', password=password)
return client
@pytest.mark.django_db
class TestSsoInfoIndex(object):
"""
web_app/views/system/sso_info.pyのテストクラス一覧表示
"""
def test_index_ok(self, admin):
"""
正常系
"""
response = admin.get(reverse('web_app:system:sso_info'))
content = response.content.decode('utf-8')
assert response.status_code == 200
def test_index_ng(self,admin):
"""
異常系
"""
with pytest.raises(Exception):
response = admin.get(reverse('web_app:system:sso_info'))
assert response.status_code == 404
class TestSsoInfoDelete(object):
def set_test_data(self):
"""
テストデータの作成
"""
SsoInfo(
provider_name = 'pytest',
auth_type = 9999,
logo = 'test_logo',
visible_flag = '1',
clientid = 5555,
clientsecret = 6666,
authorizationuri = 'ttt',
accesstokenuri = 'ttt',
resourceowneruri = 'ttt',
scope = '',
id = 1111,
name = 'test',
email = 'pytest@example.com',
imageurl = 'test_log.pmg',
proxy = 2222
).save(force_insert=True)
@pytest.mark.django_db
def test_delete_sso_ok(self, admin, monkeypatch):
"""
テストデータの削除
正常系
"""
self.set_test_data()
admin = get_adminstrator()
# 削除処理
json_str = {
"providername":"pytest",
"auth_type":9999,
"visible_flag":1,
"clientid":"0001",
"clientsecret":"0000",
"authorizationuri":"aaa",
"resourceowneruri":"bbb",
"id":"1234",
"name":"test-user"
}
json_data = json.dumps(json_str)
response = admin.post(reverse('web_app:system:delete_sso', args=[1,]), {'json_str':json_data})
assert response.status_code == 200
@pytest.mark.django_db
def test_delete_sso_ng(self, admin, monkeypatch):
"""
テストデータの削除
異常系
"""
self.set_test_data()
admin = get_adminstrator()
response_data = {}
# 削除処理
json_str = {
"providername":"pytest",
"auth_type":9999,
"visible_flag":1,
"clientid":"0001",
"clientsecret":"0000",
"authorizationuri":"aaa",
"resourceowneruri":"bbb",
"id":"1234",
"name":"test-user"
}
json_data = json.dumps(json_str)
with pytest.raises(Exception):
response = admin.post(reverse('web_app:system:delete_sso', args=["abc",]), {'json_str':json_data})
assert response.status_code == 404
@pytest.mark.django_db
class TestModify:
"""
modify テストクラス
"""
def set_test_data(self):
"""
テストデータの作成
"""
SsoInfo(
provider_name = 'pytest',
auth_type = 9999,
logo = 'test_logo',
visible_flag = '1',
clientid = 5555,
clientsecret = 6666,
authorizationuri = 'ttt',
accesstokenuri = 'ttt',
resourceowneruri = 'ttt',
scope = '',
id = 1111,
name = 'test',
email = 'pytest@example.com',
imageurl = 'test_log.pmg',
proxy = 2222
).save(force_insert=True)
def del_test_data(self):
"""
テストデータの削除
"""
SsoInfo.objects.all().delete()
@pytest.mark.django_db
def test_modify_ok(self, admin):
"""
新規登録
正常系
"""
admin = get_adminstrator()
self.set_test_data()
self.set_test_data()
# 登録データ
json_str = {"table_info":{"provider_name":"pytest",
"auth_type":9999,
"visible_flag":1,
"clientid":"0001",
"clientsecret":"0000",
"authorizationuri":"aaa",
"resourceowneruri":"bbb",
"id":"1234",
"name":"test-user"}}
json_data = json.dumps(json_str)
response = admin.post('/oase_web/system/sso_info', {'add_record':json_data})
content = response.content.decode('utf-8')
assert response.status_code == 200
self.del_test_data()
@pytest.mark.django_db
def test_modify_ng(self, admin):
"""
新規登録
異常系
"""
admin = get_adminstrator()
self.set_test_data()
self.set_test_data()
# 登録データ
json_str = {"table_info":{"provider_name":"pytest",
"auth_type":9999,
"visible_flag":0,
"clientid":"0001",
"clientsecret":"0000",
"authorizationuri":"aaa",
"resourceowneruri":"bbb",
"id":"1234",
"name":"test-user"}}
json_data = json.dumps(json_str)
with pytest.raises(Exception):
response = admin.post('/oase_web/system/sso_info', {'add_record':json_data})
assert response.status_code == 404
self.del_test_data()
@pytest.mark.django_db
def test_modify_detail_ok(self, admin):
"""
更新
正常系
"""
admin = get_adminstrator()
# 変更データ
json_str = {"table_info":{"provider_name":"pytest",
"auth_type":9876,
"visible_flag":1,
"clientid":"0001",
"clientsecret":"0000",
"authorizationuri":"aaa",
"resourceowneruri":"bbb",
"id":"1234",
"name":"test-user"}}
json_data = json.dumps(json_str)
response = admin.post(reverse('web_app:system:sso_modify_detail', args=[1,]), {'json_str':json_data})
assert response.status_code == 200
self.del_test_data()
@pytest.mark.django_db
def test_modify_detail_ok(self, admin):
"""
更新
異常系系
"""
admin = get_adminstrator()
# 変更データ
json_str = {"table_info":{"provider_name":"pytest",
"auth_type":9876,
"visible_flag":1,
"clientid":"0001",
"clientsecret":"0000",
"authorizationuri":"aaa",
"resourceowneruri":"bbb",
"id":"1234",
"name":"test-user"}}
json_data = json.dumps(json_str)
with pytest.raises(Exception):
response = admin.post(reverse('web_app:system:sso_modify_detail', args=[1,]), {'json_str':json_data})
assert response.status_code == 404
self.del_test_data()
| 29.733974
| 113
| 0.519565
|
4a156159b21225cc2e801288493b99e449953b9b
| 561
|
py
|
Python
|
setup.py
|
jcc-ne/mlpods
|
020d512c6977f88c5c3df04406c122d52c1dceb6
|
[
"Apache-2.0"
] | null | null | null |
setup.py
|
jcc-ne/mlpods
|
020d512c6977f88c5c3df04406c122d52c1dceb6
|
[
"Apache-2.0"
] | 2
|
2017-04-27T18:22:30.000Z
|
2017-04-27T18:23:10.000Z
|
setup.py
|
jcc-ne/mlpods
|
020d512c6977f88c5c3df04406c122d52c1dceb6
|
[
"Apache-2.0"
] | null | null | null |
from distutils.core import setup
setup(
name = 'mlpods',
packages = ['mlpods', 'mlpods.funpod'], # this must be the same as the name above
version = '0.1.7',
description = 'MLpods',
author = 'Janine Cheng, Ketan Patel',
author_email = 'janine.cheng@mlpods.com',
url = 'https://github.com/jcc-ne/mlpods', # use the URL to the github repo
download_url = 'https://github.com/jcc-ne/mlpods/archive/0.1.7.tar.gz',
keywords = ['MLpods', 'deployment', 'machine learning',
'ML', 'serverless'], # arbitrary keywords
classifiers = [],
)
| 37.4
| 83
| 0.657754
|
4a156184a30d5e4c83e792fd90f1abfeacf486d2
| 504
|
py
|
Python
|
TSP19simpack/utils/auto_shrink.py
|
anantgupt/GraphAssociation
|
514ebe3b532eb211384915354ce89fdc276c0395
|
[
"MIT"
] | 2
|
2021-05-31T12:36:27.000Z
|
2021-07-27T07:28:33.000Z
|
TSP19simpack/utils/auto_shrink.py
|
anantgupt/GraphAssociation
|
514ebe3b532eb211384915354ce89fdc276c0395
|
[
"MIT"
] | null | null | null |
TSP19simpack/utils/auto_shrink.py
|
anantgupt/GraphAssociation
|
514ebe3b532eb211384915354ce89fdc276c0395
|
[
"MIT"
] | null | null | null |
# Auto deletes large pickle files to save space
import os, subprocess
from tqdm import tqdm
for (path, folder, files) in os.walk('.'):
for file in files:
if file in ['plot5.pickle','plot8.pickle','plot9.pickle']:
target = os.path.join(path,file)
try:
os.remove(target)
print('\x1b[1;33;40m Removed ',target,'\x1b[0m')
except:
print('\x1b[1;32;40m Delete Failed ',target,'\x1b[0m')
# if file =='plot5.pickle':
# print(path,' : ',folder)
# print(path, ' : ',folder, ':',file)
| 28
| 60
| 0.638889
|
4a15622e5a2d057bfaedcd2dcac478eea10adc2b
| 8,698
|
py
|
Python
|
tools/tcpaccept.py
|
kernel-z/bcc
|
4b91369990332657490e4cbbe9291a251abc01c9
|
[
"ECL-2.0",
"Apache-2.0"
] | null | null | null |
tools/tcpaccept.py
|
kernel-z/bcc
|
4b91369990332657490e4cbbe9291a251abc01c9
|
[
"ECL-2.0",
"Apache-2.0"
] | null | null | null |
tools/tcpaccept.py
|
kernel-z/bcc
|
4b91369990332657490e4cbbe9291a251abc01c9
|
[
"ECL-2.0",
"Apache-2.0"
] | 1
|
2019-11-04T18:21:02.000Z
|
2019-11-04T18:21:02.000Z
|
#!/usr/bin/python
# @lint-avoid-python-3-compatibility-imports
#
# tcpaccept Trace TCP accept()s.
# For Linux, uses BCC, eBPF. Embedded C.
#
# USAGE: tcpaccept [-h] [-t] [-p PID]
#
# This uses dynamic tracing of the kernel inet_csk_accept() socket function
# (from tcp_prot.accept), and will need to be modified to match kernel changes.
#
# Copyright (c) 2015 Brendan Gregg.
# Licensed under the Apache License, Version 2.0 (the "License")
#
# 13-Oct-2015 Brendan Gregg Created this.
# 14-Feb-2016 " " Switch to bpf_perf_output.
from __future__ import print_function
from bcc import BPF
from socket import inet_ntop, AF_INET, AF_INET6
from struct import pack
import argparse
import ctypes as ct
# arguments
examples = """examples:
./tcpaccept # trace all TCP accept()s
./tcpaccept -t # include timestamps
./tcpaccept -p 181 # only trace PID 181
"""
parser = argparse.ArgumentParser(
description="Trace TCP accepts",
formatter_class=argparse.RawDescriptionHelpFormatter,
epilog=examples)
parser.add_argument("-t", "--timestamp", action="store_true",
help="include timestamp on output")
parser.add_argument("-p", "--pid",
help="trace this PID only")
parser.add_argument("--ebpf", action="store_true",
help=argparse.SUPPRESS)
args = parser.parse_args()
debug = 0
# define BPF program
bpf_text = """
#include <uapi/linux/ptrace.h>
#include <net/sock.h>
#include <bcc/proto.h>
// separate data structs for ipv4 and ipv6
struct ipv4_data_t {
// XXX: switch some to u32's when supported
u64 ts_us;
u32 pid;
u32 saddr;
u32 daddr;
u64 ip;
u16 lport;
char task[TASK_COMM_LEN];
};
BPF_PERF_OUTPUT(ipv4_events);
struct ipv6_data_t {
u64 ts_us;
u32 pid;
unsigned __int128 saddr;
unsigned __int128 daddr;
u64 ip;
u16 lport;
char task[TASK_COMM_LEN];
};
BPF_PERF_OUTPUT(ipv6_events);
"""
#
# The following is the code for older kernels(Linux pre-4.16).
# It uses kprobes to instrument inet_csk_accept(). On Linux 4.16 and
# later, the sock:inet_sock_set_state tracepoint should be used instead, as
# is done by the code that follows this.
#
bpf_text_kprobe = """
int kretprobe__inet_csk_accept(struct pt_regs *ctx)
{
struct sock *newsk = (struct sock *)PT_REGS_RC(ctx);
u32 pid = bpf_get_current_pid_tgid();
if (newsk == NULL)
return 0;
// check this is TCP
u8 protocol = 0;
// workaround for reading the sk_protocol bitfield:
// Following comments add by Joe Yin:
// Unfortunately,it can not work since Linux 4.10,
// because the sk_wmem_queued is not following the bitfield of sk_protocol.
// And the following member is sk_gso_max_segs.
// So, we can use this:
// bpf_probe_read(&protocol, 1, (void *)((u64)&newsk->sk_gso_max_segs) - 3);
// In order to diff the pre-4.10 and 4.10+ ,introduce the variables gso_max_segs_offset,sk_lingertime,
// sk_lingertime is closed to the gso_max_segs_offset,and
// the offset between the two members is 4
int gso_max_segs_offset = offsetof(struct sock, sk_gso_max_segs);
int sk_lingertime_offset = offsetof(struct sock, sk_lingertime);
if (sk_lingertime_offset - gso_max_segs_offset == 4)
// 4.10+ with little endian
#if __BYTE_ORDER__ == __ORDER_LITTLE_ENDIAN__
bpf_probe_read(&protocol, 1, (void *)((u64)&newsk->sk_gso_max_segs) - 3);
else
// pre-4.10 with little endian
bpf_probe_read(&protocol, 1, (void *)((u64)&newsk->sk_wmem_queued) - 3);
#elif __BYTE_ORDER__ == __ORDER_BIG_ENDIAN__
// 4.10+ with big endian
bpf_probe_read(&protocol, 1, (void *)((u64)&newsk->sk_gso_max_segs) - 1);
else
// pre-4.10 with big endian
bpf_probe_read(&protocol, 1, (void *)((u64)&newsk->sk_wmem_queued) - 1);
#else
# error "Fix your compiler's __BYTE_ORDER__?!"
#endif
if (protocol != IPPROTO_TCP)
return 0;
// pull in details
u16 family = 0, lport = 0;
family = newsk->__sk_common.skc_family;
lport = newsk->__sk_common.skc_num;
if (family == AF_INET) {
struct ipv4_data_t data4 = {.pid = pid, .ip = 4};
data4.ts_us = bpf_ktime_get_ns() / 1000;
data4.saddr = newsk->__sk_common.skc_rcv_saddr;
data4.daddr = newsk->__sk_common.skc_daddr;
data4.lport = lport;
bpf_get_current_comm(&data4.task, sizeof(data4.task));
ipv4_events.perf_submit(ctx, &data4, sizeof(data4));
} else if (family == AF_INET6) {
struct ipv6_data_t data6 = {.pid = pid, .ip = 6};
data6.ts_us = bpf_ktime_get_ns() / 1000;
bpf_probe_read(&data6.saddr, sizeof(data6.saddr),
&newsk->__sk_common.skc_v6_rcv_saddr.in6_u.u6_addr32);
bpf_probe_read(&data6.daddr, sizeof(data6.daddr),
&newsk->__sk_common.skc_v6_daddr.in6_u.u6_addr32);
data6.lport = lport;
bpf_get_current_comm(&data6.task, sizeof(data6.task));
ipv6_events.perf_submit(ctx, &data6, sizeof(data6));
}
// else drop
return 0;
}
"""
bpf_text_tracepoint = """
TRACEPOINT_PROBE(sock, inet_sock_set_state)
{
if (args->protocol != IPPROTO_TCP)
return 0;
u32 pid = bpf_get_current_pid_tgid();
// pull in details
u16 family = 0, lport = 0;
family = args->family;
lport = args->sport;
if (family == AF_INET) {
struct ipv4_data_t data4 = {.pid = pid, .ip = 4};
data4.ts_us = bpf_ktime_get_ns() / 1000;
__builtin_memcpy(&data4.saddr, args->saddr, sizeof(data4.saddr));
__builtin_memcpy(&data4.daddr, args->daddr, sizeof(data4.daddr));
data4.lport = lport;
bpf_get_current_comm(&data4.task, sizeof(data4.task));
ipv4_events.perf_submit(args, &data4, sizeof(data4));
} else if (family == AF_INET6) {
struct ipv6_data_t data6 = {.pid = pid, .ip = 6};
data6.ts_us = bpf_ktime_get_ns() / 1000;
__builtin_memcpy(&data6.saddr, args->saddr, sizeof(data6.saddr));
__builtin_memcpy(&data6.daddr, args->daddr, sizeof(data6.daddr));
data6.lport = lport;
bpf_get_current_comm(&data6.task, sizeof(data6.task));
ipv6_events.perf_submit(args, &data6, sizeof(data6));
}
// else drop
return 0;
}
"""
if (BPF.tracepoint_exists("sock", "inet_sock_set_state")):
bpf_text += bpf_text_tracepoint
else:
bpf_text += bpf_text_kprobe
# code substitutions
if args.pid:
bpf_text = bpf_text.replace('FILTER',
'if (pid != %s) { return 0; }' % args.pid)
else:
bpf_text = bpf_text.replace('FILTER', '')
if debug or args.ebpf:
print(bpf_text)
if args.ebpf:
exit()
# event data
TASK_COMM_LEN = 16 # linux/sched.h
class Data_ipv4(ct.Structure):
_fields_ = [
("ts_us", ct.c_ulonglong),
("pid", ct.c_uint),
("saddr", ct.c_uint),
("daddr", ct.c_uint),
("ip", ct.c_ulonglong),
("lport", ct.c_ushort),
("task", ct.c_char * TASK_COMM_LEN)
]
class Data_ipv6(ct.Structure):
_fields_ = [
("ts_us", ct.c_ulonglong),
("pid", ct.c_uint),
("saddr", (ct.c_ulonglong * 2)),
("daddr", (ct.c_ulonglong * 2)),
("ip", ct.c_ulonglong),
("lport", ct.c_ushort),
("task", ct.c_char * TASK_COMM_LEN)
]
# process event
def print_ipv4_event(cpu, data, size):
event = ct.cast(data, ct.POINTER(Data_ipv4)).contents
global start_ts
if args.timestamp:
if start_ts == 0:
start_ts = event.ts_us
print("%-9.3f" % ((float(event.ts_us) - start_ts) / 1000000), end="")
print("%-6d %-12.12s %-2d %-16s %-16s %-4d" % (event.pid,
event.task.decode(), event.ip,
inet_ntop(AF_INET, pack("I", event.daddr)),
inet_ntop(AF_INET, pack("I", event.saddr)), event.lport))
def print_ipv6_event(cpu, data, size):
event = ct.cast(data, ct.POINTER(Data_ipv6)).contents
global start_ts
if args.timestamp:
if start_ts == 0:
start_ts = event.ts_us
print("%-9.3f" % ((float(event.ts_us) - start_ts) / 1000000), end="")
print("%-6d %-12.12s %-2d %-16s %-16s %-4d" % (event.pid,
event.task.decode(), event.ip, inet_ntop(AF_INET6, event.daddr),
inet_ntop(AF_INET6, event.saddr), event.lport))
# initialize BPF
b = BPF(text=bpf_text)
# header
if args.timestamp:
print("%-9s" % ("TIME(s)"), end="")
print("%-6s %-12s %-2s %-16s %-16s %-4s" % ("PID", "COMM", "IP", "RADDR",
"LADDR", "LPORT"))
start_ts = 0
# read events
b["ipv4_events"].open_perf_buffer(print_ipv4_event)
b["ipv6_events"].open_perf_buffer(print_ipv6_event)
while 1:
b.perf_buffer_poll()
| 31.744526
| 108
| 0.642102
|
4a156431f5ce4e5e5f5e37536317bdef40e241d3
| 1,174
|
py
|
Python
|
model/condition_block.py
|
Top34051/stargan-zsvc
|
ac9f91ec6ea6fa8c1950d28cca1a23931388d496
|
[
"MIT"
] | 8
|
2021-08-05T07:50:12.000Z
|
2021-11-11T13:24:35.000Z
|
model/condition_block.py
|
Top34051/stargan-zsvc
|
ac9f91ec6ea6fa8c1950d28cca1a23931388d496
|
[
"MIT"
] | null | null | null |
model/condition_block.py
|
Top34051/stargan-zsvc
|
ac9f91ec6ea6fa8c1950d28cca1a23931388d496
|
[
"MIT"
] | 1
|
2021-08-12T02:51:06.000Z
|
2021-08-12T02:51:06.000Z
|
import torch
from torch import nn
from fastai.layers import init_linear
class CIN(nn.Module):
def __init__(self, dim_out, embed_dim):
super().__init__()
self.gamma = nn.Linear(embed_dim, dim_out)
init_linear(self.gamma)
self.beta = nn.Linear(embed_dim, dim_out)
init_linear(self.beta)
def forward(self, x, embed):
sigma, mu = torch.std_mean(x, dim=2, keepdim=True)
sigma = torch.clamp(sigma, min=1e-7)
gamma = self.gamma(embed)[..., None]
beta = self.gamma(embed)[..., None]
return gamma * (x - mu) / sigma + beta
class ConditioningBlock(nn.Module):
def __init__(self, dim_in, dim_out, kernel_size, stride, padding, embed_dim):
super(ConditioningBlock, self).__init__()
self.conv = nn.Conv1d(in_channels=dim_in, out_channels=dim_out,
kernel_size=kernel_size, stride=stride, padding=padding, bias=True
)
self.cin = CIN(dim_out, embed_dim)
self.glu = nn.GLU(dim=1)
def forward(self, x, embed):
x = self.conv(x)
x = self.cin(x, embed)
x = self.glu(x)
return x
| 30.102564
| 81
| 0.60477
|
4a1564a09ea5d9244c98a6051f6b3c426cf498a6
| 6,871
|
py
|
Python
|
scout/adapter/mongo/hpo.py
|
Clinical-Genomics/scout
|
1ec4daa76093c2ffa4655612b63d325970253f58
|
[
"BSD-3-Clause"
] | 111
|
2015-01-15T11:53:20.000Z
|
2022-03-26T19:55:24.000Z
|
scout/adapter/mongo/hpo.py
|
Clinical-Genomics/scout
|
1ec4daa76093c2ffa4655612b63d325970253f58
|
[
"BSD-3-Clause"
] | 2,995
|
2015-01-15T16:14:20.000Z
|
2022-03-31T13:36:32.000Z
|
scout/adapter/mongo/hpo.py
|
Clinical-Genomics/scout
|
1ec4daa76093c2ffa4655612b63d325970253f58
|
[
"BSD-3-Clause"
] | 55
|
2015-05-31T19:09:49.000Z
|
2021-11-01T10:50:31.000Z
|
# -*- coding: utf-8 -*-
import logging
import operator
import pymongo
from anytree import Node, RenderTree, resolver, search
from anytree.exporter import DictExporter
from pymongo.errors import BulkWriteError, DuplicateKeyError
from scout.exceptions import IntegrityError
LOG = logging.getLogger(__name__)
class HpoHandler(object):
def load_hpo_term(self, hpo_obj):
"""Add a hpo object
Arguments:
hpo_obj(dict)
"""
LOG.debug("Loading hpo term %s into database", hpo_obj["_id"])
try:
self.hpo_term_collection.insert_one(hpo_obj)
except DuplicateKeyError as err:
raise IntegrityError("Hpo term %s already exists in database".format(hpo_obj["_id"]))
LOG.debug("Hpo term saved")
def load_hpo_bulk(self, hpo_bulk):
"""Add a hpo object
Arguments:
hpo_bulk(list(scout.models.HpoTerm))
Returns:
result: pymongo bulkwrite result
"""
LOG.debug("Loading hpo bulk")
try:
result = self.hpo_term_collection.insert_many(hpo_bulk)
except (DuplicateKeyError, BulkWriteError) as err:
raise IntegrityError(err)
return result
def hpo_term(self, hpo_id):
"""Fetch a hpo term
Args:
hpo_id(str)
Returns:
hpo_obj(dict)
"""
LOG.debug("Fetching hpo term %s", hpo_id)
return self.hpo_term_collection.find_one({"_id": hpo_id})
def hpo_terms(self, query=None, hpo_term=None, text=None, limit=None, skip=None):
"""Return all HPO terms
If a query is sent hpo_terms will try to match with regex on term or
description.
Args:
query(str): Part of a hpoterm or description
hpo_term(str): Search for a specific hpo term
limit(int): the number of desired results
skip(int): the number of results to skip
Returns:
result(pymongo.Cursor): A cursor with hpo terms
"""
query_dict = {}
search_term = None
if query:
query_dict = {
"$or": [
{"hpo_id": {"$regex": query, "$options": "i"}},
{"description": {"$regex": query, "$options": "i"}},
]
}
search_term = query
elif text:
new_string = ""
for i, word in enumerate(text.split(" ")):
if i == 0:
new_string += word
else:
new_string += ' "{0}"'.format(word)
LOG.info("Search HPO terms with %s", new_string)
query_dict["$text"] = {"$search": new_string}
search_term = text
elif hpo_term:
query_dict["hpo_id"] = hpo_term
search_term = hpo_term
limit = limit or 0
skip = skip or 0
res = (
self.hpo_term_collection.find(query_dict)
.skip(skip)
.limit(limit)
.sort("hpo_number", pymongo.ASCENDING)
)
return res
def generate_hpo_gene_list(self, *hpo_terms):
"""Generate a sorted list with namedtuples of hpogenes
Each namedtuple of the list looks like (hgnc_id, count)
Args:
hpo_terms(iterable(str))
Returns:
hpo_genes(list(HpoGene))
"""
genes = {}
for term in hpo_terms:
hpo_obj = self.hpo_term(term)
if hpo_obj:
for hgnc_id in hpo_obj["genes"]:
if hgnc_id in genes:
genes[hgnc_id] += 1
else:
genes[hgnc_id] = 1
else:
LOG.warning("Term %s could not be found", term)
sorted_genes = sorted(genes.items(), key=operator.itemgetter(1), reverse=True)
return sorted_genes
def organize_tree(self, all_terms, root):
"""Organizes a set of Tree node objects into a tree, according to their ancestors and children
Args:
all_terms(dict): a dictionary with "term_name" as keys and term_dict as values
root(anytree.Node)
Returns
root(anytree.Node): the updated root node of the tree
"""
# Move tree nodes in the right position according to the ontology
for key, term in all_terms.items():
ancestors = term["ancestors"]
if len(ancestors) == 0:
continue
for ancestor in ancestors:
ancestor_node = search.find_by_attr(root, ancestor)
if ancestor_node is None: # It's probably the term on the top
continue
node = search.find_by_attr(root, key)
node.parent = ancestor_node
return root
def build_phenotype_tree(self, hpo_id):
"""Creates an HPO Tree based on one or more given ancestors
Args:
hpo_id(str): an HPO term
Returns:
tree_dict(dict): a tree of all HPO children of the given term, as a dictionary
"""
root = Node(id="root", name="root", parent=None)
all_terms = {}
unique_terms = set()
def _hpo_terms_list(hpo_ids):
for term_id in hpo_ids:
term_obj = self.hpo_term(term_id)
if term_obj is None:
continue
# sort term children by ascending HPO number
children = sorted(
term_obj["children"],
key=lambda x: int("".join([i for i in x if i.isdigit()])),
)
term_obj["children"] = children
all_terms[term_id] = term_obj
if term_id not in unique_terms:
node = Node(term_id, parent=root, description=term_obj["description"])
unique_terms.add(term_id)
# recursive loop to collect children, children of children and so on
_hpo_terms_list(term_obj["children"])
# compile a list of all HPO term objects to include in the submodel
_hpo_terms_list([hpo_id]) # trigger the recursive loop to collect nested HPO terms
# rearrange tree according to the HPO ontology
root = self.organize_tree(all_terms, root)
node_resolver = resolver.Resolver("name")
# Extract a tree structure having the chosen HPO term (hpo_id) as ancestor of all the children terms
term_node = node_resolver.get(root, hpo_id)
LOG.info(f"Built ontology for HPO term:{hpo_id}:\n{RenderTree(term_node)}")
exporter = DictExporter()
# Export this tree structure as dictionary, so that can be saved in database
tree_dict = exporter.export(term_node)
return tree_dict
| 34.878173
| 108
| 0.563819
|
4a1564a73d6664a4c698e66be943aef05071a253
| 2,798
|
py
|
Python
|
uproot/source/http.py
|
riga/uproot
|
78de42f849079c35fd05ae22033e56f02492b6c1
|
[
"BSD-3-Clause"
] | 1
|
2021-03-18T23:33:35.000Z
|
2021-03-18T23:33:35.000Z
|
uproot/source/http.py
|
riga/uproot
|
78de42f849079c35fd05ae22033e56f02492b6c1
|
[
"BSD-3-Clause"
] | 17
|
2020-01-28T22:33:27.000Z
|
2021-06-10T21:05:49.000Z
|
uproot/source/http.py
|
riga/uproot
|
78de42f849079c35fd05ae22033e56f02492b6c1
|
[
"BSD-3-Clause"
] | 1
|
2020-04-17T15:33:03.000Z
|
2020-04-17T15:33:03.000Z
|
#!/usr/bin/env python
# Copyright (c) 2017, DIANA-HEP
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# * Redistributions of source code must retain the above copyright notice, this
# list of conditions and the following disclaimer.
#
# * Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
#
# * Neither the name of the copyright holder nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
# FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
import os.path
import re
try:
from urllib.request import urlopen, Request
except ImportError:
from urllib2 import urlopen, Request
import numpy
import uproot.source.chunked
class HTTPSource(uproot.source.chunked.ChunkedSource):
# makes __doc__ attribute mutable before Python 3.3
__metaclass__ = type.__new__(type, "type", (uproot.source.chunked.ChunkedSource.__metaclass__,), {})
def __init__(self, path, *args, **kwds):
super(HTTPSource, self).__init__(path, *args, **kwds)
self._size = None
defaults = {"chunkbytes": 16*1024, "limitbytes": 16*1024**2}
def _open(self):
pass
def size(self):
return self._size
_contentrange = re.compile("^bytes [0-9]+-[0-9]+/([0-9]+)$")
def _read(self, chunkindex):
request = Request(self.path, headers={"Range": "bytes={0}-{1}".format(chunkindex * self._chunkbytes, (chunkindex + 1) * self._chunkbytes)})
handle = urlopen(request)
data = handle.read()
if self._size is None:
m = self._contentrange.match(handle.headers.get("content-range", ""))
if m is not None:
self._size = int(m.group(1))
return numpy.frombuffer(data, dtype=numpy.uint8)
| 40.550725
| 147
| 0.721587
|
4a15652363de33ea41f6a314154f01c3279e8a9c
| 2,702
|
py
|
Python
|
Configuration/Geometry/python/GeometryDD4hepExtended2026D78Reco_cff.py
|
ckamtsikis/cmssw
|
ea19fe642bb7537cbf58451dcf73aa5fd1b66250
|
[
"Apache-2.0"
] | 5
|
2020-07-02T19:05:26.000Z
|
2022-02-25T14:37:09.000Z
|
Configuration/Geometry/python/GeometryDD4hepExtended2026D78Reco_cff.py
|
ckamtsikis/cmssw
|
ea19fe642bb7537cbf58451dcf73aa5fd1b66250
|
[
"Apache-2.0"
] | 524
|
2018-01-29T15:50:45.000Z
|
2021-08-04T14:03:21.000Z
|
Configuration/Geometry/python/GeometryDD4hepExtended2026D78Reco_cff.py
|
ckamtsikis/cmssw
|
ea19fe642bb7537cbf58451dcf73aa5fd1b66250
|
[
"Apache-2.0"
] | 8
|
2020-06-08T16:28:54.000Z
|
2021-11-16T14:40:00.000Z
|
import FWCore.ParameterSet.Config as cms
# This config was generated automatically using generate2026Geometry.py
# If you notice a mistake, please update the generating script, not just this config
from Configuration.Geometry.GeometryDD4hepExtended2026D78_cff import *
# tracker
from Geometry.CommonTopologies.globalTrackingGeometry_cfi import *
from RecoTracker.GeometryESProducer.TrackerRecoGeometryESProducer_cfi import *
from Geometry.TrackerGeometryBuilder.trackerParameters_cff import *
from Geometry.TrackerNumberingBuilder.trackerTopology_cfi import *
from Geometry.TrackerGeometryBuilder.idealForDigiTrackerGeometry_cff import *
trackerGeometry.applyAlignment = cms.bool(False)
# calo
from Geometry.CaloEventSetup.HGCalV9Topology_cfi import *
from Geometry.HGCalGeometry.HGCalGeometryESProducer_cfi import *
from Geometry.CaloEventSetup.CaloTopology_cfi import *
from Geometry.CaloEventSetup.CaloGeometryBuilder_cfi import *
CaloGeometryBuilder = cms.ESProducer("CaloGeometryBuilder",
SelectedCalos = cms.vstring("HCAL",
"ZDC",
"EcalBarrel",
"TOWER",
"HGCalEESensitive",
"HGCalHESiliconSensitive",
"HGCalHEScintillatorSensitive"
)
)
from Geometry.EcalAlgo.EcalBarrelGeometry_cfi import *
from Geometry.HcalEventSetup.HcalGeometry_cfi import *
from Geometry.HcalEventSetup.CaloTowerGeometry_cfi import *
from Geometry.HcalEventSetup.CaloTowerTopology_cfi import *
from Geometry.HcalCommonData.hcalDDDRecConstants_cfi import *
from Geometry.HcalEventSetup.hcalTopologyIdeal_cfi import *
from Geometry.CaloEventSetup.EcalTrigTowerConstituents_cfi import *
from Geometry.EcalMapping.EcalMapping_cfi import *
from Geometry.EcalMapping.EcalMappingRecord_cfi import *
# muon
from Geometry.MuonNumbering.muonNumberingInitialization_cfi import *
from RecoMuon.DetLayers.muonDetLayerGeometry_cfi import *
from Geometry.GEMGeometryBuilder.gemGeometry_cfi import *
from Geometry.CSCGeometryBuilder.idealForDigiCscGeometry_cff import *
from Geometry.DTGeometryBuilder.idealForDigiDtGeometry_cff import *
# forward
from Geometry.ForwardGeometry.ForwardGeometry_cfi import *
# timing
from RecoMTD.DetLayers.mtdDetLayerGeometry_cfi import *
from Geometry.MTDGeometryBuilder.mtdParameters_cff import *
from Geometry.MTDNumberingBuilder.mtdNumberingGeometry_cff import *
from Geometry.MTDNumberingBuilder.mtdTopology_cfi import *
from Geometry.MTDGeometryBuilder.mtdGeometry_cfi import *
from Geometry.MTDGeometryBuilder.idealForDigiMTDGeometry_cff import *
mtdGeometry.applyAlignment = cms.bool(False)
| 45.033333
| 84
| 0.801999
|
4a15662bd1dda538f11181578ac77eb4d534032b
| 9,383
|
py
|
Python
|
step2_preprocess/src/tanqeebpreprocess.py
|
natc79/MENAJobData
|
ab6e03522470d1657ed9ace47734bf3597efe645
|
[
"MIT"
] | null | null | null |
step2_preprocess/src/tanqeebpreprocess.py
|
natc79/MENAJobData
|
ab6e03522470d1657ed9ace47734bf3597efe645
|
[
"MIT"
] | null | null | null |
step2_preprocess/src/tanqeebpreprocess.py
|
natc79/MENAJobData
|
ab6e03522470d1657ed9ace47734bf3597efe645
|
[
"MIT"
] | null | null | null |
"""
Purpose: This class consists of general functions that are useful
in processing data from various websites containing job advertisement
data that was scraped from the web.
Author: Natalie Chun
Created: 23 November 2018
"""
from pytz import timezone
import csv
import pandas as pd
import numpy as np
import re
from datetime import datetime
import time
import random
import re
import os
from config import FileConfig
from google.cloud import translate
import sqlite3
from basepreprocess import BasePreprocessor
from googletrans import Translator
import html2text
class TanQeebPreprocessor(BasePreprocessor):
def __init__(self):
self.extdir = os.path.join(FileConfig.EXTDIR, 'tanqeeb')
self.conn = sqlite3.connect(os.path.join(self.extdir, 'tanqeeb.db'), timeout=10)
self.cursor = self.conn.cursor()
#self.unprocdata = self._combine_data()
self.figdir = os.path.join(FileConfig.FIGDIR, 'tanqeeb')
self.outdir = os.path.join(FileConfig.INTDIR, 'tanqeeb')
self.tz = timezone('Africa/Cairo')
self.datecur = datetime.now(self.tz)
self.datasrc = 'TanQeeb'
def _translate_to_english(self):
"""Translate the arabic description into english and insert into sql table."""
query = "SELECT DISTINCT * FROM jobadpage WHERE postdate IS NOT NULL;"
results = pd.read_sql(query, tp.conn)
trans = Translator()
h = html2text.HTML2Text()
h.ignore_links = True
description_en = []
query = """SELECT DISTINCT uniqueid FROM translation;"""
trans_ids = list(pd.read_sql(query, tp.conn)['uniqueid'])
for i, row in results[0:10].iterrows():
if row['uniqueid'] not in trans_ids:
temp = h.handle(row['description'].decode('utf-8'))
query = """INSERT OR IGNORE INTO translation (country, uniqueid, description_en) VALUES (?,?,?);"""
entry = [row['country'],row['uniqueid'],trans.translate(temp).text]
tp.cursor.execute(query,entry)
tp.conn.commit()
time.sleep(random.randint(3,6))
def set_search_terms(self):
"""Set search terms."""
searcheduc = {}
searcheduc['diploma'] = ['diploma','high school', 'secondary education']
searcheduc['bachelors'] = ['higher education','baccalaureate','university','college','university (graduates|degree)','bachelor(s){0,1}','b(\.){0,1}[as](\.){0,1}','b(\.){0,1}sc(\.){0,1}','college degree','engineering','computer science']
searcheduc['masters'] = ['masters','m(\.){0,1}[as](\.){0,1}','m(\.){0,1}b(\.){0,1}a(\.){0,1}']
searcheduc['doctorate'] = ['doctorate','phd','ph(\.){0,1}d(\.){0,1}','doctor of philosophy','faculty']
searcheduc['md'] = ['m(\.){0,1}d(\.){0,1}','medical doctor', 'medical degree']
searcheduc['rn'] = ['r(\.){0,1}n(\.){0,1}','registered nurse', 'nursing degree','nursing']
searcheduc['jd'] = ['j(\.){0,1}d(\.){0,1}','law degree']
self.searcheduc = searcheduc
def get_education_map(self, educseries):
"""Get education map for arabic to english."""
values = educseries.value_counts()
translist = []
for index in values.index:
translation = client.translate(
index,
target_language='en')
translist.append(translation['translatedText'].lower())
df = pd.DataFrame({'arabic':values.index,'english':translist})
# create map for bachelors degree
df['english'] = [row['english'].strip().replace(''',"'") for i, row in df.iterrows()]
df['education'] = ''
for ed, srchterm in searcheduc.items():
re1 = re.compile(r'\b(%s)\b' % ('|'.join(srchterm)))
df['education'] = [ed if re1.match(row['english']) is not None else row['education'] for i, row in df.iterrows()]
educmap = {row['arabic']: row['education'] for i, row in df.iterrows() if row['education']!=''}
with open(os.path.join(FileConfig.INTDIR,'tanqeeb','educmap.pickle'), 'wb') as f:
pickle.dump(educmap, f)
return(df)
def fillin_education(self):
"""Fill in education column based on content description."""
query = "SELECT DISTINCT * FROM jobadpage WHERE postdate IS NOT NULL;"
results = pd.read_sql(query, self.conn)
if os.isfile(os.path.join(FileConfig.INTDIR,'tanqeeb','educmap.pickle')) is False:
self.get_education_map(results['education'])
with open(os.path.join(FileConfig.INTDIR,'tanqeeb','educmap.pickle'), 'rb') as f:
educmap = pickle.load(f)
results['f_education'] = results['education'].replace(educmap)
results['description'] = [row['description'].decode('utf-8') for i, row in results.iterrows()]
results['f_education'] = ['' if row['f_education'] not in list(searcheduc.keys()) else row['f_education'] for i, row in results.iterrows()]
for ed, srchterm in self.searcheduc.items():
re1 = re.compile(r'\b(%s)\b' % ('|'.join(srchterm)))
results['f_education'] = [ed if row['f_education']=='' and row['description'] is not None and re1.search(row['description'].lower()) is not None else row['f_education'] for i, row in results.iterrows()]
searcheducar = {}
for key, ed in educmap.items():
if ed not in searcheducar:
searcheducar[ed] = []
searcheducar[ed].append(key)
for ed, srchterm in searcheducar.items():
re1 = re.compile(r'\b(%s)\b' % ('|'.join(srchterm)))
results['f_education'] = [ed if row['f_education']=='' and row['description'] is not None and re1.search(row['description'].lower()) is not None else row['f_education'] for i, row in results.iterrows()]
# Translate description into english
return(results)
def _get_top_skills(self):
"""Identify top skills based on job descriptions.
Right now assume time series data is not that interesting.
NOTE: Not that efficient if dataset large...think about how to better do this.
"""
query = """SELECT DISTINCT country, uniqueid, description
FROM jobadpage
"""
#chunk_iter = pd.read_sql(query, self.conn, chunksize=opt_chunk)
#for chunk in chunk_iter:
df= pd.read_sql(query, self.conn)
# STEP 1: Clean the text of extraneous words
df['description'] = [self._translate_text(self._clean_text(row['description'])) for i, row in df.iterrows()]
# STEP 2: Extract key words from database
df['vocab_words'] = [kw['master'].extract_keywords(row['description']) for i, row in df.iterrows()]
# STEP 3: Figure out total counts of keywords
def _create_time_series(self):
"""Function to develop time series data and key variables that are useful
in developing predictions.
"""
raise NotImplementedError
def _combine_data(self):
#extract the relevant set of data
query = '''SELECT * FROM pagedata;'''
unprocdata = pd.read_sql(query, self.conn,parse_dates=['postdate','downloaddate'])
query = '''SELECT * FROM archivedpagedata;'''
unprocarchiveddata = pd.read_sql(query, self.conn, parse_dates=['postdate','downloaddate'])
unprocdata = unprocdata.append(unprocarchiveddata,ignore_index=True)
#now get any data that might exists in the various csv files that have been archived
archivedfiles = []
if len(archivedfiles) > 0:
for date in archivedfiles:
filename = 'archivedpagedata_'+date+'.csv'
tempdata = pd.read_csv(filename,parse_dates=['postdate','downloaddate'])
#NOTE: append tempdata to the unprocdata file maybe have to check that the data is in same format
unprocdata = unprocdata.append(tempdata,ignore_index=True)
c.close()
print("Number of distinct entries: {}".format(len(unprocdata)))
print(unprocdata.dtypes)
print(unprocdata.head())
print("Memory Usage (mb): {}".format(round(unprocdata.memory_usage(deep=True).sum()/1048576,2)))
return(unprocdata)
def generate_stats(self):
"""Create additional variables that are useful for generating statistics."""
query = """SELECT u.country, u.cat, u.uniqueid, u.i_featured, u.postdate,
j.jobtype, j.company, j.reqexp, j.education, j.title, j.pubimg
FROM jobadpageurls AS u
LEFT JOIN jobadpage AS j
ON u.uniqueid = j.uniqueid AND u.country = j.country
"""
df = pd.read_sql(query, self.conn)
statcols = ['i_featured']
self._create_stats(df, 'cat', statcols, 'uniqueid')
def run_all(self):
print("Running TanqeebPreprocessor on date (%s)" % (self.datecur))
print("="*100)
self._document_missing()
self._document_jobads('jobadpageurls','uniqueid','cat')
self.generate_stats()
self.conn.close()
if __name__ == "__main__":
tp = TanQeebPreprocessor()
tp.run_all()
| 45.328502
| 244
| 0.610892
|
4a15662e4c30d18bbec95ca77faa14de065c1fe7
| 87,578
|
py
|
Python
|
ibis/expr/operations.py
|
jclay/ibis
|
a32dc3b58c485e4706e9e8493dc8c031fe14a865
|
[
"Apache-2.0"
] | 1
|
2020-08-04T08:29:44.000Z
|
2020-08-04T08:29:44.000Z
|
ibis/expr/operations.py
|
jclay/ibis
|
a32dc3b58c485e4706e9e8493dc8c031fe14a865
|
[
"Apache-2.0"
] | null | null | null |
ibis/expr/operations.py
|
jclay/ibis
|
a32dc3b58c485e4706e9e8493dc8c031fe14a865
|
[
"Apache-2.0"
] | null | null | null |
import collections
import functools
import itertools
import operator
from contextlib import suppress
from typing import List
import toolz
import ibis.common.exceptions as com
import ibis.expr.datatypes as dt
import ibis.expr.rules as rlz
import ibis.expr.schema as sch
import ibis.expr.types as ir
from ibis import util
from ibis.expr.schema import HasSchema, Schema
from ibis.expr.signature import Annotable
from ibis.expr.signature import Argument as Arg
def _safe_repr(x, memo=None):
return x._repr(memo=memo) if isinstance(x, (ir.Expr, Node)) else repr(x)
# TODO: move to analysis
def distinct_roots(*expressions):
roots = toolz.concat(
expression._root_tables() for expression in expressions
)
return list(toolz.unique(roots))
class Node(Annotable):
__slots__ = '_expr_cached', '_hash'
def __repr__(self):
return self._repr()
def _repr(self, memo=None):
if memo is None:
from ibis.expr.format import FormatMemo
memo = FormatMemo()
opname = type(self).__name__
pprint_args = []
def _pp(x):
return _safe_repr(x, memo=memo)
for x in self.args:
if isinstance(x, (tuple, list)):
pp = repr(list(map(_pp, x)))
else:
pp = _pp(x)
pprint_args.append(pp)
return '{}({})'.format(opname, ', '.join(pprint_args))
@property
def inputs(self):
return tuple(self.args)
def blocks(self):
# The contents of this node at referentially distinct and may not be
# analyzed deeper
return False
def flat_args(self):
for arg in self.args:
if not isinstance(arg, str) and isinstance(
arg, collections.abc.Iterable
):
for x in arg:
yield x
else:
yield arg
def __hash__(self):
if not hasattr(self, '_hash'):
self._hash = hash(
(type(self),)
+ tuple(
element.op() if isinstance(element, ir.Expr) else element
for element in self.flat_args()
)
)
return self._hash
def __eq__(self, other):
return self.equals(other)
def equals(self, other, cache=None):
if cache is None:
cache = {}
key = self, other
try:
return cache[key]
except KeyError:
cache[key] = result = self is other or (
type(self) == type(other)
and all_equal(self.args, other.args, cache=cache)
)
return result
def compatible_with(self, other):
return self.equals(other)
def is_ancestor(self, other):
if isinstance(other, ir.Expr):
other = other.op()
return self.equals(other)
def to_expr(self):
if not hasattr(self, '_expr_cached'):
self._expr_cached = self._make_expr()
return self._expr_cached
def _make_expr(self):
klass = self.output_type()
return klass(self)
def output_type(self):
"""
This function must resolve the output type of the expression and return
the node wrapped in the appropriate ValueExpr type.
"""
raise NotImplementedError
class ValueOp(Node):
def root_tables(self):
exprs = [arg for arg in self.args if isinstance(arg, ir.Expr)]
return distinct_roots(*exprs)
def resolve_name(self):
raise com.ExpressionError('Expression is not named: %s' % repr(self))
def has_resolved_name(self):
return False
def all_equal(left, right, cache=None):
"""Check whether two objects `left` and `right` are equal.
Parameters
----------
left : Union[object, Expr, Node]
right : Union[object, Expr, Node]
cache : Optional[Dict[Tuple[Node, Node], bool]]
A dictionary indicating whether two Nodes are equal
"""
if cache is None:
cache = {}
if util.is_iterable(left):
# check that left and right are equal length iterables and that all
# of their elements are equal
return (
util.is_iterable(right)
and len(left) == len(right)
and all(
itertools.starmap(
functools.partial(all_equal, cache=cache), zip(left, right)
)
)
)
if hasattr(left, 'equals'):
return left.equals(right, cache=cache)
return left == right
_table_names = ('unbound_table_{:d}'.format(i) for i in itertools.count())
def genname():
return next(_table_names)
class TableNode(Node):
def get_type(self, name):
return self.schema[name]
def output_type(self):
return ir.TableExpr
def aggregate(self, this, metrics, by=None, having=None):
return Aggregation(this, metrics, by=by, having=having)
def sort_by(self, expr, sort_exprs):
return Selection(expr, [], sort_keys=sort_exprs)
def is_ancestor(self, other):
import ibis.expr.lineage as lin
if isinstance(other, ir.Expr):
other = other.op()
if self.equals(other):
return True
fn = lambda e: (lin.proceed, e.op()) # noqa: E731
expr = self.to_expr()
for child in lin.traverse(fn, expr):
if child.equals(other):
return True
return False
class TableColumn(ValueOp):
"""Selects a column from a TableExpr"""
name = Arg((str, int))
table = Arg(ir.TableExpr)
def __init__(self, name, table):
schema = table.schema()
if isinstance(name, int):
name = schema.name_at_position(name)
super().__init__(name, table)
def _validate(self):
if self.name not in self.table.schema():
raise com.IbisTypeError(
"'{}' is not a field in {}".format(
self.name, self.table.columns
)
)
def parent(self):
return self.table
def resolve_name(self):
return self.name
def has_resolved_name(self):
return True
def root_tables(self):
return self.table._root_tables()
def _make_expr(self):
dtype = self.table._get_type(self.name)
klass = dtype.column_type()
return klass(self, name=self.name)
def find_all_base_tables(expr, memo=None):
if memo is None:
memo = {}
node = expr.op()
if isinstance(expr, ir.TableExpr) and node.blocks():
if expr not in memo:
memo[node] = expr
return memo
for arg in expr.op().flat_args():
if isinstance(arg, ir.Expr):
find_all_base_tables(arg, memo)
return memo
class PhysicalTable(TableNode, HasSchema):
def blocks(self):
return True
class UnboundTable(PhysicalTable):
schema = Arg(sch.Schema)
name = Arg(str, default=genname)
class DatabaseTable(PhysicalTable):
name = Arg(str)
schema = Arg(sch.Schema)
source = Arg(rlz.client)
def change_name(self, new_name):
return type(self)(new_name, self.args[1], self.source)
class SQLQueryResult(TableNode, HasSchema):
"""A table sourced from the result set of a select query"""
query = Arg(rlz.noop)
schema = Arg(sch.Schema)
source = Arg(rlz.client)
def blocks(self):
return True
class TableArrayView(ValueOp):
"""
(Temporary?) Helper operation class for SQL translation (fully formed table
subqueries to be viewed as arrays)
"""
table = Arg(ir.TableExpr)
name = Arg(str)
def __init__(self, table):
schema = table.schema()
if len(schema) > 1:
raise com.ExpressionError('Table can only have a single column')
name = schema.names[0]
return super().__init__(table, name)
def _make_expr(self):
ctype = self.table._get_type(self.name)
klass = ctype.column_type()
return klass(self, name=self.name)
class UnaryOp(ValueOp):
arg = Arg(rlz.any)
class BinaryOp(ValueOp):
"""A binary operation"""
left = Arg(rlz.any)
right = Arg(rlz.any)
class Cast(ValueOp):
arg = Arg(rlz.any)
to = Arg(dt.dtype)
# see #396 for the issue preventing this
# def resolve_name(self):
# return self.args[0].get_name()
def output_type(self):
return rlz.shape_like(self.arg, dtype=self.to)
class TypeOf(UnaryOp):
output_type = rlz.shape_like('arg', dt.string)
class Negate(UnaryOp):
arg = Arg(rlz.one_of((rlz.numeric(), rlz.interval())))
output_type = rlz.typeof('arg')
class IsNull(UnaryOp):
"""Returns true if values are null
Returns
-------
isnull : boolean with dimension of caller
"""
output_type = rlz.shape_like('arg', dt.boolean)
class NotNull(UnaryOp):
"""Returns true if values are not null
Returns
-------
notnull : boolean with dimension of caller
"""
output_type = rlz.shape_like('arg', dt.boolean)
class ZeroIfNull(UnaryOp):
output_type = rlz.typeof('arg')
class IfNull(ValueOp):
"""Equivalent to (but perhaps implemented differently):
case().when(expr.notnull(), expr)
.else_(null_substitute_expr)
"""
arg = Arg(rlz.any)
ifnull_expr = Arg(rlz.any)
output_type = rlz.shape_like('args')
class NullIf(ValueOp):
"""Set values to NULL if they equal the null_if_expr"""
arg = Arg(rlz.any)
null_if_expr = Arg(rlz.any)
output_type = rlz.shape_like('args')
class NullIfZero(ValueOp):
"""
Set values to NULL if they equal to zero. Commonly used in cases where
divide-by-zero would produce an overflow or infinity.
Equivalent to (value == 0).ifelse(ibis.NA, value)
Returns
-------
maybe_nulled : type of caller
"""
arg = Arg(rlz.numeric)
output_type = rlz.typeof('arg')
class IsNan(ValueOp):
arg = Arg(rlz.floating)
output_type = rlz.shape_like('arg', dt.boolean)
class IsInf(ValueOp):
arg = Arg(rlz.floating)
output_type = rlz.shape_like('arg', dt.boolean)
class CoalesceLike(ValueOp):
# According to Impala documentation:
# Return type: same as the initial argument value, except that integer
# values are promoted to BIGINT and floating-point values are promoted to
# DOUBLE; use CAST() when inserting into a smaller numeric column
arg = Arg(rlz.list_of(rlz.any))
def output_type(self):
first = self.arg[0]
if isinstance(first, (ir.IntegerValue, ir.FloatingValue)):
dtype = first.type().largest
else:
dtype = first.type()
# self.arg is a list of value expressions
return rlz.shape_like(self.arg, dtype)
class Coalesce(CoalesceLike):
pass
class Greatest(CoalesceLike):
pass
class Least(CoalesceLike):
pass
class Abs(UnaryOp):
"""Absolute value"""
output_type = rlz.typeof('arg')
class Ceil(UnaryOp):
"""
Round up to the nearest integer value greater than or equal to this value
Returns
-------
ceiled : type depending on input
Decimal values: yield decimal
Other numeric values: yield integer (int32)
"""
arg = Arg(rlz.numeric)
def output_type(self):
if isinstance(self.arg.type(), dt.Decimal):
return self.arg._factory
return rlz.shape_like(self.arg, dt.int64)
class Floor(UnaryOp):
"""
Round down to the nearest integer value less than or equal to this value
Returns
-------
floored : type depending on input
Decimal values: yield decimal
Other numeric values: yield integer (int32)
"""
arg = Arg(rlz.numeric)
def output_type(self):
if isinstance(self.arg.type(), dt.Decimal):
return self.arg._factory
return rlz.shape_like(self.arg, dt.int64)
class Round(ValueOp):
arg = Arg(rlz.numeric)
digits = Arg(rlz.numeric, default=None)
def output_type(self):
if isinstance(self.arg, ir.DecimalValue):
return self.arg._factory
elif self.digits is None:
return rlz.shape_like(self.arg, dt.int64)
else:
return rlz.shape_like(self.arg, dt.double)
class Clip(ValueOp):
arg = Arg(rlz.strict_numeric)
lower = Arg(rlz.strict_numeric, default=None)
upper = Arg(rlz.strict_numeric, default=None)
output_type = rlz.typeof('arg')
class BaseConvert(ValueOp):
arg = Arg(rlz.one_of([rlz.integer, rlz.string]))
from_base = Arg(rlz.integer)
to_base = Arg(rlz.integer)
def output_type(self):
return rlz.shape_like(tuple(self.flat_args()), dt.string)
class MathUnaryOp(UnaryOp):
arg = Arg(rlz.numeric)
def output_type(self):
arg = self.arg
if isinstance(self.arg, ir.DecimalValue):
dtype = arg.type()
else:
dtype = dt.double
return rlz.shape_like(arg, dtype)
class ExpandingTypeMathUnaryOp(MathUnaryOp):
def output_type(self):
if not isinstance(self.arg, ir.DecimalValue):
return super().output_type()
arg = self.arg
return rlz.shape_like(arg, arg.type().largest)
class Exp(ExpandingTypeMathUnaryOp):
pass
class Sign(UnaryOp):
arg = Arg(rlz.numeric)
output_type = rlz.typeof('arg')
class Sqrt(MathUnaryOp):
pass
class Logarithm(MathUnaryOp):
arg = Arg(rlz.strict_numeric)
class Log(Logarithm):
arg = Arg(rlz.strict_numeric)
base = Arg(rlz.strict_numeric, default=None)
class Ln(Logarithm):
"""Natural logarithm"""
class Log2(Logarithm):
"""Logarithm base 2"""
class Log10(Logarithm):
"""Logarithm base 10"""
class Degrees(ExpandingTypeMathUnaryOp):
"""Converts radians to degrees"""
arg = Arg(rlz.numeric)
class Radians(MathUnaryOp):
"""Converts degrees to radians"""
arg = Arg(rlz.numeric)
# TRIGONOMETRIC OPERATIONS
class TrigonometricUnary(MathUnaryOp):
"""Trigonometric base unary"""
arg = Arg(rlz.numeric)
class TrigonometricBinary(BinaryOp):
"""Trigonometric base binary"""
left = Arg(rlz.numeric)
right = Arg(rlz.numeric)
output_type = rlz.shape_like('args', dt.float64)
class Acos(TrigonometricUnary):
"""Returns the arc cosine of x"""
class Asin(TrigonometricUnary):
"""Returns the arc sine of x"""
class Atan(TrigonometricUnary):
"""Returns the arc tangent of x"""
class Atan2(TrigonometricBinary):
"""Returns the arc tangent of x and y"""
class Cos(TrigonometricUnary):
"""Returns the cosine of x"""
class Cot(TrigonometricUnary):
"""Returns the cotangent of x"""
class Sin(TrigonometricUnary):
"""Returns the sine of x"""
class Tan(TrigonometricUnary):
"""Returns the tangent of x"""
class StringUnaryOp(UnaryOp):
arg = Arg(rlz.string)
output_type = rlz.shape_like('arg', dt.string)
class Uppercase(StringUnaryOp):
"""Convert string to all uppercase"""
class Lowercase(StringUnaryOp):
"""Convert string to all lowercase"""
class Reverse(StringUnaryOp):
"""Reverse string"""
class Strip(StringUnaryOp):
"""Remove whitespace from left and right sides of string"""
class LStrip(StringUnaryOp):
"""Remove whitespace from left side of string"""
class RStrip(StringUnaryOp):
"""Remove whitespace from right side of string"""
class Capitalize(StringUnaryOp):
"""Return a capitalized version of input string"""
class Substring(ValueOp):
arg = Arg(rlz.string)
start = Arg(rlz.integer)
length = Arg(rlz.integer, default=None)
output_type = rlz.shape_like('arg', dt.string)
class StrRight(ValueOp):
arg = Arg(rlz.string)
nchars = Arg(rlz.integer)
output_type = rlz.shape_like('arg', dt.string)
class Repeat(ValueOp):
arg = Arg(rlz.string)
times = Arg(rlz.integer)
output_type = rlz.shape_like('arg', dt.string)
class StringFind(ValueOp):
arg = Arg(rlz.string)
substr = Arg(rlz.string)
start = Arg(rlz.integer, default=None)
end = Arg(rlz.integer, default=None)
output_type = rlz.shape_like('arg', dt.int64)
class Translate(ValueOp):
arg = Arg(rlz.string)
from_str = Arg(rlz.string)
to_str = Arg(rlz.string)
output_type = rlz.shape_like('arg', dt.string)
class LPad(ValueOp):
arg = Arg(rlz.string)
length = Arg(rlz.integer)
pad = Arg(rlz.string, default=None)
output_type = rlz.shape_like('arg', dt.string)
class RPad(ValueOp):
arg = Arg(rlz.string)
length = Arg(rlz.integer)
pad = Arg(rlz.string, default=None)
output_type = rlz.shape_like('arg', dt.string)
class FindInSet(ValueOp):
needle = Arg(rlz.string)
values = Arg(rlz.list_of(rlz.string, min_length=1))
output_type = rlz.shape_like('needle', dt.int64)
class StringJoin(ValueOp):
sep = Arg(rlz.string)
arg = Arg(rlz.list_of(rlz.string, min_length=1))
def output_type(self):
return rlz.shape_like(tuple(self.flat_args()), dt.string)
class BooleanValueOp:
pass
class FuzzySearch(ValueOp, BooleanValueOp):
arg = Arg(rlz.string)
pattern = Arg(rlz.string)
output_type = rlz.shape_like('arg', dt.boolean)
class StringSQLLike(FuzzySearch):
arg = Arg(rlz.string)
pattern = Arg(rlz.string)
escape = Arg(str, default=None)
class StringSQLILike(StringSQLLike):
"""SQL ilike operation"""
class RegexSearch(FuzzySearch):
pass
class RegexExtract(ValueOp):
arg = Arg(rlz.string)
pattern = Arg(rlz.string)
index = Arg(rlz.integer)
output_type = rlz.shape_like('arg', dt.string)
class RegexReplace(ValueOp):
arg = Arg(rlz.string)
pattern = Arg(rlz.string)
replacement = Arg(rlz.string)
output_type = rlz.shape_like('arg', dt.string)
class StringReplace(ValueOp):
arg = Arg(rlz.string)
pattern = Arg(rlz.string)
replacement = Arg(rlz.string)
output_type = rlz.shape_like('arg', dt.string)
class StringSplit(ValueOp):
arg = Arg(rlz.string)
delimiter = Arg(rlz.string)
output_type = rlz.shape_like('arg', dt.Array(dt.string))
class StringConcat(ValueOp):
arg = Arg(rlz.list_of(rlz.string))
output_type = rlz.shape_like('arg', dt.string)
class ParseURL(ValueOp):
arg = Arg(rlz.string)
extract = Arg(
rlz.isin(
{
'PROTOCOL',
'HOST',
'PATH',
'REF',
'AUTHORITY',
'FILE',
'USERINFO',
'QUERY',
}
)
)
key = Arg(rlz.string, default=None)
output_type = rlz.shape_like('arg', dt.string)
class StringLength(UnaryOp):
"""
Compute length of strings
Returns
-------
length : int32
"""
output_type = rlz.shape_like('arg', dt.int32)
class StringAscii(UnaryOp):
output_type = rlz.shape_like('arg', dt.int32)
# ----------------------------------------------------------------------
class Reduction(ValueOp):
_reduction = True
class Count(Reduction):
arg = Arg((ir.ColumnExpr, ir.TableExpr))
where = Arg(rlz.boolean, default=None)
def output_type(self):
return functools.partial(ir.IntegerScalar, dtype=dt.int64)
class Arbitrary(Reduction):
arg = Arg(rlz.column(rlz.any))
how = Arg(rlz.isin({'first', 'last', 'heavy'}), default=None)
where = Arg(rlz.boolean, default=None)
output_type = rlz.scalar_like('arg')
class Sum(Reduction):
arg = Arg(rlz.column(rlz.numeric))
where = Arg(rlz.boolean, default=None)
def output_type(self):
if isinstance(self.arg, ir.BooleanValue):
dtype = dt.int64
else:
dtype = self.arg.type().largest
return dtype.scalar_type()
class Mean(Reduction):
arg = Arg(rlz.column(rlz.numeric))
where = Arg(rlz.boolean, default=None)
def output_type(self):
if isinstance(self.arg, ir.DecimalValue):
dtype = self.arg.type()
else:
dtype = dt.float64
return dtype.scalar_type()
class Quantile(Reduction):
arg = Arg(rlz.any)
quantile = Arg(rlz.strict_numeric)
interpolation = Arg(
rlz.isin({'linear', 'lower', 'higher', 'midpoint', 'nearest'}),
default='linear',
)
def output_type(self):
return dt.float64.scalar_type()
class MultiQuantile(Quantile):
arg = Arg(rlz.any)
quantile = Arg(rlz.value(dt.Array(dt.float64)))
interpolation = Arg(
rlz.isin({'linear', 'lower', 'higher', 'midpoint', 'nearest'}),
default='linear',
)
def output_type(self):
return dt.Array(dt.float64).scalar_type()
class VarianceBase(Reduction):
arg = Arg(rlz.column(rlz.numeric))
how = Arg(rlz.isin({'sample', 'pop'}), default=None)
where = Arg(rlz.boolean, default=None)
def output_type(self):
if isinstance(self.arg, ir.DecimalValue):
dtype = self.arg.type().largest
else:
dtype = dt.float64
return dtype.scalar_type()
class StandardDev(VarianceBase):
pass
class Variance(VarianceBase):
pass
class Correlation(Reduction):
"""Coefficient of correlation of a set of number pairs."""
left = Arg(rlz.column(rlz.numeric))
right = Arg(rlz.column(rlz.numeric))
how = Arg(rlz.isin({'sample', 'pop'}), default=None)
where = Arg(rlz.boolean, default=None)
def output_type(self):
return dt.float64.scalar_type()
class Covariance(Reduction):
"""Covariance of a set of number pairs."""
left = Arg(rlz.column(rlz.numeric))
right = Arg(rlz.column(rlz.numeric))
how = Arg(rlz.isin({'sample', 'pop'}), default=None)
where = Arg(rlz.boolean, default=None)
def output_type(self):
return dt.float64.scalar_type()
class Max(Reduction):
arg = Arg(rlz.column(rlz.any))
where = Arg(rlz.boolean, default=None)
output_type = rlz.scalar_like('arg')
class Min(Reduction):
arg = Arg(rlz.column(rlz.any))
where = Arg(rlz.boolean, default=None)
output_type = rlz.scalar_like('arg')
class HLLCardinality(Reduction):
"""Approximate number of unique values using HyperLogLog algorithm.
Impala offers the NDV built-in function for this.
"""
arg = Arg(rlz.column(rlz.any))
where = Arg(rlz.boolean, default=None)
def output_type(self):
# Impala 2.0 and higher returns a DOUBLE
# return ir.DoubleScalar
return functools.partial(ir.IntegerScalar, dtype=dt.int64)
class GroupConcat(Reduction):
arg = Arg(rlz.column(rlz.any))
sep = Arg(rlz.string, default=',')
where = Arg(rlz.boolean, default=None)
def output_type(self):
return dt.string.scalar_type()
class CMSMedian(Reduction):
"""
Compute the approximate median of a set of comparable values using the
Count-Min-Sketch algorithm. Exposed in Impala using APPX_MEDIAN.
"""
arg = Arg(rlz.column(rlz.any))
where = Arg(rlz.boolean, default=None)
output_type = rlz.scalar_like('arg')
# ----------------------------------------------------------------------
# Analytic functions
class AnalyticOp(ValueOp):
pass
class WindowOp(ValueOp):
expr = Arg(rlz.noop)
window = Arg(rlz.noop)
output_type = rlz.array_like('expr')
display_argnames = False
def __init__(self, expr, window):
from ibis.expr.window import propagate_down_window
from ibis.expr.analysis import is_analytic
if not is_analytic(expr):
raise com.IbisInputError(
'Expression does not contain a valid window operation'
)
table = ir.find_base_table(expr)
if table is not None:
window = window.bind(table)
if window.max_lookback is not None:
error_msg = (
"'max lookback' windows must be ordered "
"by a timestamp column"
)
if len(window._order_by) != 1:
raise com.IbisInputError(error_msg)
order_var = window._order_by[0].op().args[0]
if not isinstance(order_var.type(), dt.Timestamp):
raise com.IbisInputError(error_msg)
expr = propagate_down_window(expr, window)
super().__init__(expr, window)
def over(self, window):
new_window = self.window.combine(window)
return WindowOp(self.expr, new_window)
@property
def inputs(self):
return self.expr.op().inputs[0], self.window
def root_tables(self):
result = list(
toolz.unique(
toolz.concatv(
self.expr._root_tables(),
distinct_roots(
*toolz.concatv(
self.window._order_by, self.window._group_by
)
),
)
)
)
return result
class ShiftBase(AnalyticOp):
arg = Arg(rlz.column(rlz.any))
offset = Arg(rlz.one_of((rlz.integer, rlz.interval)), default=None)
default = Arg(rlz.any, default=None)
output_type = rlz.typeof('arg')
class Lag(ShiftBase):
pass
class Lead(ShiftBase):
pass
class RankBase(AnalyticOp):
def output_type(self):
return dt.int64.column_type()
class MinRank(RankBase):
"""
Compute position of first element within each equal-value group in sorted
order.
Examples
--------
values ranks
1 0
1 0
2 2
2 2
2 2
3 5
Returns
-------
ranks : Int64Column, starting from 0
"""
# Equivalent to SQL RANK()
arg = Arg(rlz.column(rlz.any))
class DenseRank(RankBase):
"""
Compute position of first element within each equal-value group in sorted
order, ignoring duplicate values.
Examples
--------
values ranks
1 0
1 0
2 1
2 1
2 1
3 2
Returns
-------
ranks : Int64Column, starting from 0
"""
# Equivalent to SQL DENSE_RANK()
arg = Arg(rlz.column(rlz.any))
class RowNumber(RankBase):
"""
Compute row number starting from 0 after sorting by column expression
Examples
--------
>>> import ibis
>>> t = ibis.table([('values', dt.int64)])
>>> w = ibis.window(order_by=t.values)
>>> row_num = ibis.row_number().over(w)
>>> result = t[t.values, row_num.name('row_num')]
Returns
-------
row_number : Int64Column, starting from 0
"""
# Equivalent to SQL ROW_NUMBER()
class CumulativeOp(AnalyticOp):
pass
class CumulativeSum(CumulativeOp):
"""Cumulative sum. Requires an order window."""
arg = Arg(rlz.column(rlz.numeric))
def output_type(self):
if isinstance(self.arg, ir.BooleanValue):
dtype = dt.int64
else:
dtype = self.arg.type().largest
return dtype.column_type()
class CumulativeMean(CumulativeOp):
"""Cumulative mean. Requires an order window."""
arg = Arg(rlz.column(rlz.numeric))
def output_type(self):
if isinstance(self.arg, ir.DecimalValue):
dtype = self.arg.type().largest
else:
dtype = dt.float64
return dtype.column_type()
class CumulativeMax(CumulativeOp):
"""Cumulative max. Requires an order window."""
arg = Arg(rlz.column(rlz.any))
output_type = rlz.array_like('arg')
class CumulativeMin(CumulativeOp):
"""Cumulative min. Requires an order window."""
arg = Arg(rlz.column(rlz.any))
output_type = rlz.array_like('arg')
class PercentRank(AnalyticOp):
arg = Arg(rlz.column(rlz.any))
output_type = rlz.shape_like('arg', dt.double)
class NTile(AnalyticOp):
arg = Arg(rlz.column(rlz.any))
buckets = Arg(rlz.integer)
output_type = rlz.shape_like('arg', dt.int64)
class FirstValue(AnalyticOp):
arg = Arg(rlz.column(rlz.any))
output_type = rlz.typeof('arg')
class LastValue(AnalyticOp):
arg = Arg(rlz.column(rlz.any))
output_type = rlz.typeof('arg')
class NthValue(AnalyticOp):
arg = Arg(rlz.column(rlz.any))
nth = Arg(rlz.integer)
output_type = rlz.typeof('arg')
# ----------------------------------------------------------------------
# Distinct stuff
class Distinct(TableNode, HasSchema):
"""
Distinct is a table-level unique-ing operation.
In SQL, you might have:
SELECT DISTINCT foo
FROM table
SELECT DISTINCT foo, bar
FROM table
"""
table = Arg(ir.TableExpr)
def _validate(self):
# check whether schema has overlapping columns or not
assert self.schema
@property
def schema(self):
return self.table.schema()
def blocks(self):
return True
class DistinctColumn(ValueOp):
"""
COUNT(DISTINCT ...) is really just syntactic suger, but we provide a
distinct().count() nicety for users nonetheless.
For all intents and purposes, like Distinct, but can be distinguished later
for evaluation if the result should be array-like versus table-like. Also
for calling count()
"""
arg = Arg(rlz.noop)
output_type = rlz.typeof('arg')
def count(self):
"""Only valid if the distinct contains a single column"""
return CountDistinct(self.arg)
class CountDistinct(Reduction):
arg = Arg(rlz.column(rlz.any))
where = Arg(rlz.boolean, default=None)
def output_type(self):
return dt.int64.scalar_type()
# ---------------------------------------------------------------------
# Boolean reductions and semi/anti join support
class Any(ValueOp):
# Depending on the kind of input boolean array, the result might either be
# array-like (an existence-type predicate) or scalar (a reduction)
arg = Arg(rlz.column(rlz.boolean))
@property
def _reduction(self):
roots = self.arg._root_tables()
return len(roots) < 2
def output_type(self):
if self._reduction:
return dt.boolean.scalar_type()
else:
return dt.boolean.column_type()
def negate(self):
return NotAny(self.arg)
class All(ValueOp):
arg = Arg(rlz.column(rlz.boolean))
output_type = rlz.scalar_like('arg')
_reduction = True
def negate(self):
return NotAll(self.arg)
class NotAny(Any):
def negate(self):
return Any(self.arg)
class NotAll(All):
def negate(self):
return All(self.arg)
class CumulativeAny(CumulativeOp):
arg = Arg(rlz.column(rlz.boolean))
output_type = rlz.typeof('arg')
class CumulativeAll(CumulativeOp):
arg = Arg(rlz.column(rlz.boolean))
output_type = rlz.typeof('arg')
# ---------------------------------------------------------------------
class TypedCaseBuilder:
__slots__ = ()
def type(self):
types = [result.type() for result in self.results]
return dt.highest_precedence(types)
def else_(self, result_expr):
"""
Specify
Returns
-------
builder : CaseBuilder
"""
kwargs = {
slot: getattr(self, slot)
for slot in self.__slots__
if slot != 'default'
}
result_expr = ir.as_value_expr(result_expr)
kwargs['default'] = result_expr
# Maintain immutability
return type(self)(**kwargs)
def end(self):
default = self.default
if default is None:
default = ir.null().cast(self.type())
args = [
getattr(self, slot) for slot in self.__slots__ if slot != 'default'
]
args.append(default)
op = self.__class__.case_op(*args)
return op.to_expr()
class SimpleCase(ValueOp):
base = Arg(rlz.any)
cases = Arg(rlz.list_of(rlz.any))
results = Arg(rlz.list_of(rlz.any))
default = Arg(rlz.any)
def _validate(self):
assert len(self.cases) == len(self.results)
def root_tables(self):
return distinct_roots(
*itertools.chain(
[self.base],
self.cases,
self.results,
[] if self.default is None else [self.default],
)
)
def output_type(self):
exprs = self.results + [self.default]
return rlz.shape_like(self.base, dtype=exprs.type())
class SimpleCaseBuilder(TypedCaseBuilder):
__slots__ = 'base', 'cases', 'results', 'default'
case_op = SimpleCase
def __init__(self, base, cases=None, results=None, default=None):
self.base = base
self.cases = list(cases if cases is not None else [])
self.results = list(results if results is not None else [])
self.default = default
def when(self, case_expr, result_expr):
"""
Add a new case-result pair.
Parameters
----------
case : Expr
Expression to equality-compare with base expression. Must be
comparable with the base.
result : Expr
Value when the case predicate evaluates to true.
Returns
-------
builder : CaseBuilder
"""
case_expr = ir.as_value_expr(case_expr)
result_expr = ir.as_value_expr(result_expr)
if not rlz.comparable(self.base, case_expr):
raise TypeError(
'Base expression and passed case are not ' 'comparable'
)
cases = list(self.cases)
cases.append(case_expr)
results = list(self.results)
results.append(result_expr)
# Maintain immutability
return type(self)(self.base, cases, results, self.default)
class SearchedCase(ValueOp):
cases = Arg(rlz.list_of(rlz.boolean))
results = Arg(rlz.list_of(rlz.any))
default = Arg(rlz.any)
def _validate(self):
assert len(self.cases) == len(self.results)
def root_tables(self):
cases, results, default = self.args
return distinct_roots(
*itertools.chain(
cases.values,
results.values,
[] if default is None else [default],
)
)
def output_type(self):
exprs = self.results + [self.default]
dtype = rlz.highest_precedence_dtype(exprs)
return rlz.shape_like(self.cases, dtype)
class SearchedCaseBuilder(TypedCaseBuilder):
__slots__ = 'cases', 'results', 'default'
case_op = SearchedCase
def __init__(self, cases=None, results=None, default=None):
self.cases = list(cases if cases is not None else [])
self.results = list(results if results is not None else [])
self.default = default
def when(self, case_expr, result_expr):
"""
Add a new case-result pair.
Parameters
----------
case : Expr
Expression to equality-compare with base expression. Must be
comparable with the base.
result : Expr
Value when the case predicate evaluates to true.
Returns
-------
builder : CaseBuilder
"""
case_expr = ir.as_value_expr(case_expr)
result_expr = ir.as_value_expr(result_expr)
if not isinstance(case_expr, ir.BooleanValue):
raise TypeError(case_expr)
cases = list(self.cases)
cases.append(case_expr)
results = list(self.results)
results.append(result_expr)
# Maintain immutability
return type(self)(cases, results, self.default)
class Where(ValueOp):
"""
Ternary case expression, equivalent to
bool_expr.case()
.when(True, true_expr)
.else_(false_or_null_expr)
"""
bool_expr = Arg(rlz.boolean)
true_expr = Arg(rlz.any)
false_null_expr = Arg(rlz.any)
def output_type(self):
return rlz.shape_like(self.bool_expr, self.true_expr.type())
def _validate_join_tables(left, right):
if not isinstance(left, ir.TableExpr):
raise TypeError(
'Can only join table expressions, got {} for '
'left table'.format(type(left).__name__)
)
if not isinstance(right, ir.TableExpr):
raise TypeError(
'Can only join table expressions, got {} for '
'right table'.format(type(right).__name__)
)
def _make_distinct_join_predicates(left, right, predicates):
# see GH #667
# If left and right table have a common parent expression (e.g. they
# have different filters), must add a self-reference and make the
# appropriate substitution in the join predicates
if left.equals(right):
right = right.view()
predicates = _clean_join_predicates(left, right, predicates)
return left, right, predicates
def _clean_join_predicates(left, right, predicates):
import ibis.expr.analysis as L
result = []
if not isinstance(predicates, (list, tuple)):
predicates = [predicates]
for pred in predicates:
if isinstance(pred, tuple):
if len(pred) != 2:
raise com.ExpressionError('Join key tuple must be ' 'length 2')
lk, rk = pred
lk = left._ensure_expr(lk)
rk = right._ensure_expr(rk)
pred = lk == rk
elif isinstance(pred, str):
pred = left[pred] == right[pred]
elif not isinstance(pred, ir.Expr):
raise NotImplementedError
if not isinstance(pred, ir.BooleanColumn):
raise com.ExpressionError('Join predicate must be comparison')
preds = L.flatten_predicate(pred)
result.extend(preds)
_validate_join_predicates(left, right, result)
return result
def _validate_join_predicates(left, right, predicates):
from ibis.expr.analysis import fully_originate_from
# Validate join predicates. Each predicate must be valid jointly when
# considering the roots of each input table
for predicate in predicates:
if not fully_originate_from(predicate, [left, right]):
raise com.RelationError(
'The expression {!r} does not fully '
'originate from dependencies of the table '
'expression.'.format(predicate)
)
class Join(TableNode):
left = Arg(rlz.noop)
right = Arg(rlz.noop)
predicates = Arg(rlz.noop)
def __init__(self, left, right, predicates):
_validate_join_tables(left, right)
left, right, predicates = _make_distinct_join_predicates(
left, right, predicates
)
super().__init__(left, right, predicates)
def _get_schema(self):
# For joins retaining both table schemas, merge them together here
left = self.left
right = self.right
if not left._is_materialized():
left = left.materialize()
if not right._is_materialized():
right = right.materialize()
sleft = left.schema()
sright = right.schema()
overlap = set(sleft.names) & set(sright.names)
if overlap:
raise com.RelationError(
'Joined tables have overlapping names: %s' % str(list(overlap))
)
return sleft.append(sright)
def has_schema(self):
return False
def root_tables(self):
if util.all_of([self.left.op(), self.right.op()], (Join, Selection)):
# Unraveling is not possible
return [self.left.op(), self.right.op()]
else:
return distinct_roots(self.left, self.right)
class InnerJoin(Join):
pass
class LeftJoin(Join):
pass
class RightJoin(Join):
pass
class OuterJoin(Join):
pass
class AnyInnerJoin(Join):
pass
class AnyLeftJoin(Join):
pass
class LeftSemiJoin(Join):
def _get_schema(self):
return self.left.schema()
class LeftAntiJoin(Join):
def _get_schema(self):
return self.left.schema()
class MaterializedJoin(TableNode, HasSchema):
join = Arg(ir.TableExpr)
def _validate(self):
assert isinstance(self.join.op(), Join)
# check whether the underlying schema has overlapping columns or not
assert self.schema
@property
def schema(self):
return self.join.op()._get_schema()
def root_tables(self):
return self.join._root_tables()
def blocks(self):
return True
class CrossJoin(InnerJoin):
"""
Some databases have a CROSS JOIN operator, that may be preferential to use
over an INNER JOIN with no predicates.
"""
def __init__(self, *args, **kwargs):
if 'prefixes' in kwargs:
raise NotImplementedError
if len(args) < 2:
raise com.IbisInputError('Must pass at least 2 tables')
left = args[0]
right = args[1]
for t in args[2:]:
right = right.cross_join(t)
InnerJoin.__init__(self, left, right, [])
class AsOfJoin(Join):
left = Arg(rlz.noop)
right = Arg(rlz.noop)
predicates = Arg(rlz.noop)
by = Arg(rlz.noop, default=None)
tolerance = Arg(rlz.interval(), default=None)
def __init__(self, left, right, predicates, by, tolerance):
super().__init__(left, right, predicates)
self.by = _clean_join_predicates(self.left, self.right, by)
self.tolerance = tolerance
self._validate_args(['by', 'tolerance'])
def _validate_args(self, args: List[str]):
for arg in args:
argument = self.signature[arg]
value = argument.validate(getattr(self, arg))
setattr(self, arg, value)
class Union(TableNode, HasSchema):
left = Arg(rlz.noop)
right = Arg(rlz.noop)
distinct = Arg(rlz.validator(bool), default=False)
def _validate(self):
if not self.left.schema().equals(self.right.schema()):
raise com.RelationError(
'Table schemas must be equal ' 'to form union'
)
@property
def schema(self):
return self.left.schema()
def blocks(self):
return True
class Limit(TableNode):
table = Arg(ir.TableExpr)
n = Arg(rlz.validator(int))
offset = Arg(rlz.validator(int))
def blocks(self):
return True
@property
def schema(self):
return self.table.schema()
def has_schema(self):
return self.table.op().has_schema()
def root_tables(self):
return [self]
# --------------------------------------------------------------------
# Sorting
def to_sort_key(table, key):
if isinstance(key, DeferredSortKey):
key = key.resolve(table)
if isinstance(key, ir.SortExpr):
return key
if isinstance(key, (tuple, list)):
key, sort_order = key
else:
sort_order = True
if not isinstance(key, ir.Expr):
key = table._ensure_expr(key)
if isinstance(key, (ir.SortExpr, DeferredSortKey)):
return to_sort_key(table, key)
if isinstance(sort_order, str):
if sort_order.lower() in ('desc', 'descending'):
sort_order = False
elif not isinstance(sort_order, bool):
sort_order = bool(sort_order)
return SortKey(key, ascending=sort_order).to_expr()
class SortKey(Node):
expr = Arg(rlz.column(rlz.any))
ascending = Arg(rlz.validator(bool), default=True)
def __repr__(self):
# Temporary
rows = [
'Sort key:',
' ascending: {0!s}'.format(self.ascending),
util.indent(_safe_repr(self.expr), 2),
]
return '\n'.join(rows)
def output_type(self):
return ir.SortExpr
def root_tables(self):
return self.expr._root_tables()
def equals(self, other, cache=None):
# TODO: might generalize this equals based on fields
# requires a proxy class with equals for non expr values
return (
isinstance(other, SortKey)
and self.expr.equals(other.expr, cache=cache)
and self.ascending == other.ascending
)
def resolve_name(self):
return self.expr.get_name()
class DeferredSortKey:
def __init__(self, what, ascending=True):
self.what = what
self.ascending = ascending
def resolve(self, parent):
what = parent._ensure_expr(self.what)
return SortKey(what, ascending=self.ascending).to_expr()
class SelfReference(TableNode, HasSchema):
table = Arg(ir.TableExpr)
@property
def schema(self):
return self.table.schema()
def root_tables(self):
# The dependencies of this operation are not walked, which makes the
# table expression holding this relationally distinct from other
# expressions, so things like self-joins are possible
return [self]
def blocks(self):
return True
class Selection(TableNode, HasSchema):
table = Arg(ir.TableExpr)
selections = Arg(rlz.noop, default=None)
predicates = Arg(rlz.noop, default=None)
sort_keys = Arg(rlz.noop, default=None)
def __init__(
self, table, selections=None, predicates=None, sort_keys=None
):
import ibis.expr.analysis as L
# Argument cleaning
selections = util.promote_list(
selections if selections is not None else []
)
projections = []
for selection in selections:
if isinstance(selection, str):
projection = table[selection]
else:
projection = selection
projections.append(projection)
sort_keys = [
to_sort_key(table, k)
for k in util.promote_list(
sort_keys if sort_keys is not None else []
)
]
predicates = list(
toolz.concat(
map(
L.flatten_predicate,
predicates if predicates is not None else [],
)
)
)
super().__init__(
table=table,
selections=projections,
predicates=predicates,
sort_keys=sort_keys,
)
def _validate(self):
from ibis.expr.analysis import FilterValidator
# Need to validate that the column expressions are compatible with the
# input table; this means they must either be scalar expressions or
# array expressions originating from the same root table expression
dependent_exprs = self.selections + self.sort_keys
self.table._assert_valid(dependent_exprs)
# Validate predicates
validator = FilterValidator([self.table])
validator.validate_all(self.predicates)
# Validate no overlapping columns in schema
assert self.schema
@property
def schema(self):
# Resolve schema and initialize
if not self.selections:
return self.table.schema()
types = []
names = []
for projection in self.selections:
if isinstance(projection, ir.ValueExpr):
names.append(projection.get_name())
types.append(projection.type())
elif isinstance(projection, ir.TableExpr):
schema = projection.schema()
names.extend(schema.names)
types.extend(schema.types)
return Schema(names, types)
def blocks(self):
return bool(self.selections)
def substitute_table(self, table_expr):
return Selection(table_expr, self.selections)
def root_tables(self):
return [self]
def can_add_filters(self, wrapped_expr, predicates):
pass
@staticmethod
def empty_or_equal(lefts, rights):
return not lefts or not rights or all_equal(lefts, rights)
def compatible_with(self, other):
# self and other are equivalent except for predicates, selections, or
# sort keys any of which is allowed to be empty. If both are not empty
# then they must be equal
if self.equals(other):
return True
if not isinstance(other, type(self)):
return False
return self.table.equals(other.table) and (
self.empty_or_equal(self.predicates, other.predicates)
and self.empty_or_equal(self.selections, other.selections)
and self.empty_or_equal(self.sort_keys, other.sort_keys)
)
# Operator combination / fusion logic
def aggregate(self, this, metrics, by=None, having=None):
if len(self.selections) > 0:
return Aggregation(this, metrics, by=by, having=having)
else:
helper = AggregateSelection(this, metrics, by, having)
return helper.get_result()
def sort_by(self, expr, sort_exprs):
sort_exprs = util.promote_list(sort_exprs)
if not self.blocks():
resolved_keys = _maybe_convert_sort_keys(self.table, sort_exprs)
if resolved_keys and self.table._is_valid(resolved_keys):
return Selection(
self.table,
self.selections,
predicates=self.predicates,
sort_keys=self.sort_keys + resolved_keys,
)
return Selection(expr, [], sort_keys=sort_exprs)
class AggregateSelection:
# sort keys cannot be discarded because of order-dependent
# aggregate functions like GROUP_CONCAT
def __init__(self, parent, metrics, by, having):
self.parent = parent
self.op = parent.op()
self.metrics = metrics
self.by = by
self.having = having
def get_result(self):
if self.op.blocks():
return self._plain_subquery()
else:
return self._attempt_pushdown()
def _plain_subquery(self):
return Aggregation(
self.parent, self.metrics, by=self.by, having=self.having
)
def _attempt_pushdown(self):
metrics_valid, lowered_metrics = self._pushdown_exprs(self.metrics)
by_valid, lowered_by = self._pushdown_exprs(self.by)
having_valid, lowered_having = self._pushdown_exprs(
self.having or None
)
if metrics_valid and by_valid and having_valid:
return Aggregation(
self.op.table,
lowered_metrics,
by=lowered_by,
having=lowered_having,
predicates=self.op.predicates,
sort_keys=self.op.sort_keys,
)
else:
return self._plain_subquery()
def _pushdown_exprs(self, exprs):
import ibis.expr.analysis as L
if exprs is None:
return True, []
resolved = self.op.table._resolve(exprs)
subbed_exprs = []
valid = False
if resolved:
for x in util.promote_list(resolved):
subbed = L.sub_for(x, [(self.parent, self.op.table)])
subbed_exprs.append(subbed)
valid = self.op.table._is_valid(subbed_exprs)
else:
valid = False
return valid, subbed_exprs
def _maybe_convert_sort_keys(table, exprs):
try:
return [to_sort_key(table, k) for k in util.promote_list(exprs)]
except com.IbisError:
return None
class Aggregation(TableNode, HasSchema):
"""
metrics : per-group scalar aggregates
by : group expressions
having : post-aggregation predicate
TODO: not putting this in the aggregate operation yet
where : pre-aggregation predicate
"""
table = Arg(ir.TableExpr)
metrics = Arg(rlz.noop)
by = Arg(rlz.noop)
having = Arg(rlz.noop, default=None)
predicates = Arg(rlz.noop, default=None)
sort_keys = Arg(rlz.noop, default=None)
def __init__(
self,
table,
metrics,
by=None,
having=None,
predicates=None,
sort_keys=None,
):
# For tables, like joins, that are not materialized
metrics = self._rewrite_exprs(table, metrics)
by = [] if by is None else by
by = table._resolve(by)
having = [] if having is None else having
predicates = [] if predicates is None else predicates
# order by only makes sense with group by in an aggregation
sort_keys = [] if not by or sort_keys is None else sort_keys
sort_keys = [
to_sort_key(table, k) for k in util.promote_list(sort_keys)
]
by = self._rewrite_exprs(table, by)
having = self._rewrite_exprs(table, having)
predicates = self._rewrite_exprs(table, predicates)
sort_keys = self._rewrite_exprs(table, sort_keys)
super().__init__(
table=table,
metrics=metrics,
by=by,
having=having,
predicates=predicates,
sort_keys=sort_keys,
)
def _validate(self):
from ibis.expr.analysis import is_reduction
from ibis.expr.analysis import FilterValidator
# All aggregates are valid
for expr in self.metrics:
if not isinstance(expr, ir.ScalarExpr) or not is_reduction(expr):
raise TypeError(
'Passed a non-aggregate expression: %s' % _safe_repr(expr)
)
for expr in self.having:
if not isinstance(expr, ir.BooleanScalar):
raise com.ExpressionError(
'Having clause must be boolean '
'expression, was: {0!s}'.format(_safe_repr(expr))
)
# All non-scalar refs originate from the input table
all_exprs = self.metrics + self.by + self.having + self.sort_keys
self.table._assert_valid(all_exprs)
# Validate predicates
validator = FilterValidator([self.table])
validator.validate_all(self.predicates)
# Validate schema has no overlapping columns
assert self.schema
def _rewrite_exprs(self, table, what):
from ibis.expr.analysis import substitute_parents
what = util.promote_list(what)
all_exprs = []
for expr in what:
if isinstance(expr, ir.ExprList):
all_exprs.extend(expr.exprs())
else:
bound_expr = ir.bind_expr(table, expr)
all_exprs.append(bound_expr)
return [
substitute_parents(x, past_projection=False) for x in all_exprs
]
def blocks(self):
return True
def substitute_table(self, table_expr):
return Aggregation(
table_expr, self.metrics, by=self.by, having=self.having
)
@property
def schema(self):
names = []
types = []
# All exprs must be named
for e in self.by + self.metrics:
names.append(e.get_name())
types.append(e.type())
return Schema(names, types)
def sort_by(self, expr, sort_exprs):
sort_exprs = util.promote_list(sort_exprs)
resolved_keys = _maybe_convert_sort_keys(self.table, sort_exprs)
if resolved_keys and self.table._is_valid(resolved_keys):
return Aggregation(
self.table,
self.metrics,
by=self.by,
having=self.having,
predicates=self.predicates,
sort_keys=self.sort_keys + resolved_keys,
)
return Selection(expr, [], sort_keys=sort_exprs)
class NumericBinaryOp(BinaryOp):
left = Arg(rlz.numeric)
right = Arg(rlz.numeric)
class Add(NumericBinaryOp):
output_type = rlz.numeric_like('args', operator.add)
class Multiply(NumericBinaryOp):
output_type = rlz.numeric_like('args', operator.mul)
class Power(NumericBinaryOp):
def output_type(self):
if util.all_of(self.args, ir.IntegerValue):
return rlz.shape_like(self.args, dt.float64)
else:
return rlz.shape_like(self.args)
class Subtract(NumericBinaryOp):
output_type = rlz.numeric_like('args', operator.sub)
class Divide(NumericBinaryOp):
output_type = rlz.shape_like('args', dt.float64)
class FloorDivide(Divide):
output_type = rlz.shape_like('args', dt.int64)
class LogicalBinaryOp(BinaryOp):
left = Arg(rlz.boolean)
right = Arg(rlz.boolean)
output_type = rlz.shape_like('args', dt.boolean)
class Not(UnaryOp):
arg = Arg(rlz.boolean)
output_type = rlz.shape_like('arg', dt.boolean)
class Modulus(NumericBinaryOp):
output_type = rlz.numeric_like('args', operator.mod)
class And(LogicalBinaryOp):
pass
class Or(LogicalBinaryOp):
pass
class Xor(LogicalBinaryOp):
pass
class Comparison(BinaryOp, BooleanValueOp):
left = Arg(rlz.any)
right = Arg(rlz.any)
def __init__(self, left, right):
"""
Casting rules for type promotions (for resolving the output type) may
depend in some cases on the target backend.
TODO: how will overflows be handled? Can we provide anything useful in
Ibis to help the user avoid them?
:param left:
:param right:
"""
super().__init__(*self._maybe_cast_args(left, right))
def _maybe_cast_args(self, left, right):
# it might not be necessary?
with suppress(com.IbisTypeError):
return left, rlz.cast(right, left)
with suppress(com.IbisTypeError):
return rlz.cast(left, right), right
return left, right
def output_type(self):
if not rlz.comparable(self.left, self.right):
raise TypeError(
'Arguments with datatype {} and {} are '
'not comparable'.format(self.left.type(), self.right.type())
)
return rlz.shape_like(self.args, dt.boolean)
class Equals(Comparison):
pass
class NotEquals(Comparison):
pass
class GreaterEqual(Comparison):
pass
class Greater(Comparison):
pass
class LessEqual(Comparison):
pass
class Less(Comparison):
pass
class IdenticalTo(Comparison):
pass
class Between(ValueOp, BooleanValueOp):
arg = Arg(rlz.any)
lower_bound = Arg(rlz.any)
upper_bound = Arg(rlz.any)
def output_type(self):
arg, lower, upper = self.args
if not (rlz.comparable(arg, lower) and rlz.comparable(arg, upper)):
raise TypeError('Arguments are not comparable')
return rlz.shape_like(self.args, dt.boolean)
class BetweenTime(Between):
arg = Arg(rlz.one_of([rlz.timestamp, rlz.time]))
lower_bound = Arg(rlz.one_of([rlz.time, rlz.string]))
upper_bound = Arg(rlz.one_of([rlz.time, rlz.string]))
class Contains(ValueOp, BooleanValueOp):
value = Arg(rlz.any)
options = Arg(
rlz.one_of(
[
rlz.list_of(rlz.any),
rlz.set_,
rlz.column(rlz.any),
rlz.array_of(rlz.any),
]
)
)
def __init__(self, value, options):
# it can be a single expression, like a column
if not isinstance(options, ir.Expr):
if util.any_of(options, ir.Expr):
# or a list of expressions
options = ir.sequence(options)
else:
# or a set of scalar values
options = frozenset(options)
super().__init__(value, options)
def output_type(self):
all_args = [self.value]
if isinstance(self.options, ir.ListExpr):
all_args += self.options
else:
all_args += [self.options]
return rlz.shape_like(all_args, dt.boolean)
class NotContains(Contains):
pass
class ReplaceValues(ValueOp):
"""
Apply a multi-value replacement on a particular column. As an example from
SQL, given DAYOFWEEK(timestamp_col), replace 1 through 5 to "WEEKDAY" and 6
and 7 to "WEEKEND"
"""
pass
class SummaryFilter(ValueOp):
expr = Arg(rlz.noop)
def output_type(self):
return dt.boolean.column_type()
class TopK(ValueOp):
arg = Arg(rlz.noop)
k = Arg(int)
by = Arg(rlz.noop)
def __init__(self, arg, k, by=None):
if by is None:
by = arg.count()
if not isinstance(arg, ir.ColumnExpr):
raise TypeError(arg)
if not isinstance(k, int) or k < 0:
raise ValueError('k must be positive integer, was: {0}'.format(k))
super().__init__(arg, k, by)
def output_type(self):
return ir.TopKExpr
def blocks(self):
return True
class Constant(ValueOp):
pass
class TimestampNow(Constant):
def output_type(self):
return dt.timestamp.scalar_type()
class RandomScalar(Constant):
def output_type(self):
return dt.float64.scalar_type()
class E(Constant):
def output_type(self):
return functools.partial(ir.FloatingScalar, dtype=dt.float64)
class Pi(Constant):
"""
The constant pi
"""
def output_type(self):
return functools.partial(ir.FloatingScalar, dtype=dt.float64)
class TemporalUnaryOp(UnaryOp):
arg = Arg(rlz.temporal)
class TimestampUnaryOp(UnaryOp):
arg = Arg(rlz.timestamp)
_date_units = dict(
Y='Y',
y='Y',
year='Y',
YEAR='Y',
YYYY='Y',
SYYYY='Y',
YYY='Y',
YY='Y',
Q='Q',
q='Q',
quarter='Q',
QUARTER='Q',
M='M',
month='M',
MONTH='M',
w='W',
W='W',
week='W',
WEEK='W',
d='D',
D='D',
J='D',
day='D',
DAY='D',
)
_time_units = dict(
h='h',
H='h',
HH24='h',
hour='h',
HOUR='h',
m='m',
MI='m',
minute='m',
MINUTE='m',
s='s',
second='s',
SECOND='s',
ms='ms',
millisecond='ms',
MILLISECOND='ms',
us='us',
microsecond='ms',
MICROSECOND='ms',
ns='ns',
nanosecond='ns',
NANOSECOND='ns',
)
_timestamp_units = toolz.merge(_date_units, _time_units)
class TimestampTruncate(ValueOp):
arg = Arg(rlz.timestamp)
unit = Arg(rlz.isin(_timestamp_units))
output_type = rlz.shape_like('arg', dt.timestamp)
class DateTruncate(ValueOp):
arg = Arg(rlz.date)
unit = Arg(rlz.isin(_date_units))
output_type = rlz.shape_like('arg', dt.date)
class TimeTruncate(ValueOp):
arg = Arg(rlz.time)
unit = Arg(rlz.isin(_time_units))
output_type = rlz.shape_like('arg', dt.time)
class Strftime(ValueOp):
arg = Arg(rlz.temporal)
format_str = Arg(rlz.string)
output_type = rlz.shape_like('arg', dt.string)
class StringToTimestamp(ValueOp):
arg = Arg(rlz.string)
format_str = Arg(rlz.string)
timezone = Arg(rlz.string, default=None)
output_type = rlz.shape_like('arg', dt.Timestamp(timezone='UTC'))
class ExtractTemporalField(TemporalUnaryOp):
output_type = rlz.shape_like('arg', dt.int32)
ExtractTimestampField = ExtractTemporalField
class ExtractDateField(ExtractTemporalField):
arg = Arg(rlz.one_of([rlz.date, rlz.timestamp]))
class ExtractTimeField(ExtractTemporalField):
arg = Arg(rlz.one_of([rlz.time, rlz.timestamp]))
class ExtractYear(ExtractDateField):
pass
class ExtractMonth(ExtractDateField):
pass
class ExtractDay(ExtractDateField):
pass
class ExtractDayOfYear(ExtractDateField):
pass
class ExtractQuarter(ExtractDateField):
pass
class ExtractHour(ExtractTimeField):
pass
class ExtractMinute(ExtractTimeField):
pass
class ExtractSecond(ExtractTimeField):
pass
class ExtractMillisecond(ExtractTimeField):
pass
class DayOfWeekIndex(UnaryOp):
arg = Arg(rlz.one_of([rlz.date, rlz.timestamp]))
output_type = rlz.shape_like('arg', dt.int16)
class DayOfWeekName(UnaryOp):
arg = Arg(rlz.one_of([rlz.date, rlz.timestamp]))
output_type = rlz.shape_like('arg', dt.string)
class DayOfWeekNode(Node):
arg = Arg(rlz.one_of([rlz.date, rlz.timestamp]))
def output_type(self):
return ir.DayOfWeek
class Time(UnaryOp):
output_type = rlz.shape_like('arg', dt.time)
class Date(UnaryOp):
output_type = rlz.shape_like('arg', dt.date)
class TimestampFromUNIX(ValueOp):
arg = Arg(rlz.any)
# Only pandas-based backends support 'ns'
unit = Arg(rlz.isin({'s', 'ms', 'us', 'ns'}))
output_type = rlz.shape_like('arg', dt.timestamp)
class DecimalUnaryOp(UnaryOp):
arg = Arg(rlz.decimal)
class DecimalPrecision(DecimalUnaryOp):
output_type = rlz.shape_like('arg', dt.int32)
class DecimalScale(UnaryOp):
output_type = rlz.shape_like('arg', dt.int32)
class Hash(ValueOp):
arg = Arg(rlz.any)
how = Arg(rlz.isin({'fnv'}))
output_type = rlz.shape_like('arg', dt.int64)
class DateAdd(BinaryOp):
left = Arg(rlz.date)
right = Arg(rlz.interval(units={'Y', 'Q', 'M', 'W', 'D'}))
output_type = rlz.shape_like('left')
class DateSub(BinaryOp):
left = Arg(rlz.date)
right = Arg(rlz.interval(units={'Y', 'Q', 'M', 'W', 'D'}))
output_type = rlz.shape_like('left')
class DateDiff(BinaryOp):
left = Arg(rlz.date)
right = Arg(rlz.date)
output_type = rlz.shape_like('left', dt.Interval('D'))
class TimeAdd(BinaryOp):
left = Arg(rlz.time)
right = Arg(rlz.interval(units={'h', 'm', 's', 'ms', 'us', 'ns'}))
output_type = rlz.shape_like('left')
class TimeSub(BinaryOp):
left = Arg(rlz.time)
right = Arg(rlz.interval(units={'h', 'm', 's', 'ms', 'us', 'ns'}))
output_type = rlz.shape_like('left')
class TimeDiff(BinaryOp):
left = Arg(rlz.time)
right = Arg(rlz.time)
output_type = rlz.shape_like('left', dt.Interval('s'))
class TimestampAdd(BinaryOp):
left = Arg(rlz.timestamp)
right = Arg(
rlz.interval(
units={'Y', 'Q', 'M', 'W', 'D', 'h', 'm', 's', 'ms', 'us', 'ns'}
)
)
output_type = rlz.shape_like('left')
class TimestampSub(BinaryOp):
left = Arg(rlz.timestamp)
right = Arg(
rlz.interval(
units={'Y', 'Q', 'M', 'W', 'D', 'h', 'm', 's', 'ms', 'us', 'ns'}
)
)
output_type = rlz.shape_like('left')
class TimestampDiff(BinaryOp):
left = Arg(rlz.timestamp)
right = Arg(rlz.timestamp)
output_type = rlz.shape_like('left', dt.Interval('s'))
class IntervalBinaryOp(BinaryOp):
def output_type(self):
args = [
arg.cast(arg.type().value_type)
if isinstance(arg.type(), dt.Interval)
else arg
for arg in self.args
]
expr = rlz.numeric_like(args, self.__class__.op)(self)
left_dtype = self.left.type()
dtype_type = type(left_dtype)
additional_args = {
attr: getattr(left_dtype, attr)
for attr in dtype_type.__slots__
if attr not in {'unit', 'value_type'}
}
dtype = dtype_type(left_dtype.unit, expr.type(), **additional_args)
return rlz.shape_like(self.args, dtype=dtype)
class IntervalAdd(IntervalBinaryOp):
left = Arg(rlz.interval)
right = Arg(rlz.interval)
op = operator.add
class IntervalSubtract(IntervalBinaryOp):
left = Arg(rlz.interval)
right = Arg(rlz.interval)
op = operator.sub
class IntervalMultiply(IntervalBinaryOp):
left = Arg(rlz.interval)
right = Arg(rlz.numeric)
op = operator.mul
class IntervalFloorDivide(IntervalBinaryOp):
left = Arg(rlz.interval)
right = Arg(rlz.numeric)
op = operator.floordiv
class IntervalFromInteger(ValueOp):
arg = Arg(rlz.integer)
unit = Arg(
rlz.isin({'Y', 'Q', 'M', 'W', 'D', 'h', 'm', 's', 'ms', 'us', 'ns'})
)
@property
def resolution(self):
return dt.Interval(self.unit).resolution
def output_type(self):
dtype = dt.Interval(self.unit, self.arg.type())
return rlz.shape_like(self.arg, dtype=dtype)
class ArrayLength(UnaryOp):
arg = Arg(rlz.array)
output_type = rlz.shape_like('arg', dt.int64)
class ArraySlice(ValueOp):
arg = Arg(rlz.array)
start = Arg(rlz.integer)
stop = Arg(rlz.integer, default=None)
output_type = rlz.typeof('arg')
class ArrayIndex(ValueOp):
arg = Arg(rlz.array)
index = Arg(rlz.integer)
def output_type(self):
value_dtype = self.arg.type().value_type
return rlz.shape_like(self.arg, value_dtype)
class ArrayConcat(ValueOp):
left = Arg(rlz.array)
right = Arg(rlz.array)
output_type = rlz.shape_like('left')
def _validate(self):
left_dtype, right_dtype = self.left.type(), self.right.type()
if left_dtype != right_dtype:
raise com.IbisTypeError(
'Array types must match exactly in a {} operation. '
'Left type {} != Right type {}'.format(
type(self).__name__, left_dtype, right_dtype
)
)
class ArrayRepeat(ValueOp):
arg = Arg(rlz.array)
times = Arg(rlz.integer)
output_type = rlz.typeof('arg')
class ArrayCollect(Reduction):
arg = Arg(rlz.column(rlz.any))
def output_type(self):
dtype = dt.Array(self.arg.type())
return dtype.scalar_type()
class MapLength(ValueOp):
arg = Arg(rlz.mapping)
output_type = rlz.shape_like('arg', dt.int64)
class MapValueForKey(ValueOp):
arg = Arg(rlz.mapping)
key = Arg(rlz.one_of([rlz.string, rlz.integer]))
def output_type(self):
return rlz.shape_like(tuple(self.args), self.arg.type().value_type)
class MapValueOrDefaultForKey(ValueOp):
arg = Arg(rlz.mapping)
key = Arg(rlz.one_of([rlz.string, rlz.integer]))
default = Arg(rlz.any)
def output_type(self):
arg = self.arg
default = self.default
map_type = arg.type()
value_type = map_type.value_type
default_type = default.type()
if default is not None and not dt.same_kind(default_type, value_type):
raise com.IbisTypeError(
"Default value\n{}\nof type {} cannot be cast to map's value "
"type {}".format(default, default_type, value_type)
)
result_type = dt.highest_precedence((default_type, value_type))
return rlz.shape_like(tuple(self.args), result_type)
class MapKeys(ValueOp):
arg = Arg(rlz.mapping)
def output_type(self):
arg = self.arg
return rlz.shape_like(arg, dt.Array(arg.type().key_type))
class MapValues(ValueOp):
arg = Arg(rlz.mapping)
def output_type(self):
arg = self.arg
return rlz.shape_like(arg, dt.Array(arg.type().value_type))
class MapConcat(ValueOp):
left = Arg(rlz.mapping)
right = Arg(rlz.mapping)
output_type = rlz.typeof('left')
class StructField(ValueOp):
arg = Arg(rlz.struct)
field = Arg(str)
def output_type(self):
struct_dtype = self.arg.type()
value_dtype = struct_dtype[self.field]
return rlz.shape_like(self.arg, value_dtype)
class Literal(ValueOp):
value = Arg(rlz.noop)
dtype = Arg(dt.dtype)
def __repr__(self):
return '{}({})'.format(
type(self).__name__, ', '.join(map(repr, self.args))
)
def equals(self, other, cache=None):
return (
isinstance(other, Literal)
and isinstance(other.value, type(self.value))
and self.value == other.value
)
def output_type(self):
return self.dtype.scalar_type()
def root_tables(self):
return []
def __hash__(self) -> int:
"""Return the hash of a literal value.
We override this method to make sure that we can handle things that
aren't eminently hashable like an ``array<array<int64>>``.
"""
return hash(self.dtype._literal_value_hash_key(self.value))
class NullLiteral(Literal):
"""Typeless NULL literal"""
value = Arg(type(None), default=None)
dtype = Arg(dt.Null, default=dt.null)
class ScalarParameter(ValueOp):
_counter = itertools.count()
dtype = Arg(dt.dtype)
counter = Arg(int, default=lambda: next(ScalarParameter._counter))
def resolve_name(self):
return 'param_{:d}'.format(self.counter)
def __repr__(self):
return '{}(type={})'.format(type(self).__name__, self.dtype)
def __hash__(self):
return hash((self.dtype, self.counter))
def output_type(self):
return self.dtype.scalar_type()
def equals(self, other, cache=None):
return (
isinstance(other, ScalarParameter)
and self.counter == other.counter
and self.dtype.equals(other.dtype, cache=cache)
)
@property
def inputs(self):
return ()
def root_tables(self):
return []
class ExpressionList(Node):
"""Data structure for a list of arbitrary expressions"""
exprs = Arg(rlz.noop)
def __init__(self, values):
super().__init__(list(map(rlz.any, values)))
@property
def inputs(self):
return (tuple(self.exprs),)
def root_tables(self):
return distinct_roots(self.exprs)
def output_type(self):
return ir.ExprList
class ValueList(ValueOp):
"""Data structure for a list of value expressions"""
values = Arg(rlz.noop)
display_argnames = False # disable showing argnames in repr
def __init__(self, values):
super().__init__(tuple(map(rlz.any, values)))
def output_type(self):
dtype = rlz.highest_precedence_dtype(self.values)
return functools.partial(ir.ListExpr, dtype=dtype)
def root_tables(self):
return distinct_roots(*self.values)
# ----------------------------------------------------------------------
# GeoSpatial operations
class GeoSpatialBinOp(BinaryOp):
"""Geo Spatial base binary"""
left = Arg(rlz.geospatial)
right = Arg(rlz.geospatial)
class GeoSpatialUnOp(UnaryOp):
"""Geo Spatial base unary"""
arg = Arg(rlz.geospatial)
class GeoDistance(GeoSpatialBinOp):
"""Returns minimum distance between two geo spatial data"""
output_type = rlz.shape_like('args', dt.float64)
class GeoContains(GeoSpatialBinOp):
"""Check if the first geo spatial data contains the second one"""
output_type = rlz.shape_like('args', dt.boolean)
class GeoContainsProperly(GeoSpatialBinOp):
"""Check if the first geo spatial data contains the second one,
and no boundary points are shared."""
output_type = rlz.shape_like('args', dt.boolean)
class GeoCovers(GeoSpatialBinOp):
"""Returns True if no point in Geometry B is outside Geometry A"""
output_type = rlz.shape_like('args', dt.boolean)
class GeoCoveredBy(GeoSpatialBinOp):
"""Returns True if no point in Geometry/Geography A is
outside Geometry/Geography B"""
output_type = rlz.shape_like('args', dt.boolean)
class GeoCrosses(GeoSpatialBinOp):
"""Returns True if the supplied geometries have some, but not all,
interior points in common."""
output_type = rlz.shape_like('args', dt.boolean)
class GeoDisjoint(GeoSpatialBinOp):
"""Returns True if the Geometries do not “spatially intersect” -
if they do not share any space together."""
output_type = rlz.shape_like('args', dt.boolean)
class GeoEquals(GeoSpatialBinOp):
"""Returns True if the given geometries represent the same geometry."""
output_type = rlz.shape_like('args', dt.boolean)
class GeoGeometryN(GeoSpatialUnOp):
"""Returns the Nth Geometry of a Multi geometry."""
n = Arg(rlz.integer)
output_type = rlz.shape_like('args', dt.geometry)
class GeoGeometryType(GeoSpatialUnOp):
"""Returns the type of the geometry."""
output_type = rlz.shape_like('args', dt.string)
class GeoIntersects(GeoSpatialBinOp):
"""Returns True if the Geometries/Geography “spatially intersect in 2D”
- (share any portion of space) and False if they don’t (they are Disjoint).
"""
output_type = rlz.shape_like('args', dt.boolean)
class GeoIsValid(GeoSpatialUnOp):
"""Returns true if the geometry is well-formed."""
output_type = rlz.shape_like('args', dt.boolean)
class GeoLineLocatePoint(GeoSpatialBinOp):
"""
Locate the distance a point falls along the length of a line.
Returns a float between zero and one representing the location of the
closest point on the linestring to the given point, as a fraction of the
total 2d line length.
"""
left = Arg(rlz.linestring)
right = Arg(rlz.point)
output_type = rlz.shape_like('args', dt.halffloat)
class GeoLineMerge(GeoSpatialUnOp):
"""
Merge a MultiLineString into a LineString.
Returns a (set of) LineString(s) formed by sewing together the
constituent line work of a multilinestring. If a geometry other than
a linestring or multilinestring is given, this will return an empty
geometry collection.
"""
output_type = rlz.shape_like('args', dt.geometry)
class GeoLineSubstring(GeoSpatialUnOp):
"""
Clip a substring from a LineString.
Returns a linestring that is a substring of the input one, starting
and ending at the given fractions of the total 2d length. The second
and third arguments are floating point values between zero and one.
This only works with linestrings.
"""
arg = Arg(rlz.linestring)
start = Arg(rlz.floating)
end = Arg(rlz.floating)
output_type = rlz.shape_like('args', dt.linestring)
class GeoOrderingEquals(GeoSpatialBinOp):
"""
Check if two geometries are equal and have the same point ordering.
Returns true if the two geometries are equal and the coordinates
are in the same order.
"""
output_type = rlz.shape_like('args', dt.boolean)
class GeoOverlaps(GeoSpatialBinOp):
"""Returns True if the Geometries share space, are of the same dimension,
but are not completely contained by each other."""
output_type = rlz.shape_like('args', dt.boolean)
class GeoTouches(GeoSpatialBinOp):
"""Returns True if the geometries have at least one point in common,
but their interiors do not intersect."""
output_type = rlz.shape_like('args', dt.boolean)
class GeoUnaryUnion(Reduction):
"""Returns the pointwise union of the geometries in the column."""
arg = Arg(rlz.column(rlz.geospatial))
def output_type(self):
return dt.geometry.scalar_type()
class GeoUnion(GeoSpatialBinOp):
"""Returns the pointwise union of the two geometries."""
output_type = rlz.shape_like('args', dt.geometry)
class GeoArea(GeoSpatialUnOp):
"""Area of the geo spatial data"""
output_type = rlz.shape_like('args', dt.float64)
class GeoPerimeter(GeoSpatialUnOp):
"""Perimeter of the geo spatial data"""
output_type = rlz.shape_like('args', dt.float64)
class GeoLength(GeoSpatialUnOp):
"""Length of geo spatial data"""
output_type = rlz.shape_like('args', dt.float64)
class GeoMaxDistance(GeoSpatialBinOp):
"""Returns the 2-dimensional maximum distance between two geometries in
projected units. If g1 and g2 is the same geometry the function will
return the distance between the two vertices most far from each other
in that geometry
"""
output_type = rlz.shape_like('args', dt.float64)
class GeoX(GeoSpatialUnOp):
"""Return the X coordinate of the point, or NULL if not available.
Input must be a point
"""
output_type = rlz.shape_like('args', dt.float64)
class GeoY(GeoSpatialUnOp):
"""Return the Y coordinate of the point, or NULL if not available.
Input must be a point
"""
output_type = rlz.shape_like('args', dt.float64)
class GeoXMin(GeoSpatialUnOp):
"""Returns Y minima of a bounding box 2d or 3d or a geometry"""
output_type = rlz.shape_like('args', dt.float64)
class GeoXMax(GeoSpatialUnOp):
"""Returns X maxima of a bounding box 2d or 3d or a geometry"""
output_type = rlz.shape_like('args', dt.float64)
class GeoYMin(GeoSpatialUnOp):
"""Returns Y minima of a bounding box 2d or 3d or a geometry"""
output_type = rlz.shape_like('args', dt.float64)
class GeoYMax(GeoSpatialUnOp):
"""Returns Y maxima of a bounding box 2d or 3d or a geometry"""
output_type = rlz.shape_like('args', dt.float64)
class GeoStartPoint(GeoSpatialUnOp):
"""Returns the first point of a LINESTRING geometry as a POINT or
NULL if the input parameter is not a LINESTRING
"""
output_type = rlz.shape_like('arg', dt.point)
class GeoEndPoint(GeoSpatialUnOp):
"""Returns the last point of a LINESTRING geometry as a POINT or
NULL if the input parameter is not a LINESTRING
"""
output_type = rlz.shape_like('arg', dt.point)
class GeoPoint(GeoSpatialBinOp):
"""
Return a point constructed on the fly from the provided coordinate values.
Constant coordinates result in construction of a POINT literal.
"""
left = Arg(rlz.numeric)
right = Arg(rlz.numeric)
output_type = rlz.shape_like('args', dt.point)
class GeoPointN(GeoSpatialUnOp):
"""Return the Nth point in a single linestring in the geometry.
Negative values are counted backwards from the end of the LineString,
so that -1 is the last point. Returns NULL if there is no linestring in
the geometry
"""
n = Arg(rlz.integer)
output_type = rlz.shape_like('args', dt.point)
class GeoNPoints(GeoSpatialUnOp):
"""Return the number of points in a geometry. Works for all geometries"""
output_type = rlz.shape_like('args', dt.int64)
class GeoNRings(GeoSpatialUnOp):
"""If the geometry is a polygon or multi-polygon returns the number of
rings. It counts the outer rings as well
"""
output_type = rlz.shape_like('args', dt.int64)
class GeoSRID(GeoSpatialUnOp):
"""Returns the spatial reference identifier for the ST_Geometry."""
output_type = rlz.shape_like('args', dt.int64)
class GeoSetSRID(GeoSpatialUnOp):
"""Set the spatial reference identifier for the ST_Geometry."""
srid = Arg(rlz.integer)
output_type = rlz.shape_like('args', dt.geometry)
class GeoBuffer(GeoSpatialUnOp):
"""Returns a geometry that represents all points whose distance from this
Geometry is less than or equal to distance. Calculations are in the
Spatial Reference System of this Geometry.
"""
radius = Arg(rlz.floating)
output_type = rlz.shape_like('args', dt.geometry)
class GeoCentroid(GeoSpatialUnOp):
"""Returns the geometric center of a geometry."""
output_type = rlz.shape_like('arg', dt.point)
class GeoDFullyWithin(GeoSpatialBinOp):
"""Returns True if the geometries are fully within the specified distance
of one another.
"""
distance = Arg(rlz.floating)
output_type = rlz.shape_like('args', dt.boolean)
class GeoDWithin(GeoSpatialBinOp):
"""Returns True if the geometries are within the specified distance
of one another.
"""
distance = Arg(rlz.floating)
output_type = rlz.shape_like('args', dt.boolean)
class GeoEnvelope(GeoSpatialUnOp):
"""Returns a geometry representing the boundingbox of the supplied geometry.
"""
output_type = rlz.shape_like('arg', dt.polygon)
class GeoAzimuth(GeoSpatialBinOp):
"""Returns the angle in radians from the horizontal of the vector defined
by pointA and pointB. Angle is computed clockwise from down-to-up:
on the clock: 12=0; 3=PI/2; 6=PI; 9=3PI/2.
"""
left = Arg(rlz.point)
right = Arg(rlz.point)
output_type = rlz.shape_like('args', dt.float64)
class GeoWithin(GeoSpatialBinOp):
"""Returns True if the geometry A is completely inside geometry B"""
output_type = rlz.shape_like('args', dt.boolean)
class GeoIntersection(GeoSpatialBinOp):
"""Returns a geometry that represents the point set intersection
of the Geometries.
"""
output_type = rlz.shape_like('args', dt.geometry)
class GeoDifference(GeoSpatialBinOp):
"""Returns a geometry that represents that part of geometry A
that does not intersect with geometry B
"""
output_type = rlz.shape_like('args', dt.geometry)
class GeoSimplify(GeoSpatialUnOp):
"""Returns a simplified version of the given geometry."""
tolerance = Arg(rlz.floating)
preserve_collapsed = Arg(rlz.boolean)
output_type = rlz.shape_like('arg', dt.geometry)
class GeoTransform(GeoSpatialUnOp):
"""Returns a transformed version of the given geometry into a new SRID."""
srid = Arg(rlz.integer)
output_type = rlz.shape_like('arg', dt.geometry)
class GeoAsBinary(GeoSpatialUnOp):
"""Return the Well-Known Binary (WKB) representation of the
geometry/geography without SRID meta data.
"""
output_type = rlz.shape_like('arg', dt.binary)
class GeoAsEWKB(GeoSpatialUnOp):
"""Return the Well-Known Binary (WKB) representation of the
geometry/geography with SRID meta data.
"""
output_type = rlz.shape_like('arg', dt.binary)
class GeoAsEWKT(GeoSpatialUnOp):
"""Return the Well-Known Text (WKT) representation of the
geometry/geography with SRID meta data.
"""
output_type = rlz.shape_like('arg', dt.string)
class GeoAsText(GeoSpatialUnOp):
"""Return the Well-Known Text (WKT) representation of the
geometry/geography without SRID metadata.
"""
output_type = rlz.shape_like('arg', dt.string)
class ElementWiseVectorizedUDF(ValueOp):
"""Node for element wise UDF."""
func = Arg(callable)
func_args = Arg(tuple)
input_type = Arg(rlz.shape_like('func_args'))
_output_type = Arg(rlz.noop)
def __init__(self, func, args, input_type, output_type):
self.func = func
self.func_args = args
self.input_type = input_type
self._output_type = output_type
@property
def inputs(self):
return self.func_args
def output_type(self):
return self._output_type.column_type()
def root_tables(self):
result = list(
toolz.unique(
toolz.concat(arg._root_tables() for arg in self.func_args)
)
)
return result
class ReductionVectorizedUDF(Reduction):
"""Node for reduction UDF."""
func = Arg(callable)
func_args = Arg(tuple)
input_type = Arg(rlz.shape_like('func_args'))
_output_type = Arg(rlz.noop)
def __init__(self, func, args, input_type, output_type):
self.func = func
self.func_args = args
self.input_type = input_type
self._output_type = output_type
@property
def inputs(self):
return self.func_args
def output_type(self):
return self._output_type.scalar_type()
def root_tables(self):
result = list(
toolz.unique(
toolz.concat(arg._root_tables() for arg in self.func_args)
)
)
return result
class AnalyticVectorizedUDF(AnalyticOp):
"""Node for analytics UDF."""
func = Arg(callable)
func_args = Arg(tuple)
input_type = Arg(rlz.shape_like('func_args'))
_output_type = Arg(rlz.noop)
def __init__(self, func, args, input_type, output_type):
self.func = func
self.func_args = args
self.input_type = input_type
self._output_type = output_type
@property
def inputs(self):
return self.func_args
def output_type(self):
return self._output_type.column_type()
def root_tables(self):
result = list(
toolz.unique(
toolz.concat(arg._root_tables() for arg in self.func_args)
)
)
return result
| 24.690725
| 80
| 0.623193
|
4a1566c009344becc57f87877c9727d8e8d6a5ce
| 6,891
|
py
|
Python
|
utils/build/build_llvm.py
|
goku2610/hermes
|
f3ddbbac8cb0e0d9214926b58242440b9cf1055c
|
[
"MIT"
] | null | null | null |
utils/build/build_llvm.py
|
goku2610/hermes
|
f3ddbbac8cb0e0d9214926b58242440b9cf1055c
|
[
"MIT"
] | null | null | null |
utils/build/build_llvm.py
|
goku2610/hermes
|
f3ddbbac8cb0e0d9214926b58242440b9cf1055c
|
[
"MIT"
] | 1
|
2021-03-26T18:18:26.000Z
|
2021-03-26T18:18:26.000Z
|
#!/usr/bin/env python
# Copyright (c) Facebook, Inc. and its affiliates.
#
# This source code is licensed under the MIT license found in the LICENSE
# file in the root directory of this source tree.
from __future__ import absolute_import, division, print_function, unicode_literals
import os
import platform
import subprocess
import sys
from common import build_dir_suffix, get_parser, is_visual_studio, run_command, which
# It references the commit day so we can shallow clone
# and still manage to checkout this specific revision.
# NOTE: The revision date must be the day before the
# actual commit date.
_LLVM_REV = "c179d7b006348005d2da228aed4c3c251590baa3"
_LLVM_REV_DATE = "2018-10-08"
def parse_args():
parser = get_parser()
parser.add_argument("llvm_src_dir", type=str, nargs="?", default="llvm")
parser.add_argument("llvm_build_dir", type=str, nargs="?", default="llvm_build")
parser.add_argument("--target", "-t", action="append", default=[])
args = parser.parse_args()
args.llvm_src_dir = os.path.realpath(args.llvm_src_dir)
args.llvm_build_dir = os.path.realpath(args.llvm_build_dir)
args.build_type = args.build_type or ("MinSizeRel" if args.distribute else "Debug")
args.llvm_build_dir += build_dir_suffix(args)
return args
def build_git_command(http_proxy):
# Otherwise, trust that the user has git on the path.
command = [which("git")]
if http_proxy:
command += ["-c", "http.proxy={}".format(http_proxy)]
if platform.system() == "Windows":
command += ["-c", "core.filemode=false"]
command += ["-c", "core.autocrlf=false"]
return command
def clone_and_patch_llvm(args):
git = build_git_command(args.http_proxy)
if not os.path.exists(args.llvm_src_dir):
# If the directory doesn't exist, clone LLVM there.
print("Cloning LLVM into {}".format(args.llvm_src_dir))
run_command(
git
+ [
"clone",
"--shallow-since",
_LLVM_REV_DATE,
"https://github.com/llvm-mirror/llvm.git",
args.llvm_src_dir,
]
)
# Checkout a specific revision in LLVM.
run_command(git + ["checkout", _LLVM_REV], cwd=args.llvm_src_dir)
# Check that the respository is clean.
try:
run_command(git + ["diff-index", "--quiet", "HEAD"], cwd=args.llvm_src_dir)
except subprocess.CalledProcessError:
raise Exception("llvm dir is dirty (contains uncommitted changes)")
# Apply small edits to LLVM from patch files.
run_command(
git
+ [
"apply",
"--ignore-space-change",
"--ignore-whitespace",
os.path.join(
os.path.dirname(os.path.realpath(__file__)),
"llvm-changes-for-hermes.patch",
),
],
cwd=args.llvm_src_dir,
)
# Commit the patch.
run_command(
git
+ [
"-c",
"user.name=nobody",
"-c",
"user.email='nobody@example.com'",
"commit",
"-a",
"-m",
"Patch by Hermes build script",
],
cwd=args.llvm_src_dir,
)
def main():
args = parse_args()
print("Source Dir: {}".format(args.llvm_src_dir))
print("Using Build system: {}".format(args.build_system))
print("Build Dir: {}".format(args.llvm_build_dir))
print("Build Type: {}".format(args.build_type))
clone_and_patch_llvm(args)
cmake_flags = args.cmake_flags.split() + [
"-DLLVM_TARGETS_TO_BUILD=",
"-DCMAKE_BUILD_TYPE={}".format(args.build_type),
]
if args.is_32_bit:
cmake_flags += ["-DLLVM_BUILD_32_BITS=On"]
if platform.system() == "Windows":
if platform.machine().endswith("64") and is_visual_studio(args.build_system):
cmake_flags += ["-Thost=x64"]
cmake_flags += ["-DLLVM_INCLUDE_EXAMPLES=Off"]
if args.enable_asan:
cmake_flags += ["-DLLVM_USE_SANITIZER=Address"]
cmake_flags += ["-DLLVM_VERSION_PRINTER_SHOW_HOST_TARGET_INFO=Off"]
if args.target_platform.startswith("iphone"):
cmake_flags += [
"-DCMAKE_C_FLAGS=-miphoneos-version-min=8.0",
"-DCMAKE_CXX_FLAGS=-miphoneos-version-min=8.0",
"-DCMAKE_TOOLCHAIN_FILE={}".format(
os.path.join(args.llvm_src_dir, "cmake", "platforms", "iOS.cmake")
),
"-DLLVM_BUILD_RUNTIME=Off",
"-DLLVM_INCLUDE_TESTS=Off",
"-DLLVM_INCLUDE_EXAMPLES=Off",
"-DLLVM_ENABLE_BACKTRACES=Off",
"-DLLVM_INCLUDE_UTILS=Off",
"-DLLVM_ENABLE_TERMINFO=Off",
]
if args.target_platform == "iphoneos":
cmake_flags += ["-DCMAKE_OSX_ARCHITECTURES=armv7;arm64"]
elif args.target_platform == "iphonesimulator":
xcode_sysroot = subprocess.check_output(
[which("xcodebuild"), "-version", "-sdk", "iphonesimulator", "Path"]
)
cmake_flags += [
"-DCMAKE_OSX_ARCHITECTURES=x86_64",
"-DCMAKE_OSX_SYSROOT={}".format(xcode_sysroot),
]
elif args.target_platform == "macosx":
cmake_flags += [
"-DCMAKE_C_FLAGS=-mmacosx-version-min=10.9",
"-DCMAKE_CXX_FLAGS=-mmacosx-version-min=10.9",
]
cmake_flags += ["-DPYTHON_EXECUTABLE={}".format(sys.executable or which("python"))]
try:
os.mkdir(args.llvm_build_dir)
except OSError:
# It's alright if the file already exists.
pass
print("CMake flags: {}".format(" ".join(cmake_flags)))
cmake = which("cmake")
# Print the CMake version to assist in diagnosing issues.
print(
"CMake version:\n{}".format(
subprocess.check_output([cmake, "--version"], stderr=subprocess.STDOUT)
)
)
run_command(
[cmake, "-G", args.build_system, args.llvm_src_dir] + cmake_flags,
env=os.environ,
cwd=args.llvm_build_dir,
)
# MSBuild needs retries to handle an unexplainable linker error: LNK1000.
# Retry the build in case of failures.
tries = 3
for i in range(tries):
try:
build_cmd = [cmake, "--build", args.llvm_build_dir]
for target in args.target:
build_cmd += ["--target", target]
if args.distribute and is_visual_studio(args.build_system):
build_cmd += ["--config", "Release"]
run_command(build_cmd, env=os.environ)
break
except subprocess.CalledProcessError as e:
if i == tries - 1:
# If all retries failed, re-throw the last exception
raise
else:
print("Exec failed: {}\nRetrying...".format(str(e)))
continue
if __name__ == "__main__":
main()
| 33.614634
| 87
| 0.605718
|
4a1566e47f9e7ace0efe62186ac4aa96dc8fdbf9
| 498
|
py
|
Python
|
tests/test_maybe/test_maybe_functions/test_maybe_decorator.py
|
internetimagery/returns
|
8f4f23bae6861fb3969a8d0c8979f96c5589fc61
|
[
"BSD-2-Clause"
] | null | null | null |
tests/test_maybe/test_maybe_functions/test_maybe_decorator.py
|
internetimagery/returns
|
8f4f23bae6861fb3969a8d0c8979f96c5589fc61
|
[
"BSD-2-Clause"
] | null | null | null |
tests/test_maybe/test_maybe_functions/test_maybe_decorator.py
|
internetimagery/returns
|
8f4f23bae6861fb3969a8d0c8979f96c5589fc61
|
[
"BSD-2-Clause"
] | null | null | null |
from __future__ import absolute_import
from typing import Dict, Optional
from returns.maybe import Nothing, Some, maybe
@maybe
def _function(hashmap, key):
return hashmap.get(key, None)
def test_maybe_some():
u"""Ensures that maybe decorator works correctly for some case."""
assert _function({u'a': u'b'}, u'a') == Some(u'b')
def test_maybe_nothing():
u"""Ensures that maybe decorator works correctly for nothing case."""
assert _function({u'a': u'b'}, u'c') == Nothing
| 24.9
| 73
| 0.702811
|
4a1568185e43cd8fd3501b2d0b86df2b1f0d309a
| 896
|
py
|
Python
|
python_brasilidades/telefones_br.py
|
lariodiniz/Alura-Python-Brasil-validacao-de-dados
|
e8e2f551e69a996cf4d47c93cc60664670637289
|
[
"MIT"
] | null | null | null |
python_brasilidades/telefones_br.py
|
lariodiniz/Alura-Python-Brasil-validacao-de-dados
|
e8e2f551e69a996cf4d47c93cc60664670637289
|
[
"MIT"
] | null | null | null |
python_brasilidades/telefones_br.py
|
lariodiniz/Alura-Python-Brasil-validacao-de-dados
|
e8e2f551e69a996cf4d47c93cc60664670637289
|
[
"MIT"
] | null | null | null |
import re
class TelefonesBr:
def __init__(self, telefone):
if self.valida_telefone(telefone):
self.numero = telefone
else:
raise ValueError('Telefone não encontrado!')
def valida_telefone(self, telefone):
padrao = "([0-9]{2,3})?([0-9]{2})([0-9]{4,5})([0-9]{4})"
resposta = re.findall(padrao, telefone)
if resposta:
return True
else:
return False
def format_numero(self):
padrao = "([0-9]{2,3})?([0-9]{2})([0-9]{4,5})([0-9]{4})"
resposta = re.search(padrao, self.numero)
ddi = resposta.group(1)
ddd = resposta.group(2)
numero1=resposta.group(3)
numero2=resposta.group(4)
numero_formatado =f'+{ddi}({ddd}){numero1}-{numero2}'
return numero_formatado
def __str__(self):
return self.format_numero()
| 28.903226
| 64
| 0.551339
|
4a1568b0ec15009b614a022d046d9dc8e83f2c6a
| 875
|
py
|
Python
|
app/core/inventory/commands/update_items.py
|
sns/burgess
|
23a73660ff5079b282d44329e7a150757a2a9c61
|
[
"BSD-3-Clause"
] | null | null | null |
app/core/inventory/commands/update_items.py
|
sns/burgess
|
23a73660ff5079b282d44329e7a150757a2a9c61
|
[
"BSD-3-Clause"
] | null | null | null |
app/core/inventory/commands/update_items.py
|
sns/burgess
|
23a73660ff5079b282d44329e7a150757a2a9c61
|
[
"BSD-3-Clause"
] | null | null | null |
from sqlalchemy.orm import Session
from app.core.inventory.models.item import Item
from app.core.request_handler import RequestHandler
from app.core.inventory.schemas.item import Item as ItemSchema
from typing import List
class Command:
items: List[ItemSchema]
def __init__(self, items):
self.items = items
class Handler(RequestHandler):
def __init__(self, db: Session, command: Command):
super(Handler, self).__init__(db)
self.command = command
def execute(self):
try:
updatedItems = []
for i in self.command.items:
updatedItems.append(Item(name=i.name, quantity=i.quantity))
self.db.query(Item).delete()
self.db.bulk_save_objects(updatedItems)
self.db.commit()
except:
self.db.rollback()
| 30.172414
| 75
| 0.627429
|
4a156a15c0573d056308e5b5184672cbaedbbc84
| 24,908
|
py
|
Python
|
Lib/site-packages/aniso8601/tests/test_time.py
|
EduMacedoEng/ScreenRecorder
|
40050789074a40d214cb09d9f6a7b504e345617f
|
[
"MIT"
] | 1
|
2021-01-27T17:14:06.000Z
|
2021-01-27T17:14:06.000Z
|
Lib/site-packages/aniso8601/tests/test_time.py
|
EduMacedoEng/ScreenRecorder
|
40050789074a40d214cb09d9f6a7b504e345617f
|
[
"MIT"
] | null | null | null |
Lib/site-packages/aniso8601/tests/test_time.py
|
EduMacedoEng/ScreenRecorder
|
40050789074a40d214cb09d9f6a7b504e345617f
|
[
"MIT"
] | 2
|
2021-01-20T12:39:49.000Z
|
2021-02-11T10:08:16.000Z
|
# -*- coding: utf-8 -*-
# Copyright (c) 2020, Brandon Nielsen
# All rights reserved.
#
# This software may be modified and distributed under the terms
# of the BSD license. See the LICENSE file for details.
import unittest
import aniso8601
from aniso8601.exceptions import ISOFormatError
from aniso8601.resolution import TimeResolution
from aniso8601.time import (get_time_resolution, parse_datetime, parse_time,
_parse_hour, _parse_minute_time,
_parse_second_time, _split_tz)
from aniso8601.tests.compat import mock
class TestTimeResolutionFunctions(unittest.TestCase):
def test_get_time_resolution(self):
self.assertEqual(get_time_resolution('01:23:45'),
TimeResolution.Seconds)
self.assertEqual(get_time_resolution('24:00:00'),
TimeResolution.Seconds)
self.assertEqual(get_time_resolution('23:21:28,512400'),
TimeResolution.Seconds)
self.assertEqual(get_time_resolution('23:21:28.512400'),
TimeResolution.Seconds)
self.assertEqual(get_time_resolution('01:23'), TimeResolution.Minutes)
self.assertEqual(get_time_resolution('24:00'), TimeResolution.Minutes)
self.assertEqual(get_time_resolution('01:23,4567'),
TimeResolution.Minutes)
self.assertEqual(get_time_resolution('01:23.4567'),
TimeResolution.Minutes)
self.assertEqual(get_time_resolution('012345'), TimeResolution.Seconds)
self.assertEqual(get_time_resolution('240000'), TimeResolution.Seconds)
self.assertEqual(get_time_resolution('0123'), TimeResolution.Minutes)
self.assertEqual(get_time_resolution('2400'), TimeResolution.Minutes)
self.assertEqual(get_time_resolution('01'), TimeResolution.Hours)
self.assertEqual(get_time_resolution('24'), TimeResolution.Hours)
self.assertEqual(get_time_resolution('12,5'), TimeResolution.Hours)
self.assertEqual(get_time_resolution('12.5'), TimeResolution.Hours)
self.assertEqual(get_time_resolution('232128.512400+00:00'),
TimeResolution.Seconds)
self.assertEqual(get_time_resolution('0123.4567+00:00'),
TimeResolution.Minutes)
self.assertEqual(get_time_resolution('01.4567+00:00'),
TimeResolution.Hours)
self.assertEqual(get_time_resolution('01:23:45+00:00'),
TimeResolution.Seconds)
self.assertEqual(get_time_resolution('24:00:00+00:00'),
TimeResolution.Seconds)
self.assertEqual(get_time_resolution('23:21:28.512400+00:00'),
TimeResolution.Seconds)
self.assertEqual(get_time_resolution('01:23+00:00'),
TimeResolution.Minutes)
self.assertEqual(get_time_resolution('24:00+00:00'),
TimeResolution.Minutes)
self.assertEqual(get_time_resolution('01:23.4567+00:00'),
TimeResolution.Minutes)
self.assertEqual(get_time_resolution('23:21:28.512400+11:15'),
TimeResolution.Seconds)
self.assertEqual(get_time_resolution('23:21:28.512400-12:34'),
TimeResolution.Seconds)
self.assertEqual(get_time_resolution('23:21:28.512400Z'),
TimeResolution.Seconds)
self.assertEqual(get_time_resolution('06:14:00.000123Z'),
TimeResolution.Seconds)
def test_get_time_resolution_badtype(self):
testtuples = (None, 1, False, 1.234)
for testtuple in testtuples:
with self.assertRaises(ValueError):
get_time_resolution(testtuple)
def test_get_time_resolution_badstr(self):
testtuples = ('A6:14:00.000123Z', '06:14:0B', 'bad', '')
for testtuple in testtuples:
with self.assertRaises(ISOFormatError):
get_time_resolution(testtuple)
class TestTimeParserFunctions(unittest.TestCase):
def test_parse_time(self):
testtuples = (('01:23:45', {'hh': '01', 'mm': '23',
'ss': '45', 'tz': None}),
('24:00:00', {'hh': '24', 'mm': '00',
'ss': '00', 'tz': None}),
('23:21:28,512400', {'hh': '23', 'mm': '21',
'ss': '28.512400', 'tz': None}),
('23:21:28.512400', {'hh': '23', 'mm': '21',
'ss': '28.512400', 'tz': None}),
('01:03:11.858714', {'hh': '01', 'mm': '03',
'ss': '11.858714', 'tz': None}),
('14:43:59.9999997', {'hh': '14', 'mm': '43',
'ss': '59.9999997', 'tz': None}),
('01:23', {'hh': '01', 'mm': '23', 'tz': None}),
('24:00', {'hh': '24', 'mm': '00', 'tz': None}),
('01:23,4567', {'hh': '01', 'mm': '23.4567',
'tz': None}),
('01:23.4567', {'hh': '01', 'mm': '23.4567',
'tz': None}),
('012345', {'hh': '01', 'mm': '23',
'ss': '45', 'tz': None}),
('240000', {'hh': '24', 'mm': '00',
'ss': '00', 'tz': None}),
('232128,512400', {'hh': '23', 'mm': '21',
'ss': '28.512400', 'tz': None}),
('232128.512400', {'hh': '23', 'mm': '21',
'ss': '28.512400', 'tz': None}),
('010311.858714', {'hh': '01', 'mm': '03',
'ss': '11.858714', 'tz': None}),
('144359.9999997', {'hh': '14', 'mm': '43',
'ss': '59.9999997', 'tz': None}),
('0123', {'hh': '01', 'mm': '23', 'tz': None}),
('2400', {'hh': '24', 'mm': '00', 'tz': None}),
('01', {'hh': '01', 'tz': None}),
('24', {'tz': None}),
('12,5', {'hh': '12.5', 'tz': None}),
('12.5', {'hh': '12.5', 'tz': None}),
('232128,512400+00:00', {'hh': '23', 'mm': '21',
'ss': '28.512400',
'tz': (False, None,
'00', '00',
'+00:00', 'timezone')}),
('232128.512400+00:00', {'hh': '23', 'mm': '21',
'ss': '28.512400',
'tz': (False, None,
'00', '00',
'+00:00', 'timezone')}),
('0123,4567+00:00', {'hh': '01', 'mm': '23.4567',
'tz': (False, None,
'00', '00',
'+00:00', 'timezone')}),
('0123.4567+00:00', {'hh': '01', 'mm': '23.4567',
'tz': (False, None,
'00', '00',
'+00:00', 'timezone')}),
('01,4567+00:00', {'hh': '01.4567',
'tz': (False, None,
'00', '00',
'+00:00', 'timezone')}),
('01.4567+00:00', {'hh': '01.4567',
'tz': (False, None,
'00', '00',
'+00:00', 'timezone')}),
('01:23:45+00:00', {'hh': '01', 'mm': '23',
'ss': '45',
'tz': (False, None,
'00', '00',
'+00:00', 'timezone')}),
('24:00:00+00:00', {'hh': '24', 'mm': '00',
'ss': '00',
'tz': (False, None,
'00', '00',
'+00:00', 'timezone')}),
('23:21:28.512400+00:00', {'hh': '23', 'mm': '21',
'ss': '28.512400',
'tz': (False, None,
'00', '00',
'+00:00',
'timezone')}),
('01:23+00:00', {'hh': '01', 'mm': '23',
'tz': (False, None,
'00', '00',
'+00:00', 'timezone')}),
('24:00+00:00', {'hh': '24', 'mm': '00',
'tz': (False, None,
'00', '00',
'+00:00', 'timezone')}),
('01:23.4567+00:00', {'hh': '01', 'mm': '23.4567',
'tz': (False, None,
'00', '00',
'+00:00', 'timezone')}),
('23:21:28.512400+11:15', {'hh': '23', 'mm': '21',
'ss': '28.512400',
'tz': (False, None,
'11', '15',
'+11:15',
'timezone')}),
('23:21:28.512400-12:34', {'hh': '23', 'mm': '21',
'ss': '28.512400',
'tz': (True, None,
'12', '34',
'-12:34',
'timezone')}),
('23:21:28.512400Z', {'hh': '23', 'mm': '21',
'ss': '28.512400',
'tz': (False, True,
None, None,
'Z', 'timezone')}),
('06:14:00.000123Z', {'hh': '06', 'mm': '14',
'ss': '00.000123',
'tz': (False, True,
None, None,
'Z', 'timezone')}))
for testtuple in testtuples:
with mock.patch.object(aniso8601.time.PythonTimeBuilder,
'build_time') as mockBuildTime:
mockBuildTime.return_value = testtuple[1]
result = parse_time(testtuple[0])
self.assertEqual(result, testtuple[1])
mockBuildTime.assert_called_once_with(**testtuple[1])
def test_parse_time_badtype(self):
testtuples = (None, 1, False, 1.234)
for testtuple in testtuples:
with self.assertRaises(ValueError):
parse_time(testtuple, builder=None)
def test_parse_time_badstr(self):
testtuples = ('A6:14:00.000123Z', '06:14:0B', 'bad', '')
for testtuple in testtuples:
with self.assertRaises(ISOFormatError):
parse_time(testtuple, builder=None)
def test_parse_time_mockbuilder(self):
mockBuilder = mock.Mock()
expectedargs = {'hh': '01', 'mm': '23', 'ss': '45', 'tz': None}
mockBuilder.build_time.return_value = expectedargs
result = parse_time('01:23:45', builder=mockBuilder)
self.assertEqual(result, expectedargs)
mockBuilder.build_time.assert_called_once_with(**expectedargs)
mockBuilder = mock.Mock()
expectedargs = {'hh': '23', 'mm': '21', 'ss': '28.512400',
'tz': (False, None, '00', '00', '+00:00', 'timezone')}
mockBuilder.build_time.return_value = expectedargs
result = parse_time('232128.512400+00:00', builder=mockBuilder)
self.assertEqual(result, expectedargs)
mockBuilder.build_time.assert_called_once_with(**expectedargs)
mockBuilder = mock.Mock()
expectedargs = {'hh': '23', 'mm': '21', 'ss': '28.512400',
'tz': (False, None, '11', '15', '+11:15', 'timezone')}
mockBuilder.build_time.return_value = expectedargs
result = parse_time('23:21:28.512400+11:15', builder=mockBuilder)
self.assertEqual(result, expectedargs)
mockBuilder.build_time.assert_called_once_with(**expectedargs)
def test_parse_datetime(self):
testtuples = (('2019-06-05T01:03:11,858714',
(('2019', '06', '05', None, None, None, 'date'),
('01', '03', '11.858714',
None, 'time'))),
('2019-06-05T01:03:11.858714',
(('2019', '06', '05', None, None, None, 'date'),
('01', '03', '11.858714',
None, 'time'))),
('1981-04-05T23:21:28.512400Z',
(('1981', '04', '05', None, None, None, 'date'),
('23', '21', '28.512400',
(False, True, None, None, 'Z', 'timezone'),
'time'))),
('1981095T23:21:28.512400-12:34',
(('1981', None, None, None, None, '095', 'date'),
('23', '21', '28.512400',
(True, None, '12', '34', '-12:34', 'timezone'),
'time'))),
('19810405T23:21:28+00',
(('1981', '04', '05', None, None, None, 'date'),
('23', '21', '28',
(False, None, '00', None, '+00', 'timezone'),
'time'))),
('19810405T23:21:28+00:00',
(('1981', '04', '05', None, None, None, 'date'),
('23', '21', '28',
(False, None, '00', '00', '+00:00', 'timezone'),
'time'))))
for testtuple in testtuples:
with mock.patch.object(aniso8601.time.PythonTimeBuilder,
'build_datetime') as mockBuildDateTime:
mockBuildDateTime.return_value = testtuple[1]
result = parse_datetime(testtuple[0])
self.assertEqual(result, testtuple[1])
mockBuildDateTime.assert_called_once_with(*testtuple[1])
def test_parse_datetime_spacedelimited(self):
expectedargs = (('2004', None, None, '53', '6', None, 'date'),
('23', '21', '28.512400',
(True, None, '12', '34', '-12:34', 'timezone'),
'time'))
with mock.patch.object(aniso8601.time.PythonTimeBuilder,
'build_datetime') as mockBuildDateTime:
mockBuildDateTime.return_value = expectedargs
result = parse_datetime('2004-W53-6 23:21:28.512400-12:34',
delimiter=' ')
self.assertEqual(result, expectedargs)
mockBuildDateTime.assert_called_once_with(*expectedargs)
def test_parse_datetime_commadelimited(self):
expectedargs = (('1981', '04', '05', None, None, None, 'date'),
('23', '21', '28.512400',
(False, True, None, None, 'Z', 'timezone'),
'time'))
with mock.patch.object(aniso8601.time.PythonTimeBuilder,
'build_datetime') as mockBuildDateTime:
mockBuildDateTime.return_value = expectedargs
result = parse_datetime('1981-04-05,23:21:28,512400Z',
delimiter=',')
self.assertEqual(result, expectedargs)
mockBuildDateTime.assert_called_once_with(*expectedargs)
def test_parse_datetime_baddelimiter(self):
testtuples = ('1981-04-05,23:21:28,512400Z',
'2004-W53-6 23:21:28.512400-12:3',
'1981040523:21:28')
for testtuple in testtuples:
with self.assertRaises(ISOFormatError):
parse_datetime(testtuple, builder=None)
def test_parse_datetime_badtype(self):
testtuples = (None, 1, False, 1.234)
for testtuple in testtuples:
with self.assertRaises(ValueError):
parse_datetime(testtuple, builder=None)
def test_parse_datetime_badstr(self):
testtuples = ('1981-04-05TA6:14:00.000123Z',
'2004-W53-6T06:14:0B',
'2014-01-230T23:21:28+00',
'201401230T01:03:11.858714',
'bad',
'')
for testtuple in testtuples:
with self.assertRaises(ISOFormatError):
parse_time(testtuple, builder=None)
def test_parse_datetime_mockbuilder(self):
mockBuilder = mock.Mock()
expectedargs = (('1981', None, None, None, None, '095', 'date'),
('23', '21', '28.512400',
(True, None, '12', '34', '-12:34', 'timezone'),
'time'))
mockBuilder.build_datetime.return_value = expectedargs
result = parse_datetime('1981095T23:21:28.512400-12:34',
builder=mockBuilder)
self.assertEqual(result, expectedargs)
mockBuilder.build_datetime.assert_called_once_with(*expectedargs)
def test_parse_hour(self):
testtuples = (('01', None, {'hh': '01', 'tz': None}),
('24', None, {'tz': None}),
('01.4567', None, {'hh': '01.4567', 'tz': None}),
('12.5', None, {'hh': '12.5', 'tz': None}),
('08', (True, None, '12', '34', '-12:34', 'timezone'),
{'hh': '08', 'tz':
(True, None, '12', '34', '-12:34', 'timezone')}))
for testtuple in testtuples:
mockBuilder = mock.Mock()
mockBuilder.build_time.return_value = testtuple[2]
result = _parse_hour(testtuple[0], testtuple[1], mockBuilder)
self.assertEqual(result, testtuple[2])
mockBuilder.build_time.assert_called_once_with(**testtuple[2])
def test_parse_minute_time(self):
testtuples = (('01:23', None, {'hh': '01', 'mm': '23', 'tz': None}),
('24:00', None, {'hh': '24', 'mm': '00', 'tz': None}),
('01:23.4567', None, {'hh': '01', 'mm': '23.4567',
'tz': None}),
('0123', None, {'hh': '01', 'mm': '23', 'tz': None}),
('2400', None, {'hh': '24', 'mm': '00', 'tz': None}),
('0123.4567', None, {'hh': '01', 'mm': '23.4567',
'tz': None}),
('08:13', (True, None, '12', '34', '-12:34', 'timezone'),
{'hh': '08', 'mm': '13',
'tz': (True, None, '12', '34', '-12:34', 'timezone')}))
for testtuple in testtuples:
mockBuilder = mock.Mock()
mockBuilder.build_time.return_value = testtuple[2]
result = _parse_minute_time(testtuple[0], testtuple[1],
mockBuilder)
self.assertEqual(result, testtuple[2])
mockBuilder.build_time.assert_called_once_with(**testtuple[2])
def test_parse_second_time(self):
testtuples = (('01:23:45', None, {'hh': '01', 'mm': '23',
'ss': '45', 'tz': None}),
('24:00:00', None, {'hh': '24', 'mm': '00',
'ss': '00', 'tz': None}),
('23:21:28.512400', None, {'hh': '23', 'mm': '21',
'ss': '28.512400',
'tz': None}),
('14:43:59.9999997', None, {'hh': '14', 'mm': '43',
'ss': '59.9999997',
'tz': None}),
('012345', None, {'hh': '01', 'mm': '23',
'ss': '45', 'tz': None}),
('240000', None, {'hh': '24', 'mm': '00',
'ss': '00', 'tz': None}),
('232128.512400', None, {'hh': '23', 'mm': '21',
'ss': '28.512400', 'tz': None}),
('144359.9999997', None, {'hh': '14', 'mm': '43',
'ss': '59.9999997',
'tz': None}),
('08:22:21',
(True, None, '12', '34', '-12:34', 'timezone'),
{'hh': '08', 'mm': '22', 'ss': '21',
'tz': (True, None, '12', '34', '-12:34', 'timezone')}))
for testtuple in testtuples:
mockBuilder = mock.Mock()
mockBuilder.build_time.return_value = testtuple[2]
result = _parse_second_time(testtuple[0], testtuple[1],
mockBuilder)
self.assertEqual(result, testtuple[2])
mockBuilder.build_time.assert_called_once_with(**testtuple[2])
def test_split_tz(self):
self.assertEqual(_split_tz('01:23:45'), ('01:23:45', None))
self.assertEqual(_split_tz('24:00:00'), ('24:00:00', None))
self.assertEqual(_split_tz('23:21:28.512400'),
('23:21:28.512400', None))
self.assertEqual(_split_tz('01:23'), ('01:23', None))
self.assertEqual(_split_tz('24:00'), ('24:00', None))
self.assertEqual(_split_tz('01:23.4567'), ('01:23.4567', None))
self.assertEqual(_split_tz('012345'), ('012345', None))
self.assertEqual(_split_tz('240000'), ('240000', None))
self.assertEqual(_split_tz('0123'), ('0123', None))
self.assertEqual(_split_tz('2400'), ('2400', None))
self.assertEqual(_split_tz('01'), ('01', None))
self.assertEqual(_split_tz('24'), ('24', None))
self.assertEqual(_split_tz('12.5'), ('12.5', None))
self.assertEqual(_split_tz('232128.512400+00:00'),
('232128.512400', '+00:00'))
self.assertEqual(_split_tz('0123.4567+00:00'), ('0123.4567', '+00:00'))
self.assertEqual(_split_tz('01.4567+00:00'), ('01.4567', '+00:00'))
self.assertEqual(_split_tz('01:23:45+00:00'), ('01:23:45', '+00:00'))
self.assertEqual(_split_tz('24:00:00+00:00'), ('24:00:00', '+00:00'))
self.assertEqual(_split_tz('23:21:28.512400+00:00'),
('23:21:28.512400', '+00:00'))
self.assertEqual(_split_tz('01:23+00:00'), ('01:23', '+00:00'))
self.assertEqual(_split_tz('24:00+00:00'), ('24:00', '+00:00'))
self.assertEqual(_split_tz('01:23.4567+00:00'),
('01:23.4567', '+00:00'))
self.assertEqual(_split_tz('23:21:28.512400+11:15'),
('23:21:28.512400', '+11:15'))
self.assertEqual(_split_tz('23:21:28.512400-12:34'),
('23:21:28.512400', '-12:34'))
self.assertEqual(_split_tz('23:21:28.512400Z'),
('23:21:28.512400', 'Z'))
self.assertEqual(_split_tz('06:14:00.000123Z'),
('06:14:00.000123', 'Z'))
| 48.084942
| 79
| 0.418982
|
4a156acfb4ab1018b0cf761c8a07645d196bab63
| 5,975
|
py
|
Python
|
codespace/python/examples/persistentconversationbot.py
|
tzpBingo/github-trending
|
05dc0a4a5aae91871a57b9c6db5d9faee243173e
|
[
"MIT"
] | 37
|
2017-10-12T01:50:42.000Z
|
2022-02-24T02:44:45.000Z
|
codespace/python/examples/persistentconversationbot.py
|
tzpBingo/github-trending
|
05dc0a4a5aae91871a57b9c6db5d9faee243173e
|
[
"MIT"
] | null | null | null |
codespace/python/examples/persistentconversationbot.py
|
tzpBingo/github-trending
|
05dc0a4a5aae91871a57b9c6db5d9faee243173e
|
[
"MIT"
] | 12
|
2018-07-31T10:04:56.000Z
|
2022-02-07T00:08:06.000Z
|
#!/usr/bin/env python
# pylint: disable=C0116,W0613
# This program is dedicated to the public domain under the CC0 license.
"""
First, a few callback functions are defined. Then, those functions are passed to
the Dispatcher and registered at their respective places.
Then, the bot is started and runs until we press Ctrl-C on the command line.
Usage:
Example of a bot-user conversation using ConversationHandler.
Send /start to initiate the conversation.
Press Ctrl-C on the command line or send a signal to the process to stop the
bot.
"""
import logging
from typing import Dict
from telegram import ReplyKeyboardMarkup, Update, ReplyKeyboardRemove
from telegram.ext import (
Updater,
CommandHandler,
MessageHandler,
Filters,
ConversationHandler,
PicklePersistence,
CallbackContext,
)
# Enable logging
logging.basicConfig(
format='%(asctime)s - %(name)s - %(levelname)s - %(message)s', level=logging.INFO
)
logger = logging.getLogger(__name__)
CHOOSING, TYPING_REPLY, TYPING_CHOICE = range(3)
reply_keyboard = [
['Age', 'Favourite colour'],
['Number of siblings', 'Something else...'],
['Done'],
]
markup = ReplyKeyboardMarkup(reply_keyboard, one_time_keyboard=True)
def facts_to_str(user_data: Dict[str, str]) -> str:
"""Helper function for formatting the gathered user info."""
facts = [f'{key} - {value}' for key, value in user_data.items()]
return "\n".join(facts).join(['\n', '\n'])
def start(update: Update, context: CallbackContext) -> int:
"""Start the conversation, display any stored data and ask user for input."""
reply_text = "Hi! My name is Doctor Botter."
if context.user_data:
reply_text += (
f" You already told me your {', '.join(context.user_data.keys())}. Why don't you "
f"tell me something more about yourself? Or change anything I already know."
)
else:
reply_text += (
" I will hold a more complex conversation with you. Why don't you tell me "
"something about yourself?"
)
update.message.reply_text(reply_text, reply_markup=markup)
return CHOOSING
def regular_choice(update: Update, context: CallbackContext) -> int:
"""Ask the user for info about the selected predefined choice."""
text = update.message.text.lower()
context.user_data['choice'] = text
if context.user_data.get(text):
reply_text = (
f'Your {text}? I already know the following about that: {context.user_data[text]}'
)
else:
reply_text = f'Your {text}? Yes, I would love to hear about that!'
update.message.reply_text(reply_text)
return TYPING_REPLY
def custom_choice(update: Update, context: CallbackContext) -> int:
"""Ask the user for a description of a custom category."""
update.message.reply_text(
'Alright, please send me the category first, for example "Most impressive skill"'
)
return TYPING_CHOICE
def received_information(update: Update, context: CallbackContext) -> int:
"""Store info provided by user and ask for the next category."""
text = update.message.text
category = context.user_data['choice']
context.user_data[category] = text.lower()
del context.user_data['choice']
update.message.reply_text(
"Neat! Just so you know, this is what you already told me:"
f"{facts_to_str(context.user_data)}"
"You can tell me more, or change your opinion on something.",
reply_markup=markup,
)
return CHOOSING
def show_data(update: Update, context: CallbackContext) -> None:
"""Display the gathered info."""
update.message.reply_text(
f"This is what you already told me: {facts_to_str(context.user_data)}"
)
def done(update: Update, context: CallbackContext) -> int:
"""Display the gathered info and end the conversation."""
if 'choice' in context.user_data:
del context.user_data['choice']
update.message.reply_text(
f"I learned these facts about you: {facts_to_str(context.user_data)}Until next time!",
reply_markup=ReplyKeyboardRemove(),
)
return ConversationHandler.END
def main() -> None:
"""Run the bot."""
# Create the Updater and pass it your bot's token.
persistence = PicklePersistence(filename='conversationbot')
updater = Updater("TOKEN", persistence=persistence)
# Get the dispatcher to register handlers
dispatcher = updater.dispatcher
# Add conversation handler with the states CHOOSING, TYPING_CHOICE and TYPING_REPLY
conv_handler = ConversationHandler(
entry_points=[CommandHandler('start', start)],
states={
CHOOSING: [
MessageHandler(
Filters.regex('^(Age|Favourite colour|Number of siblings)$'), regular_choice
),
MessageHandler(Filters.regex('^Something else...$'), custom_choice),
],
TYPING_CHOICE: [
MessageHandler(
Filters.text & ~(Filters.command | Filters.regex('^Done$')), regular_choice
)
],
TYPING_REPLY: [
MessageHandler(
Filters.text & ~(Filters.command | Filters.regex('^Done$')),
received_information,
)
],
},
fallbacks=[MessageHandler(Filters.regex('^Done$'), done)],
name="my_conversation",
persistent=True,
)
dispatcher.add_handler(conv_handler)
show_data_handler = CommandHandler('show_data', show_data)
dispatcher.add_handler(show_data_handler)
# Start the Bot
updater.start_polling()
# Run the bot until you press Ctrl-C or the process receives SIGINT,
# SIGTERM or SIGABRT. This should be used most of the time, since
# start_polling() is non-blocking and will stop the bot gracefully.
updater.idle()
if __name__ == '__main__':
main()
| 32.472826
| 96
| 0.660753
|
4a156b191b1f69480034ecb7cf46cbf99436479b
| 1,760
|
py
|
Python
|
test/hummingbot/core/mock_api/test_mock_web_socket_server.py
|
subchap/hummingbot
|
de1ed87977c45a72425bb9708e707230fe117b99
|
[
"Apache-2.0"
] | 2
|
2021-06-17T13:09:25.000Z
|
2021-06-17T13:09:29.000Z
|
test/hummingbot/core/mock_api/test_mock_web_socket_server.py
|
rabbitholeanalytics/hummingbot
|
60e5b0f7eca84cbd3288a786a20ac99d622510a8
|
[
"Apache-2.0"
] | null | null | null |
test/hummingbot/core/mock_api/test_mock_web_socket_server.py
|
rabbitholeanalytics/hummingbot
|
60e5b0f7eca84cbd3288a786a20ac99d622510a8
|
[
"Apache-2.0"
] | 1
|
2021-06-02T09:34:13.000Z
|
2021-06-02T09:34:13.000Z
|
import asyncio
import unittest.mock
import websockets
from hummingbot.core.mock_api.mock_web_socket_server import MockWebSocketServerFactory
import json
class MockWebSocketServerFactoryTest(unittest.TestCase):
@classmethod
def setUpClass(cls) -> None:
cls.ev_loop: asyncio.AbstractEventLoop = asyncio.get_event_loop()
cls.ws_server = MockWebSocketServerFactory.start_new_server("wss://www.google.com/ws/")
cls._patcher = unittest.mock.patch("websockets.connect", autospec=True)
cls._mock = cls._patcher.start()
cls._mock.side_effect = MockWebSocketServerFactory.reroute_ws_connect
# need to wait a bit for the server to be available
asyncio.get_event_loop().run_until_complete(asyncio.sleep(0.2))
@classmethod
def tearDownClass(cls) -> None:
cls._patcher.stop()
cls.ws_server.stop()
async def _test_web_socket(self):
uri = "wss://www.google.com/ws/"
async with websockets.connect(uri) as websocket:
await MockWebSocketServerFactory.send_str(uri, "aaa")
answer = await websocket.recv()
print(answer)
self.assertEqual("aaa", answer)
await MockWebSocketServerFactory.send_json(uri, data={"foo": "bar"})
answer = await websocket.recv()
print(answer)
answer = json.loads(answer)
self.assertEqual(answer["foo"], "bar")
await self.ws_server.websocket.send("xxx")
answer = await websocket.recv()
print(answer)
self.assertEqual("xxx", answer)
def test_web_socket(self):
asyncio.get_event_loop().run_until_complete(self._test_web_socket())
if __name__ == '__main__':
unittest.main()
| 37.446809
| 95
| 0.669318
|
4a156b3a0eead5ce632fd8b164418516bce5b0f8
| 2,577
|
py
|
Python
|
users/models.py
|
ephyle/Legit-Info
|
7f3845563a64299aa64e4fdba75949276ed9a711
|
[
"BSD-2-Clause",
"CC-BY-4.0",
"Apache-2.0"
] | 44
|
2020-10-19T13:06:10.000Z
|
2022-01-23T10:56:31.000Z
|
users/models.py
|
ephyle/Legit-Info
|
7f3845563a64299aa64e4fdba75949276ed9a711
|
[
"BSD-2-Clause",
"CC-BY-4.0",
"Apache-2.0"
] | 111
|
2020-10-20T22:12:58.000Z
|
2022-03-28T00:25:13.000Z
|
users/models.py
|
ephyle/Legit-Info
|
7f3845563a64299aa64e4fdba75949276ed9a711
|
[
"BSD-2-Clause",
"CC-BY-4.0",
"Apache-2.0"
] | 31
|
2021-02-08T22:32:37.000Z
|
2022-03-11T10:57:29.000Z
|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
users/models.py -- Database ORM models related to users and profiles.
Written by Tony Pearson, IBM, 2020
Licensed under Apache 2.0, see LICENSE for details
"""
# System imports
from django.db import models
from django.contrib.auth.models import User
from django.db.models.signals import post_save
from django.dispatch import receiver
from cfc_app.models import Impact, Criteria
# Create your models here.
class Profile(models.Model):
""" A profile holds the location and impact areas. """
class Meta:
""" set application label for this profile """
app_label = 'users'
user = models.OneToOneField(User, on_delete=models.CASCADE,
related_name='profile')
location = models.ForeignKey('cfc_app.Location', null=True,
related_name='profiles',
on_delete=models.SET_NULL)
impacts = models.ManyToManyField(Impact)
criteria = models.ForeignKey('cfc_app.Criteria', null=True,
related_name='profiles',
on_delete=models.SET_NULL)
def __str__(self):
""" Return a string representation of the model. """
return f'{self.user.username}'
def set_criteria(self):
""" Create or update criteria record for this profile. """
crit = self.criteria
if crit:
crit.location = self.location
selected = self.impacts.all()
for impact in Impact.objects.all():
if impact in selected:
crit.impacts.add(impact)
else:
crit.impacts.remove(impact)
else:
crit = Criteria(location=self.location)
crit.save()
for impact in self.impacts.all():
crit.impacts.add(impact)
crit.set_text()
crit.save()
self.criteria = crit
self.save()
return self
@receiver(post_save, sender=User)
def create_user_profile(sender, instance, created, **kwargs):
""" Create profile when you create a user. """
if sender is None and kwargs is None: # Eliminate pylint errors
pass
if created:
Profile.objects.create(user=instance)
@receiver(post_save, sender=User)
def save_user_profile(sender, instance, **kwargs):
""" Update profile when you update the user. """
if sender is None and kwargs is None: # Eliminate pylint errors
pass
instance.profile.save()
| 28.633333
| 69
| 0.606907
|
4a156c1382f732f825f89678222d6fac9c21cbe6
| 17,319
|
py
|
Python
|
sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2019_06_01/aio/operations/_private_endpoint_connections_operations.py
|
vbarbaresi/azure-sdk-for-python
|
397ba46c51d001ff89c66b170f5576cf8f49c05f
|
[
"MIT"
] | 8
|
2021-01-13T23:44:08.000Z
|
2021-03-17T10:13:36.000Z
|
sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2019_06_01/aio/operations/_private_endpoint_connections_operations.py
|
vbarbaresi/azure-sdk-for-python
|
397ba46c51d001ff89c66b170f5576cf8f49c05f
|
[
"MIT"
] | 2
|
2021-11-03T06:10:36.000Z
|
2021-12-01T06:29:39.000Z
|
sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2019_06_01/aio/operations/_private_endpoint_connections_operations.py
|
vbarbaresi/azure-sdk-for-python
|
397ba46c51d001ff89c66b170f5576cf8f49c05f
|
[
"MIT"
] | 1
|
2021-05-19T02:55:10.000Z
|
2021-05-19T02:55:10.000Z
|
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
from typing import Any, AsyncIterable, Callable, Dict, Generic, Optional, TypeVar
import warnings
from azure.core.async_paging import AsyncItemPaged, AsyncList
from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
from azure.core.pipeline import PipelineResponse
from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest
from azure.mgmt.core.exceptions import ARMErrorFormat
from ... import models
T = TypeVar('T')
ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]]
class PrivateEndpointConnectionsOperations:
"""PrivateEndpointConnectionsOperations async operations.
You should not instantiate this class directly. Instead, you should create a Client instance that
instantiates it for you and attaches it as an attribute.
:ivar models: Alias to model classes used in this operation group.
:type models: ~azure.mgmt.storage.v2019_06_01.models
:param client: Client for service requests.
:param config: Configuration of service client.
:param serializer: An object model serializer.
:param deserializer: An object model deserializer.
"""
models = models
def __init__(self, client, config, serializer, deserializer) -> None:
self._client = client
self._serialize = serializer
self._deserialize = deserializer
self._config = config
def list(
self,
resource_group_name: str,
account_name: str,
**kwargs
) -> AsyncIterable["models.PrivateEndpointConnectionListResult"]:
"""List all the private endpoint connections associated with the storage account.
:param resource_group_name: The name of the resource group within the user's subscription. The
name is case insensitive.
:type resource_group_name: str
:param account_name: The name of the storage account within the specified resource group.
Storage account names must be between 3 and 24 characters in length and use numbers and lower-
case letters only.
:type account_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either PrivateEndpointConnectionListResult or the result of cls(response)
:rtype: ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.storage.v2019_06_01.models.PrivateEndpointConnectionListResult]
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["models.PrivateEndpointConnectionListResult"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2019-06-01"
accept = "application/json"
def prepare_request(next_link=None):
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
if not next_link:
# Construct URL
url = self.list.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'),
'accountName': self._serialize.url("account_name", account_name, 'str', max_length=24, min_length=3),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
request = self._client.get(url, query_parameters, header_parameters)
else:
url = next_link
query_parameters = {} # type: Dict[str, Any]
request = self._client.get(url, query_parameters, header_parameters)
return request
async def extract_data(pipeline_response):
deserialized = self._deserialize('PrivateEndpointConnectionListResult', pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return None, AsyncList(list_of_elem)
async def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
return pipeline_response
return AsyncItemPaged(
get_next, extract_data
)
list.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Storage/storageAccounts/{accountName}/privateEndpointConnections'} # type: ignore
async def get(
self,
resource_group_name: str,
account_name: str,
private_endpoint_connection_name: str,
**kwargs
) -> "models.PrivateEndpointConnection":
"""Gets the specified private endpoint connection associated with the storage account.
:param resource_group_name: The name of the resource group within the user's subscription. The
name is case insensitive.
:type resource_group_name: str
:param account_name: The name of the storage account within the specified resource group.
Storage account names must be between 3 and 24 characters in length and use numbers and lower-
case letters only.
:type account_name: str
:param private_endpoint_connection_name: The name of the private endpoint connection associated
with the Azure resource.
:type private_endpoint_connection_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: PrivateEndpointConnection, or the result of cls(response)
:rtype: ~azure.mgmt.storage.v2019_06_01.models.PrivateEndpointConnection
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["models.PrivateEndpointConnection"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2019-06-01"
accept = "application/json"
# Construct URL
url = self.get.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'),
'accountName': self._serialize.url("account_name", account_name, 'str', max_length=24, min_length=3),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1),
'privateEndpointConnectionName': self._serialize.url("private_endpoint_connection_name", private_endpoint_connection_name, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.get(url, query_parameters, header_parameters)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize(models.ErrorResponse, response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
deserialized = self._deserialize('PrivateEndpointConnection', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Storage/storageAccounts/{accountName}/privateEndpointConnections/{privateEndpointConnectionName}'} # type: ignore
async def put(
self,
resource_group_name: str,
account_name: str,
private_endpoint_connection_name: str,
properties: "models.PrivateEndpointConnection",
**kwargs
) -> "models.PrivateEndpointConnection":
"""Update the state of specified private endpoint connection associated with the storage account.
:param resource_group_name: The name of the resource group within the user's subscription. The
name is case insensitive.
:type resource_group_name: str
:param account_name: The name of the storage account within the specified resource group.
Storage account names must be between 3 and 24 characters in length and use numbers and lower-
case letters only.
:type account_name: str
:param private_endpoint_connection_name: The name of the private endpoint connection associated
with the Azure resource.
:type private_endpoint_connection_name: str
:param properties: The private endpoint connection properties.
:type properties: ~azure.mgmt.storage.v2019_06_01.models.PrivateEndpointConnection
:keyword callable cls: A custom type or function that will be passed the direct response
:return: PrivateEndpointConnection, or the result of cls(response)
:rtype: ~azure.mgmt.storage.v2019_06_01.models.PrivateEndpointConnection
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["models.PrivateEndpointConnection"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2019-06-01"
content_type = kwargs.pop("content_type", "application/json")
accept = "application/json"
# Construct URL
url = self.put.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'),
'accountName': self._serialize.url("account_name", account_name, 'str', max_length=24, min_length=3),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1),
'privateEndpointConnectionName': self._serialize.url("private_endpoint_connection_name", private_endpoint_connection_name, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
body_content_kwargs = {} # type: Dict[str, Any]
body_content = self._serialize.body(properties, 'PrivateEndpointConnection')
body_content_kwargs['content'] = body_content
request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize(models.ErrorResponse, response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
deserialized = self._deserialize('PrivateEndpointConnection', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
put.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Storage/storageAccounts/{accountName}/privateEndpointConnections/{privateEndpointConnectionName}'} # type: ignore
async def delete(
self,
resource_group_name: str,
account_name: str,
private_endpoint_connection_name: str,
**kwargs
) -> None:
"""Deletes the specified private endpoint connection associated with the storage account.
:param resource_group_name: The name of the resource group within the user's subscription. The
name is case insensitive.
:type resource_group_name: str
:param account_name: The name of the storage account within the specified resource group.
Storage account names must be between 3 and 24 characters in length and use numbers and lower-
case letters only.
:type account_name: str
:param private_endpoint_connection_name: The name of the private endpoint connection associated
with the Azure resource.
:type private_endpoint_connection_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: None, or the result of cls(response)
:rtype: None
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType[None]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2019-06-01"
accept = "application/json"
# Construct URL
url = self.delete.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'),
'accountName': self._serialize.url("account_name", account_name, 'str', max_length=24, min_length=3),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1),
'privateEndpointConnectionName': self._serialize.url("private_endpoint_connection_name", private_endpoint_connection_name, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.delete(url, query_parameters, header_parameters)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 204]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize(models.ErrorResponse, response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
if cls:
return cls(pipeline_response, None, {})
delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Storage/storageAccounts/{accountName}/privateEndpointConnections/{privateEndpointConnectionName}'} # type: ignore
| 53.125767
| 232
| 0.685317
|
4a156c4d42c75af7f1b4cee5423d3410590e3b70
| 1,405
|
py
|
Python
|
tests/test_compat.py
|
piglei/attrs
|
b9668ef791a35e5bd2dc9f05c90ff427dc5c6fed
|
[
"MIT"
] | null | null | null |
tests/test_compat.py
|
piglei/attrs
|
b9668ef791a35e5bd2dc9f05c90ff427dc5c6fed
|
[
"MIT"
] | null | null | null |
tests/test_compat.py
|
piglei/attrs
|
b9668ef791a35e5bd2dc9f05c90ff427dc5c6fed
|
[
"MIT"
] | null | null | null |
# SPDX-License-Identifier: MIT
import types
import pytest
@pytest.fixture(name="mp")
def _mp():
return types.MappingProxyType({"x": 42, "y": "foo"})
class TestMetadataProxy:
"""
Ensure properties of metadata proxy independently of hypothesis strategies.
"""
def test_repr(self, mp):
"""
repr makes sense and is consistent across Python versions.
"""
assert any(
[
"mappingproxy({'x': 42, 'y': 'foo'})" == repr(mp),
"mappingproxy({'y': 'foo', 'x': 42})" == repr(mp),
]
)
def test_immutable(self, mp):
"""
All mutating methods raise errors.
"""
with pytest.raises(TypeError, match="not support item assignment"):
mp["z"] = 23
with pytest.raises(TypeError, match="not support item deletion"):
del mp["x"]
with pytest.raises(AttributeError, match="no attribute 'update'"):
mp.update({})
with pytest.raises(AttributeError, match="no attribute 'clear'"):
mp.clear()
with pytest.raises(AttributeError, match="no attribute 'pop'"):
mp.pop("x")
with pytest.raises(AttributeError, match="no attribute 'popitem'"):
mp.popitem()
with pytest.raises(AttributeError, match="no attribute 'setdefault'"):
mp.setdefault("x")
| 26.509434
| 79
| 0.564413
|
4a156d36191daf87f2f337154b6759b1c495eeef
| 12,661
|
py
|
Python
|
flask/sessions.py
|
bells/flask
|
1441e02256a169ded5e1bdeaa304b279de98d53a
|
[
"BSD-3-Clause"
] | 1
|
2019-06-21T14:28:53.000Z
|
2019-06-21T14:28:53.000Z
|
flask/sessions.py
|
bells/flask
|
1441e02256a169ded5e1bdeaa304b279de98d53a
|
[
"BSD-3-Clause"
] | null | null | null |
flask/sessions.py
|
bells/flask
|
1441e02256a169ded5e1bdeaa304b279de98d53a
|
[
"BSD-3-Clause"
] | 1
|
2019-08-13T08:08:53.000Z
|
2019-08-13T08:08:53.000Z
|
# -*- coding: utf-8 -*-
"""
flask.sessions
~~~~~~~~~~~~~~
Implements cookie based sessions based on itsdangerous.
:copyright: (c) 2012 by Armin Ronacher.
:license: BSD, see LICENSE for more details.
"""
import hashlib
from datetime import datetime
from werkzeug.http import http_date, parse_date
from werkzeug.datastructures import CallbackDict
from . import Markup, json
from itsdangerous import URLSafeTimedSerializer, BadSignature
def total_seconds(td):
return td.days * 60 * 60 * 24 + td.seconds
class SessionMixin(object):
"""Expands a basic dictionary with an accessors that are expected
by Flask extensions and users for the session.
"""
def _get_permanent(self):
return self.get('_permanent', False)
def _set_permanent(self, value):
self['_permanent'] = bool(value)
#: this reflects the ``'_permanent'`` key in the dict.
permanent = property(_get_permanent, _set_permanent)
del _get_permanent, _set_permanent
#: some session backends can tell you if a session is new, but that is
#: not necessarily guaranteed. Use with caution. The default mixin
#: implementation just hardcodes `False` in.
new = False
#: for some backends this will always be `True`, but some backends will
#: default this to false and detect changes in the dictionary for as
#: long as changes do not happen on mutable structures in the session.
#: The default mixin implementation just hardcodes `True` in.
modified = True
class TaggedJSONSerializer(object):
"""A customized JSON serializer that supports a few extra types that
we take for granted when serializing (tuples, markup objects, datetime).
"""
def dumps(self, value):
def _tag(value):
if isinstance(value, tuple):
return {' t': [_tag(x) for x in value]}
elif callable(getattr(value, '__html__', None)):
return {' m': unicode(value.__html__())}
elif isinstance(value, list):
return [_tag(x) for x in value]
elif isinstance(value, datetime):
return {' d': http_date(value)}
elif isinstance(value, dict):
return dict((k, _tag(v)) for k, v in value.iteritems())
elif isinstance(value, str):
try:
return unicode(value)
except UnicodeError:
raise UnexpectedUnicodeError(u'A byte string with '
u'non-ASCII data was passed to the session system '
u'which can only store unicode strings. Consider '
u'base64 encoding your string (String was %r)' % value)
return value
return json.dumps(_tag(value), separators=(',', ':'))
def loads(self, value):
def object_hook(obj):
if len(obj) != 1:
return obj
the_key, the_value = obj.iteritems().next()
if the_key == ' t':
return tuple(the_value)
elif the_key == ' m':
return Markup(the_value)
elif the_key == ' d':
return parse_date(the_value)
return obj
return json.loads(value, object_hook=object_hook)
session_json_serializer = TaggedJSONSerializer()
class SecureCookieSession(CallbackDict, SessionMixin):
"""Baseclass for sessions based on signed cookies."""
def __init__(self, initial=None):
def on_update(self):
self.modified = True
CallbackDict.__init__(self, initial, on_update)
self.modified = False
class NullSession(SecureCookieSession):
"""Class used to generate nicer error messages if sessions are not
available. Will still allow read-only access to the empty session
but fail on setting.
"""
def _fail(self, *args, **kwargs):
raise RuntimeError('the session is unavailable because no secret '
'key was set. Set the secret_key on the '
'application to something unique and secret.')
__setitem__ = __delitem__ = clear = pop = popitem = \
update = setdefault = _fail
del _fail
class SessionInterface(object):
"""The basic interface you have to implement in order to replace the
default session interface which uses werkzeug's securecookie
implementation. The only methods you have to implement are
:meth:`open_session` and :meth:`save_session`, the others have
useful defaults which you don't need to change.
The session object returned by the :meth:`open_session` method has to
provide a dictionary like interface plus the properties and methods
from the :class:`SessionMixin`. We recommend just subclassing a dict
and adding that mixin::
class Session(dict, SessionMixin):
pass
If :meth:`open_session` returns `None` Flask will call into
:meth:`make_null_session` to create a session that acts as replacement
if the session support cannot work because some requirement is not
fulfilled. The default :class:`NullSession` class that is created
will complain that the secret key was not set.
To replace the session interface on an application all you have to do
is to assign :attr:`flask.Flask.session_interface`::
app = Flask(__name__)
app.session_interface = MySessionInterface()
.. versionadded:: 0.8
"""
#: :meth:`make_null_session` will look here for the class that should
#: be created when a null session is requested. Likewise the
#: :meth:`is_null_session` method will perform a typecheck against
#: this type.
null_session_class = NullSession
#: A flag that indicates if the session interface is pickle based.
#: This can be used by flask extensions to make a decision in regards
#: to how to deal with the session object.
#:
#: .. versionadded:: 0.10
pickle_based = False
def make_null_session(self, app):
"""Creates a null session which acts as a replacement object if the
real session support could not be loaded due to a configuration
error. This mainly aids the user experience because the job of the
null session is to still support lookup without complaining but
modifications are answered with a helpful error message of what
failed.
This creates an instance of :attr:`null_session_class` by default.
"""
return self.null_session_class()
def is_null_session(self, obj):
"""Checks if a given object is a null session. Null sessions are
not asked to be saved.
This checks if the object is an instance of :attr:`null_session_class`
by default.
"""
return isinstance(obj, self.null_session_class)
def get_cookie_domain(self, app):
"""Helpful helper method that returns the cookie domain that should
be used for the session cookie if session cookies are used.
"""
if app.config['SESSION_COOKIE_DOMAIN'] is not None:
return app.config['SESSION_COOKIE_DOMAIN']
if app.config['SERVER_NAME'] is not None:
# chop of the port which is usually not supported by browsers
rv = '.' + app.config['SERVER_NAME'].rsplit(':', 1)[0]
# Google chrome does not like cookies set to .localhost, so
# we just go with no domain then. Flask documents anyways that
# cross domain cookies need a fully qualified domain name
if rv == '.localhost':
rv = None
# If we infer the cookie domain from the server name we need
# to check if we are in a subpath. In that case we can't
# set a cross domain cookie.
if rv is not None:
path = self.get_cookie_path(app)
if path != '/':
rv = rv.lstrip('.')
return rv
def get_cookie_path(self, app):
"""Returns the path for which the cookie should be valid. The
default implementation uses the value from the SESSION_COOKIE_PATH``
config var if it's set, and falls back to ``APPLICATION_ROOT`` or
uses ``/`` if it's `None`.
"""
return app.config['SESSION_COOKIE_PATH'] or \
app.config['APPLICATION_ROOT'] or '/'
def get_cookie_httponly(self, app):
"""Returns True if the session cookie should be httponly. This
currently just returns the value of the ``SESSION_COOKIE_HTTPONLY``
config var.
"""
return app.config['SESSION_COOKIE_HTTPONLY']
def get_cookie_secure(self, app):
"""Returns True if the cookie should be secure. This currently
just returns the value of the ``SESSION_COOKIE_SECURE`` setting.
"""
return app.config['SESSION_COOKIE_SECURE']
def get_expiration_time(self, app, session):
"""A helper method that returns an expiration date for the session
or `None` if the session is linked to the browser session. The
default implementation returns now + the permanent session
lifetime configured on the application.
"""
if session.permanent:
return datetime.utcnow() + app.permanent_session_lifetime
def open_session(self, app, request):
"""This method has to be implemented and must either return `None`
in case the loading failed because of a configuration error or an
instance of a session object which implements a dictionary like
interface + the methods and attributes on :class:`SessionMixin`.
"""
raise NotImplementedError()
def save_session(self, app, session, response):
"""This is called for actual sessions returned by :meth:`open_session`
at the end of the request. This is still called during a request
context so if you absolutely need access to the request you can do
that.
"""
raise NotImplementedError()
class SecureCookieSessionInterface(SessionInterface):
"""The default session interface that stores sessions in signed cookies
through the :mod:`itsdangerous` module.
"""
#: the salt that should be applied on top of the secret key for the
#: signing of cookie based sessions.
salt = 'cookie-session'
#: the hash function to use for the signature. The default is sha1
digest_method = staticmethod(hashlib.sha1)
#: the name of the itsdangerous supported key derivation. The default
#: is hmac.
key_derivation = 'hmac'
#: A python serializer for the payload. The default is a compact
#: JSON derived serializer with support for some extra Python types
#: such as datetime objects or tuples.
serializer = session_json_serializer
session_class = SecureCookieSession
def get_signing_serializer(self, app):
if not app.secret_key:
return None
signer_kwargs = dict(
key_derivation=self.key_derivation,
digest_method=self.digest_method
)
return URLSafeTimedSerializer(app.secret_key, salt=self.salt,
serializer=self.serializer,
signer_kwargs=signer_kwargs)
def open_session(self, app, request):
s = self.get_signing_serializer(app)
if s is None:
return None
val = request.cookies.get(app.session_cookie_name)
if not val:
return self.session_class()
max_age = total_seconds(app.permanent_session_lifetime)
try:
data = s.loads(val, max_age=max_age)
return self.session_class(data)
except BadSignature:
return self.session_class()
def save_session(self, app, session, response):
domain = self.get_cookie_domain(app)
path = self.get_cookie_path(app)
if not session:
if session.modified:
response.delete_cookie(app.session_cookie_name,
domain=domain, path=path)
return
httponly = self.get_cookie_httponly(app)
secure = self.get_cookie_secure(app)
expires = self.get_expiration_time(app, session)
val = self.get_signing_serializer(app).dumps(dict(session))
response.set_cookie(app.session_cookie_name, val,
expires=expires, httponly=httponly,
domain=domain, path=path, secure=secure)
from flask.debughelpers import UnexpectedUnicodeError
| 39.319876
| 79
| 0.642603
|
4a156d7b6401c4d9af1ca038ae1063534a0a05d2
| 12,423
|
py
|
Python
|
tests/sentry/api/endpoints/test_sentry_apps.py
|
mikiec84/sentry
|
4594f479db9a079d7f1ed41a9e07d8f36953319f
|
[
"BSD-3-Clause"
] | null | null | null |
tests/sentry/api/endpoints/test_sentry_apps.py
|
mikiec84/sentry
|
4594f479db9a079d7f1ed41a9e07d8f36953319f
|
[
"BSD-3-Clause"
] | 1
|
2022-01-15T02:36:18.000Z
|
2022-01-15T02:36:18.000Z
|
tests/sentry/api/endpoints/test_sentry_apps.py
|
gaybro8777/sentry
|
4594f479db9a079d7f1ed41a9e07d8f36953319f
|
[
"BSD-3-Clause"
] | null | null | null |
from __future__ import absolute_import
import six
from django.core.urlresolvers import reverse
from sentry.constants import SentryAppStatus
from sentry.utils import json
from sentry.testutils import APITestCase
from sentry.testutils.helpers import with_feature
class SentryAppsTest(APITestCase):
def setUp(self):
self.superuser = self.create_user(email='a@example.com', is_superuser=True)
self.user = self.create_user(email='boop@example.com')
self.org = self.create_organization(owner=self.user)
self.super_org = self.create_organization(owner=self.superuser)
self.published_app = self.create_sentry_app(
name='Test',
organization=self.org,
published=True,
)
self.unpublished_app = self.create_sentry_app(
name='Testin',
organization=self.org,
)
self.unowned_unpublished_app = self.create_sentry_app(
name='Nosee',
organization=self.create_organization(),
scopes=(),
webhook_url='https://example.com',
)
self.url = reverse('sentry-api-0-sentry-apps')
class GetSentryAppsTest(SentryAppsTest):
def test_superuser_sees_all_apps(self):
self.login_as(user=self.superuser, superuser=True)
response = self.client.get(self.url, format='json')
response_uuids = set(o['uuid'] for o in response.data)
assert response.status_code == 200
assert self.published_app.uuid in response_uuids
assert self.unpublished_app.uuid in response_uuids
assert self.unowned_unpublished_app.uuid in response_uuids
def test_users_see_published_apps(self):
self.login_as(user=self.user)
response = self.client.get(self.url, format='json')
assert response.status_code == 200
assert {
'name': self.published_app.name,
'author': self.published_app.author,
'slug': self.published_app.slug,
'scopes': [],
'events': [],
'status': self.published_app.get_status_display(),
'uuid': self.published_app.uuid,
'webhookUrl': self.published_app.webhook_url,
'redirectUrl': self.published_app.redirect_url,
'isAlertable': self.published_app.is_alertable,
'clientId': self.published_app.application.client_id,
'clientSecret': self.published_app.application.client_secret,
'overview': self.published_app.overview,
'schema': {},
'owner': {
'id': self.org.id,
'slug': self.org.slug,
}
} in json.loads(response.content)
def test_superuser_filter_on_published(self):
self.login_as(user=self.superuser, superuser=True)
url = u'{}?status=published'.format(self.url)
response = self.client.get(url, format='json')
assert response.status_code == 200
assert {
'name': self.published_app.name,
'author': self.published_app.author,
'slug': self.published_app.slug,
'scopes': [],
'events': [],
'status': self.published_app.get_status_display(),
'uuid': self.published_app.uuid,
'webhookUrl': self.published_app.webhook_url,
'redirectUrl': self.published_app.redirect_url,
'isAlertable': self.published_app.is_alertable,
'clientId': self.published_app.application.client_id,
'clientSecret': self.published_app.application.client_secret,
'overview': self.published_app.overview,
'schema': {},
'owner': {
'id': self.org.id,
'slug': self.org.slug,
}
} in json.loads(response.content)
response_uuids = set(o['uuid'] for o in response.data)
assert self.unpublished_app.uuid not in response_uuids
assert self.unowned_unpublished_app.uuid not in response_uuids
def test_superuser_filter_on_unpublished(self):
self.login_as(user=self.superuser, superuser=True)
url = u'{}?status=unpublished'.format(self.url)
response = self.client.get(url, format='json')
assert response.status_code == 200
response_uuids = set(o['uuid'] for o in response.data)
assert self.unpublished_app.uuid in response_uuids
assert self.unowned_unpublished_app.uuid in response_uuids
assert self.published_app.uuid not in response_uuids
def test_user_filter_on_unpublished(self):
self.login_as(user=self.user)
url = u'{}?status=unpublished'.format(self.url)
response = self.client.get(url, format='json')
assert response.status_code == 200
assert {
'name': self.unpublished_app.name,
'author': self.unpublished_app.author,
'slug': self.unpublished_app.slug,
'scopes': [],
'events': [],
'status': self.unpublished_app.get_status_display(),
'uuid': self.unpublished_app.uuid,
'webhookUrl': self.unpublished_app.webhook_url,
'redirectUrl': self.unpublished_app.redirect_url,
'isAlertable': self.unpublished_app.is_alertable,
'clientId': self.unpublished_app.application.client_id,
'clientSecret': self.unpublished_app.application.client_secret,
'overview': self.unpublished_app.overview,
'schema': {},
'owner': {
'id': self.org.id,
'slug': self.org.slug,
}
} in json.loads(response.content)
response_uuids = set(o['uuid'] for o in response.data)
assert self.published_app.uuid not in response_uuids
assert self.unowned_unpublished_app.uuid not in response_uuids
def test_user_filter_on_published(self):
self.login_as(user=self.user)
url = u'{}?status=published'.format(self.url)
response = self.client.get(url, format='json')
assert response.status_code == 200
response_uuids = set(o['uuid'] for o in response.data)
assert self.published_app.uuid in response_uuids
assert self.unpublished_app not in response_uuids
assert self.unowned_unpublished_app.uuid not in response_uuids
def test_users_dont_see_unpublished_apps_their_org_owns(self):
self.login_as(user=self.user)
response = self.client.get(self.url, format='json')
assert response.status_code == 200
assert self.unpublished_app.uuid not in [
a['uuid'] for a in response.data
]
def test_users_dont_see_unpublished_apps_outside_their_orgs(self):
self.login_as(user=self.user)
response = self.client.get(self.url, format='json')
assert response.status_code == 200
assert self.unowned_unpublished_app.uuid not in [
a['uuid'] for a in response.data
]
class PostSentryAppsTest(SentryAppsTest):
@with_feature('organizations:sentry-apps')
def test_creates_sentry_app(self):
self.login_as(user=self.user)
response = self._post()
expected = {
'name': 'MyApp',
'scopes': ['project:read', 'event:read'],
'events': ['issue'],
'webhookUrl': 'https://example.com',
}
assert response.status_code == 201, response.content
assert six.viewitems(expected) <= six.viewitems(json.loads(response.content))
@with_feature('organizations:sentry-apps')
def test_non_unique_app_slug(self):
from sentry.mediators import sentry_apps
self.login_as(user=self.user)
sentry_app = self.create_sentry_app(
name='Foo Bar',
organization=self.org,
)
sentry_apps.Destroyer.run(sentry_app=sentry_app, user=self.user)
response = self._post(**{'name': sentry_app.name})
assert response.status_code == 400
assert response.data == \
{"name": ["Name Foo Bar is already taken, please use another."]}
@with_feature('organizations:sentry-apps')
def test_invalid_with_missing_webhool_url_scheme(self):
self.login_as(user=self.user)
kwargs = {'webhookUrl': 'example.com'}
response = self._post(**kwargs)
assert response.status_code == 400
assert response.data == \
{'webhookUrl': ['URL must start with http[s]://']}
@with_feature('organizations:sentry-apps')
def test_cannot_create_app_without_correct_permissions(self):
self.login_as(user=self.user)
kwargs = {'scopes': ('project:read',)}
response = self._post(**kwargs)
assert response.status_code == 400
assert response.data == \
{'events': ['issue webhooks require the event:read permission.']}
@with_feature('organizations:sentry-apps')
def test_wrong_schema_format(self):
self.login_as(user=self.user)
kwargs = {'schema': {
'elements': [
{
'type': 'alert-rule-action',
'required_fields': [
{
'type': 'select',
'label': 'Channel',
'name': 'channel',
'options': [
# option items should have 2 elements
# i.e. ['channel_id', '#general']
['#general'],
]
},
],
}
],
}}
response = self._post(**kwargs)
assert response.status_code == 400
assert response.data == \
{'schema': ["['#general'] is too short"]}
@with_feature('organizations:sentry-apps')
def test_allows_empty_schema(self):
self.login_as(self.user)
response = self._post(schema={})
assert response.status_code == 201, response.content
@with_feature('organizations:sentry-apps')
def test_missing_name(self):
self.login_as(self.user)
response = self._post(name=None)
assert response.status_code == 400, response.content
assert 'name' in response.data
@with_feature('organizations:sentry-apps')
def test_invalid_events(self):
self.login_as(self.user)
response = self._post(events=['project'])
assert response.status_code == 400, response.content
assert 'events' in response.data
@with_feature('organizations:sentry-apps')
def test_invalid_scope(self):
self.login_as(self.user)
response = self._post(scopes=('not:ascope', ))
assert response.status_code == 400, response.content
assert 'scopes' in response.data
@with_feature('organizations:sentry-apps')
def test_missing_webhook_url(self):
self.login_as(self.user)
response = self._post(webhookUrl=None)
assert response.status_code == 400, response.content
assert 'webhookUrl' in response.data
@with_feature('organizations:sentry-apps')
def test_allows_empty_permissions(self):
self.login_as(self.user)
response = self._post(scopes=None)
assert response.status_code == 201, response.content
assert response.data['scopes'] == []
@with_feature('organizations:sentry-apps')
def test_creates_internal_integration(self):
self.create_project(organization=self.org)
self.login_as(self.user)
response = self._post(isInternal=True)
assert response.data['slug'] == 'myapp'
assert response.data['status'] == SentryAppStatus.as_str(SentryAppStatus.INTERNAL)
def _post(self, **kwargs):
body = {
'name': 'MyApp',
'organization': self.org.slug,
'author': 'Sentry',
'schema': None,
'scopes': ('project:read', 'event:read'),
'events': ('issue',),
'webhookUrl': 'https://example.com',
'redirectUrl': '',
'isAlertable': False,
}
body.update(**kwargs)
return self.client.post(
self.url,
body,
headers={'Content-Type': 'application/json'},
)
| 36.754438
| 90
| 0.606214
|
4a156eec493bb5b9ebd21598eaca40741549a007
| 353
|
py
|
Python
|
tests/miscellaneous/test_utils.py
|
lucasvieirasilva/aws-ssm-secrets-cli
|
861779099dd5a77c7166d69cad90bba5a1145001
|
[
"MIT"
] | 4
|
2021-05-18T16:11:26.000Z
|
2022-01-10T14:23:11.000Z
|
tests/miscellaneous/test_utils.py
|
lucasvieirasilva/aws-ssm-secrets-cli
|
861779099dd5a77c7166d69cad90bba5a1145001
|
[
"MIT"
] | 45
|
2020-09-14T08:57:53.000Z
|
2021-09-16T16:13:58.000Z
|
tests/miscellaneous/test_utils.py
|
lucasvieirasilva/aws-ssm-secrets-cli
|
861779099dd5a77c7166d69cad90bba5a1145001
|
[
"MIT"
] | null | null | null |
import pytest
from aws_secrets.miscellaneous import utils
@pytest.mark.parametrize("value,size,expected", [
('#', 5, "#####"),
('=', 10, "=========="),
('=', 1, "=")
])
def test_repeat_to_length(value, size, expected):
"""
Should generate the string correctly
"""
assert utils.repeat_to_length(value, size) == expected
| 23.533333
| 58
| 0.594901
|
4a156f868db1cc61902c372eab6ef4f13bf98db3
| 910
|
py
|
Python
|
app/migrations/0003_admin.py
|
kuwii/some-information-system
|
82ecb6d3ee07294ce80bf22b9bbfcf71ce957c00
|
[
"PSF-2.0",
"MIT"
] | null | null | null |
app/migrations/0003_admin.py
|
kuwii/some-information-system
|
82ecb6d3ee07294ce80bf22b9bbfcf71ce957c00
|
[
"PSF-2.0",
"MIT"
] | null | null | null |
app/migrations/0003_admin.py
|
kuwii/some-information-system
|
82ecb6d3ee07294ce80bf22b9bbfcf71ce957c00
|
[
"PSF-2.0",
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
# Generated by Django 1.10.5 on 2017-01-06 03:11
from __future__ import unicode_literals
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
('app', '0002_auto_20170106_1109'),
]
operations = [
migrations.CreateModel(
name='Admin',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=32)),
('phone', models.CharField(max_length=32)),
('user', models.OneToOneField(on_delete=django.db.models.deletion.CASCADE, related_name='admin', to=settings.AUTH_USER_MODEL)),
],
),
]
| 32.5
| 143
| 0.640659
|
4a156fcfa914616a8b18e0b265cf47828413acb8
| 4,381
|
py
|
Python
|
www/python/server.py
|
amenoyoya/tus-upload-form
|
ed0fc6da58ccb50a8911205ca07d012eec7f7009
|
[
"MIT"
] | null | null | null |
www/python/server.py
|
amenoyoya/tus-upload-form
|
ed0fc6da58ccb50a8911205ca07d012eec7f7009
|
[
"MIT"
] | 2
|
2019-08-25T12:40:29.000Z
|
2019-08-31T03:49:00.000Z
|
www/python/server.py
|
amenoyoya/tus-upload-form
|
ed0fc6da58ccb50a8911205ca07d012eec7f7009
|
[
"MIT"
] | null | null | null |
from flask import Flask, render_template, request, make_response
from flask_cors import CORS
from datetime import datetime, timedelta
import os, uuid, base64
# metadata -> dict
def metadata2dict(metadata):
data = {}
for field in metadata.split(','):
values = field.split(' ')
data[values[0]] = base64.b64decode(values[1]).decode('utf-8') if len(values) > 1 else ''
return data
# get saved file size
## return false if not exists
def get_saved_file_size(file_id):
path = f'./static/uploaded/{file_id}'
if not os.path.exists(path):
return False
return os.path.getsize(path)
# save file, resumable
def save_file(file_id, content):
path = f'./static/uploaded/{file_id}'
# staticディレクトリに配置してダウンロードできるようにする
if not os.path.isdir('./static/uploaded'):
os.mkdir('./static/uploaded')
with open(path, 'ab' if os.path.isfile(path) else 'wb') as f:
f.write(content)
# 保存済みサイズを返す
return get_saved_file_size(file_id)
# ---
# ベースURLのルーティング関数
## ベースURL: uWSGI環境変数から読み込みfile_id
url_for = lambda url: request.environ.get('ROOT_URL', 'http://192.168.11.237:3333/') + url
app = Flask(__name__)
CORS(app)
# url_for関数を上書き
app.jinja_env.globals.update(url_for = url_for)
files = {} # uploading files
# home
@app.route('/', methods=['GET'])
def home():
return render_template('home.jinja')
# create file upload
@app.route('/api/files/', methods=['POST'])
def upload():
data = {
'content_length': request.headers.get('Content-Length'),
'upload_length': request.headers.get('Upload-Length'),
'tus_resumable': request.headers.get('Tus-Resumable'),
'upload_metadata': metadata2dict(request.headers.get('Upload-Metadata')),
'id': str(uuid.uuid4()) # 任意のファイルID生成
}
if data['upload_metadata']['fileext'] != '':
# 拡張子がある場合は付与する
data['id'] += '.' + data['upload_metadata']['fileext']
res = make_response('', 201)
res.headers['Location'] = '/api/files/' + data['id']
res.headers['Tus-Resumable'] = data['tus_resumable']
#res.headers['Access-Control-Allow-Origin'] = request.environ['HTTP_ORIGIN']
#res.headers['Access-Control-Allow-Headers'] = 'access-control-allow-origin,content-type'
res.headers['Access-Control-Expose-Headers'] = 'Upload-Offset, Location, Upload-Length, Tus-Version, Tus-Resumable, Tus-Max-Size, Tus-Extension, Upload-Metadata'
files[data['id']] = int(data['upload_length']) # アップロード予定サイズを保持
return res
# resume file upload
@app.route('/api/files/<string:file_id>', methods=['PATCH'])
def resume(file_id):
data = {
'content_type': request.headers.get('Content-Type'),
'content_length': request.headers.get('Content-Length'), # 残りアップロードサイズ
'upload_offset': request.headers.get('Upload-Offset'), # アップロード済みサイズ
'tus_resumable': request.headers.get('Tus-Resumable')
}
# ファイル保存
saved_size = save_file(file_id, request.get_data())
# response
res = make_response('', 204)
res.headers['Upload-Expires'] = datetime.now() + timedelta(hours=1) # レジューム不可になる期限=1時間後
res.headers['Upload-Offset'] = 0 if saved_size == False else saved_size # アップロード済みサイズ
res.headers['Tus-Resumable'] = data['tus_resumable']
#res.headers['Access-Control-Allow-Origin'] = request.environ['HTTP_ORIGIN']
#res.headers['Access-Control-Allow-Headers'] = 'access-control-allow-origin,content-type'
res.headers['Access-Control-Expose-Headers'] = 'Upload-Offset, Location, Upload-Length, Tus-Version, Tus-Resumable, Tus-Max-Size, Tus-Extension, Upload-Metadata'
return res
# confirm uploaded file
@app.route('/api/files/<string:file_id>', methods=['HEAD'])
def confirm(file_id):
# response
saved_size = get_saved_file_size(file_id) # アップロード済みサイズ
res = make_response('', 404 if saved_size == False else 200)
if isinstance(saved_size, int):
res.headers['Upload-Offset'] = saved_size
res.headers['Tus-Resumable'] = request.headers.get('Tus-Resumable')
return res
if __name__ == "__main__":
# run server: http://localhost:3333
app.run(port=3333, debug=True, host="0.0.0.0")
| 40.192661
| 205
| 0.642319
|
4a1570637989fa2210517a813b7f71217c27d6aa
| 9,491
|
py
|
Python
|
haikuza_main.py
|
justinmklam/project-haikuza
|
dda9e75977a043a8eaa818746764c0685b6d6172
|
[
"MIT"
] | null | null | null |
haikuza_main.py
|
justinmklam/project-haikuza
|
dda9e75977a043a8eaa818746764c0685b6d6172
|
[
"MIT"
] | null | null | null |
haikuza_main.py
|
justinmklam/project-haikuza
|
dda9e75977a043a8eaa818746764c0685b6d6172
|
[
"MIT"
] | null | null | null |
__author__ = 'Justin'
# from numpy import random
from syllable_counter import sylco
# from haiku_training import LearnHaiku
from twitter import Twitter
# import nltk
import time
import logging
import lyrics as lyricwiki
from markov import Markov
from gsheets import GSheets
from time import strftime
class Haiku(object):
class Line(object):
def __init__(self):
self.words = []
self.string = ''
self.syls = 0
self.syls_remaining = 0
def __init__(self, artist, title):
wordlist = self.get_song(artist, title)
self.markov = Markov(wordlist)
def get_song(self, artist, title):
try:
song = lyricwiki.getlyrics(artist, title)
song = song.encode('ascii', 'ignore')
wordlist = song.translate(None, '`.,-";:!?@#$%^&*()[]{}/').replace('\n', ' ').split(' ')
return wordlist
except IOError:
errorstr = "Couldn't find %s by %s in LyricsWikia. Check spelling?" %(title, artist)
print errorstr
exit()
except TypeError:
print "Something might be fucky with the song formatting..."
exit()
def write_haiku(self):
meter = [5, 7, 5]
haiku = ''
numlines = len(meter)
for i in range(numlines):
if i != numlines-1:
endchar = '\n'
else:
endchar = '.'
haiku += self.make_line(meter[i]) + endchar
return haiku
def make_line(self, syl_max):
line = self.Line()
line.syls_remaining = syl_max
num_delete = 1
num_tries = 0
thresh_tries = 5
i = 0
alwayscaps = ['i', "i'm", "i'll"]
phrase, numwords = self.new_markov_line()
# print phrase
while True:
word, i = self.get_word(phrase, numwords, i)
line.syls = sylco(word)
if word in alwayscaps:
word = word.capitalize()
# print phrase_struct[i], word, syls
line.words.append(word)
# print line.words
if len(line.words) == 1:
try:
line.words[0] = line.words[0].capitalize()
except IndexError:
print line.words
print "Oops couldn't capitalize %s, %d!" %(word, i)
line.syls_remaining -= line.syls
line.string = ' '.join(line.words)
line.syls = self.sylco_line(line.words)
# print "Max: %d, Curr: %d, Remaining: %d" %(syl_max, line.syls, line.syls_remaining)
case1 = line.syls == syl_max and line.syls_remaining == 0
case2 = line.syls > syl_max or line.syls_remaining < 0
if case1:
approved, line = self.check_last_word(line)
if approved:
break
else:
num_tries += 1
elif case2:
line = self.delete_word(num_delete, line)
num_tries += 1
breakloop, num_tries, num_delete = self.check_num_tries(num_tries, thresh_tries, num_delete, syl_max)
if breakloop:
break
if i >= numwords:
i = 0
phrase, numwords = self.new_markov_line()
else:
i += 1
return line.string
def check_num_tries(self, num_tries, thresh_tries, num_delete, syl_max):
breakloop = False
if num_tries > thresh_tries:
# phrase, numwords = self.new_markov_line()
num_delete += 1
num_tries = 0
if num_delete > syl_max:
print "FUCKING DAMNIT."
breakloop = True
return breakloop, num_tries, num_delete
def check_last_word(self, line):
exceptions = ['the', 'they', 'a', 'for', 'I', "I'll", "I'm", 'my',
'your', "you're", 'and', 'to', 'we', 'were', "we're", "it's", "who",
'so', 'he', 'she', 'him', 'her', 'but', 'is', 'of']
if line.words[-1] in exceptions:
# print "Switching last word for better haiku-ness..."
approved = False
# print "PRE: %d" %line.syls_remaining
# print line.string
line_post = self.delete_word(1, line)
# print "POST: %d" %line_post.syls_remaining
# print line_post.string
else:
# print "Last word is good to go!"
approved = True
line_post = line
return approved, line_post
def get_word(self, phrase, numwords, j):
while phrase[j] == '':
j += 1
if j >= numwords:
break
return phrase[j], j
def delete_word(self, num_delete, line):
for n in range(num_delete):
if line.words != []:
syls = sylco(line.words[-1])
# print "Del: %s, %d" %(line.words[-1], syls)
del line.words[-1]
line.syls_remaining += syls
# print "Remaining: %d" %syls
line.string = line.string.strip(' ')
return line
def sylco_line(self, line_words):
line_syls = 0
for k in range(len(line_words)):
line_syls += sylco(line_words[k])
return line_syls
def new_markov_line(self):
phrase = self.markov.generate_markov_text(10)
phrase = phrase.lower().strip(' ').split(' ')
numwords = len(phrase) - 1
return phrase, numwords
def save_haiku(self, haiku_str):
fname = "_Diary of a Haikuza.txt"
ts = time.strftime("%Y/%m/%d, %H:%M:%S")
to_write = "%s\n\n%s\n" %(ts, haiku_str)
with open(fname, 'a') as inputfile:
inputfile.write(to_write)
print "%s - Haiku saved to '%s'." % (ts, fname)
def flatten(x):
result = []
for el in x:
if hasattr(el, "__iter__") and not isinstance(el, str):
result.extend(flatten(el))
else:
result.append(el)
return result
def haiku_from_twitter(tw):
new_tweets = tw.get_new_requests()
if new_tweets:
for i in range(len(new_tweets)):
try:
user, artist, title = tw.parse_request(new_tweets[i])
except IndexError:
print "Incorrectly formatted tweet string: '%s'" %(new_tweets[i])
continue
haiku = Haiku(artist, title).write_haiku()
if user == 'thehaikuza':
tweet_str = format_haiku_tweet(haiku, artist, title)
else:
tweet_str = format_haiku_tweet(haiku, user, title)
# song_str = construct_song_label(title, artist)
# tweet_str = "%s%s %s" %(user, haiku, song_str)
# print "%s - %d chars." %(tweet_str, len(tweet_str))
tw.tweet(tweet_str)
else:
print "No new song requests :("
def haiku_from_local(artist, title, iftweet):
# song_str = construct_song_label(title, artist)
hk = Haiku(artist, title)
haiku = hk.write_haiku()
hk.save_haiku(haiku)
# tweet_str = "%s %s" %(haiku, song_str)
tweet_str = format_haiku_tweet(haiku, artist, title)
# print "%s - %d chars." %(tweet_str, len(tweet_str))
if iftweet:
Twitter().tweet(tweet_str)
return tweet_str
def haiku_from_gsheet(tw):
gs = GSheets()
title, artist = gs.get_gsheet_song()
if not title:
print "Nothing to tweet in gsheets... yet"
return
else:
hk = Haiku(artist, title)
haiku = hk.write_haiku()
# song_str = construct_song_label(title, artist)
tweet_str = format_haiku_tweet(haiku, artist, title)
try:
tw.tweet(tweet_str)
# print "%s - %d chars." %(tweet_str, len(tweet_str))
except AttributeError:
print "Couldn't tweet from gsheets."
exit()
def construct_song_label(raw_title, raw_artist):
title = raw_title.replace("'", '').title().replace(' ', '')
artist = raw_artist.replace("'", '').title().replace(' ', '')
if raw_artist.lower() == 'taylor swift':
artist += '13'
return "\n\n#%s \nby @%s" %(title, artist)
def format_haiku_tweet(haiku, raw_artist, raw_title):
title = raw_title.replace("'", '').title().replace(' ', '')
artist = raw_artist.replace("'", '').title().replace(' ', '')
if raw_artist.lower() == 'taylor swift':
artist += '13'
tweet_str = "Dear @%s:\n\n%s\n\n#%s" %(artist, haiku, title)
print "%s - %d chars." %(tweet_str, len(tweet_str))
return tweet_str
def haiku_autogen():
logging.basicConfig(level=logging.DEBUG, filename='_haikuza.log')
tw = Twitter()
# tw = None
haiku_from_gsheet(tw)
haiku_from_twitter(tw)
if __name__ == '__main__':
timenow = strftime("%a, %d %b %Y %H:%M:%S")
print "\n%s" % timenow
# artist = "bruce springsteen"
# title = "blinded by the light"
# hk = haiku_from_local(artist, title, False)
#
haiku_autogen()
| 29.566978
| 118
| 0.521125
|
4a15717ffdfa1b8c4a497ccb445ebb3faba423b4
| 386
|
py
|
Python
|
objectModel/Python/cdm/objectmodel/spew_catcher.py
|
rt112000/CDM
|
34bd34f9260140a8f8aa02bd87c23033f3daad4c
|
[
"CC-BY-4.0",
"MIT"
] | 884
|
2019-05-10T02:09:10.000Z
|
2022-03-31T14:02:00.000Z
|
objectModel/Python/cdm/objectmodel/spew_catcher.py
|
spbast/CDM
|
bf97a3720c97ee4c9df3625084cf8b3bc65ff9c7
|
[
"CC-BY-4.0",
"MIT"
] | 171
|
2019-06-10T11:34:37.000Z
|
2022-03-31T22:50:12.000Z
|
objectModel/Python/cdm/objectmodel/spew_catcher.py
|
spbast/CDM
|
bf97a3720c97ee4c9df3625084cf8b3bc65ff9c7
|
[
"CC-BY-4.0",
"MIT"
] | 340
|
2019-05-07T18:00:16.000Z
|
2022-03-31T12:00:15.000Z
|
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
from abc import ABC, abstractmethod
class SpewCatcher(ABC):
@abstractmethod
def clear(self) -> None:
raise NotImplementedError()
def spew_line(self, spew: str) -> None:
raise NotImplementedError()
| 25.733333
| 94
| 0.717617
|
4a1573071bb4d72eb4cc27677343aba0e2e639c6
| 2,234
|
py
|
Python
|
app_model/src/model/train.py
|
DavidBky/dsti-2021-ml-pipeline
|
61deb0b134c8359044ac64ee1dcacd04e493a7e8
|
[
"MIT"
] | 1
|
2020-10-06T14:27:22.000Z
|
2020-10-06T14:27:22.000Z
|
app_model/src/model/train.py
|
DavidBky/dsti-2021-ml-pipeline
|
61deb0b134c8359044ac64ee1dcacd04e493a7e8
|
[
"MIT"
] | null | null | null |
app_model/src/model/train.py
|
DavidBky/dsti-2021-ml-pipeline
|
61deb0b134c8359044ac64ee1dcacd04e493a7e8
|
[
"MIT"
] | 3
|
2020-10-06T13:01:02.000Z
|
2020-10-06T13:33:37.000Z
|
import numpy as np
import pandas as pd
from sklearn.model_selection import train_test_split
from sklearn.metrics import mean_squared_error, mean_absolute_error, r2_score
from sklearn.ensemble import RandomForestClassifier
import mlflow
import mlflow.sklearn
try:
import cPickle as pickle
except ImportError:
import pickle
def eval_metrics(actual, pred):
rmse = np.sqrt(mean_squared_error(actual, pred))
mae = mean_absolute_error(actual, pred)
r2 = r2_score(actual, pred)
return rmse, mae, r2
output = "/model_data/train_output"
df = pd.read_csv('/data/googleplaystore.csv') # load data set
df.info()
df = df.drop([
"App",
"Reviews",
"Size",
"Installs",
"Type",
"Content Rating",
"Genres",
"Last Updated",
"Current Ver",
"Android Ver",
"Price"
], axis=1)
df.info()
labels = df[["Category"]]
labels["Category"] = labels["Category"].astype("category")
labels.info()
df = df.drop("Category", axis=1)
df = df.fillna(0)
df.info()
# Build training and testing set
X_train, X_test, y_train, y_test = train_test_split(
df, labels["Category"].cat.codes, test_size=0.33
)
print("Labels")
print(pd.get_dummies(labels))
print(y_train)
# Set hyperparameters
estimators = 100
jobs = 3
with mlflow.start_run():
clf = RandomForestClassifier(n_estimators=estimators,
n_jobs=jobs)
clf.fit(X_train, y_train)
predicted = clf.predict(X_test)
(rmse, mae, r2) = eval_metrics(y_test, predicted)
mlflow.log_param("estimators", estimators)
mlflow.log_param("jobs", jobs)
mlflow.log_metric("rmse", rmse)
mlflow.log_metric("r2", r2)
mlflow.log_metric("mae", mae)
mlflow.sklearn.log_model(clf, "clf")
with open(output, 'wb') as fd:
pickle.dump(clf, fd)
# X = data.iloc[:, 0].values.reshape(-1, 1) # values converts it into a numpy array
# Y = data.iloc[:, 1].values.reshape(-1, 1) # -1 means that calculate the dimension of rows, but have 1 column
# linear_regressor = LinearRegression() # create object for the class
# linear_regressor.fit(X, Y) # perform linear regression
# Y_pred = linear_regressor.predict(X) # make predictions
| 23.765957
| 111
| 0.671889
|
4a157340443cbe1f9ecc1d8c1d81aeccfbee704a
| 1,027
|
py
|
Python
|
setup.py
|
LFLab/aiotodoist
|
3eff1927053445ea72aae04f1bf0d851e5f1f8d6
|
[
"MIT"
] | 1
|
2021-03-03T09:17:32.000Z
|
2021-03-03T09:17:32.000Z
|
setup.py
|
LFLab/aiotodoist
|
3eff1927053445ea72aae04f1bf0d851e5f1f8d6
|
[
"MIT"
] | 1
|
2021-12-23T03:59:35.000Z
|
2021-12-23T04:09:34.000Z
|
setup.py
|
LFLab/aiotodoist
|
3eff1927053445ea72aae04f1bf0d851e5f1f8d6
|
[
"MIT"
] | null | null | null |
import re
from pathlib import Path
from setuptools import setup, find_packages
with open("README.md", "r") as f:
long_description = f.read()
txt = (Path(__file__).parent / 'aiotodoist' / '__init__.py').read_text('utf-8')
version = re.findall(r"^__version__ = '([^']+)'\r?", txt, re.M)[0]
setup(
name="aiotodoist",
version=version,
author="Lanfon",
author_email="lanfon72@gmail.com",
long_description=long_description,
long_description_content_type="text/markdown",
url="https://github.com/LFLab/aio-todoist",
packages=find_packages(),
install_requires=["aiohttp>=3.*, <4.*", "todoist-python>=8.*"],
classifiers=[
"Development Status :: 4 - Beta",
"Intended Audience :: Developers",
"License :: OSI Approved :: MIT License",
"Operating System :: OS Independent",
'Programming Language :: Python :: 3.7',
'Programming Language :: Python :: 3.8',
'Programming Language :: Python :: 3.9'
],
python_requires='>=3.7',
)
| 30.205882
| 79
| 0.631938
|
4a15739a8030ce2b5d6c23b88137eb35dc7dbc72
| 6,847
|
py
|
Python
|
conf_media_driver.py
|
aukhina/product-configs
|
958db11ec5fe631b0c39f02d0fb34742ae206ade
|
[
"MIT"
] | null | null | null |
conf_media_driver.py
|
aukhina/product-configs
|
958db11ec5fe631b0c39f02d0fb34742ae206ade
|
[
"MIT"
] | null | null | null |
conf_media_driver.py
|
aukhina/product-configs
|
958db11ec5fe631b0c39f02d0fb34742ae206ade
|
[
"MIT"
] | null | null | null |
# Copyright (c) 2019-2020 Intel Corporation
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
from pathlib import Path
DRIVER_REPO_NAME = 'media-driver'
DRIVER_REPO_DIR = options.get('REPOS_DIR') / DRIVER_REPO_NAME
BUILD_NUM = get_commit_number(DRIVER_REPO_DIR)
DRIVER_VERSION = manifest.get_component(DRIVER_REPO_NAME).version
DRIVER_PKG_VERSION = DRIVER_VERSION + f'.{BUILD_NUM}'
DEPENDENCIES = [
'libva',
'gmmlib'
]
ENABLE_DEVTOOLSET = 'source /opt/rh/devtoolset-6/enable'
# Workaround to run fpm tool on CentOS 6.9
ENABLE_RUBY24 = 'source /opt/rh/rh-ruby24/enable'
GCC_LATEST = '10'
CLANG_VERSION = '10'
options["STRIP_BINARIES"] = True
DRIVER_INSTALL_PREFIX = Path('/opt/intel/msdk_driver')
# Installation by default: /opt/intel/msdk_driver/lib64
DRIVER_LIB_DIR = 'lib64'
def set_env(gcc_latest, clang_version):
compiler_version = args.get('compiler_version')
if args.get('compiler') == "gcc" and compiler_version == gcc_latest:
# TODO: Add possibility to choose other gcc versions
options["ENV"]['CC'] = '/usr/bin/gcc-10'
options["ENV"]['CXX'] = '/usr/bin/g++-10'
elif args.get('compiler') == "clang" and compiler_version == clang_version:
options["ENV"]['CC'] = f'/usr/bin/clang-{compiler_version}'
options["ENV"]['CXX'] = f'/usr/bin/clang++-{compiler_version}'
# TODO: add more smart logic or warnings?! (potential danger zone)
def get_building_cmd(command, gcc_latest, enable_devtoolset):
# Ubuntu Server: gcc_latest or clang
if args.get('compiler') == "clang" or (args.get('compiler') == "gcc" and args.get('compiler_version') == gcc_latest):
return command
else:
return f'{enable_devtoolset} && {command}' # enable new compiler on CentOS
action('set CC and CXX environment variables',
callfunc=(set_env, [GCC_LATEST, CLANG_VERSION], {}))
cmake_command = [
'cmake3',
f'-DMEDIA_VERSION="{DRIVER_VERSION}"',
f'-DCMAKE_INSTALL_PREFIX={DRIVER_INSTALL_PREFIX}',
# By default install driver to /opt/intel/msdk_driver
f'-DCMAKE_INSTALL_LIBDIR={DRIVER_INSTALL_PREFIX / DRIVER_LIB_DIR}',
f'-DINSTALL_DRIVER_SYSCONF=OFF',
# Path contains iHD_drv_video.so
f'-DLIBVA_DRIVERS_PATH={DRIVER_INSTALL_PREFIX / DRIVER_LIB_DIR}',
f'-DBUILD_TYPE={options["BUILD_TYPE"]}',
]
if product_type == 'public_linux_driver_kernels_off':
cmake_command.append('-DENABLE_KERNELS=OFF')
elif product_type == 'public_linux_driver_nonfree_kernels_off':
cmake_command.append('-DENABLE_NONFREE_KERNELS=OFF')
cmake_command.append(str(DRIVER_REPO_DIR))
cmake = ' '.join(cmake_command)
# Prepare dependencies
# Libva
LIBVA_PATH = options['DEPENDENCIES_DIR'] / 'libva' / 'usr' / 'local'
LIBVA_PKG_CONFIG_PATH = LIBVA_PATH / 'lib64' / 'pkgconfig'
LIBVA_PKG_CONFIG_RPM_PATTERN = {
'^prefix=.+': f'prefix={LIBVA_PATH}',
}
action('LibVA: change pkgconfigs',
stage=stage.EXTRACT,
callfunc=(update_config, [LIBVA_PKG_CONFIG_PATH, LIBVA_PKG_CONFIG_RPM_PATTERN], {}))
# Gmmlib
GMMLIB_PATH = options['DEPENDENCIES_DIR'] / 'gmmlib' / 'usr' / 'local'
GMMLIB_PKG_CONFIG_PATH = GMMLIB_PATH / 'lib64' / 'pkgconfig'
GMMLIB_PKG_CONFIG_RPM_PATTERN = {
'^prefix=.+': f'prefix={GMMLIB_PATH}',
'^includedir=.+': f'includedir={GMMLIB_PATH}/include/igdgmm',
'^libdir=.+': f'libdir={GMMLIB_PATH}/lib64'
}
action('Gmmlib: change pkgconfigs',
stage=stage.EXTRACT,
callfunc=(update_config, [GMMLIB_PKG_CONFIG_PATH, GMMLIB_PKG_CONFIG_RPM_PATTERN], {}))
# Build Media Driver
action('media-driver: cmake',
work_dir=options['BUILD_DIR'],
cmd=get_building_cmd(cmake, GCC_LATEST, ENABLE_DEVTOOLSET),
env={'PKG_CONFIG_PATH': f'{LIBVA_PKG_CONFIG_PATH}:{GMMLIB_PKG_CONFIG_PATH}'})
action('media-driver: build',
cmd=get_building_cmd(f'make -j`nproc`', GCC_LATEST, ENABLE_DEVTOOLSET))
action('media-driver: list artifacts',
cmd=f'echo " " && ls ./media_driver',
verbose=True)
action('media-driver: make install',
stage=stage.INSTALL,
work_dir=options['BUILD_DIR'],
cmd=get_building_cmd(f'make DESTDIR={options["INSTALL_DIR"]} install', GCC_LATEST, ENABLE_DEVTOOLSET))
# Create configuration files
intel_mediasdk_file = options["INSTALL_DIR"] / 'intel-mediasdk.sh'
data = '# add libva driver path/name exporting for intel media solution\n'\
'export LIBVA_DRIVERS_PATH=/opt/intel/msdk_driver/lib64\n'\
'export LIBVA_DRIVER_NAME=iHD'
action('create intel-mediasdk.sh',
stage=stage.INSTALL,
callfunc=(create_file, [intel_mediasdk_file, data], {}))
# Get package installation dir for media-driver
pack_dir = options['INSTALL_DIR'] / DRIVER_INSTALL_PREFIX.relative_to(DRIVER_INSTALL_PREFIX.root)
DRIVER_PACK_DIRS = [
f'{pack_dir}/lib64/={DRIVER_INSTALL_PREFIX / DRIVER_LIB_DIR }',
f'{pack_dir}/include/={DRIVER_INSTALL_PREFIX}/include',
f'{options["INSTALL_DIR"]}/intel-mediasdk.sh=/etc/profile.d/',
]
action('media-driver: create rpm pkg',
stage=stage.PACK,
work_dir=options['PACK_DIR'],
cmd=get_packing_cmd('rpm', DRIVER_PACK_DIRS, ENABLE_RUBY24, DRIVER_PKG_VERSION, DRIVER_REPO_NAME.lower()))
action('media-driver: create deb pkg',
stage=stage.PACK,
work_dir=options['PACK_DIR'],
cmd=get_packing_cmd('deb', DRIVER_PACK_DIRS, ENABLE_RUBY24, DRIVER_PKG_VERSION, DRIVER_REPO_NAME.lower()))
# TODO: Define where to copy
INSTALL_PKG_DATA_TO_ARCHIVE.extend([
{
'from_path': options['INSTALL_DIR'],
'relative': [
{
'path': 'opt',
}
]
},
])
# TODO: Define where to copy
DEV_PKG_DATA_TO_ARCHIVE.extend([
{
'from_path': options['BUILD_DIR'],
'relative': [
{
'path': '',
'pack_as': ''
},
]
}
])
| 35.661458
| 121
| 0.706003
|
4a1573b015ad31d3bb312735a1e752daba15ee33
| 3,294
|
py
|
Python
|
statick_tool/plugins/tool/lizard_tool_plugin.py
|
xydesa/statick
|
22d863306bfd6fe411758e5807ae036679a309c5
|
[
"CC0-1.0"
] | 1
|
2022-01-25T16:08:45.000Z
|
2022-01-25T16:08:45.000Z
|
statick_tool/plugins/tool/lizard_tool_plugin.py
|
xydesa/statick
|
22d863306bfd6fe411758e5807ae036679a309c5
|
[
"CC0-1.0"
] | 1
|
2020-05-06T01:41:35.000Z
|
2020-05-06T01:41:35.000Z
|
statick_tool/plugins/tool/lizard_tool_plugin.py
|
jhdcs/statick
|
121ab511f206967c587d70d88217ae23db84726d
|
[
"CC0-1.0"
] | null | null | null |
"""Apply lizard tool and gather results."""
import io
import logging
import re
from contextlib import redirect_stdout
from typing import List, Match, Optional, Pattern
import lizard
from statick_tool.issue import Issue
from statick_tool.package import Package
from statick_tool.tool_plugin import ToolPlugin
class LizardToolPlugin(ToolPlugin):
"""Apply Lizard tool and gather results.
Note: The `-f/--input_file`, `-o/--output_file`, and `-Edumpcomments`
options are unsupported.
"""
def get_name(self) -> str:
"""Get name of tool."""
return "lizard"
def scan(self, package: Package, level: str) -> Optional[List[Issue]]:
"""Run tool and gather output."""
if not package.path:
return []
# The following is a modification of lizard.py's main().
raw_user_flags = (
[lizard.__file__] + [package.path] + self.get_user_flags(level)
) # Leading lizard file name is required.
# Make sure we log warnings.
if "-w" not in raw_user_flags:
raw_user_flags += ["-w"]
# Make sure unsupported arguments are not included.
user_flags = self.remove_invalid_flags(raw_user_flags)
options = lizard.parse_args(user_flags)
printer = options.printer or lizard.print_result
schema = lizard.OutputScheme(options.extensions)
schema.patch_for_extensions()
result = lizard.analyze(
options.paths,
options.exclude,
options.working_threads,
options.extensions,
options.languages,
)
lizard_output = io.StringIO()
with redirect_stdout(lizard_output):
printer(result, options, schema, lizard.AllResult)
output = lizard_output.getvalue()
lizard.print_extension_results(options.extensions)
logging.debug("%s", output)
if self.plugin_context and self.plugin_context.args.output_directory:
with open(self.get_name() + ".log", "w") as fid:
fid.write(output)
issues: List[Issue] = self.parse_output(output)
return issues
def parse_output(self, output: str) -> List[Issue]:
"""Parse tool output and report issues."""
lizard_re = r"(.+):(\d+):\s(.+):\s(.+)"
parse: Pattern[str] = re.compile(lizard_re)
matches = []
for line in output.splitlines():
match: Optional[Match[str]] = parse.match(line)
if match:
matches.append(match.groups())
issues: List[Issue] = []
for item in matches:
issue = Issue(
item[0], item[1], self.get_name(), item[2], "5", item[3], None
)
if issue not in issues:
issues.append(issue)
return issues
def remove_invalid_flags(self, flag_list: List[str]) -> List[str]:
"""Filter out all disabled flags."""
return [x for x in flag_list if self.valid_flag(x)]
@classmethod
def valid_flag(cls, flag: str) -> bool:
"""Indicate if passed flag is invalid."""
disabled_flags = ["-f", "--input_file", "-o", "--output_file", "-Edumpcomments"]
if flag in disabled_flags:
return False
return True
| 32.613861
| 88
| 0.606861
|
4a157409806230927348fb8ddf9fda2a3f668bad
| 1,366
|
py
|
Python
|
mathgenerator/funcs/misc/geometric_progression.py
|
Sankari-K/mathgenerator
|
712c74fbe34fe594c4c0f7e3b3057b01d85112ba
|
[
"MIT"
] | 40
|
2020-11-17T19:45:20.000Z
|
2022-03-22T18:16:43.000Z
|
mathgenerator/funcs/misc/geometric_progression.py
|
Sankari-K/mathgenerator
|
712c74fbe34fe594c4c0f7e3b3057b01d85112ba
|
[
"MIT"
] | 209
|
2020-10-14T15:32:08.000Z
|
2020-11-03T19:08:19.000Z
|
mathgenerator/funcs/misc/geometric_progression.py
|
Sankari-K/mathgenerator
|
712c74fbe34fe594c4c0f7e3b3057b01d85112ba
|
[
"MIT"
] | 179
|
2020-10-14T15:36:55.000Z
|
2020-10-29T19:26:16.000Z
|
from .__init__ import *
def gen_func(number_values=6,
min_value=2,
max_value=12,
n_term=7,
sum_term=5,
format='string'):
r = random.randint(min_value, max_value)
a = random.randint(min_value, max_value)
n_term = random.randint(number_values, number_values + 5)
sum_term = random.randint(number_values, number_values + 5)
GP = []
for i in range(number_values):
GP.append(a * (r**i))
value_nth_term = a * (r**(n_term - 1))
sum_till_nth_term = a * ((r**sum_term - 1) / (r - 1))
if format == 'string':
problem = "For the given GP " + str(
GP) + " ,Find the value of a,common ratio," + str(
n_term) + "th term value, sum upto " + str(
sum_term) + "th term"
solution = "The value of a is {}, common ratio is {} , {}th term is {} , sum upto {}th term is {}".format(
a, r, n_term, value_nth_term, sum_term, sum_till_nth_term)
return problem, solution
elif format == 'latex':
return "Latex unavailable"
else:
return GP, n_term, sum_term, a, r, n_term, value_nth_term, sum_term, sum_till_nth_term
geometric_progression = Generator("Geometric Progression", 66, gen_func, [
"number_values=6", "min_value=2", "max_value=12", "n_term=7", "sum_term=5"
])
| 36.918919
| 114
| 0.586384
|
4a1575ea3d6a0aa6a2f468d02c2cad8d9b904e53
| 29,049
|
py
|
Python
|
platform/gsutil/gslib/wildcard_iterator.py
|
IsaacHuang/google-cloud-sdk
|
52afa5d1a75dff08f4f5380c5cccc015bf796ca5
|
[
"Apache-2.0"
] | null | null | null |
platform/gsutil/gslib/wildcard_iterator.py
|
IsaacHuang/google-cloud-sdk
|
52afa5d1a75dff08f4f5380c5cccc015bf796ca5
|
[
"Apache-2.0"
] | null | null | null |
platform/gsutil/gslib/wildcard_iterator.py
|
IsaacHuang/google-cloud-sdk
|
52afa5d1a75dff08f4f5380c5cccc015bf796ca5
|
[
"Apache-2.0"
] | null | null | null |
# -*- coding: utf-8 -*-
# Copyright 2010 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Wildcard iterator class and supporting functions."""
from __future__ import absolute_import
import fnmatch
import glob
import os
import re
import sys
import textwrap
from gslib.bucket_listing_ref import BucketListingBucket
from gslib.bucket_listing_ref import BucketListingObject
from gslib.bucket_listing_ref import BucketListingPrefix
from gslib.cloud_api import AccessDeniedException
from gslib.cloud_api import CloudApi
from gslib.cloud_api import NotFoundException
from gslib.exception import CommandException
from gslib.storage_url import ContainsWildcard
from gslib.storage_url import StorageUrlFromString
from gslib.storage_url import StripOneSlash
from gslib.storage_url import WILDCARD_REGEX
from gslib.translation_helper import GenerationFromUrlAndString
from gslib.util import UTF8
FLAT_LIST_REGEX = re.compile(r'(?P<before>.*?)\*\*(?P<after>.*)')
class WildcardIterator(object):
"""Class for iterating over Google Cloud Storage strings containing wildcards.
The base class is abstract; you should instantiate using the
wildcard_iterator() static factory method, which chooses the right
implementation depending on the base string.
"""
# TODO: Standardize on __str__ and __repr__ here and elsewhere. Define both
# and make one return the other.
def __repr__(self):
"""Returns string representation of WildcardIterator."""
return 'WildcardIterator(%s)' % self.wildcard_url.url_string
class CloudWildcardIterator(WildcardIterator):
"""WildcardIterator subclass for buckets, bucket subdirs and objects.
Iterates over BucketListingRef matching the Url string wildcard. It's
much more efficient to first get metadata that's available in the Bucket
(for example to get the name and size of each object), because that
information is available in the object list results.
"""
def __init__(self, wildcard_url, gsutil_api, all_versions=False,
debug=0, project_id=None):
"""Instantiates an iterator that matches the wildcard URL.
Args:
wildcard_url: CloudUrl that contains the wildcard to iterate.
gsutil_api: Cloud storage interface. Passed in for thread safety, also
settable for testing/mocking.
all_versions: If true, the iterator yields all versions of objects
matching the wildcard. If false, yields just the live
object version.
debug: Debug level to control debug output for iterator.
project_id: Project ID to use for bucket listings.
"""
self.wildcard_url = wildcard_url
self.all_versions = all_versions
self.debug = debug
self.gsutil_api = gsutil_api
self.project_id = project_id
def __iter__(self, bucket_listing_fields=None,
expand_top_level_buckets=False):
"""Iterator that gets called when iterating over the cloud wildcard.
In the case where no wildcard is present, returns a single matching object,
single matching prefix, or one of each if both exist.
Args:
bucket_listing_fields: Iterable fields to include in bucket listings.
Ex. ['name', 'acl']. Iterator is
responsible for converting these to list-style
format ['items/name', 'items/acl'] as well as
adding any fields necessary for listing such as
prefixes. API implemenation is responsible for
adding pagination fields. If this is None,
all fields are returned.
expand_top_level_buckets: If true, yield no BUCKET references. Instead,
expand buckets into top-level objects and
prefixes.
Yields:
BucketListingRef of type BUCKET, OBJECT or PREFIX.
"""
single_version_request = self.wildcard_url.HasGeneration()
# For wildcard expansion purposes, we need at a minimum the name of
# each object and prefix. If we're not using the default of requesting
# all fields, make sure at least these are requested. The Cloud API
# tolerates specifying the same field twice.
get_fields = None
if bucket_listing_fields:
get_fields = set()
for field in bucket_listing_fields:
get_fields.add(field)
bucket_listing_fields = self._GetToListFields(
get_fields=bucket_listing_fields)
bucket_listing_fields.update(['items/name', 'prefixes'])
get_fields.update(['name'])
# If we're making versioned requests, ensure generation and
# metageneration are also included.
if single_version_request or self.all_versions:
bucket_listing_fields.update(['items/generation',
'items/metageneration'])
get_fields.update(['generation', 'metageneration'])
# Handle bucket wildcarding, if any, in _ExpandBucketWildcards. Then
# iterate over the expanded bucket strings and handle any object
# wildcarding.
for bucket_listing_ref in self._ExpandBucketWildcards(bucket_fields=['id']):
bucket_url_string = bucket_listing_ref.url_string
if self.wildcard_url.IsBucket():
# IsBucket() guarantees there are no prefix or object wildcards, and
# thus this is a top-level listing of buckets.
if expand_top_level_buckets:
url = StorageUrlFromString(bucket_url_string)
for obj_or_prefix in self.gsutil_api.ListObjects(
url.bucket_name, delimiter='/', all_versions=self.all_versions,
provider=self.wildcard_url.scheme,
fields=bucket_listing_fields):
if obj_or_prefix.datatype == CloudApi.CsObjectOrPrefixType.OBJECT:
yield self._GetObjectRef(bucket_url_string, obj_or_prefix.data,
with_version=self.all_versions)
else: # CloudApi.CsObjectOrPrefixType.PREFIX:
yield self._GetPrefixRef(bucket_url_string, obj_or_prefix.data)
else:
yield bucket_listing_ref
else:
# By default, assume a non-wildcarded URL is an object, not a prefix.
# This prevents unnecessary listings (which are slower, more expensive,
# and also subject to eventual consistency).
if (not ContainsWildcard(self.wildcard_url.url_string) and
self.wildcard_url.IsObject() and not self.all_versions):
try:
get_object = self.gsutil_api.GetObjectMetadata(
self.wildcard_url.bucket_name,
self.wildcard_url.object_name,
generation=self.wildcard_url.generation,
provider=self.wildcard_url.scheme,
fields=get_fields)
yield self._GetObjectRef(
self.wildcard_url.bucket_url_string, get_object,
with_version=(self.all_versions or single_version_request))
return
except (NotFoundException, AccessDeniedException):
# It's possible this is a prefix - try to list instead.
pass
# Expand iteratively by building prefix/delimiter bucket listing
# request, filtering the results per the current level's wildcard
# (if present), and continuing with the next component of the
# wildcard. See _BuildBucketFilterStrings() documentation for details.
if single_version_request:
url_string = '%s%s#%s' % (bucket_url_string,
self.wildcard_url.object_name,
self.wildcard_url.generation)
else:
# Rstrip any prefixes to correspond with rstripped prefix wildcard
# from _BuildBucketFilterStrings().
url_string = '%s%s' % (bucket_url_string,
StripOneSlash(self.wildcard_url.object_name)
or '/') # Cover root object named '/' case.
urls_needing_expansion = [url_string]
while urls_needing_expansion:
url = StorageUrlFromString(urls_needing_expansion.pop(0))
(prefix, delimiter, prefix_wildcard, suffix_wildcard) = (
self._BuildBucketFilterStrings(url.object_name))
prog = re.compile(fnmatch.translate(prefix_wildcard))
# List bucket for objects matching prefix up to delimiter.
for obj_or_prefix in self.gsutil_api.ListObjects(
url.bucket_name, prefix=prefix, delimiter=delimiter,
all_versions=self.all_versions or single_version_request,
provider=self.wildcard_url.scheme,
fields=bucket_listing_fields):
if obj_or_prefix.datatype == CloudApi.CsObjectOrPrefixType.OBJECT:
gcs_object = obj_or_prefix.data
if prog.match(gcs_object.name):
if not suffix_wildcard or (
StripOneSlash(gcs_object.name) == suffix_wildcard):
if not single_version_request or (
self._SingleVersionMatches(gcs_object.generation)):
yield self._GetObjectRef(
bucket_url_string, gcs_object, with_version=(
self.all_versions or single_version_request))
else: # CloudApi.CsObjectOrPrefixType.PREFIX
prefix = obj_or_prefix.data
# If the prefix ends with a slash, remove it. Note that we only
# remove one slash so that we can successfully enumerate dirs
# containing multiple slashes.
rstripped_prefix = StripOneSlash(prefix)
if prog.match(rstripped_prefix):
if suffix_wildcard and rstripped_prefix != suffix_wildcard:
# There's more wildcard left to expand.
url_append_string = '%s%s' % (
bucket_url_string, rstripped_prefix + '/' +
suffix_wildcard)
urls_needing_expansion.append(url_append_string)
else:
# No wildcard to expand, just yield the prefix
yield self._GetPrefixRef(bucket_url_string, prefix)
def _BuildBucketFilterStrings(self, wildcard):
"""Builds strings needed for querying a bucket and filtering results.
This implements wildcard object name matching.
Args:
wildcard: The wildcard string to match to objects.
Returns:
(prefix, delimiter, prefix_wildcard, suffix_wildcard)
where:
prefix is the prefix to be sent in bucket GET request.
delimiter is the delimiter to be sent in bucket GET request.
prefix_wildcard is the wildcard to be used to filter bucket GET results.
suffix_wildcard is wildcard to be appended to filtered bucket GET
results for next wildcard expansion iteration.
For example, given the wildcard gs://bucket/abc/d*e/f*.txt we
would build prefix= abc/d, delimiter=/, prefix_wildcard=d*e, and
suffix_wildcard=f*.txt. Using this prefix and delimiter for a bucket
listing request will then produce a listing result set that can be
filtered using this prefix_wildcard; and we'd use this suffix_wildcard
to feed into the next call(s) to _BuildBucketFilterStrings(), for the
next iteration of listing/filtering.
Raises:
AssertionError if wildcard doesn't contain any wildcard chars.
"""
# Generate a request prefix if the object name part of the wildcard starts
# with a non-wildcard string (e.g., that's true for 'gs://bucket/abc*xyz').
match = WILDCARD_REGEX.search(wildcard)
if not match:
# Input "wildcard" has no wildcard chars, so just return tuple that will
# cause a bucket listing to match the given input wildcard. Example: if
# previous iteration yielded gs://bucket/dir/ with suffix_wildcard abc,
# the next iteration will call _BuildBucketFilterStrings() with
# gs://bucket/dir/abc, and we will return prefix ='dir/abc',
# delimiter='/', prefix_wildcard='dir/abc', and suffix_wildcard=''.
prefix = wildcard
delimiter = '/'
prefix_wildcard = wildcard
suffix_wildcard = ''
else:
if match.start() > 0:
# Wildcard does not occur at beginning of object name, so construct a
# prefix string to send to server.
prefix = wildcard[:match.start()]
wildcard_part = wildcard[match.start():]
else:
prefix = None
wildcard_part = wildcard
end = wildcard_part.find('/')
if end != -1:
wildcard_part = wildcard_part[:end+1]
# Remove trailing '/' so we will match gs://bucket/abc* as well as
# gs://bucket/abc*/ with the same wildcard regex.
prefix_wildcard = StripOneSlash((prefix or '') + wildcard_part)
suffix_wildcard = wildcard[match.end():]
end = suffix_wildcard.find('/')
if end == -1:
suffix_wildcard = ''
else:
suffix_wildcard = suffix_wildcard[end+1:]
# To implement recursive (**) wildcarding, if prefix_wildcard
# suffix_wildcard starts with '**' don't send a delimiter, and combine
# suffix_wildcard at end of prefix_wildcard.
if prefix_wildcard.find('**') != -1:
delimiter = None
prefix_wildcard += suffix_wildcard
suffix_wildcard = ''
else:
delimiter = '/'
# The following debug output is useful for tracing how the algorithm
# walks through a multi-part wildcard like gs://bucket/abc/d*e/f*.txt
if self.debug > 1:
sys.stderr.write(
'DEBUG: wildcard=%s, prefix=%s, delimiter=%s, '
'prefix_wildcard=%s, suffix_wildcard=%s\n' %
(wildcard, prefix, delimiter, prefix_wildcard, suffix_wildcard))
return (prefix, delimiter, prefix_wildcard, suffix_wildcard)
def _SingleVersionMatches(self, listed_generation):
decoded_generation = GenerationFromUrlAndString(self.wildcard_url,
listed_generation)
return str(self.wildcard_url.generation) == str(decoded_generation)
def _ExpandBucketWildcards(self, bucket_fields=None):
"""Expands bucket and provider wildcards.
Builds a list of bucket url strings that can be iterated on.
Args:
bucket_fields: If present, populate only these metadata fields for
buckets. Example value: ['acl', 'defaultObjectAcl']
Yields:
BucketListingRefereneces of type BUCKET.
"""
bucket_url = StorageUrlFromString(self.wildcard_url.bucket_url_string)
if (bucket_fields and set(bucket_fields) == set(['id']) and
not ContainsWildcard(self.wildcard_url.bucket_name)):
# If we just want the name of a non-wildcarded bucket URL,
# don't make an RPC.
yield BucketListingBucket(bucket_url)
elif(self.wildcard_url.IsBucket() and
not ContainsWildcard(self.wildcard_url.bucket_name)):
# If we have a non-wildcarded bucket URL, get just that bucket.
yield BucketListingBucket(
bucket_url, root_object=self.gsutil_api.GetBucket(
self.wildcard_url.bucket_name, provider=self.wildcard_url.scheme,
fields=bucket_fields))
else:
regex = fnmatch.translate(self.wildcard_url.bucket_name)
prog = re.compile(regex)
fields = self._GetToListFields(bucket_fields)
if fields:
fields.add('items/id')
for bucket in self.gsutil_api.ListBuckets(
fields=fields, project_id=self.project_id,
provider=self.wildcard_url.scheme):
if prog.match(bucket.id):
url = StorageUrlFromString(
'%s://%s/' % (self.wildcard_url.scheme, bucket.id))
yield BucketListingBucket(url, root_object=bucket)
def _GetToListFields(self, get_fields=None):
"""Prepends 'items/' to the input fields and converts it to a set.
This way field sets requested for GetBucket can be used in ListBucket calls.
Note that the input set must contain only bucket or object fields; listing
fields such as prefixes or nextPageToken should be added after calling
this function.
Args:
get_fields: Iterable fields usable in GetBucket/GetObject calls.
Returns:
Set of fields usable in ListBuckets/ListObjects calls.
"""
if get_fields:
list_fields = set()
for field in get_fields:
list_fields.add('items/' + field)
return list_fields
def _GetObjectRef(self, bucket_url_string, gcs_object, with_version=False):
"""Creates a BucketListingRef of type OBJECT from the arguments.
Args:
bucket_url_string: Wildcardless string describing the containing bucket.
gcs_object: gsutil_api root Object for populating the BucketListingRef.
with_version: If true, return a reference with a versioned string.
Returns:
BucketListingRef of type OBJECT.
"""
# Generation can be None in test mocks, so just return the
# live object for simplicity.
if with_version and gcs_object.generation is not None:
generation_str = GenerationFromUrlAndString(self.wildcard_url,
gcs_object.generation)
object_string = '%s%s#%s' % (bucket_url_string, gcs_object.name,
generation_str)
else:
object_string = '%s%s' % (bucket_url_string, gcs_object.name)
object_url = StorageUrlFromString(object_string)
return BucketListingObject(object_url, root_object=gcs_object)
def _GetPrefixRef(self, bucket_url_string, prefix):
"""Creates a BucketListingRef of type PREFIX from the arguments.
Args:
bucket_url_string: Wildcardless string describing the containing bucket.
prefix: gsutil_api Prefix for populating the BucketListingRef
Returns:
BucketListingRef of type PREFIX.
"""
prefix_url = StorageUrlFromString('%s%s' % (bucket_url_string, prefix))
return BucketListingPrefix(prefix_url, root_object=prefix)
def IterBuckets(self, bucket_fields=None):
"""Iterates over the wildcard, returning refs for each expanded bucket.
This ignores the object part of the URL entirely and expands only the
the bucket portion. It will yield BucketListingRefs of type BUCKET only.
Args:
bucket_fields: Iterable fields to include in bucket listings.
Ex. ['defaultObjectAcl', 'logging']. This function is
responsible for converting these to listing-style
format ['items/defaultObjectAcl', 'items/logging'], as
well as adding any fields necessary for listing such as
'items/id'. API implemenation is responsible for
adding pagination fields. If this is None, all fields are
returned.
Yields:
BucketListingRef of type BUCKET, or empty iterator if no matches.
"""
for blr in self._ExpandBucketWildcards(bucket_fields=bucket_fields):
yield blr
def IterAll(self, bucket_listing_fields=None, expand_top_level_buckets=False):
"""Iterates over the wildcard, yielding bucket, prefix or object refs.
Args:
bucket_listing_fields: If present, populate only these metadata
fields for listed objects.
expand_top_level_buckets: If true and the wildcard expands only to
Bucket(s), yields the expansion of each bucket
into a top-level listing of prefixes and objects
in that bucket instead of a BucketListingRef
to that bucket.
Yields:
BucketListingRef, or empty iterator if no matches.
"""
for blr in self. __iter__(
bucket_listing_fields=bucket_listing_fields,
expand_top_level_buckets=expand_top_level_buckets):
yield blr
def IterObjects(self, bucket_listing_fields=None):
"""Iterates over the wildcard, yielding only object BucketListingRefs.
Args:
bucket_listing_fields: If present, populate only these metadata
fields for listed objects.
Yields:
BucketListingRefs of type OBJECT or empty iterator if no matches.
"""
for blr in self. __iter__(bucket_listing_fields=bucket_listing_fields,
expand_top_level_buckets=True):
if blr.IsObject():
yield blr
class FileWildcardIterator(WildcardIterator):
"""WildcardIterator subclass for files and directories.
If you use recursive wildcards ('**') only a single such wildcard is
supported. For example you could use the wildcard '**/*.txt' to list all .txt
files in any subdirectory of the current directory, but you couldn't use a
wildcard like '**/abc/**/*.txt' (which would, if supported, let you find .txt
files in any subdirectory named 'abc').
"""
def __init__(self, wildcard_url, debug=0):
"""Instantiates an iterator over BucketListingRefs matching wildcard URL.
Args:
wildcard_url: FileUrl that contains the wildcard to iterate.
debug: Debug level (range 0..3).
"""
self.wildcard_url = wildcard_url
self.debug = debug
def __iter__(self):
"""Iterator that gets called when iterating over the file wildcard.
In the case where no wildcard is present, returns a single matching file
or directory.
Raises:
WildcardException: if invalid wildcard found.
Yields:
BucketListingRef of type OBJECT (for files) or PREFIX (for directories)
"""
wildcard = self.wildcard_url.object_name
match = FLAT_LIST_REGEX.match(wildcard)
if match:
# Recursive wildcarding request ('.../**/...').
# Example input: wildcard = '/tmp/tmp2pQJAX/**/*'
base_dir = match.group('before')[:-1]
remaining_wildcard = match.group('after')
# At this point for the above example base_dir = '/tmp/tmp2pQJAX' and
# remaining_wildcard = '/*'
if remaining_wildcard.startswith('*'):
raise WildcardException('Invalid wildcard with more than 2 consecutive '
'*s (%s)' % wildcard)
# If there was no remaining wildcard past the recursive wildcard,
# treat it as if it were a '*'. For example, file://tmp/** is equivalent
# to file://tmp/**/*
if not remaining_wildcard:
remaining_wildcard = '*'
# Skip slash(es).
remaining_wildcard = remaining_wildcard.lstrip(os.sep)
filepaths = self._IterDir(base_dir, remaining_wildcard)
else:
# Not a recursive wildcarding request.
filepaths = glob.iglob(wildcard)
for filepath in filepaths:
expanded_url = StorageUrlFromString(filepath)
if os.path.isdir(filepath):
yield BucketListingPrefix(expanded_url)
else:
yield BucketListingObject(expanded_url)
def _IterDir(self, directory, wildcard):
"""An iterator over the specified dir and wildcard."""
# UTF8-encode directory before passing it to os.walk() so if there are
# non-valid UTF8 chars in the file name (e.g., that can happen if the file
# originated on Windows) os.walk() will not attempt to decode and then die
# with a "codec can't decode byte" error, and instead we can catch the error
# at yield time and print a more informative error message.
for dirpath, unused_dirnames, filenames in os.walk(directory.encode(UTF8)):
for f in fnmatch.filter(filenames, wildcard):
try:
yield os.path.join(dirpath, f).decode(UTF8)
except UnicodeDecodeError:
# Note: We considered several ways to deal with this, but each had
# problems:
# 1. Raise an exception and try to catch in a higher layer (the
# gsutil cp command), so we can properly support the gsutil cp -c
# option. That doesn't work because raising an exception during
# iteration terminates the generator.
# 2. Accumulate a list of bad filenames and skip processing each
# during iteration, then raise at the end, with exception text
# printing the bad paths. That doesn't work because iteration is
# wrapped in PluralityCheckableIterator, so it's possible there
# are not-yet-performed copy operations at the time we reach the
# end of the iteration and raise the exception - which would cause
# us to skip copying validly named files. Moreover, the gsutil
# cp command loops over argv, so if you run the command gsutil cp
# -rc dir1 dir2 gs://bucket, an invalid unicode name inside dir1
# would cause dir2 never to be visited.
# 3. Print the invalid pathname and skip it during iteration. That
# would work but would mean gsutil cp could exit with status 0
# even though some files weren't copied.
# 4. Change the WildcardIterator to include an error status along with
# the result. That would solve the problem but would be a
# substantial change (WildcardIterator is used in many parts of
# gsutil), and we didn't feel that magnitude of change was
# warranted by this relatively uncommon corner case.
# Instead we chose to abort when one such file is encountered, and
# require the user to remove or rename the files and try again.
raise CommandException('\n'.join(textwrap.wrap(
'Invalid Unicode path encountered (%s). gsutil cannot proceed '
'with such files present. Please remove or rename this file and '
'try again.' % repr(os.path.join(dirpath, f)))))
# pylint: disable=unused-argument
def IterObjects(self, bucket_listing_fields=None):
"""Iterates over the wildcard, yielding only object (file) refs.
Args:
bucket_listing_fields: Ignored as filesystems don't have buckets.
Yields:
BucketListingRefs of type OBJECT or empty iterator if no matches.
"""
for bucket_listing_ref in self.IterAll():
if bucket_listing_ref.IsObject():
yield bucket_listing_ref
# pylint: disable=unused-argument
def IterAll(self, bucket_listing_fields=None, expand_top_level_buckets=False):
"""Iterates over the wildcard, yielding BucketListingRefs.
Args:
bucket_listing_fields: Ignored; filesystems don't have buckets.
expand_top_level_buckets: Ignored; filesystems don't have buckets.
Yields:
BucketListingRefs of type OBJECT (file) or PREFIX (directory),
or empty iterator if no matches.
"""
for bucket_listing_ref in self.__iter__():
yield bucket_listing_ref
def IterBuckets(self, unused_bucket_fields=None):
"""Placeholder to allow polymorphic use of WildcardIterator.
Args:
unused_bucket_fields: Ignored; filesystems don't have buckets.
Raises:
WildcardException: in all cases.
"""
raise WildcardException(
'Iterating over Buckets not possible for file wildcards')
class WildcardException(StandardError):
"""Exception raised for invalid wildcard URLs."""
def __init__(self, reason):
StandardError.__init__(self)
self.reason = reason
def __repr__(self):
return 'WildcardException: %s' % self.reason
def __str__(self):
return 'WildcardException: %s' % self.reason
def CreateWildcardIterator(url_str, gsutil_api, all_versions=False, debug=0,
project_id=None):
"""Instantiate a WildcardIterator for the given URL string.
Args:
url_str: URL string naming wildcard object(s) to iterate.
gsutil_api: Cloud storage interface. Passed in for thread safety, also
settable for testing/mocking.
all_versions: If true, the iterator yields all versions of objects
matching the wildcard. If false, yields just the live
object version.
debug: Debug level to control debug output for iterator.
project_id: Project id to use for bucket listings.
Returns:
A WildcardIterator that handles the requested iteration.
"""
url = StorageUrlFromString(url_str)
if url.IsFileUrl():
return FileWildcardIterator(url, debug=debug)
else: # Cloud URL
return CloudWildcardIterator(
url, gsutil_api, all_versions=all_versions, debug=debug,
project_id=project_id)
| 44.417431
| 80
| 0.671314
|
4a15769d5f4d4439c17ce2b1dde5270a398094e0
| 479
|
py
|
Python
|
tools/gen_random_csv.py
|
arjun-menon/vcalc
|
899c890b100ce33fb4bf2f94653c57d1a1997bc7
|
[
"Apache-2.0"
] | 1
|
2019-07-16T08:25:52.000Z
|
2019-07-16T08:25:52.000Z
|
tools/gen_random_csv.py
|
arjun-menon/vcalc
|
899c890b100ce33fb4bf2f94653c57d1a1997bc7
|
[
"Apache-2.0"
] | null | null | null |
tools/gen_random_csv.py
|
arjun-menon/vcalc
|
899c890b100ce33fb4bf2f94653c57d1a1997bc7
|
[
"Apache-2.0"
] | null | null | null |
#!/usr/bin/env python3
import sys, random
try:
col_count = int(sys.argv[1])
row_count = int(sys.argv[2])
if col_count <= 0 or row_count <= 0:
raise ValueError()
except (IndexError, ValueError):
print("Usage: %s col_count row_count" % sys.argv[0], file=sys.stderr)
sys.exit(1)
columns = [ random.sample(range(10 * row_count), row_count) for _ in range(col_count) ]
for row in range(row_count):
print(', '.join(str(columns[col][row]) for col in range(col_count) ))
| 22.809524
| 87
| 0.688935
|
4a1576ba8d8404c19e2c0d893a9499af77344395
| 3,878
|
py
|
Python
|
app_name/steps/generic_steps.py
|
radu-sover/naaf
|
a9bbccdb83454c223d86ca83abe00212871ad697
|
[
"MIT"
] | null | null | null |
app_name/steps/generic_steps.py
|
radu-sover/naaf
|
a9bbccdb83454c223d86ca83abe00212871ad697
|
[
"MIT"
] | null | null | null |
app_name/steps/generic_steps.py
|
radu-sover/naaf
|
a9bbccdb83454c223d86ca83abe00212871ad697
|
[
"MIT"
] | null | null | null |
"""
This would be a generic steps module - common actions to Pages and Elements.
This steps were suposed to be implemented in the framework: naaf.page_steps,
but I had some issues with "reflection" and for now is here.
Should I continue with this generalization?
Feature files will be more conventions and will eliminate the need of separate
steps, meaning somebody will need to write more on a feature Given, When, Then.
"""
from behave import given, when, then, step
# from naaf.page_steps import * # NOQA
from naaf.base import CurrentPage
from app_name.pages.calendar import * # NOQA
from app_name.pages.trainings import * # NOQA
from app_name.pages.navigation import * # NOQA
@given('I am on "{page_class_name}" page')
def navigate_to_page(context, page_class_name):
# this translates to: page_class_name(context).navigate()
page_class = _str_to_class(page_class_name)
page = page_class(context).navigate()
assert page.at()
@when('I click on "{button_name}" from "{page_class_name}"')
def button_click(context, button_name, page_class_name):
# this translates to: page_class_name(context).button_name()
page_class = _str_to_class(page_class_name)
page = page_class(context)
button = getattr(page, button_name)
button()
@when('I type "{text}" in "{form_element_name}" from "{page_class_name}"')
def type_text(context, text, form_element_name, page_class_name):
# this translates to: page_class_name(context).form_name(text)
page_class = _str_to_class(page_class_name)
page = page_class(context)
form_element = getattr(page, form_element_name)
form_element(text)
@when(
'I select "{option_text}" in "{select_element_name}" from "{page_class_name}"')
def select_option(context, option_text, select_element_name, page_class_name):
# this translates to: page_class_name(context).select_name(option)
page_class = _str_to_class(page_class_name)
page = page_class(context)
select_element = getattr(page, select_element_name)
select_element(option_text)
@when(
'I click on "{button_name}" for "{text}" training from "{page_class_name}"')
def button_click_in_list(context, button_name, text, page_class_name):
# translates to: page_class_name(context).btn_edit_for_item(text)
page_class = _str_to_class(page_class_name)
page = page_class(context)
contains_function = getattr(page, button_name + '_for_item')
contains_function(text)
@when('I wait for {seconds:Number} seconds')
def wait_for_seconds(context, seconds):
CurrentPage(context).wait(seconds)
@then('I see in page "{search_text}"')
def search_in_page(context, search_text):
"""
Strange but does not find text in page_source.
The content of page_source is correct.
"""
assert search_text in CurrentPage(context)
@then('I am at "{page_class_name}" page')
def at_page(context, page_class_name):
# this translates to: page_class_name(context).at()
page_class = _str_to_class(page_class_name)
assert page_class(context).at()
@step('I see the "{text}" in "{list_name}" from "{page_class_name}"')
def search_in_list(context, text, list_name, page_class_name):
# translates to: page_class_name(context).list_name_contains(text)
page_class = _str_to_class(page_class_name)
page = page_class(context)
contains_function = getattr(page, list_name + '_contains')
assert contains_function(text)
@then('I do not see the "{text}" in "{list_name}" from "{page_class_name}"')
def search_in_list_not_finding(context, text, list_name, page_class_name):
# translates to: page_class_name(context).list_name_contains(text)
page_class = _str_to_class(page_class_name)
page = page_class(context)
contains_function = getattr(page, list_name + '_contains')
assert not contains_function(text)
def _str_to_class(s):
return globals().get(s)
| 35.907407
| 83
| 0.746003
|
4a1577fb7ff51205a1baead9dc25783d604953cd
| 2,596
|
py
|
Python
|
mmdet/models/losses/giou_loss.py
|
zhangxiaosong18/LTM
|
31676e24aabc7d15ba0ac833dea55c03a22b8e5d
|
[
"Apache-2.0"
] | 3
|
2021-03-06T09:18:03.000Z
|
2021-12-04T13:01:03.000Z
|
mmdet/models/losses/giou_loss.py
|
zhangxiaosong18/LTM
|
31676e24aabc7d15ba0ac833dea55c03a22b8e5d
|
[
"Apache-2.0"
] | null | null | null |
mmdet/models/losses/giou_loss.py
|
zhangxiaosong18/LTM
|
31676e24aabc7d15ba0ac833dea55c03a22b8e5d
|
[
"Apache-2.0"
] | null | null | null |
import torch
import torch.nn as nn
from ..registry import LOSSES
from .utils import weighted_loss
@weighted_loss
def giou_loss(pred, target):
"""IoU loss.
Computing the IoU loss between a set of predicted bboxes and target bboxes.
The loss is calculated as negative log of IoU.
Args:
pred (Tensor): Predicted boundaries (left, top, right, bottom),
shape (..., 4).
target (Tensor): Corresponding gt bboxes, shape (..., 4).
Return:
Tensor: Loss tensor.
"""
pred_left = pred[..., 0]
pred_top = pred[..., 1]
pred_right = pred[..., 2]
pred_bottom = pred[..., 3]
target_left = target[..., 0]
target_top = target[..., 1]
target_right = target[..., 2]
target_bottom = target[..., 3]
target_area = (target_left + target_right + 1.0) * (target_top + target_bottom + 1.0)
pred_area = (pred_left + pred_right + 1.0) * (pred_top + pred_bottom + 1.0)
w_intersect = (torch.min(pred_left, target_left) + torch.min(pred_right, target_right)).clamp(min=0.0) + 1.0
h_intersect = (torch.min(pred_bottom, target_bottom) + torch.min(pred_top, target_top)).clamp(min=0.0) + 1.0
area_intersect = w_intersect * h_intersect
area_union = target_area + pred_area - area_intersect
g_w_intersect = (torch.max(pred_left, target_left) + torch.max(pred_right, target_right)).clamp(min=0.0) + 1.0
g_h_intersect = (torch.max(pred_bottom, target_bottom) + torch.max(pred_top, target_top)).clamp(min=0.0) + 1.0
ac_uion = g_w_intersect * g_h_intersect
ious = area_intersect / area_union
gious = ious - (ac_uion - area_union) / ac_uion
losses = 1 - gious
return losses
@LOSSES.register_module
class GIoULoss(nn.Module):
def __init__(self, reduction='mean', loss_weight=1.0):
super(GIoULoss, self).__init__()
self.reduction = reduction
self.loss_weight = loss_weight
def forward(self,
pred,
target,
weight=None,
avg_factor=None,
reduction_override=None,
**kwargs):
if weight is not None and not torch.any(weight > 0):
return (pred * weight).sum() # 0
assert reduction_override in (None, 'none', 'mean', 'sum')
reduction = (
reduction_override if reduction_override else self.reduction)
loss = self.loss_weight * giou_loss(
pred,
target,
weight,
reduction=reduction,
avg_factor=avg_factor,
**kwargs)
return loss
| 32.45
| 114
| 0.617103
|
4a15790f42b2c129030b63b9b193217df52f9921
| 3,641
|
py
|
Python
|
mux_python/models/upload_response.py
|
moaazsidat/mux-python
|
3f03b9dd0761fa1a0cd5bdbeac85ccf4f326508c
|
[
"MIT"
] | 36
|
2019-02-28T21:18:39.000Z
|
2022-03-04T19:58:45.000Z
|
mux_python/models/upload_response.py
|
moaazsidat/mux-python
|
3f03b9dd0761fa1a0cd5bdbeac85ccf4f326508c
|
[
"MIT"
] | 7
|
2019-04-01T14:48:34.000Z
|
2022-03-04T16:31:34.000Z
|
mux_python/models/upload_response.py
|
moaazsidat/mux-python
|
3f03b9dd0761fa1a0cd5bdbeac85ccf4f326508c
|
[
"MIT"
] | 9
|
2019-11-29T03:57:58.000Z
|
2022-03-02T17:29:25.000Z
|
# coding: utf-8
"""
Mux API
Mux is how developers build online video. This API encompasses both Mux Video and Mux Data functionality to help you build your video-related projects better and faster than ever before. # noqa: E501
The version of the OpenAPI document: v1
Contact: devex@mux.com
Generated by: https://openapi-generator.tech
"""
import inspect
import pprint
import re # noqa: F401
import six
from mux_python.configuration import Configuration
class UploadResponse(object):
"""NOTE: This class is auto generated by OpenAPI Generator.
Ref: https://openapi-generator.tech
Do not edit the class manually.
"""
"""
Attributes:
openapi_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
openapi_types = {
'data': 'Upload'
}
attribute_map = {
'data': 'data'
}
def __init__(self, data=None, local_vars_configuration=None): # noqa: E501
"""UploadResponse - a model defined in OpenAPI""" # noqa: E501
if local_vars_configuration is None:
local_vars_configuration = Configuration.get_default_copy()
self.local_vars_configuration = local_vars_configuration
self._data = None
self.discriminator = None
if data is not None:
self.data = data
@property
def data(self):
"""Gets the data of this UploadResponse. # noqa: E501
:return: The data of this UploadResponse. # noqa: E501
:rtype: Upload
"""
return self._data
@data.setter
def data(self, data):
"""Sets the data of this UploadResponse.
:param data: The data of this UploadResponse. # noqa: E501
:type data: Upload
"""
self._data = data
def to_dict(self, serialize=False):
"""Returns the model properties as a dict"""
result = {}
def convert(x):
if hasattr(x, "to_dict"):
args = inspect.getargspec(x.to_dict).args
if len(args) == 1:
return x.to_dict()
else:
return x.to_dict(serialize)
else:
return x
for attr, _ in six.iteritems(self.openapi_types):
value = getattr(self, attr)
attr = self.attribute_map.get(attr, attr) if serialize else attr
if isinstance(value, list):
result[attr] = list(map(
lambda x: convert(x),
value
))
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], convert(item[1])),
value.items()
))
else:
result[attr] = convert(value)
return result
def to_str(self):
"""Returns the string representation of the model"""
return pprint.pformat(self.to_dict())
def __repr__(self):
"""For `print` and `pprint`"""
return self.to_str()
def __eq__(self, other):
"""Returns true if both objects are equal"""
if not isinstance(other, UploadResponse):
return False
return self.to_dict() == other.to_dict()
def __ne__(self, other):
"""Returns true if both objects are not equal"""
if not isinstance(other, UploadResponse):
return True
return self.to_dict() != other.to_dict()
| 28.007692
| 204
| 0.570722
|
4a1579212946a709b889476706dfb6d6a51d9f8d
| 1,665
|
py
|
Python
|
{{cookiecutter.project_slug}}/scripts/test.py
|
blueskyideas/cookiecutter-python
|
754c1c7f457e3cd13918308a4b946eaeac5b2529
|
[
"MIT"
] | 1
|
2018-01-18T12:54:03.000Z
|
2018-01-18T12:54:03.000Z
|
{{cookiecutter.project_slug}}/scripts/test.py
|
blueskyideas/cookiecutter-python
|
754c1c7f457e3cd13918308a4b946eaeac5b2529
|
[
"MIT"
] | null | null | null |
{{cookiecutter.project_slug}}/scripts/test.py
|
blueskyideas/cookiecutter-python
|
754c1c7f457e3cd13918308a4b946eaeac5b2529
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python3.6
'''
Run the test suite.
Part of the Scripts to Rule them All suite of scripts to provide a consistent
developer experience for working on our repos
https://githubengineering.com/scripts-to-rule-them-all/
'''
import argparse
import logging
import os
import subprocess
import common
import bootstrap
_LOGGER = logging.getLogger("script.test")
def run(*, verbose=False, sqlite_echo=False, skip_bootstrap=False, pytest_options=None):
common.run(_LOGGER, verbose=verbose)
os.environ['PIPENV_VENV_IN_PROJECT'] = "1"
if sqlite_echo:
os.environ['SQLITE_ECHO'] = "y"
else:
os.environ['SQLITE_ECHO'] = "n"
os.environ['PYTEST_ADDOPTS']="--color=yes"
if not skip_bootstrap:
bootstrap.run(verbose=verbose)
cmd = ["pipenv", "run","pytest"]
if pytest_options:
cmd.append(pytest_options)
common.run_cmd(_LOGGER, cmd, raw_log=True)
if __name__ == "__main__":
parser = argparse.ArgumentParser()
parser = argparse.ArgumentParser(
description="Run the test suite"
)
parser.add_argument("-v",action="store_true", help="Verbose output")
parser.add_argument("--sqlite_echo",action="store_true", help="Show sqlite debug messages")
parser.add_argument("--skip_bootstrap",action="store_true", help="skip bootstrapping before running tests")
parser.add_argument("--pytest",help="extra pytest options")
args = parser.parse_args()
print(args)
common.setup_parent_loggers()
run(
verbose=args.v,
sqlite_echo=args.sqlite_echo,
skip_bootstrap=args.skip_bootstrap,
pytest_options=args.pytest
)
| 25.227273
| 111
| 0.695495
|
4a157926929b77e9951fcdc9ee4ca958a7b0102d
| 649
|
py
|
Python
|
examples/web/jsonserializer.py
|
spaceone/circuits
|
ed6d5464f1f83034109ed3d23d126c715450cfd2
|
[
"MIT"
] | null | null | null |
examples/web/jsonserializer.py
|
spaceone/circuits
|
ed6d5464f1f83034109ed3d23d126c715450cfd2
|
[
"MIT"
] | null | null | null |
examples/web/jsonserializer.py
|
spaceone/circuits
|
ed6d5464f1f83034109ed3d23d126c715450cfd2
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python
from json import dumps
from circuits import Component, handler
from circuits.web import Controller, Logger, Server
class JSONSerializer(Component):
channel = "web"
# 1 higher than the default response handler
@handler("response", priority=1.0)
def serialize_response_body(self, response):
response.headers["Content-Type"] = "application/json"
response.body = dumps(response.body)
class Root(Controller):
def index(self):
return {"message": "Hello World!"}
app = Server(("0.0.0.0", 9000))
JSONSerializer().register(app)
Logger().register(app)
Root().register(app)
app.run()
| 21.633333
| 61
| 0.696456
|
4a15799390341963532a100a7765bf73b854da61
| 1,247
|
py
|
Python
|
webscaff/commands/sys/apt.py
|
idlesign/webscaff
|
b839e6c9bbdf491c3bd0f392f80d39762b8549ee
|
[
"BSD-3-Clause"
] | 3
|
2017-03-22T15:07:20.000Z
|
2021-01-16T19:23:14.000Z
|
webscaff/commands/sys/apt.py
|
idlesign/webscaff
|
b839e6c9bbdf491c3bd0f392f80d39762b8549ee
|
[
"BSD-3-Clause"
] | null | null | null |
webscaff/commands/sys/apt.py
|
idlesign/webscaff
|
b839e6c9bbdf491c3bd0f392f80d39762b8549ee
|
[
"BSD-3-Clause"
] | null | null | null |
from invoke import task
BOOTSTRAP_SYSTEM_PACKAGES = [
'python3-dev',
'python3-pip',
'python3-venv',
'python3-wheel',
# For source builds.
'build-essential',
'libjpeg-dev', # for Pillow
'libxml2-dev', 'libxslt1-dev', # for lxml
'libpcre3-dev', 'libssl-dev', # for uWSGI with SSL and routing support
'git',
'postgresql', 'libpq-dev',
'certbot',
# Utils.
'acl',
'mc',
'htop',
'net-tools',
'ncdu',
]
@task
def configure(ctx):
"""Continues configuring using dpkg"""
ctx.sudo('dpkg --configure -a', env={'DEBIAN_FRONTEND': 'noninteractive'})
def upgrade(ctx):
"""Initiates remote OS upgrade procedure."""
update()
ctx.sudo('apt-get upgrade')
@task
def update(ctx):
"""Initiates apt cache update."""
ctx.sudo('apt-get update')
@task
def install(ctx, packages):
"""Installs packages using apt.
:param packages:
"""
if not isinstance(packages, list):
packages = [packages]
update(ctx)
ctx.sudo(f"apt install -y {' '.join(packages)}", env={'DEBIAN_FRONTEND': 'noninteractive'})
def bootstrap(ctx):
"""Bootstraps system by installing required packages."""
install(ctx, BOOTSTRAP_SYSTEM_PACKAGES)
| 19.184615
| 95
| 0.619086
|
4a157a687fae0e5d69f4e41bbd32a101741da65b
| 8,844
|
py
|
Python
|
neutron/db/dvr_mac_db.py
|
congnt95/neutron
|
6a73a362c5ff5b7c28c15a49f47a9900c0d2b4e1
|
[
"Apache-2.0"
] | 1,080
|
2015-01-04T08:35:00.000Z
|
2022-03-27T09:15:52.000Z
|
neutron/db/dvr_mac_db.py
|
congnt95/neutron
|
6a73a362c5ff5b7c28c15a49f47a9900c0d2b4e1
|
[
"Apache-2.0"
] | 24
|
2015-02-21T01:48:28.000Z
|
2021-11-26T02:38:56.000Z
|
neutron/db/dvr_mac_db.py
|
congnt95/neutron
|
6a73a362c5ff5b7c28c15a49f47a9900c0d2b4e1
|
[
"Apache-2.0"
] | 1,241
|
2015-01-02T10:47:10.000Z
|
2022-03-27T09:42:23.000Z
|
# Copyright 2014 Hewlett-Packard Development Company, L.P.
# All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import netaddr
from neutron_lib.callbacks import events
from neutron_lib.callbacks import registry
from neutron_lib.callbacks import resources
from neutron_lib import constants
from neutron_lib.db import api as db_api
from neutron_lib import exceptions as n_exc
from neutron_lib.exceptions import dvr as dvr_exc
from neutron_lib.objects import exceptions
from neutron_lib.plugins import directory
from neutron_lib.utils import net
from oslo_config import cfg
from oslo_log import helpers as log_helpers
from oslo_log import log as logging
from sqlalchemy import or_
from neutron.common import utils
from neutron.conf.db import dvr_mac_db
from neutron.conf.db import l3_dvr_db
from neutron.db import models_v2
from neutron.extensions import dvr as ext_dvr
from neutron.objects import router
from neutron.plugins.ml2 import models as ml2_models
LOG = logging.getLogger(__name__)
dvr_mac_db.register_db_dvr_mac_opts()
l3_dvr_db.register_db_l3_dvr_opts()
def get_ports_query_by_subnet_and_ip(context, subnet, ip_addresses=None):
query = context.session.query(models_v2.Port)
query = query.join(models_v2.IPAllocation)
query = query.filter(
models_v2.Port.id == models_v2.IPAllocation.port_id,
models_v2.IPAllocation.subnet_id == subnet)
if ip_addresses:
query = query.filter(
models_v2.IPAllocation.ip_address.in_(ip_addresses))
return query
@registry.has_registry_receivers
class DVRDbMixin(ext_dvr.DVRMacAddressPluginBase):
"""Mixin class to add dvr mac address to db_plugin_base_v2."""
@property
def plugin(self):
try:
if self._plugin is not None:
return self._plugin
except AttributeError:
pass
self._plugin = directory.get_plugin()
return self._plugin
@staticmethod
@db_api.retry_if_session_inactive()
def _db_delete_mac_associated_with_agent(context, agent):
host = agent['host']
plugin = directory.get_plugin()
if [a for a in plugin.get_agents(context, filters={'host': [host]})
if a['id'] != agent['id']]:
# there are still agents on this host, don't mess with the mac
# entry until they are all deleted.
return
if not router.DVRMacAddress.delete_objects(context, host=host):
return
# notify remaining agents so they cleanup flows
dvr_macs = plugin.get_dvr_mac_address_list(context)
plugin.notifier.dvr_mac_address_update(context, dvr_macs)
@staticmethod
@registry.receives(resources.AGENT, [events.BEFORE_DELETE])
def _delete_mac_associated_with_agent(resource, event,
trigger, payload=None):
DVRDbMixin._db_delete_mac_associated_with_agent(
payload.context, payload.latest_state)
@db_api.CONTEXT_READER
def _get_dvr_mac_address_by_host(self, context, host):
dvr_obj = router.DVRMacAddress.get_object(context, host=host)
if not dvr_obj:
raise dvr_exc.DVRMacAddressNotFound(host=host)
return self._make_dvr_mac_address_dict(dvr_obj)
@utils.transaction_guard
@db_api.retry_if_session_inactive()
def _create_dvr_mac_address_retry(self, context, host, base_mac):
with db_api.CONTEXT_WRITER.using(context):
mac_address = net.get_random_mac(base_mac)
dvr_mac_binding = router.DVRMacAddress(
context, host=host, mac_address=netaddr.EUI(mac_address))
dvr_mac_binding.create()
LOG.debug("Generated DVR mac for host %(host)s "
"is %(mac_address)s",
{'host': host, 'mac_address': mac_address})
dvr_macs = self.get_dvr_mac_address_list(context)
# TODO(vivek): improve scalability of this fanout by
# sending a single mac address rather than the entire set
self.notifier.dvr_mac_address_update(context, dvr_macs)
return self._make_dvr_mac_address_dict(dvr_mac_binding)
def _create_dvr_mac_address(self, context, host):
"""Create DVR mac address for a given host."""
base_mac = cfg.CONF.dvr_base_mac.split(':')
try:
return self._create_dvr_mac_address_retry(context, host, base_mac)
except exceptions.NeutronDbObjectDuplicateEntry:
LOG.error("MAC generation error after %s attempts",
db_api.MAX_RETRIES)
raise n_exc.HostMacAddressGenerationFailure(host=host)
@db_api.CONTEXT_READER
def get_dvr_mac_address_list(self, context):
return [
dvr_mac.to_dict()
for dvr_mac in router.DVRMacAddress.get_objects(context)
]
def get_dvr_mac_address_by_host(self, context, host):
"""Determine the MAC for the DVR port associated to host."""
if not host:
return
try:
return self._get_dvr_mac_address_by_host(context, host)
except dvr_exc.DVRMacAddressNotFound:
return self._create_dvr_mac_address(context, host)
def _make_dvr_mac_address_dict(self, dvr_mac_entry, fields=None):
return {'host': dvr_mac_entry['host'],
'mac_address': str(dvr_mac_entry['mac_address'])}
@log_helpers.log_method_call
@db_api.retry_if_session_inactive()
def get_ports_on_host_by_subnet(self, context, host, subnet):
"""Returns DVR serviced ports on a given subnet in the input host
This method returns ports that need to be serviced by DVR.
:param context: rpc request context
:param host: host id to match and extract ports of interest
:param subnet: subnet id to match and extract ports of interest
:returns: list -- Ports on the given subnet in the input host
"""
host_dvr_for_dhcp = cfg.CONF.host_dvr_for_dhcp
query = context.session.query(models_v2.Port)
query = query.join(ml2_models.PortBinding)
query = query.join(models_v2.IPAllocation)
query = query.filter(
models_v2.Port.id == ml2_models.PortBinding.port_id,
models_v2.Port.id == models_v2.IPAllocation.port_id,
ml2_models.PortBinding.host == host,
models_v2.IPAllocation.subnet_id == subnet)
owner_filter = or_(
models_v2.Port.device_owner.startswith(
constants.DEVICE_OWNER_COMPUTE_PREFIX),
models_v2.Port.device_owner.in_(
utils.get_other_dvr_serviced_device_owners(host_dvr_for_dhcp)))
ports_query = query.filter(owner_filter)
ports = [
self.plugin._make_port_dict(port, process_extensions=False,
with_fixed_ips=False)
for port in ports_query.all()
]
LOG.debug("Returning list of dvr serviced ports on host %(host)s"
" for subnet %(subnet)s ports %(ports)s",
{'host': host, 'subnet': subnet,
'ports': ports})
return ports
@log_helpers.log_method_call
@db_api.retry_if_session_inactive()
def get_subnet_for_dvr(self, context, subnet, fixed_ips=None):
if fixed_ips:
subnet_data = fixed_ips[0]['subnet_id']
else:
subnet_data = subnet
try:
subnet_info = self.plugin.get_subnet(
context, subnet_data)
except n_exc.SubnetNotFound:
return {}
else:
# retrieve the gateway port on this subnet
if fixed_ips:
ip_address = fixed_ips[0]['ip_address']
else:
ip_address = subnet_info['gateway_ip']
query = get_ports_query_by_subnet_and_ip(
context, subnet, [ip_address])
internal_gateway_ports = query.all()
if not internal_gateway_ports:
LOG.error("Could not retrieve gateway port "
"for subnet %s", subnet_info)
return {}
internal_port = internal_gateway_ports[0]
subnet_info['gateway_mac'] = internal_port['mac_address']
return subnet_info
| 38.789474
| 79
| 0.668137
|
4a157a82091931dbe7ffc1289ac736172f4e6b40
| 15,863
|
py
|
Python
|
scripts/primes_scratch.py
|
rcasero/cytometer
|
d76e58fa37f83f6a666d556ba061530d787fcfb2
|
[
"Apache-2.0"
] | 1
|
2021-06-09T10:18:26.000Z
|
2021-06-09T10:18:26.000Z
|
scripts/primes_scratch.py
|
rcasero/cytometer
|
d76e58fa37f83f6a666d556ba061530d787fcfb2
|
[
"Apache-2.0"
] | null | null | null |
scripts/primes_scratch.py
|
rcasero/cytometer
|
d76e58fa37f83f6a666d556ba061530d787fcfb2
|
[
"Apache-2.0"
] | null | null | null |
"""
This file is part of Cytometer
Copyright 2021 Medical Research Council
SPDX-License-Identifier: Apache-2.0
Author: Ramon Casero <rcasero@gmail.com>
"""
import numpy as np
import matplotlib.pyplot as plt
import matplotlib.ticker as plticker
from primesieve import *
import pandas as pd
import time
DEBUG = False
def fibonacci(x=0, y=1, n_fibo=10):
out = [x, y]
for i in range(n_fibo - 2):
z = x + y
x = y
y = z
out.append(z)
return out
def pixel_connectivity(is_prime_square, x_square):
"""
Count the number of pixels adjacent to each labelled pixel, split into two types of connectivity:
diagonally and 4-neighbourhood (laterally).
:param is_prime_square:
:param x_square:
:return:
* pandas.DataFrame
"""
def is_neigh(is_prime_square, i, j):
nrow = is_prime_square.shape[0]
ncol = is_prime_square.shape[1]
if i < 0 or j < 0 or i >= nrow or j >= ncol:
return 0
else:
return int(is_prime_square[i, j])
# init outputs
out_num = []
out_neigh_d = []
out_neigh_4 = []
# loop pixels that correspond to prime numbers
row, col = np.where(is_prime_square)
for i, j in zip(row, col):
# check how many neighbours this pixels is connected to diagonally
neigh_d = is_neigh(is_prime_square, i - 1, j - 1) \
+ is_neigh(is_prime_square, i - 1, j + 1) \
+ is_neigh(is_prime_square, i + 1, j - 1) \
+ is_neigh(is_prime_square, i + 1, j + 1)
# check how many neighbours this pixels is connected to laterally
neigh_4 = is_neigh(is_prime_square, i - 1, j) \
+ is_neigh(is_prime_square, i + 1, j) \
+ is_neigh(is_prime_square, i, j - 1) \
+ is_neigh(is_prime_square, i, j + 1)
out_num.append(x_square[i, j])
out_neigh_d.append(neigh_d)
out_neigh_4.append(neigh_4)
# put outputs into structured array
df = pd.DataFrame(data={'num': out_num, 'neigh_d': out_neigh_d, 'neigh_4': out_neigh_4})
return df
def diagonal_zigzag_square(x):
"""
Fill square matrix in diagonal order.
x = [0, ..., 8]
y = diagonal_zigzag_square(x)
y = [[0 2 5]
[1 4 7]
[3 6 8]]
:param x: vector of length n**2.
:return: y
"""
x = np.array(x)
# length of the square side
n = np.sqrt(len(x))
if n != int(n):
raise ValueError('Input vector needs to have n**2 elements')
n = int(n)
# init output matrix
y = np.zeros((n, n), dtype=x.dtype)
# number of elements in each diagonal
tot_diag = list(range(1, n)) + list(range(n, 0, -1))
# indices in x of the different segments that correspond to each diagonal
idx = np.cumsum([0] + tot_diag)
# loop diagonals
for k in range(2 * n - 1):
first_i = np.min((k, n-1))
i = np.array(range(first_i, first_i - tot_diag[k], -1))
j = k - i
# print(str(i_all) + ' -> ' + str(j_all))
y[i, j] = x[idx[k]:idx[k+1]]
return y
def prop_primes(x):
"""
Compute proportion of prime numbers in each row/column (even length squares) or diagonal/antidiagonal (odd length
squares).
:param x:
:return:
"""
# length of square length
n = x.shape[0]
if n % 2 == 1: # odd length square
prop_fw = []
for k in range(n - 1, -n, -1):
diag = np.diagonal(x, k)
prop_fw.append(np.count_nonzero(diag) / len(diag))
x = np.fliplr(x)
prop_bk = []
for k in range(n - 1, -n, -1):
diag = np.diagonal(x, k)
prop_bk.append(np.count_nonzero(diag) / len(diag))
else: # even length square
prop_fw = np.sum(x, axis=1) / n # rows
prop_bk = np.sum(x, axis=0) / n # columns
if DEBUG:
if n % 2 == 1: # odd length square
plt.clf()
(markers, stemlines, baseline) = plt.stem(range(n - 1, -n, -1), prop_fw, label='Diagonals')
plt.setp(markers, marker='D', markersize=6, markeredgecolor="C0", markeredgewidth=2)
plt.stem(range(n - 1, -n, -1), prop_bk, linefmt='C1', markerfmt='C1o', label='Antidiagonals')
plt.xlabel('Diagonal index')
plt.ylabel('Primes proportion')
plt.legend()
else:
plt.clf()
(markers, stemlines, baseline) = plt.stem(prop_fw, label='Rows')
plt.setp(markers, marker='D', markersize=6, markeredgecolor="C0", markeredgewidth=2)
plt.stem(prop_bk, linefmt='C1', markerfmt='C1o', label='Columns')
plt.xlabel('Row/column index')
plt.ylabel('Primes proportion')
plt.legend()
return np.array(prop_fw), np.array(prop_bk)
# list of precomputed primes
primes_list = np.array(primes(650e3))
############################################################################################
# example of number square
#
# array([[ 1, 2, 3, 4, 5, 6, 7, 8],
# [ 9, 10, 11, 12, 13, 14, 15, 16],
# [17, 18, 19, 20, 21, 22, 23, 24],
# [25, 26, 27, 28, 29, 30, 31, 32],
# [33, 34, 35, 36, 37, 38, 39, 40],
# [41, 42, 43, 44, 45, 46, 47, 48],
# [49, 50, 51, 52, 53, 54, 55, 56],
# [57, 58, 59, 60, 61, 62, 63, 64]])
############################################################################################
n = 13
first_number = 105
last_number = first_number + n**2 # one past the last number, to make use of range() easier
# sequence of numbers contained in the Fibonacci square
x_list = np.array(range(first_number, last_number))
# check whether each number is a prime number
is_prime = np.array([x in primes_list for x in x_list])
# number square with sequence of numbers
x_square = x_list.reshape((n, n))
is_prime_square = is_prime.reshape((n, n))
plt.clf()
plt.imshow(is_prime_square)
loc = plticker.MultipleLocator(base=1)
plt.gca().xaxis.set_major_locator(loc)
plt.gca().yaxis.set_major_locator(loc)
plt.grid(True, which='both')
# compute pixel connectivity
df = pixel_connectivity(is_prime_square, x_square)
############################################################################################
# Loop of Fibonacci number squares. Fill all the squares row by row
############################################################################################
# number of Fibonacci squares (without counting the initial 0 size)
n_fibo = 16
# side length of each Fibonacci square
fibo_len = fibonacci(0, 1, n_fibo + 1)
# init dataframe with results
df_total = pd.DataFrame([], columns=['i_fibo', 'n', 'rank', 'primes_neigh_d', 'primes_neigh_4',
'prop_fw', 'prop_bk', 'time'])
# for debugging purposes, this can be used to directly get the initial parameters for any matrix, without having to
# check for primes, etc.
if DEBUG:
n_fibo_to_stop = 15
last_number = 1
for i_fibo in range(1, n_fibo_to_stop + 1):
# length of current Fibonacci square
n = fibo_len[i_fibo]
# first and last numbers in the square
first_number = last_number
last_number = first_number + n ** 2 # one past the last number, to make use of range() easier
# loop Fibonacci squares
last_number = 1
for i_fibo in range(1, n_fibo + 1):
# to calculate how long it takes to compute each iteration
t0 = time.time()
# i_fibo += 1 ## for manual debugging
# length of current Fibonacci square
n = fibo_len[i_fibo]
# first and last numbers in the square
first_number = last_number
last_number = first_number + n**2 # one past the last number, to make use of range() easier
# list of precomputed primes
primes_list = np.array(primes(last_number))
# sequence of numbers contained in the Fibonacci square
x_list = np.array(range(first_number, last_number))
# skip the first empty square
if len(x_list) == 0:
continue
# check whether each number is a prime number
is_prime = np.array([x in primes_list for x in x_list])
# Fibonacci square filled with the sequence of numbers
x_square = x_list.reshape((n, n))
is_prime_square = is_prime.reshape((n, n))
# plot Fibonacci square
if DEBUG:
plt.clf()
plt.imshow(is_prime_square)
loc = plticker.MultipleLocator(base=1)
plt.gca().xaxis.set_major_locator(loc)
plt.gca().yaxis.set_major_locator(loc)
plt.grid(which='major', axis='both', linestyle='-')
for i in range(n):
for j in range(n):
plt.text(j, i, '{:d}'.format(x_square[i, j]), color='w', ha='center', va='center', fontsize=int(72/n))
# compute proportion of primes in each diagonal or lateral line
prop_fw, prop_bk = prop_primes(is_prime_square)
if DEBUG:
if n % 2 == 1: # odd length square
plt.clf()
(markers, stemlines, baseline) = plt.stem(range(n - 1, -n, -1), prop_fw, label='Diagonals')
plt.setp(markers, marker='D', markersize=6, markeredgecolor="C0", markeredgewidth=2)
plt.stem(range(n - 1, -n, -1), prop_bk, linefmt='C1', markerfmt='C1o', label='Antidiagonals')
plt.xlabel('Diagonal index')
plt.ylabel('Primes proportion')
plt.legend()
else:
plt.clf()
(markers, stemlines, baseline) = plt.stem(prop_fw, label='Rows')
plt.setp(markers, marker='D', markersize=6, markeredgecolor="C0", markeredgewidth=2)
plt.stem(prop_bk, linefmt='C1', markerfmt='C1o', label='Columns')
plt.xlabel('Row/column index')
plt.ylabel('Primes proportion')
plt.legend()
# compute neighbourhood connectivity
df = pixel_connectivity(is_prime_square, x_square)
# skip second square, that has a single non-prime number, so df is empty
if df.shape[0] == 0:
continue
# count how many primes have connectivity 0, 1, 2, 3, 4
idx, counts = np.unique(df['neigh_d'], return_counts=True)
counts_d = np.array([0, 0, 0, 0])
counts_d[idx] = counts
idx, counts = np.unique(df['neigh_4'], return_counts=True)
counts_4 = np.array([0, 0, 0, 0])
counts_4[idx] = counts
df_total = df_total.append({'i_fibo': i_fibo, 'n': n, 'rank': np.linalg.matrix_rank(is_prime_square),
'primes_neigh_d': counts_d, 'primes_neigh_4': counts_4,
'prop_fw': prop_fw, 'prop_bk': prop_bk,
'time': (time.time() - t0)},
ignore_index=True)
with pd.option_context('display.max_rows', None, 'display.max_columns', None):
print(df_total)
############################################################################################
# Loop of Fibonacci number squares. Fill the squares with odd length in diagonal order.
#
# This experiment fails. Odd-Fibonacci squares now have primes touching both laterally and
# diagonally.
############################################################################################
# number of Fibonacci squares (without counting the initial 0 size)
n_fibo = 16
# side length of each Fibonacci square
fibo_len = fibonacci(0, 1, n_fibo + 1)
# init dataframe with results
df_total = pd.DataFrame([], columns=['i_fibo', 'n', 'rank', 'primes_neigh_d', 'primes_neigh_4',
'time'])
# for debugging purposes, this can be used to directly get the initial parameters for any matrix, without having to
# check for primes, etc.
if DEBUG:
n_fibo_to_stop = 9
last_number = 1
for i_fibo in range(1, n_fibo_to_stop + 1):
# length of current Fibonacci square
n = fibo_len[i_fibo]
# first and last numbers in the square
first_number = last_number
last_number = first_number + n ** 2 # one past the last number, to make use of range() easier
# loop Fibonacci squares
last_number = 1
for i_fibo in range(1, n_fibo + 1):
# to calculate how long it takes to compute each iteration
t0 = time.time()
# i_fibo += 1 ## for manual debugging
# length of current Fibonacci square
n = fibo_len[i_fibo]
# first and last numbers in the square
first_number = last_number
last_number = first_number + n**2 # one past the last number, to make use of range() easier
# list of precomputed primes
primes_list = np.array(primes(last_number))
# sequence of numbers contained in the Fibonacci square
x_list = np.array(range(first_number, last_number))
# skip the first empty square
if len(x_list) == 0:
continue
# check whether each number is a prime number
is_prime = np.array([x in primes_list for x in x_list])
# Fibonacci square filled with the sequence of numbers
if n % 2 == 1: # Fibonacci square's length is odd
x_square = diagonal_zigzag_square(x_list)
is_prime_square = diagonal_zigzag_square(is_prime)
else: # Fibonacci square's length is even
x_square = x_list.reshape((n, n))
is_prime_square = is_prime.reshape((n, n))
# plot Fibonacci square
if DEBUG:
plt.clf()
plt.imshow(is_prime_square)
loc = plticker.MultipleLocator(base=1)
plt.gca().xaxis.set_major_locator(loc)
plt.gca().yaxis.set_major_locator(loc)
plt.grid(which='major', axis='both', linestyle='-')
for i in range(n):
for j in range(n):
plt.text(j, i, '{:d}'.format(x_square[i, j]), color='w', ha='center', va='center', fontsize=int(72/n))
# compute neighbourhood connectivity
df = pixel_connectivity(is_prime_square, x_square)
# skip second square, that has a single non-prime number, so df is empty
if df.shape[0] == 0:
continue
# count how many primes have connectivity 0, 1, 2, 3, 4
idx, counts = np.unique(df['neigh_d'], return_counts=True)
counts_d = np.array([0, 0, 0, 0])
counts_d[idx] = counts
idx, counts = np.unique(df['neigh_4'], return_counts=True)
counts_4 = np.array([0, 0, 0, 0])
counts_4[idx] = counts
df_total = df_total.append({'i_fibo': i_fibo, 'n': n, 'rank': np.linalg.matrix_rank(is_prime_square),
'primes_neigh_d': counts_d, 'primes_neigh_4': counts_4,
'time': (time.time() - t0)},
ignore_index=True)
with pd.option_context('display.max_rows', None, 'display.max_columns', None):
print(df_total)
############################################################################################
# Position of prime numbers within fibonacci intervals
############################################################################################
def cart2pol(x, y):
rho = np.sqrt(x**2 + y**2)
phi = np.arctan2(y, x)
return(rho, phi)
def pol2cart(rho, phi):
x = rho * np.cos(phi)
y = rho * np.sin(phi)
return(x, y)
n_fibo = 30
fibo = fibonacci(0, 1, n_fibo + 1)
primes_list = np.array(primes(fibo[-1]))
primes_lot = np.zeros(shape=(fibo[-1],), dtype=np.bool)
primes_lot[primes_list] = True
p_out = []
i_out = []
for i in range(len(fibo) - 1):
fibo_from = fibo[i]
fibo_to = fibo[i+1]
# normalize primes to the interval
p = np.where(primes_lot[fibo_from:fibo_to])[0] / (fibo_to - fibo_from)
# append to output vector
i_out = i_out + [i, ] * len(p)
p_out = np.concatenate((p_out, p))
plt.clf()
plt.hist(p_out, bins=21)
# interpret as polar numbers
x, y = pol2cart(i_out, p_out * 2 * np.pi)
xmin = np.min(x)
xmax = np.max(x)
ymin = np.min(y)
ymax = np.max(y)
plt.clf()
plt.plot([0, 0], [ymin, ymax], 'k')
plt.plot([xmin, xmax], [0, 0], 'k')
plt.scatter(x, y, s=20, color='C1')
| 32.774793
| 118
| 0.583937
|
4a157af317b9f2b1c5d1f99e7b0cd9056db3df49
| 2,865
|
py
|
Python
|
sdk/python/pulumi_azure_nextgen/datafactory/v20180601/get_exposure_control_feature_value.py
|
test-wiz-sec/pulumi-azure-nextgen
|
20a695af0d020b34b0f1c336e1b69702755174cc
|
[
"Apache-2.0"
] | null | null | null |
sdk/python/pulumi_azure_nextgen/datafactory/v20180601/get_exposure_control_feature_value.py
|
test-wiz-sec/pulumi-azure-nextgen
|
20a695af0d020b34b0f1c336e1b69702755174cc
|
[
"Apache-2.0"
] | null | null | null |
sdk/python/pulumi_azure_nextgen/datafactory/v20180601/get_exposure_control_feature_value.py
|
test-wiz-sec/pulumi-azure-nextgen
|
20a695af0d020b34b0f1c336e1b69702755174cc
|
[
"Apache-2.0"
] | null | null | null |
# coding=utf-8
# *** WARNING: this file was generated by the Pulumi SDK Generator. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union
from ... import _utilities, _tables
__all__ = [
'GetExposureControlFeatureValueResult',
'AwaitableGetExposureControlFeatureValueResult',
'get_exposure_control_feature_value',
]
@pulumi.output_type
class GetExposureControlFeatureValueResult:
"""
The exposure control response.
"""
def __init__(__self__, feature_name=None, value=None):
if feature_name and not isinstance(feature_name, str):
raise TypeError("Expected argument 'feature_name' to be a str")
pulumi.set(__self__, "feature_name", feature_name)
if value and not isinstance(value, str):
raise TypeError("Expected argument 'value' to be a str")
pulumi.set(__self__, "value", value)
@property
@pulumi.getter(name="featureName")
def feature_name(self) -> str:
"""
The feature name.
"""
return pulumi.get(self, "feature_name")
@property
@pulumi.getter
def value(self) -> str:
"""
The feature value.
"""
return pulumi.get(self, "value")
class AwaitableGetExposureControlFeatureValueResult(GetExposureControlFeatureValueResult):
# pylint: disable=using-constant-test
def __await__(self):
if False:
yield self
return GetExposureControlFeatureValueResult(
feature_name=self.feature_name,
value=self.value)
def get_exposure_control_feature_value(feature_name: Optional[str] = None,
feature_type: Optional[str] = None,
location_id: Optional[str] = None,
opts: Optional[pulumi.InvokeOptions] = None) -> AwaitableGetExposureControlFeatureValueResult:
"""
Use this data source to access information about an existing resource.
:param str feature_name: The feature name.
:param str feature_type: The feature type.
:param str location_id: The location identifier.
"""
__args__ = dict()
__args__['featureName'] = feature_name
__args__['featureType'] = feature_type
__args__['locationId'] = location_id
if opts is None:
opts = pulumi.InvokeOptions()
if opts.version is None:
opts.version = _utilities.get_version()
__ret__ = pulumi.runtime.invoke('azure-nextgen:datafactory/v20180601:getExposureControlFeatureValue', __args__, opts=opts, typ=GetExposureControlFeatureValueResult).value
return AwaitableGetExposureControlFeatureValueResult(
feature_name=__ret__.feature_name,
value=__ret__.value)
| 35.37037
| 174
| 0.673298
|
4a157bc3f45ed90f81141ccdc9ef138629427633
| 18,580
|
py
|
Python
|
CHIP-Network-Model/model_fitting_utils.py
|
IdeasLabUT/WSDM-2022-Challenge
|
f4d0aa8dc3491c8ab77b86e376f37d04720389e3
|
[
"BSD-3-Clause"
] | 2
|
2022-01-22T21:53:36.000Z
|
2022-01-25T08:05:21.000Z
|
CHIP-Network-Model/model_fitting_utils.py
|
IdeasLabUT/WSDM-2022-Challenge
|
f4d0aa8dc3491c8ab77b86e376f37d04720389e3
|
[
"BSD-3-Clause"
] | null | null | null |
CHIP-Network-Model/model_fitting_utils.py
|
IdeasLabUT/WSDM-2022-Challenge
|
f4d0aa8dc3491c8ab77b86e376f37d04720389e3
|
[
"BSD-3-Clause"
] | null | null | null |
# -*- coding: utf-8 -*-
"""
@author: Makan Arastuie
"""
import numpy as np
from scipy.stats import norm
import chip_local_search as cls
import matplotlib.pyplot as plt
from scipy.stats import multinomial
import generative_model_utils as utils
import parameter_estimation as estimate_utils
from spectral_clustering import spectral_cluster
from chip_generative_model import community_generative_model
def fit_community_model(event_dict, num_nodes, duration, num_classes, local_search_max_iter, local_search_n_cores,
verbose=False):
"""
Fits CHIP model to a network.
:param event_dict: Edge dictionary of events between all node pair.
:param num_nodes: (int) Total number of nodes
:param duration: (int) duration of the network
:param num_classes: (int) number of blocks / classes
:param local_search_max_iter: Maximum number of local search to be performed. If 0, no local search is done
:param local_search_n_cores: Number of cores to parallelize local search. Only applicable if
`local_search_max_iter` > 0
:param verbose: Prints fitted Block Hawkes parameters
:return: node_membership, mu, alpha, beta, block_pair_events
"""
agg_adj = utils.event_dict_to_aggregated_adjacency(num_nodes, event_dict)
# adj = utils.event_dict_to_adjacency(num_nodes, event_dict)
# Running spectral clustering
node_membership = spectral_cluster(agg_adj, num_classes, verbose=False, plot_eigenvalues=False)
if local_search_max_iter > 0 and num_classes > 1:
node_membership, bp_mu, bp_alpha, bp_beta = cls.chip_local_search(event_dict, num_classes, node_membership,
duration,
max_iter=local_search_max_iter,
n_cores=local_search_n_cores,
return_fitted_param=True, verbose=False)
block_pair_events = utils.event_dict_to_block_pair_events(event_dict, node_membership, num_classes)
else:
(bp_mu,
bp_alpha,
bp_beta,
bp_alpha_beta_ratio,
block_pair_events) = estimate_bp_hawkes_params(event_dict, node_membership, duration, num_classes,
agg_adj=agg_adj, return_block_pair_events=True)
# Printing information about the fit
if verbose:
_, block_count = np.unique(node_membership, return_counts=True)
class_prob = block_count / sum(block_count)
print(f"Membership percentage: ", class_prob)
print("Mu:")
print(bp_mu)
print("\nAlpha:")
print(bp_alpha)
print("\nBeta:")
print(bp_beta)
return node_membership, bp_mu, bp_alpha, bp_beta, block_pair_events
def estimate_bp_hawkes_params(event_dict, node_membership, duration, num_classes,
agg_adj=None, return_block_pair_events=False):
"""
Estimate CHIP Hawkes parameters.
:param event_dict: Edge dictionary of events between all node pair.
:param node_membership: (list) membership of every node to one of K classes.
:param duration: (int) duration of the network
:param num_classes: (int) number of blocks / classes
:param agg_adj: (optional) np array (num_nodes x num_nodes) Adjacency matrix where element ij denotes the
number of events between nodes i an j. If None, this will be calculated.
:param return_block_pair_events: (bool) If True, returns the return_block_pair_events
:return: parameters of the CHIP model -> mu, alpha, beta, m
"""
if agg_adj is None:
num_nodes = len(node_membership)
agg_adj = utils.event_dict_to_aggregated_adjacency(num_nodes, event_dict)
bp_mu, bp_alpha_beta_ratio = estimate_utils.estimate_hawkes_from_counts(agg_adj, node_membership,
duration,
1e-10 / duration)
bp_beta = np.zeros((num_classes, num_classes), dtype=np.float)
block_pair_events = utils.event_dict_to_block_pair_events(event_dict, node_membership, num_classes)
bp_size = utils.calc_block_pair_size(node_membership, num_classes)
for b_i in range(num_classes):
for b_j in range(num_classes):
bp_beta[b_i, b_j], _ = estimate_utils.estimate_beta_from_events(block_pair_events[b_i][b_j],
bp_mu[b_i, b_j],
bp_alpha_beta_ratio[b_i, b_j],
duration, bp_size[b_i, b_j])
bp_alpha = bp_alpha_beta_ratio * bp_beta
if return_block_pair_events:
return bp_mu, bp_alpha, bp_beta, bp_alpha_beta_ratio, block_pair_events
return bp_mu, bp_alpha, bp_beta, bp_alpha_beta_ratio
def calc_full_log_likelihood(block_pair_events, node_membership,
bp_mu, bp_alpha, bp_beta,
duration, num_classes,
add_com_assig_log_prob=True):
"""
Calculates the full log likelihood of the CHIP model.
:param block_pair_events: (list) n_classes x n_classes where entry ij is a list of event lists between nodes in
block i to nodes in block j.
:param node_membership: (list) membership of every node to one of K classes.
:param bp_mu: n_classes x n_classes where entry ij is the mu of the block pair ij
:param bp_alpha: n_classes x n_classes where entry ij is the alpha of the block pair ij
:param bp_beta: n_classes x n_classes where entry ij is the beta of the block pair ij
:param duration: (int) duration of the network
:param num_classes: (int) number of blocks / classes
:param add_com_assig_log_prob: if True, adds the likelihood the community assignment to the total log-likelihood.
:return: log-likelihood of the CHIP model
"""
log_likelihood = 0
bp_size = utils.calc_block_pair_size(node_membership, num_classes)
for b_i in range(num_classes):
for b_j in range(num_classes):
log_likelihood += estimate_utils.block_pair_full_hawkes_log_likelihood(block_pair_events[b_i][b_j],
bp_mu[b_i, b_j], bp_alpha[b_i, b_j],
bp_beta[b_i, b_j], duration,
block_pair_size=bp_size[b_i, b_j])
if add_com_assig_log_prob:
# Adding the log probability of the community assignments to the full log likelihood
n_nodes = len(node_membership)
_, block_count = np.unique(node_membership, return_counts=True)
class_prob_mle = block_count / sum(block_count)
rv_multi = multinomial(n_nodes, class_prob_mle)
log_prob_community_assignment = rv_multi.logpmf(block_count)
log_likelihood += log_prob_community_assignment
return log_likelihood
def assign_node_membership_for_missing_nodes(node_membership, missing_nodes):
"""
Assigns the missing nodes to the largest community
:param node_membership: (list) membership of every node (except missing ones) to one of K classes
:param missing_nodes: (list) nodes to be assigned a community
:return: node_membership including missing nodes
"""
class_idx, class_count = np.unique(node_membership, return_counts=True)
largest_class_idx = class_idx[np.argmax(class_count)]
combined_node_membership = np.copy(node_membership)
missing_nodes.sort()
for n in missing_nodes:
combined_node_membership = np.insert(combined_node_membership, n, largest_class_idx)
return combined_node_membership
def calc_per_event_log_likelihood(combined_log_likelihood, train_log_likelihood, test_event_dict, test_num_nodes):
"""
Subtracts the log-likelihood of the entire data from the train data and divides by the number of test events
:param combined_log_likelihood: (float) log-likelihood of the entire data
:param train_log_likelihood: (float) log-likelihood of the train data
:param test_event_dict: event_dict of the test data
:param test_num_nodes: Number of nodes in the test dataset
:return: per test event log-likelihood
"""
test_num_events = np.sum(utils.event_dict_to_aggregated_adjacency(test_num_nodes, test_event_dict))
return (combined_log_likelihood - train_log_likelihood) / test_num_events
def generate_fit_community_hawkes(event_dict, node_membership,
bp_mu, bp_alpha, bp_beta,
duration, plot_hist, n_cores=1,
seed=None):
"""
Generates a community model and plots its count histogram against the original event_dict. (if plot_hist is True)
:param event_dict: Edge dictionary of events between all node pair.
:param node_membership: (list) membership of every node to one of K classes.
:param bp_mu, bp_alpha, bp_beta: Hawkes process parameters
:param duration: duration of the network
:param plot_hist: if True, plots a histogram of the weighted adjacency of real vs. generated model.
:param n_cores: number of cores to parallelize the generative process
:param seed: seed for CHIP generative process
:return: generated_node_membership, generated_event_dict
"""
# Generating a network
n_nodes = len(node_membership)
_, block_count = np.unique(node_membership, return_counts=True)
class_prob = block_count / sum(block_count)
generated_node_membership, generated_event_dict = community_generative_model(n_nodes, class_prob,
bp_mu, bp_alpha, bp_beta,
end_time=duration, burnin=None,
n_cores=n_cores, seed=seed)
if plot_hist:
generated_agg_adj = utils.event_dict_to_aggregated_adjacency(n_nodes, generated_event_dict, dtype=np.int)
generated_deg_count_flattened = np.reshape(generated_agg_adj, (n_nodes * n_nodes))
agg_adj = utils.event_dict_to_aggregated_adjacency(n_nodes, event_dict, dtype=np.int)
deg_count_flattened = np.reshape(agg_adj, (n_nodes * n_nodes))
plt.hist(deg_count_flattened, bins=50, alpha=0.5, label='Real Data', color='blue', density=True)
plt.hist(generated_deg_count_flattened, bins=50, alpha=0.5, label='Generated Data', color='red', density=True)
plt.legend(loc='upper right')
plt.xlabel('Event Count')
plt.ylabel('Density')
plt.title(f'Histogram of the Count Matrix Real Vs. Generated CHIP Model Data - K: {len(class_prob)}'
f'\n Mean Count - Real: {np.mean(agg_adj):.3f} - Generated: {np.mean(generated_agg_adj):.3f}')
plt.yscale("log")
plt.show()
return generated_node_membership, generated_event_dict
def log_binning(counter, bin_count=35):
"""
Based on https://stackoverflow.com/questions/16489655/plotting-log-binned-network-degree-distributions/16490678
"""
keys = counter[0]
values = counter[1]
max_x = np.log10(max(keys))
max_y = np.log10(max(values))
max_base = max([max_x, max_y])
min_x = np.log10(min(keys))
bins = np.logspace(min_x, max_base, num=bin_count)
bin_means_y = np.histogram(keys, bins, weights=values)[0] / np.histogram(keys, bins)[0]
bin_means_x = np.histogram(keys, bins, weights=keys)[0] / np.histogram(keys, bins)[0]
return bin_means_x, bin_means_y
def compute_mu_and_m_confidence_interval(event_dict, node_membership, num_classes, z_alpha, duration):
"""
Computes the confidence interval for mu and m (alpha to beta ratio)
:param event_dict: Edge dictionary of events between all node pair.
:param node_membership: (list) membership of every node to one of K classes.
:param num_classes: (int) number of blocks / classes
:param z_alpha: significance level (resulting in (1 - z_alpha) * 100 % CI)
:param duration: the duration of the network
:return: matrix of KxK confidence interval for mu and m
"""
num_nodes = len(node_membership)
agg_adj = utils.event_dict_to_aggregated_adjacency(num_nodes, event_dict)
sample_mean, sample_var = estimate_utils.compute_sample_mean_and_variance(agg_adj, node_membership)
bp_size = utils.calc_block_pair_size(node_membership, num_classes)
z = 1 - (z_alpha / (2 * (num_classes ** 2)))
ci_percentile = norm.ppf(1 - ((1 - z) / 2))
mu_ci = ci_percentile * np.sqrt((9 * sample_mean) / (4 * bp_size))
mu_ci /= duration
m_ci = ci_percentile * np.sqrt(1 / (4 * bp_size * sample_mean))
return mu_ci, m_ci
def compute_mu_pairwise_difference_confidence_interval(event_dict, node_membership, num_classes, mu, duration,
block_pair_tuple_list, z_alpha):
"""
Computes the pairwise difference if mu along with its confidence interval
:param event_dict: Edge dictionary of events between all node pair.
:param node_membership: (list) membership of every node to one of K classes.
:param num_classes: (int) number of blocks / classes
:param mu: KxK matrix of mu values for each block pair
:param duration: the duration of the network
:param block_pair_tuple_list: (list) of tuples for pairwise difference [(1, 1, 1, 2), (1, 1, 2, 1)]
:param z_alpha: significance level (resulting in (1 - z_alpha) * 100 % CI)
:return: dict with passed tuples as keys and a tuple of (difference, CI) as value
"""
num_nodes = len(node_membership)
agg_adj = utils.event_dict_to_aggregated_adjacency(num_nodes, event_dict)
sample_mean, sample_var = estimate_utils.compute_sample_mean_and_variance(agg_adj, node_membership)
bp_size = utils.calc_block_pair_size(node_membership, num_classes)
z = 1 - (z_alpha / (4 * (num_classes - 1) * num_classes))
ci_percentile = norm.ppf(1 - ((1 - z) / 2))
pairwise_res_dict = {}
for a, b, x, y in block_pair_tuple_list:
diff = mu[a, b] - mu[x, y]
sqroot = np.sqrt((9 / 4) * ((sample_mean[a, b] / bp_size[a, b]) + (sample_mean[x, y] / bp_size[x, y])))
ci = ci_percentile * (1 / duration) * sqroot
pairwise_res_dict[(a, b, x, y)] = (diff, ci)
return pairwise_res_dict
def compute_block_pair_event_count_empirical_mean_and_variance(block_pair_events, node_membership, n_classes):
"""
Computes the mean and variance of block pair event counts
:param block_pair_events: (list) n_classes x n_classes where entry ij is a list of event lists between nodes in
block i to nodes in block j.
:param node_membership: (list) membership of every node to one of K classes.
:param n_classes: (int) number of blocks / classes
:return: a tuple of two matrices of KxK for mean and variance of block pair event counts
"""
bp_size = utils.calc_block_pair_size(node_membership, n_classes).astype(int)
block_pair_events_counts_mean = np.zeros((n_classes, n_classes))
block_pair_events_counts_variance = np.zeros((n_classes, n_classes))
for i in range(n_classes):
for j in range(n_classes):
temp_counts = [len(event_list) for event_list in block_pair_events[i][j]] # actual counts
temp_counts.extend([0] * (bp_size[i, j] - len(temp_counts))) # add 0's for node-pairs with no events
block_pair_events_counts_mean[i, j] = np.mean(temp_counts)
block_pair_events_counts_variance[i, j] = np.std(temp_counts) ** 2
return block_pair_events_counts_mean, block_pair_events_counts_variance
def compute_block_pair_total_event_count(block_pair_events, n_classes):
"""
Computes the total number of events in each block pair.
:param block_pair_events: (list) n_classes x n_classes where entry ij is a list of event lists between nodes in
block i to nodes in block j.
:param n_classes: (int) number of blocks / classes
:return: n_classes x n_classes matrix where entry ij is the number of events in block pair ij
"""
block_pair_event_count = np.zeros((n_classes, n_classes))
for i in range(n_classes):
for j in range(n_classes):
block_pair_event_count[i, j] = np.sum([len(event_list) for event_list in block_pair_events[i][j]])
return block_pair_event_count
def compute_prediction_mean_and_variance_for_block_pair_event_count(train_bp_mu, train_bp_alpha_beta_ratio,
test_block_pair_events,
train_node_membership, n_classes,
train_duration, test_duration):
"""
Computes sample mean and variance of block pair event counts
:param test_block_pair_events: (list) n_classes x n_classes where entry ij is a list of event lists between nodes in
block i to nodes in block j of the test dataset.
:param train_node_membership: (list) membership of every node to one of K classes in the train dataset.
:param n_classes: (int) number of blocks / classes
:param train_duration: duration of the train dataset
:param test_duration: duration of the test dataset
:return:
"""
train_bp_size = utils.calc_block_pair_size(train_node_membership, n_classes)
sample_mean = (train_bp_mu * train_duration) / (1 - train_bp_alpha_beta_ratio)
sample_mean = (sample_mean / train_duration) * test_duration * train_bp_size
sample_var = (train_bp_mu * train_duration) / ((1 - train_bp_alpha_beta_ratio) ** 3)
sample_var = (sample_var / train_duration) * test_duration * train_bp_size
test_block_pair_event_count = compute_block_pair_total_event_count(test_block_pair_events, n_classes)
return sample_mean, sample_var, test_block_pair_event_count
| 45.876543
| 120
| 0.662648
|
4a157c5cac14d06f43f8e7bb4f14087ede0633a0
| 19,868
|
py
|
Python
|
aiohttp/test_utils.py
|
shootkin/aiohttp
|
2d8621a62903a2db9d55b05485fa6895dd0fc59a
|
[
"Apache-2.0"
] | 2
|
2021-02-04T10:49:55.000Z
|
2021-02-04T10:50:31.000Z
|
aiohttp/test_utils.py
|
shootkin/aiohttp
|
2d8621a62903a2db9d55b05485fa6895dd0fc59a
|
[
"Apache-2.0"
] | 228
|
2020-10-17T22:31:34.000Z
|
2022-03-28T18:13:31.000Z
|
aiohttp/test_utils.py
|
shootkin/aiohttp
|
2d8621a62903a2db9d55b05485fa6895dd0fc59a
|
[
"Apache-2.0"
] | 1
|
2021-07-22T04:21:08.000Z
|
2021-07-22T04:21:08.000Z
|
"""Utilities shared by tests."""
import asyncio
import contextlib
import gc
import inspect
import ipaddress
import os
import socket
import sys
from abc import ABC, abstractmethod
from types import TracebackType
from typing import (
TYPE_CHECKING,
Any,
Callable,
Iterator,
List,
Optional,
Type,
Union,
cast,
)
from unittest import mock
from aiosignal import Signal
from multidict import CIMultiDict, CIMultiDictProxy
from yarl import URL
import aiohttp
from aiohttp.client import _RequestContextManager, _WSRequestContextManager
from . import ClientSession, hdrs
from .abc import AbstractCookieJar
from .client_reqrep import ClientResponse
from .client_ws import ClientWebSocketResponse
from .helpers import _SENTINEL, PY_38, sentinel
from .http import HttpVersion, RawRequestMessage
from .web import (
Application,
AppRunner,
BaseRunner,
Request,
Server,
ServerRunner,
SockSite,
UrlMappingMatchInfo,
)
from .web_protocol import _RequestHandler
if TYPE_CHECKING: # pragma: no cover
from ssl import SSLContext
else:
SSLContext = None
if PY_38:
from unittest import IsolatedAsyncioTestCase as TestCase
else:
from asynctest import TestCase # type: ignore[no-redef]
REUSE_ADDRESS = os.name == "posix" and sys.platform != "cygwin"
def get_unused_port_socket(
host: str, family: socket.AddressFamily = socket.AF_INET
) -> socket.socket:
return get_port_socket(host, 0, family)
def get_port_socket(
host: str, port: int, family: socket.AddressFamily = socket.AF_INET
) -> socket.socket:
s = socket.socket(family, socket.SOCK_STREAM)
if REUSE_ADDRESS:
# Windows has different semantics for SO_REUSEADDR,
# so don't set it. Ref:
# https://docs.microsoft.com/en-us/windows/win32/winsock/using-so-reuseaddr-and-so-exclusiveaddruse
s.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
s.bind((host, port))
return s
def unused_port() -> int:
"""Return a port that is unused on the current host."""
with socket.socket(socket.AF_INET, socket.SOCK_STREAM) as s:
s.bind(("127.0.0.1", 0))
return cast(int, s.getsockname()[1])
class BaseTestServer(ABC):
__test__ = False
def __init__(
self,
*,
scheme: Union[str, _SENTINEL] = sentinel,
host: str = "127.0.0.1",
port: Optional[int] = None,
skip_url_asserts: bool = False,
socket_factory: Callable[
[str, int, socket.AddressFamily], socket.socket
] = get_port_socket,
**kwargs: Any,
) -> None:
self.runner = None # type: Optional[BaseRunner]
self._root = None # type: Optional[URL]
self.host = host
self.port = port
self._closed = False
self.scheme = scheme
self.skip_url_asserts = skip_url_asserts
self.socket_factory = socket_factory
async def start_server(self, **kwargs: Any) -> None:
if self.runner:
return
self._ssl = kwargs.pop("ssl", None)
self.runner = await self._make_runner(**kwargs)
await self.runner.setup()
if not self.port:
self.port = 0
absolute_host = self.host
try:
version = ipaddress.ip_address(self.host).version
except ValueError:
version = 4
if version == 6:
absolute_host = f"[{self.host}]"
family = socket.AF_INET6 if version == 6 else socket.AF_INET
_sock = self.socket_factory(self.host, self.port, family)
self.host, self.port = _sock.getsockname()[:2]
site = SockSite(self.runner, sock=_sock, ssl_context=self._ssl)
await site.start()
server = site._server
assert server is not None
sockets = server.sockets
assert sockets is not None
self.port = sockets[0].getsockname()[1]
if self.scheme is sentinel:
if self._ssl:
scheme = "https"
else:
scheme = "http"
self.scheme = scheme
self._root = URL(f"{self.scheme}://{absolute_host}:{self.port}")
@abstractmethod # pragma: no cover
async def _make_runner(self, **kwargs: Any) -> BaseRunner:
pass
def make_url(self, path: str) -> URL:
assert self._root is not None
url = URL(path)
if not self.skip_url_asserts:
assert not url.is_absolute()
return self._root.join(url)
else:
return URL(str(self._root) + path)
@property
def started(self) -> bool:
return self.runner is not None
@property
def closed(self) -> bool:
return self._closed
@property
def handler(self) -> Server:
# for backward compatibility
# web.Server instance
runner = self.runner
assert runner is not None
assert runner.server is not None
return runner.server
async def close(self) -> None:
"""Close all fixtures created by the test client.
After that point, the TestClient is no longer usable.
This is an idempotent function: running close multiple times
will not have any additional effects.
close is also run when the object is garbage collected, and on
exit when used as a context manager.
"""
if self.started and not self.closed:
assert self.runner is not None
await self.runner.cleanup()
self._root = None
self.port = None
self._closed = True
async def __aenter__(self) -> "BaseTestServer":
await self.start_server()
return self
async def __aexit__(
self,
exc_type: Optional[Type[BaseException]],
exc_value: Optional[BaseException],
traceback: Optional[TracebackType],
) -> None:
await self.close()
class TestServer(BaseTestServer):
def __init__(
self,
app: Application,
*,
scheme: Union[str, _SENTINEL] = sentinel,
host: str = "127.0.0.1",
port: Optional[int] = None,
**kwargs: Any,
):
self.app = app
super().__init__(scheme=scheme, host=host, port=port, **kwargs)
async def _make_runner(self, **kwargs: Any) -> BaseRunner:
return AppRunner(self.app, **kwargs)
class RawTestServer(BaseTestServer):
def __init__(
self,
handler: _RequestHandler,
*,
scheme: Union[str, _SENTINEL] = sentinel,
host: str = "127.0.0.1",
port: Optional[int] = None,
**kwargs: Any,
) -> None:
self._handler = handler
super().__init__(scheme=scheme, host=host, port=port, **kwargs)
async def _make_runner(self, **kwargs: Any) -> ServerRunner:
srv = Server(self._handler, **kwargs)
return ServerRunner(srv, **kwargs)
class TestClient:
"""
A test client implementation.
To write functional tests for aiohttp based servers.
"""
__test__ = False
def __init__(
self,
server: BaseTestServer,
*,
cookie_jar: Optional[AbstractCookieJar] = None,
**kwargs: Any,
) -> None:
if not isinstance(server, BaseTestServer):
raise TypeError(
"server must be TestServer " "instance, found type: %r" % type(server)
)
self._server = server
if cookie_jar is None:
cookie_jar = aiohttp.CookieJar(unsafe=True)
self._session = ClientSession(cookie_jar=cookie_jar, **kwargs)
self._closed = False
self._responses = [] # type: List[ClientResponse]
self._websockets = [] # type: List[ClientWebSocketResponse]
async def start_server(self) -> None:
await self._server.start_server()
@property
def scheme(self) -> Union[str, object]:
return self._server.scheme
@property
def host(self) -> str:
return self._server.host
@property
def port(self) -> Optional[int]:
return self._server.port
@property
def server(self) -> BaseTestServer:
return self._server
@property
def app(self) -> Optional[Application]:
return cast(Optional[Application], getattr(self._server, "app", None))
@property
def session(self) -> ClientSession:
"""An internal aiohttp.ClientSession.
Unlike the methods on the TestClient, client session requests
do not automatically include the host in the url queried, and
will require an absolute path to the resource.
"""
return self._session
def make_url(self, path: str) -> URL:
return self._server.make_url(path)
async def _request(self, method: str, path: str, **kwargs: Any) -> ClientResponse:
resp = await self._session.request(method, self.make_url(path), **kwargs)
# save it to close later
self._responses.append(resp)
return resp
def request(self, method: str, path: str, **kwargs: Any) -> _RequestContextManager:
"""Routes a request to tested http server.
The interface is identical to aiohttp.ClientSession.request,
except the loop kwarg is overridden by the instance used by the
test server.
"""
return _RequestContextManager(self._request(method, path, **kwargs))
def get(self, path: str, **kwargs: Any) -> _RequestContextManager:
"""Perform an HTTP GET request."""
return _RequestContextManager(self._request(hdrs.METH_GET, path, **kwargs))
def post(self, path: str, **kwargs: Any) -> _RequestContextManager:
"""Perform an HTTP POST request."""
return _RequestContextManager(self._request(hdrs.METH_POST, path, **kwargs))
def options(self, path: str, **kwargs: Any) -> _RequestContextManager:
"""Perform an HTTP OPTIONS request."""
return _RequestContextManager(self._request(hdrs.METH_OPTIONS, path, **kwargs))
def head(self, path: str, **kwargs: Any) -> _RequestContextManager:
"""Perform an HTTP HEAD request."""
return _RequestContextManager(self._request(hdrs.METH_HEAD, path, **kwargs))
def put(self, path: str, **kwargs: Any) -> _RequestContextManager:
"""Perform an HTTP PUT request."""
return _RequestContextManager(self._request(hdrs.METH_PUT, path, **kwargs))
def patch(self, path: str, **kwargs: Any) -> _RequestContextManager:
"""Perform an HTTP PATCH request."""
return _RequestContextManager(self._request(hdrs.METH_PATCH, path, **kwargs))
def delete(self, path: str, **kwargs: Any) -> _RequestContextManager:
"""Perform an HTTP PATCH request."""
return _RequestContextManager(self._request(hdrs.METH_DELETE, path, **kwargs))
def ws_connect(self, path: str, **kwargs: Any) -> _WSRequestContextManager:
"""Initiate websocket connection.
The api corresponds to aiohttp.ClientSession.ws_connect.
"""
return _WSRequestContextManager(self._ws_connect(path, **kwargs))
async def _ws_connect(self, path: str, **kwargs: Any) -> ClientWebSocketResponse:
ws = await self._session.ws_connect(self.make_url(path), **kwargs)
self._websockets.append(ws)
return ws
async def close(self) -> None:
"""Close all fixtures created by the test client.
After that point, the TestClient is no longer usable.
This is an idempotent function: running close multiple times
will not have any additional effects.
close is also run on exit when used as a(n) (asynchronous)
context manager.
"""
if not self._closed:
for resp in self._responses:
resp.close()
for ws in self._websockets:
await ws.close()
await self._session.close()
await self._server.close()
self._closed = True
async def __aenter__(self) -> "TestClient":
await self.start_server()
return self
async def __aexit__(
self,
exc_type: Optional[Type[BaseException]],
exc: Optional[BaseException],
tb: Optional[TracebackType],
) -> None:
await self.close()
class AioHTTPTestCase(TestCase):
"""A base class to allow for unittest web applications using
aiohttp.
Provides the following:
* self.client (aiohttp.test_utils.TestClient): an aiohttp test client.
* self.loop (asyncio.BaseEventLoop): the event loop in which the
application and server are running.
* self.app (aiohttp.web.Application): the application returned by
self.get_application()
Note that the TestClient's methods are asynchronous: you have to
execute function on the test client using asynchronous methods.
"""
async def get_application(self) -> Application:
"""
This method should be overridden
to return the aiohttp.web.Application
object to test.
"""
return self.get_app()
def get_app(self) -> Application:
"""Obsolete method used to constructing web application.
Use .get_application() coroutine instead
"""
raise RuntimeError("Did you forget to define get_application()?")
def setUp(self) -> None:
try:
self.loop = asyncio.get_running_loop()
except RuntimeError:
self.loop = asyncio.get_event_loop_policy().get_event_loop()
self.loop.run_until_complete(self.setUpAsync())
async def setUpAsync(self) -> None:
self.app = await self.get_application()
self.server = await self.get_server(self.app)
self.client = await self.get_client(self.server)
await self.client.start_server()
def tearDown(self) -> None:
self.loop.run_until_complete(self.tearDownAsync())
async def tearDownAsync(self) -> None:
await self.client.close()
async def get_server(self, app: Application) -> TestServer:
"""Return a TestServer instance."""
return TestServer(app)
async def get_client(self, server: TestServer) -> TestClient:
"""Return a TestClient instance."""
return TestClient(server)
_LOOP_FACTORY = Callable[[], asyncio.AbstractEventLoop]
@contextlib.contextmanager
def loop_context(
loop_factory: _LOOP_FACTORY = asyncio.new_event_loop, fast: bool = False
) -> Iterator[asyncio.AbstractEventLoop]:
"""A contextmanager that creates an event_loop, for test purposes.
Handles the creation and cleanup of a test loop.
"""
loop = setup_test_loop(loop_factory)
yield loop
teardown_test_loop(loop, fast=fast)
def setup_test_loop(
loop_factory: _LOOP_FACTORY = asyncio.new_event_loop,
) -> asyncio.AbstractEventLoop:
"""Create and return an asyncio.BaseEventLoop
instance.
The caller should also call teardown_test_loop,
once they are done with the loop.
"""
loop = loop_factory()
try:
module = loop.__class__.__module__
skip_watcher = "uvloop" in module
except AttributeError: # pragma: no cover
# Just in case
skip_watcher = True
asyncio.set_event_loop(loop)
if sys.platform != "win32" and not skip_watcher:
policy = asyncio.get_event_loop_policy()
watcher: asyncio.AbstractChildWatcher
try: # Python >= 3.8
# Refs:
# * https://github.com/pytest-dev/pytest-xdist/issues/620
# * https://stackoverflow.com/a/58614689/595220
# * https://bugs.python.org/issue35621
# * https://github.com/python/cpython/pull/14344
watcher = asyncio.ThreadedChildWatcher()
except AttributeError: # Python < 3.8
watcher = asyncio.SafeChildWatcher()
watcher.attach_loop(loop)
with contextlib.suppress(NotImplementedError):
policy.set_child_watcher(watcher)
return loop
def teardown_test_loop(loop: asyncio.AbstractEventLoop, fast: bool = False) -> None:
"""Teardown and cleanup an event_loop created
by setup_test_loop.
"""
closed = loop.is_closed()
if not closed:
loop.call_soon(loop.stop)
loop.run_forever()
loop.close()
if not fast:
gc.collect()
asyncio.set_event_loop(None)
def _create_app_mock() -> mock.MagicMock:
def get_dict(app: Any, key: str) -> Any:
return app.__app_dict[key]
def set_dict(app: Any, key: str, value: Any) -> None:
app.__app_dict[key] = value
app = mock.MagicMock()
app.__app_dict = {}
app.__getitem__ = get_dict
app.__setitem__ = set_dict
app.on_response_prepare = Signal(app)
app.on_response_prepare.freeze()
return app
def _create_transport(sslcontext: Optional[SSLContext] = None) -> mock.Mock:
transport = mock.Mock()
def get_extra_info(key: str) -> Optional[SSLContext]:
if key == "sslcontext":
return sslcontext
else:
return None
transport.get_extra_info.side_effect = get_extra_info
return transport
def make_mocked_request(
method: str,
path: str,
headers: Any = None,
*,
match_info: Any = sentinel,
version: HttpVersion = HttpVersion(1, 1),
closing: bool = False,
app: Any = None,
writer: Any = sentinel,
protocol: Any = sentinel,
transport: Any = sentinel,
payload: Any = sentinel,
sslcontext: Optional[SSLContext] = None,
client_max_size: int = 1024 ** 2,
loop: Any = ...,
) -> Request:
"""Creates mocked web.Request testing purposes.
Useful in unit tests, when spinning full web server is overkill or
specific conditions and errors are hard to trigger.
"""
task = mock.Mock()
if loop is ...:
loop = mock.Mock()
loop.create_future.return_value = ()
if version < HttpVersion(1, 1):
closing = True
if headers:
headers = CIMultiDictProxy(CIMultiDict(headers))
raw_hdrs = tuple(
(k.encode("utf-8"), v.encode("utf-8")) for k, v in headers.items()
)
else:
headers = CIMultiDictProxy(CIMultiDict())
raw_hdrs = ()
chunked = "chunked" in headers.get(hdrs.TRANSFER_ENCODING, "").lower()
message = RawRequestMessage(
method,
path,
version,
headers,
raw_hdrs,
closing,
None,
False,
chunked,
URL(path),
)
if app is None:
app = _create_app_mock()
if transport is sentinel:
transport = _create_transport(sslcontext)
if protocol is sentinel:
protocol = mock.Mock()
protocol.transport = transport
if writer is sentinel:
writer = mock.Mock()
writer.write_headers = make_mocked_coro(None)
writer.write = make_mocked_coro(None)
writer.write_eof = make_mocked_coro(None)
writer.drain = make_mocked_coro(None)
writer.transport = transport
protocol.transport = transport
protocol.writer = writer
if payload is sentinel:
payload = mock.Mock()
req = Request(
message, payload, protocol, writer, task, loop, client_max_size=client_max_size
)
match_info = UrlMappingMatchInfo(
{} if match_info is sentinel else match_info, mock.Mock()
)
match_info.add_app(app)
req._match_info = match_info
return req
def make_mocked_coro(
return_value: Any = sentinel, raise_exception: Any = sentinel
) -> Any:
"""Creates a coroutine mock."""
async def mock_coro(*args: Any, **kwargs: Any) -> Any:
if raise_exception is not sentinel:
raise raise_exception
if not inspect.isawaitable(return_value):
return return_value
await return_value
return mock.Mock(wraps=mock_coro)
| 29.787106
| 107
| 0.636451
|
4a157ca83f1569e902e2d8787625d514504365bd
| 2,087
|
py
|
Python
|
pypy/objspace/std/test/test_identitydict.py
|
olliemath/pypy
|
8b873bd0b8bf76075aba3d915c260789f26f5788
|
[
"Apache-2.0",
"OpenSSL"
] | 1
|
2021-06-02T23:02:09.000Z
|
2021-06-02T23:02:09.000Z
|
pypy/objspace/std/test/test_identitydict.py
|
olliemath/pypy
|
8b873bd0b8bf76075aba3d915c260789f26f5788
|
[
"Apache-2.0",
"OpenSSL"
] | 1
|
2021-03-30T18:08:41.000Z
|
2021-03-30T18:08:41.000Z
|
pypy/objspace/std/test/test_identitydict.py
|
olliemath/pypy
|
8b873bd0b8bf76075aba3d915c260789f26f5788
|
[
"Apache-2.0",
"OpenSSL"
] | 1
|
2022-03-30T11:42:37.000Z
|
2022-03-30T11:42:37.000Z
|
import py
from pypy.interpreter.gateway import interp2app
class AppTestIdentityDict(object):
def setup_class(cls):
if cls.runappdirect:
py.test.skip("interp2app doesn't work on appdirect")
def w_uses_identity_strategy(self, obj):
import __pypy__
return "IdentityDictStrategy" in __pypy__.internal_repr(obj)
def test_use_strategy(self):
class X(object):
pass
d = {}
x = X()
d[x] = 1
assert self.uses_identity_strategy(d)
assert d[x] == 1
def test_bad_item(self):
class X(object):
pass
class Y(object):
def __hash__(self):
return 32
d = {}
x = X()
y = Y()
d[x] = 1
assert self.uses_identity_strategy(d)
d[y] = 2
assert not self.uses_identity_strategy(d)
assert d[x] == 1
assert d[y] == 2
def test_bad_key(self):
class X(object):
pass
d = {}
x = X()
class Y(object):
def __hash__(self):
return hash(x) # to make sure we do x == y
def __eq__(self, other):
return True
y = Y()
d[x] = 1
assert self.uses_identity_strategy(d)
assert d[y] == 1
assert not self.uses_identity_strategy(d)
def test_iter(self):
class X(object):
pass
x = X()
d = {x: 1}
assert self.uses_identity_strategy(d)
assert list(iter(d)) == [x]
def test_mutate_class_and_then_compare(self):
class X(object):
pass
class Y(object):
pass
x = X()
y = Y()
d1 = {x: 1}
d2 = {y: 1}
assert self.uses_identity_strategy(d1)
assert self.uses_identity_strategy(d2)
#
X.__hash__ = lambda self: hash(y)
X.__eq__ = lambda self, other: True
#
assert d1 == d2
assert self.uses_identity_strategy(d1)
assert not self.uses_identity_strategy(d2)
| 24.267442
| 68
| 0.519885
|
4a157d77c5e645f8e17ca55591ceaf7b884347e3
| 5,273
|
py
|
Python
|
cride/users/migrations/0001_initial.py
|
mpita/cride
|
5e8afb710d3ac704c9c3819e0f7ff762e60948ff
|
[
"MIT"
] | 1
|
2019-04-09T07:15:46.000Z
|
2019-04-09T07:15:46.000Z
|
cride/users/migrations/0001_initial.py
|
mpita/cride
|
5e8afb710d3ac704c9c3819e0f7ff762e60948ff
|
[
"MIT"
] | 7
|
2020-06-05T19:54:39.000Z
|
2022-03-11T23:41:06.000Z
|
cride/users/migrations/0001_initial.py
|
mpita/cride
|
5e8afb710d3ac704c9c3819e0f7ff762e60948ff
|
[
"MIT"
] | null | null | null |
# Generated by Django 2.0.10 on 2019-02-24 21:49
from django.conf import settings
import django.contrib.auth.models
import django.contrib.auth.validators
import django.core.validators
from django.db import migrations, models
import django.db.models.deletion
import django.utils.timezone
class Migration(migrations.Migration):
initial = True
dependencies = [
('auth', '0009_alter_user_last_name_max_length'),
]
operations = [
migrations.CreateModel(
name='User',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('password', models.CharField(max_length=128, verbose_name='password')),
('last_login', models.DateTimeField(blank=True, null=True, verbose_name='last login')),
('is_superuser', models.BooleanField(default=False, help_text='Designates that this user has all permissions without explicitly assigning them.', verbose_name='superuser status')),
('username', models.CharField(error_messages={'unique': 'A user with that username already exists.'}, help_text='Required. 150 characters or fewer. Letters, digits and @/./+/-/_ only.', max_length=150, unique=True, validators=[django.contrib.auth.validators.UnicodeUsernameValidator()], verbose_name='username')),
('first_name', models.CharField(blank=True, max_length=30, verbose_name='first name')),
('last_name', models.CharField(blank=True, max_length=150, verbose_name='last name')),
('is_staff', models.BooleanField(default=False, help_text='Designates whether the user can log into this admin site.', verbose_name='staff status')),
('is_active', models.BooleanField(default=True, help_text='Designates whether this user should be treated as active. Unselect this instead of deleting accounts.', verbose_name='active')),
('date_joined', models.DateTimeField(default=django.utils.timezone.now, verbose_name='date joined')),
('created', models.DateTimeField(auto_now_add=True, help_text='Date time on which the object was created.', verbose_name='created at')),
('modified', models.DateTimeField(auto_now=True, help_text='Date time on which the object was last modified.', verbose_name='modified at')),
('email', models.EmailField(error_messages={'unique': 'A user with that email already exists.'}, max_length=254, unique=True, verbose_name='email address')),
('phone_number', models.CharField(blank=True, max_length=17, validators=[django.core.validators.RegexValidator(message='Phone number must be entered in the format: +999999999. Up to 15 digits allowed.', regex='\\+?1?\\d{9,15}$')])),
('is_client', models.BooleanField(default=True, help_text='Help easily distinguish users and perform queries. Clients are the main type of user.', verbose_name='client')),
('is_verified', models.BooleanField(default=True, help_text='Set to true when the user have verified its email address.', verbose_name='verified')),
('groups', models.ManyToManyField(blank=True, help_text='The groups this user belongs to. A user will get all permissions granted to each of their groups.', related_name='user_set', related_query_name='user', to='auth.Group', verbose_name='groups')),
('user_permissions', models.ManyToManyField(blank=True, help_text='Specific permissions for this user.', related_name='user_set', related_query_name='user', to='auth.Permission', verbose_name='user permissions')),
],
options={
'ordering': ['-created', '-modified'],
'get_latest_by': 'created',
'abstract': False,
},
managers=[
('objects', django.contrib.auth.models.UserManager()),
],
),
migrations.CreateModel(
name='Profile',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('created', models.DateTimeField(auto_now_add=True, help_text='Date time on which the object was created.', verbose_name='created at')),
('modified', models.DateTimeField(auto_now=True, help_text='Date time on which the object was last modified.', verbose_name='modified at')),
('picture', models.ImageField(blank=True, null=True, upload_to='users/pictures/', verbose_name='profile picture')),
('biography', models.TextField(blank=True, max_length=500)),
('rides_taken', models.PositiveIntegerField(default=0)),
('rides_offered', models.PositiveIntegerField(default=0)),
('reputation', models.FloatField(default=5.0, help_text="User's reputation based on the rides taken and offered.")),
('user', models.OneToOneField(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)),
],
options={
'ordering': ['-created', '-modified'],
'get_latest_by': 'created',
'abstract': False,
},
),
]
| 73.236111
| 329
| 0.658259
|
4a157e5fa5b27f99ea3ac507b06cb7cee3e10104
| 475
|
py
|
Python
|
src/ui/validate_text.py
|
KenyC/Shajara
|
ec5ab94e92cf154aef63ac2278fddff901f6328c
|
[
"MIT"
] | null | null | null |
src/ui/validate_text.py
|
KenyC/Shajara
|
ec5ab94e92cf154aef63ac2278fddff901f6328c
|
[
"MIT"
] | null | null | null |
src/ui/validate_text.py
|
KenyC/Shajara
|
ec5ab94e92cf154aef63ac2278fddff901f6328c
|
[
"MIT"
] | null | null | null |
from PyQt5.QtWidgets import QTextEdit, QApplication
from PyQt5.QtCore import Qt
class ValidateText(QTextEdit):
"""Makes Enter key board press copy to clipboard"""
def keyPressEvent(self, event):
if self.treat_event(event):
return
elif event.key() == Qt.Key_Return or event.key() == Qt.Key_Return:
QApplication.clipboard().setText(self.toPlainText())
else:
super(ValidateText, self).keyPressEvent(event)
def treat_event(self, event):
return False
| 25
| 69
| 0.738947
|
4a157e95bde7b19a396e4751fa9932ca2e063327
| 4,364
|
py
|
Python
|
django_shop/apps/trade/migrations/0001_initial.py
|
XZH950926/mydjango
|
a6f7ecceda251f20778124a81b3e71e98bab2a08
|
[
"Apache-2.0"
] | null | null | null |
django_shop/apps/trade/migrations/0001_initial.py
|
XZH950926/mydjango
|
a6f7ecceda251f20778124a81b3e71e98bab2a08
|
[
"Apache-2.0"
] | null | null | null |
django_shop/apps/trade/migrations/0001_initial.py
|
XZH950926/mydjango
|
a6f7ecceda251f20778124a81b3e71e98bab2a08
|
[
"Apache-2.0"
] | null | null | null |
# Generated by Django 2.0 on 2018-11-08 20:34
import datetime
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
initial = True
dependencies = [
('goods', '0001_initial'),
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
]
operations = [
migrations.CreateModel(
name='addressModel',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('province', models.CharField(default='', max_length=30, verbose_name='省份')),
('city', models.CharField(default='', max_length=30, verbose_name='市')),
('district', models.CharField(default='', max_length=100, verbose_name='区域')),
('address', models.CharField(default='', max_length=100, verbose_name='详细地址')),
('signer_name', models.CharField(default='', max_length=100, verbose_name='签收人')),
('signer_mobile', models.CharField(default='', max_length=100, verbose_name='联系电话')),
('add_time', models.DateTimeField(default=datetime.datetime.now, verbose_name='添加时间')),
('user', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL, verbose_name='用户')),
],
options={
'verbose_name': '签收地址',
'verbose_name_plural': '签收地址',
},
),
migrations.CreateModel(
name='OrderGoods',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('goods_num', models.IntegerField(default=0, verbose_name='商品数量')),
('add_time', models.DateTimeField(default=datetime.datetime.now, verbose_name='添加时间')),
('goods', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='goods.Goods', verbose_name='商品')),
],
options={
'verbose_name': '订单商品',
'verbose_name_plural': '订单商品',
},
),
migrations.CreateModel(
name='OrderInfo',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('order_sn', models.CharField(blank=True, max_length=30, null=True, unique=True, verbose_name='订单编号')),
('nonce_str', models.CharField(blank=True, max_length=50, null=True, unique=True, verbose_name='随机加密串')),
('trade_no', models.CharField(blank=True, max_length=100, null=True, unique=True, verbose_name='交易号')),
('pay_status', models.CharField(choices=[('TRADE_SUCCESS', '成功'), ('TRADE_CLOSED', '超时关闭'), ('WAIT_BUYER_PAY', '交易创建'), ('TRADE_FINISHED', '交易结束'), ('paying', '待支付')], default='paying', max_length=30, verbose_name='订单状态')),
('pay_type', models.CharField(choices=[('alipay', '支付宝'), ('wechat', '微信')], default='alipay', max_length=10, verbose_name='支付类型')),
('post_script', models.CharField(max_length=200, verbose_name='订单留言')),
('order_mount', models.FloatField(default=0.0, verbose_name='订单金额')),
('pay_time', models.DateTimeField(blank=True, null=True, verbose_name='支付时间')),
('address', models.CharField(default='', max_length=100, verbose_name='收货地址')),
('signer_name', models.CharField(default='', max_length=20, verbose_name='签收人')),
('singer_mobile', models.CharField(max_length=11, verbose_name='联系电话')),
('add_time', models.DateTimeField(default=datetime.datetime.now, verbose_name='添加时间')),
('user', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL, verbose_name='用户')),
],
options={
'verbose_name': '订单信息',
'verbose_name_plural': '订单信息',
},
),
migrations.AddField(
model_name='ordergoods',
name='order',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='goods', to='trade.OrderInfo', verbose_name='订单信息'),
),
]
| 55.240506
| 239
| 0.59945
|
4a157ed10f68c31a89ebf3cc0d961156cf9bd43d
| 20,806
|
py
|
Python
|
myems-api/core/costcenter.py
|
18600575648/myems
|
38ab7d509b5ab275a4df0333e6256c586abdfbf9
|
[
"MIT"
] | null | null | null |
myems-api/core/costcenter.py
|
18600575648/myems
|
38ab7d509b5ab275a4df0333e6256c586abdfbf9
|
[
"MIT"
] | null | null | null |
myems-api/core/costcenter.py
|
18600575648/myems
|
38ab7d509b5ab275a4df0333e6256c586abdfbf9
|
[
"MIT"
] | null | null | null |
import falcon
import simplejson as json
import mysql.connector
import config
import uuid
from core.useractivity import user_logger, access_control
class CostCenterCollection:
@staticmethod
def __init__():
""""Initializes CostCenterCollection"""
pass
@staticmethod
def on_options(req, resp):
resp.status = falcon.HTTP_200
@staticmethod
def on_get(req, resp):
"""Handles GET requests"""
cnx = mysql.connector.connect(**config.myems_system_db)
cursor = cnx.cursor()
query = (" SELECT id, name, uuid, external_id "
" FROM tbl_cost_centers "
" ORDER BY id")
cursor.execute(query)
rows = cursor.fetchall()
cursor.close()
cnx.close()
result = list()
if rows is not None and len(rows) > 0:
for row in rows:
meta_result = {"id": row[0], "name": row[1], "uuid": row[2], "external_id": row[3]}
result.append(meta_result)
resp.text = json.dumps(result)
@staticmethod
@user_logger
def on_post(req, resp):
"""Handles POST requests"""
access_control(req)
try:
raw_json = req.stream.read().decode('utf-8')
except Exception as ex:
raise falcon.HTTPError(falcon.HTTP_400, 'API.ERROR', ex)
new_values = json.loads(raw_json)
if 'name' not in new_values['data'].keys() or len(new_values['data']['name']) <= 0:
raise falcon.HTTPError(falcon.HTTP_400, title='API.BAD_REQUEST',
description='API.INVALID_NAME_VALUE')
name = str.strip(new_values['data']['name'])
if 'external_id' in new_values['data'].keys() and \
new_values['data']['external_id'] is not None and \
len(str(new_values['data']['external_id'])) > 0:
external_id = str.strip(new_values['data']['external_id'])
else:
external_id = None
cnx = mysql.connector.connect(**config.myems_system_db)
cursor = cnx.cursor()
cursor.execute(" SELECT name "
" FROM tbl_cost_centers "
" WHERE name = %s ", (name, ))
if cursor.fetchone() is not None:
cursor.close()
cnx.close()
raise falcon.HTTPError(falcon.HTTP_400,
title='API.BAD_REQUEST',
description='API.COST_CENTER_NAME_EXISTS')
if external_id is not None:
cursor.execute(" SELECT name "
" FROM tbl_cost_centers "
" WHERE external_id = %s ", (external_id, ))
if cursor.fetchone() is not None:
cursor.close()
cnx.close()
raise falcon.HTTPError(falcon.HTTP_400, title='API.BAD_REQUEST',
description='API.COST_CENTER_EXTERNAL_ID_EXISTS')
add_row = (" INSERT INTO tbl_cost_centers "
" (name, uuid, external_id) "
" VALUES (%s, %s, %s) ")
cursor.execute(add_row, (name,
str(uuid.uuid4()),
external_id,))
new_id = cursor.lastrowid
cnx.commit()
cursor.close()
cnx.close()
resp.status = falcon.HTTP_201
resp.location = '/costcenters/' + str(new_id)
class CostCenterItem:
@staticmethod
def __init__():
""""Initializes CostCenterItem"""
pass
@staticmethod
def on_options(req, resp, id_):
resp.status = falcon.HTTP_200
@staticmethod
def on_get(req, resp, id_):
"""Handles GET requests"""
if not id_.isdigit() or int(id_) <= 0:
raise falcon.HTTPError(falcon.HTTP_400, title='API.BAD_REQUEST',
description='API.INVALID_COST_CENTER_ID')
cnx = mysql.connector.connect(**config.myems_system_db)
cursor = cnx.cursor()
query = (" SELECT id, name, uuid, external_id "
" FROM tbl_cost_centers "
" WHERE id = %s ")
cursor.execute(query, (id_,))
row = cursor.fetchone()
cursor.close()
cnx.close()
if row is None:
raise falcon.HTTPError(falcon.HTTP_404, title='API.NOT_FOUND',
description='API.COST_CENTER_NOT_FOUND')
result = {"id": row[0], "name": row[1], "uuid": row[2], "external_id": row[3]}
resp.text = json.dumps(result)
@staticmethod
@user_logger
def on_delete(req, resp, id_):
"""Handles DELETE requests"""
access_control(req)
if not id_.isdigit() or int(id_) <= 0:
raise falcon.HTTPError(falcon.HTTP_400, title='API.BAD_REQUEST',
description='API.INVALID_COST_CENTER_ID')
cnx = mysql.connector.connect(**config.myems_system_db)
cursor = cnx.cursor()
cursor.execute(" SELECT name "
" FROM tbl_cost_centers "
" WHERE id = %s ", (id_,))
if cursor.fetchone() is None:
cursor.close()
cnx.close()
raise falcon.HTTPError(falcon.HTTP_404, title='API.NOT_FOUND',
description='API.COST_CENTER_NOT_FOUND')
# check relation with equipments
cursor.execute(" SELECT id "
" FROM tbl_equipments "
" WHERE cost_center_id = %s ", (id_,))
rows_equipments = cursor.fetchall()
if rows_equipments is not None and len(rows_equipments) > 0:
cursor.close()
cnx.close()
raise falcon.HTTPError(falcon.HTTP_400,
title='API.BAD_REQUEST',
description='API.THERE_IS_RELATION_WITH_EQUIPMENTS')
# check relation with combined equipments
cursor.execute(" SELECT id "
" FROM tbl_combined_equipments "
" WHERE cost_center_id = %s ", (id_,))
rows_combined_equipments = cursor.fetchall()
if rows_combined_equipments is not None and len(rows_combined_equipments) > 0:
cursor.close()
cnx.close()
raise falcon.HTTPError(falcon.HTTP_400,
title='API.BAD_REQUEST',
description='API.THERE_IS_RELATION_WITH_COMBINED_EQUIPMENTS')
# check relation with tariffs
cursor.execute(" SELECT id "
" FROM tbl_cost_centers_tariffs "
" WHERE cost_center_id = %s ", (id_,))
rows_tariffs = cursor.fetchall()
if rows_tariffs is not None and len(rows_tariffs) > 0:
cursor.close()
cnx.close()
raise falcon.HTTPError(falcon.HTTP_400,
title='API.BAD_REQUEST',
description='API.THERE_IS_RELATION_WITH_TARIFFS')
# check relation with meters
cursor.execute(" SELECT id "
" FROM tbl_meters "
" WHERE cost_center_id = %s ", (id_,))
rows_meters = cursor.fetchall()
if rows_meters is not None and len(rows_meters) > 0:
cursor.close()
cnx.close()
raise falcon.HTTPError(falcon.HTTP_400,
title='API.BAD_REQUEST',
description='API.THERE_IS_RELATION_WITH_METERS')
# check relation with offline meters
cursor.execute(" SELECT id "
" FROM tbl_offline_meters "
" WHERE cost_center_id = %s ", (id_,))
rows_offline_meters = cursor.fetchall()
if rows_offline_meters is not None and len(rows_offline_meters) > 0:
cursor.close()
cnx.close()
raise falcon.HTTPError(falcon.HTTP_400,
title='API.BAD_REQUEST',
description='API.THERE_IS_RELATION_WITH_OFFLINE_METERS')
# check relation with virtual meters
cursor.execute(" SELECT id "
" FROM tbl_virtual_meters "
" WHERE cost_center_id = %s ", (id_,))
rows_virtual_meters = cursor.fetchall()
if rows_virtual_meters is not None and len(rows_virtual_meters) > 0:
cursor.close()
cnx.close()
raise falcon.HTTPError(falcon.HTTP_400,
title='API.BAD_REQUEST',
description='API.THERE_IS_RELATION_WITH_OFFLINE_METERS')
# check relation with tenants
cursor.execute(" SELECT id "
" FROM tbl_tenants "
" WHERE cost_center_id = %s ", (id_,))
rows_tenants = cursor.fetchall()
if rows_tenants is not None and len(rows_tenants) > 0:
cursor.close()
cnx.close()
raise falcon.HTTPError(falcon.HTTP_400,
title='API.BAD_REQUEST',
description='API.THERE_IS_RELATION_WITH_TENANTS')
# check relation with stores
cursor.execute(" SELECT id "
" FROM tbl_stores "
" WHERE cost_center_id = %s ", (id_,))
rows_stores = cursor.fetchall()
if rows_stores is not None and len(rows_stores) > 0:
cursor.close()
cnx.close()
raise falcon.HTTPError(falcon.HTTP_400,
title='API.BAD_REQUEST',
description='API.THERE_IS_RELATION_WITH_STORES')
# check relation with spaces
cursor.execute(" SELECT id "
" FROM tbl_spaces "
" WHERE cost_center_id = %s ", (id_,))
rows_factories = cursor.fetchall()
if rows_factories is not None and len(rows_factories) > 0:
cursor.close()
cnx.close()
raise falcon.HTTPError(falcon.HTTP_400,
title='API.BAD_REQUEST',
description='API.THERE_IS_RELATION_WITH_SPACES')
# check relation with shopfloors
cursor.execute(" SELECT id "
" FROM tbl_shopfloors "
" WHERE cost_center_id = %s ", (id_,))
rows_shopfloors = cursor.fetchall()
if rows_shopfloors is not None and len(rows_shopfloors) > 0:
cursor.close()
cnx.close()
raise falcon.HTTPError(falcon.HTTP_400,
title='API.BAD_REQUEST',
description='API.THERE_IS_RELATION_WITH_SHOPFLOORS')
cursor.execute(" DELETE FROM tbl_cost_centers WHERE id = %s ", (id_,))
cnx.commit()
cursor.close()
cnx.close()
resp.status = falcon.HTTP_204
@staticmethod
@user_logger
def on_put(req, resp, id_):
"""Handles PUT requests"""
access_control(req)
try:
raw_json = req.stream.read().decode('utf-8')
except Exception as ex:
raise falcon.HTTPError(falcon.HTTP_400, title='API.ERROR', description=ex)
if not id_.isdigit() or int(id_) <= 0:
raise falcon.HTTPError(falcon.HTTP_400, title='API.BAD_REQUEST',
description='API.INVALID_COST_CENTER_ID')
new_values = json.loads(raw_json)
if 'name' not in new_values['data'].keys() or len(new_values['data']['name']) <= 0:
raise falcon.HTTPError(falcon.HTTP_400, title='API.BAD_REQUEST',
description='API.INVALID_NAME_VALUE')
name = str.strip(new_values['data']['name'])
if 'external_id' in new_values['data'].keys() and \
new_values['data']['external_id'] is not None and \
len(str(new_values['data']['external_id'])) > 0:
external_id = str.strip(new_values['data']['external_id'])
else:
external_id = None
cnx = mysql.connector.connect(**config.myems_system_db)
cursor = cnx.cursor()
cursor.execute(" SELECT name "
" FROM tbl_cost_centers "
" WHERE id = %s ", (id_,))
if cursor.fetchone() is None:
cursor.close()
cnx.close()
raise falcon.HTTPError(falcon.HTTP_404, title='API.NOT_FOUND',
description='API.COST_CENTER_NOT_FOUND')
cursor.execute(" SELECT name "
" FROM tbl_cost_centers "
" WHERE name = %s AND id != %s ",
(name, id_, ))
if cursor.fetchone() is not None:
cursor.close()
cnx.close()
raise falcon.HTTPError(falcon.HTTP_400,
title='API.BAD_REQUEST',
description='API.COST_CENTER_NAME_EXISTS')
if external_id is not None:
cursor.execute(" SELECT name "
" FROM tbl_cost_centers "
" WHERE external_id = %s AND id != %s ",
(external_id, id_, ))
if cursor.fetchone() is not None:
cursor.close()
cnx.close()
raise falcon.HTTPError(falcon.HTTP_400,
title='API.BAD_REQUEST',
description='API.COST_CENTER_EXTERNAL_ID_EXISTS')
cursor.execute(" SELECT name "
" FROM tbl_cost_centers "
" WHERE id = %s ", (id_,))
if cursor.fetchone() is None:
cursor.close()
cnx.close()
raise falcon.HTTPError(falcon.HTTP_404,
title='API.NOT_FOUND',
description='API.COST_CENTER_NOT_FOUND')
update_row = (" UPDATE tbl_cost_centers "
" SET name = %s, external_id = %s "
" WHERE id = %s ")
cursor.execute(update_row, (name,
external_id,
id_,))
cnx.commit()
cursor.close()
cnx.close()
resp.status = falcon.HTTP_200
class CostCenterTariffCollection:
@staticmethod
def __init__():
""""Initializes CostCenterTariffCollection"""
pass
@staticmethod
def on_options(req, resp, id_):
resp.status = falcon.HTTP_200
@staticmethod
def on_get(req, resp, id_):
"""Handles GET requests"""
if not id_.isdigit() or int(id_) <= 0:
raise falcon.HTTPError(falcon.HTTP_400, title='API.BAD_REQUEST',
description='API.INVALID_COST_CENTER_ID')
cnx = mysql.connector.connect(**config.myems_system_db)
cursor = cnx.cursor()
query = (" SELECT t.id, t.name, t.uuid, "
" t.tariff_type, t.unit_of_price "
" FROM tbl_tariffs t, tbl_cost_centers_tariffs ct "
" WHERE t.id = ct.tariff_id AND ct.cost_center_id = %s "
" ORDER BY t.name ")
cursor.execute(query, (id_,))
rows = cursor.fetchall()
cursor.close()
cnx.close()
result = list()
if rows is not None and len(rows) > 0:
for row in rows:
meta_result = {"id": row[0],
"name": row[1],
"uuid": row[2],
"tariff_type": row[3],
"unit_of_price": row[4]}
result.append(meta_result)
resp.text = json.dumps(result)
@staticmethod
@user_logger
def on_post(req, resp, id_):
"""Handles POST requests"""
access_control(req)
try:
raw_json = req.stream.read().decode('utf-8')
except Exception as ex:
raise falcon.HTTPError(falcon.HTTP_400, title='API.EXCEPTION', description=ex)
if not id_.isdigit() or int(id_) <= 0:
raise falcon.HTTPError(falcon.HTTP_400, title='API.BAD_REQUEST',
description='API.INVALID_COST_CENTER_ID')
new_values = json.loads(raw_json)
cnx = mysql.connector.connect(**config.myems_system_db)
cursor = cnx.cursor()
cursor.execute(" SELECT name "
" FROM tbl_cost_centers "
" WHERE id = %s ", (id_,))
if cursor.fetchone() is None:
cursor.close()
cnx.close()
raise falcon.HTTPError(falcon.HTTP_404, title='API.NOT_FOUND',
description='API.COST_CENTER_NOT_FOUND')
cursor.execute(" SELECT name "
" FROM tbl_tariffs "
" WHERE id = %s ", (new_values['data']['tariff_id'],))
if cursor.fetchone() is None:
cursor.close()
cnx.close()
raise falcon.HTTPError(falcon.HTTP_404, title='API.NOT_FOUND',
description='API.TARIFF_NOT_FOUND')
cursor.execute(" SELECT id "
" FROM tbl_cost_centers_tariffs "
" WHERE cost_center_id = %s AND tariff_id = %s ", (id_, new_values['data']['tariff_id']))
rows = cursor.fetchall()
if rows is not None and len(rows) > 0:
cursor.close()
cnx.close()
raise falcon.HTTPError(falcon.HTTP_400, title='API.BAD_REQUEST',
description='API.TARIFF_IS_ALREADY_ASSOCIATED_WITH_COST_CENTER')
add_row = (" INSERT INTO tbl_cost_centers_tariffs "
" (cost_center_id, tariff_id) "
" VALUES (%s, %s) ")
cursor.execute(add_row, (id_, new_values['data']['tariff_id'],))
cnx.commit()
cursor.close()
cnx.close()
resp.status = falcon.HTTP_201
resp.location = '/costcenters/' + str(id_) + '/tariffs/' + str(new_values['data']['tariff_id'])
class CostCenterTariffItem:
@staticmethod
def __init__():
""""Initializes CostCenterTariffItem"""
pass
@staticmethod
def on_options(req, resp, id_, tid):
resp.status = falcon.HTTP_200
@staticmethod
@user_logger
def on_delete(req, resp, id_, tid):
"""Handles DELETE requests"""
access_control(req)
if not id_.isdigit() or int(id_) <= 0:
raise falcon.HTTPError(falcon.HTTP_400, title='API.BAD_REQUEST',
description='API.INVALID_COST_CENTER_ID')
if not tid.isdigit() or int(tid) <= 0:
raise falcon.HTTPError(falcon.HTTP_400, title='API.BAD_REQUEST',
description='API.INVALID_TARIFF_ID')
cnx = mysql.connector.connect(**config.myems_system_db)
cursor = cnx.cursor()
cursor.execute(" SELECT name "
" FROM tbl_cost_centers "
" WHERE id = %s ", (id_,))
if cursor.fetchone() is None:
cursor.close()
cnx.close()
raise falcon.HTTPError(falcon.HTTP_404, title='API.NOT_FOUND',
description='API.COST_CENTER_NOT_FOUND')
cursor.execute(" SELECT name "
" FROM tbl_tariffs "
" WHERE id = %s ", (tid,))
if cursor.fetchone() is None:
cursor.close()
cnx.close()
raise falcon.HTTPError(falcon.HTTP_404, title='API.NOT_FOUND',
description='API.TARIFF_NOT_FOUND')
cursor.execute(" SELECT id "
" FROM tbl_cost_centers_tariffs "
" WHERE cost_center_id = %s AND tariff_id = %s ", (id_, tid))
if cursor.fetchone() is None:
cursor.close()
cnx.close()
raise falcon.HTTPError(falcon.HTTP_404, title='API.NOT_FOUND',
description='API.TARIFF_IS_NOT_ASSOCIATED_WITH_COST_CENTER')
cursor.execute(" DELETE FROM tbl_cost_centers_tariffs "
" WHERE cost_center_id = %s AND tariff_id = %s ", (id_, tid))
cnx.commit()
cursor.close()
cnx.close()
resp.status = falcon.HTTP_204
| 38.744879
| 112
| 0.520907
|
4a157f1467ac92ac6ef44cb911cfaf74b9b1f4a7
| 134
|
py
|
Python
|
misc/hello/hello.py
|
HenrikSamuelsson/advent-of-code
|
728907af50728148794b3897f6713504e84b9d9d
|
[
"MIT"
] | null | null | null |
misc/hello/hello.py
|
HenrikSamuelsson/advent-of-code
|
728907af50728148794b3897f6713504e84b9d9d
|
[
"MIT"
] | null | null | null |
misc/hello/hello.py
|
HenrikSamuelsson/advent-of-code
|
728907af50728148794b3897f6713504e84b9d9d
|
[
"MIT"
] | null | null | null |
"""
Hello world application.
Mostly just to test the Python setup on current computer.
"""
MESSSAGE = "Hello World"
print(MESSSAGE)
| 14.888889
| 57
| 0.738806
|
4a15807fa46cf0888a60a3e7e5ffc6f98b053c6d
| 176
|
py
|
Python
|
place/models/type.py
|
Kradukman/beesUlb
|
1234658af3aff7d2f580212c01d8acec96167078
|
[
"MIT"
] | null | null | null |
place/models/type.py
|
Kradukman/beesUlb
|
1234658af3aff7d2f580212c01d8acec96167078
|
[
"MIT"
] | null | null | null |
place/models/type.py
|
Kradukman/beesUlb
|
1234658af3aff7d2f580212c01d8acec96167078
|
[
"MIT"
] | null | null | null |
from odoo import api, fields, models, _
class PlaceType(models.Model):
_name = "place.type"
_description = "Place Type"
name = fields.Char('Name', required=True)
| 22
| 45
| 0.681818
|
4a158082c058c807140f2fdee5311c482268437f
| 5,159
|
py
|
Python
|
test/posix/integration/syslog_default/test.py
|
paulyc/IncludeOS
|
5c82bad4a22838bc2219fbadef57d94f006b4760
|
[
"Apache-2.0"
] | null | null | null |
test/posix/integration/syslog_default/test.py
|
paulyc/IncludeOS
|
5c82bad4a22838bc2219fbadef57d94f006b4760
|
[
"Apache-2.0"
] | null | null | null |
test/posix/integration/syslog_default/test.py
|
paulyc/IncludeOS
|
5c82bad4a22838bc2219fbadef57d94f006b4760
|
[
"Apache-2.0"
] | 1
|
2021-06-16T22:48:53.000Z
|
2021-06-16T22:48:53.000Z
|
#! /usr/bin/env python
import sys
import os
includeos_src = os.environ.get('INCLUDEOS_SRC',
os.path.realpath(os.path.join(os.getcwd(), os.path.dirname(__file__))).split('/test')[0])
print 'includeos_src: {0}'.format(includeos_src)
sys.path.insert(0,includeos_src)
from vmrunner import vmrunner
vm = vmrunner.vms[0]
num_outputs = 0
EMERG_C = "\033\\[38;5;1m" # RED
ALERT_C = "\033\\[38;5;160m" # RED (lighter)
CRIT_C = "\033\\[38;5;196m" # RED (even lighter)
ERR_C = "\033\\[38;5;208m" # DARK YELLOW
WARNING_C = "\033\\[93m" # YELLOW
NOTICE_C = "\033\\[92m" # GREEN
INFO_C = "\033\\[96m" # TURQUOISE
DEBUG_C = "\033\\[94m" # BLUE
END_C = "\033\\[0m" # CLEAR
def increment(line):
global num_outputs
num_outputs += 1
print "num_outputs after increment: ", num_outputs
def unexpected(line):
assert False
expected_outputs = 22
def check_num_outputs(line):
print "Registered", num_outputs, " / ", expected_outputs, " expected ouput lines"
assert(num_outputs == expected_outputs)
vmrunner.vms[0].exit(0, "All tests passed")
# ---------- POSIX wrapper syslog ----------
vm.on_output(" : Invalid -1", unexpected)
vm.on_output(" : A info message", increment)
vm.on_output(" : Program created with two arguments: one and two", increment)
vm.on_output(" Prepended message: Log after prepended message with one argument: 44", increment)
vm.on_output(" Prepended message: Log number two after openlog set prepended message", increment)
vm.on_output(" Prepended message: Log after closelog with three arguments. " +
"One is 33, another is this, a third is 4011", increment)
vm.on_output(" Second prepended message\\[1\\]: Emergency log after openlog and new facility: user", increment)
vm.on_output(" Second prepended message\\[1\\]: Alert log with the m argument: Invalid argument", increment)
vm.on_output(" Second prepended message\\[1\\]: Second alert log with the m argument: No error information", increment)
vm.on_output(" Second prepended message\\[1\\]: Critical after cleared prepended message", increment)
# std err is just a regular printf
vm.on_output("Open after close prepended mess: " +
"Info after openlog with both m: No error information and two hex arguments: 0x64 and 0x32", increment)
# ---------- IncludeOS syslogd ----------
# Count 1. vm.on_output("<11> " + ERR_C + "<USER.ERR> " + END_C, increment)
vm.on_output(" test_syslog_default: Syslog: Unknown priority -1. Message: Syslogd Invalid -1", increment)
# Count 1. vm.on_output("<11> " + ERR_C + "<USER.ERR> " + END_C, increment)
vm.on_output(" test_syslog_default: Syslog: Unknown priority 10. Message: Syslogd Invalid 10", increment)
# Count 1. vm.on_output("<11> " + ERR_C + "<USER.ERR> " + END_C, increment)
vm.on_output(" test_syslog_default: Syslog: Unknown priority 55. Message: Syslogd Invalid 55", increment)
# Count 1. vm.on_output("<14> " + INFO_C + "<USER.INFO> " + END_C, increment)
vm.on_output(" test_syslog_default: Syslogd No open has been called prior to this", increment)
# Count 1. vm.on_output("<13> " + NOTICE_C + "<USER.NOTICE> " + END_C, increment)
vm.on_output(" test_syslog_default: Syslogd Program created with two arguments: one and two", increment)
# Count 1. vm.on_output("<19> " + ERR_C + "<MAIL.ERR> " + END_C, increment)
vm.on_output(" test_syslog_default Prepended message: Syslogd Log after prepended message with one argument: 44", increment)
# Count 1. vm.on_output("<20> " + WARNING_C + "<MAIL.WARNING> " + END_C, increment)
vm.on_output(" test_syslog_default Prepended message: Syslogd Log number two after openlog set prepended message", increment)
# Count 1. vm.on_output("<12> " + WARNING_C + "<USER.WARNING> " + END_C, increment)
vm.on_output(" test_syslog_default: Syslogd Log after closelog with three arguments. " +
"One is 33, another is this, a third is 4011", increment)
# Count 1. vm.on_output("<8> " + EMERG_C + "<USER.EMERG> " + END_C, increment)
vm.on_output(" test_syslog_default Second prepended message\\[1\\]: Syslogd Emergency log after openlog and new facility: user", increment)
# Count 1. vm.on_output("<9> " + ALERT_C + "<USER.ALERT> " + END_C, increment)
vm.on_output(" test_syslog_default Second prepended message\\[1\\]: Syslogd Alert log with the m argument: Success", increment)
# Count 1. vm.on_output("<10> " + CRIT_C + "<USER.CRIT> " + END_C, increment)
vm.on_output(" test_syslog_default: Syslogd Critical after cleared prepended message", increment)
# Count 2. Also has logopt LOG_PERROR (so will also be written to std::cerr)
# Count 1. vm.on_output("<6> " + INFO_C + "<KERN.INFO> " + END_C, increment)
vm.on_output(" test_syslog_default Open after close prepended message: " +
"Syslogd Info after openlog with both m: No error information and two hex arguments: 0x64 and 0x32", increment)
vm.on_output("<191> " + DEBUG_C + "<LOCAL7.DEBUG> " + END_C, increment)
vm.on_output(" test_syslog_default Exiting test: Something special to close with", check_num_outputs)
# Boot the VM, taking a timeout as parameter
vm.cmake().boot(20).clean()
| 45.254386
| 139
| 0.698779
|
4a1580aa9c7f3ccadf0f5afe769158d1f83e6b64
| 5,059
|
py
|
Python
|
mil/archive/script_train_CAEP.py
|
labsyspharm/deeptile
|
b79a52f9613d5f8861b31e68a407cf507a90bcbe
|
[
"MIT"
] | null | null | null |
mil/archive/script_train_CAEP.py
|
labsyspharm/deeptile
|
b79a52f9613d5f8861b31e68a407cf507a90bcbe
|
[
"MIT"
] | 1
|
2020-04-05T01:13:24.000Z
|
2020-04-05T01:13:24.000Z
|
mil/archive/script_train_CAEP.py
|
labsyspharm/deeptile
|
b79a52f9613d5f8861b31e68a407cf507a90bcbe
|
[
"MIT"
] | 2
|
2019-07-29T17:45:13.000Z
|
2019-08-06T15:44:00.000Z
|
import numpy as np
import pandas as pd
import time
import argparse
import os
import copy
import yaml
import tqdm
import tensorflow as tf
import CAEP
import preprocessing
if __name__ == '__main__':
# parameter defined within the scope of model preparation
BATCH_SIZE = 10
LATENT_DIM = 20
LEARNING_RATE = 1e-5
GRID_NUM = 1000
TOTAL_EPOCH = 10
# load configuration
master_folderpath = '/n/scratch2/hungyiwu/deeptile_data/'
triplet_folderpath = '/n/scratch2/hungyiwu/triplet_images'
config_filepath = os.path.expanduser('./default_config.yaml')
with open(config_filepath, 'r') as yaml_file:
config_dict = yaml.safe_load(yaml_file)
# paths
model_filepath = os.path.join(master_folderpath, 'CAEP_model.hdf5'),
# parse arguments
parser = argparse.ArgumentParser(description='Get verbosity.')
parser.add_argument('--verbose', action='store_true', # default is False
help='Turn on tqdm progress bar.')
args = parser.parse_args()
verbose = args.verbose
# data
patient_id = '26531'
pre_identifier = patient_id+'PRE'
pre_loader = preprocessing.tile_loader(
workspace_folderpath=os.path.join(master_folderpath, pre_identifier),
warm_start=True,
image_filepath=os.path.join(triplet_folderpath, pre_identifier+'.ome.tif'),
channel_filepath=os.path.join(master_folderpath, patient_id+'_channel_info.csv'),
)
post_identifier = patient_id+'PRE'
post_loader = preprocessing.tile_loader(
workspace_folderpath=os.path.join(master_folderpath, post_identifier),
warm_start=True,
image_filepath=os.path.join(triplet_folderpath, post_identifier+'.ome.tif'),
channel_filepath=os.path.join(master_folderpath, patient_id+'_channel_info.csv'),
)
# model
caep_model = CAEP.CAEP(
latent_dim=LATENT_DIM,
feature_shape=tuple(config_dict['tile_shape'])+(pre_loader.count_channel,),
)
caep_optimizer = tf.keras.optimizers.Adam(learning_rate=LEARNING_RATE)
# define grid
def get_grid(image_shape, grid_num):
x_linspace = np.linspace(
start=0,
stop=image_shape[0],
num=GRID_NUM,
).astype(int)
y_linspace = np.linspace(
start=0,
stop=image_shape[1],
num=GRID_NUM,
).astype(int)
x_mesh, y_mesh = np.meshgrid(x_linspace, y_linspace)
survey_grid = []
for x, y in zip(x_mesh.flatten(), y_mesh.flatten()):
if preprocessing.within_range(
point=(x,y),
tile_shape=config_dict['tile_shape'],
support_range=[(0, image_shape[0]), (0, image_shape[1])],
):
survey_grid.append((x,y))
return survey_grid
pre_grid = get_grid(pre_loader.image['image'].shape, GRID_NUM)
post_grid = get_grid(post_loader.image['image'].shape, GRID_NUM)
# convert to dataset
pre_dataset = pre_loader.get_dataset(
tile_shape=config_dict['tile_shape'],
center_list=pre_grid,
batch_size=BATCH_SIZE,
)
pre_batch_count = np.ceil(len(pre_grid)/BATCH_SIZE).astype(int)
post_dataset = post_loader.get_dataset(
tile_shape=config_dict['tile_shape'],
center_list=post_grid,
batch_size=BATCH_SIZE,
)
post_batch_count = np.ceil(len(post_grid)/BATCH_SIZE).astype(int)
# setup training, evaluation, sampling loop
record = []
for epoch in range(TOTAL_EPOCH):
ts_start = time.time()
# phase 1: train on grid
for pre_batch, post_batch in tqdm.tqdm(
iterable=zip(pre_dataset, post_dataset),
desc='train',
total=min(pre_batch_count, post_batch_count),
disable=not verbose):
caep_model.compute_apply_gradients(pre_batch, post_batch, caep_optimizer)
# phase 2: survey on grid
loss_list = []
for pre_batch, post_batch in tqdm.tqdm(
iterable=zip(pre_dataset, post_dataset),
desc='evaluate',
total=min(pre_batch_count, post_batch_count),
disable=not verbose):
loss = caep_model.compute_loss(pre_batch, post_batch)
loss_list.append(loss.numpy()[0])
ts_end = time.time()
# report progress
mean_loss = np.mean(loss_list)
runtime = ts_end-ts_start
print('epoch {}, loss {:.3f}, runtime {:.3f} sec/epoch.'.format(
epoch, mean_loss, runtime), flush=True)
# check-point
if not np.isfinite(mean_loss):
print('non-finite loss detected: {}'.format(mean_loss), flush=True)
break
caep_model.save(
filepath=model_filepath,
overwrite=True,
include_optimizer=True,
save_format='h5',
)
print('Done.', flush=True)
| 38.037594
| 93
| 0.622455
|
4a1580b0b3ee114babac8608a4754a13402b948f
| 7,616
|
py
|
Python
|
st2actions/st2actions/runners/remote_script_runner.py
|
totalkyos/stack-storm
|
b89bc648d53dae03c7484d22abd771edfe45bbb8
|
[
"Apache-2.0"
] | null | null | null |
st2actions/st2actions/runners/remote_script_runner.py
|
totalkyos/stack-storm
|
b89bc648d53dae03c7484d22abd771edfe45bbb8
|
[
"Apache-2.0"
] | null | null | null |
st2actions/st2actions/runners/remote_script_runner.py
|
totalkyos/stack-storm
|
b89bc648d53dae03c7484d22abd771edfe45bbb8
|
[
"Apache-2.0"
] | null | null | null |
# Licensed to the StackStorm, Inc ('StackStorm') under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
import sys
import traceback
import uuid
from oslo_config import cfg
from st2common import log as logging
from st2actions.runners.ssh.paramiko_ssh_runner import RUNNER_REMOTE_DIR
from st2actions.runners.ssh.paramiko_ssh_runner import BaseParallelSSHRunner
from st2common.models.system.paramiko_script_action import ParamikoRemoteScriptAction
__all__ = [
'get_runner',
'ParamikoRemoteScriptRunner',
]
LOG = logging.getLogger(__name__)
def get_runner():
return ParamikoRemoteScriptRunner(str(uuid.uuid4()))
class ParamikoRemoteScriptRunner(BaseParallelSSHRunner):
def run(self, action_parameters):
remote_action = self._get_remote_action(action_parameters)
LOG.debug('Executing remote action.', extra={'_action_params': remote_action})
result = self._run(remote_action)
LOG.debug('Executed remote action.', extra={'_result': result})
status = self._get_result_status(result, cfg.CONF.ssh_runner.allow_partial_failure)
return (status, result, None)
def _run(self, remote_action):
try:
copy_results = self._copy_artifacts(remote_action)
except:
# If for whatever reason there is a top level exception,
# we just bail here.
error = 'Failed copying content to remote boxes.'
LOG.exception(error)
_, ex, tb = sys.exc_info()
copy_results = self._generate_error_results(' '.join([error, str(ex)]), tb)
return copy_results
try:
exec_results = self._run_script_on_remote_host(remote_action)
try:
remote_dir = remote_action.get_remote_base_dir()
LOG.debug('Deleting remote execution dir.', extra={'_remote_dir': remote_dir})
delete_results = self._parallel_ssh_client.delete_dir(path=remote_dir,
force=True)
LOG.debug('Deleted remote execution dir.', extra={'_result': delete_results})
except:
LOG.exception('Failed deleting remote dir.', extra={'_remote_dir': remote_dir})
finally:
return exec_results
except:
error = 'Failed executing script on remote boxes.'
LOG.exception(error, extra={'_action_params': remote_action})
_, ex, tb = sys.exc_info()
exec_results = self._generate_error_results(' '.join([error, str(ex)]), tb)
return exec_results
def _copy_artifacts(self, remote_action):
# First create remote execution directory.
remote_dir = remote_action.get_remote_base_dir()
LOG.debug('Creating remote execution dir.', extra={'_path': remote_dir})
mkdir_result = self._parallel_ssh_client.mkdir(path=remote_action.get_remote_base_dir())
# Copy the script to remote dir in remote host.
local_script_abs_path = remote_action.get_local_script_abs_path()
remote_script_abs_path = remote_action.get_remote_script_abs_path()
file_mode = 0744
extra = {'_local_script': local_script_abs_path, '_remote_script': remote_script_abs_path,
'mode': file_mode}
LOG.debug('Copying local script to remote box.', extra=extra)
put_result_1 = self._parallel_ssh_client.put(local_path=local_script_abs_path,
remote_path=remote_script_abs_path,
mirror_local_mode=False, mode=file_mode)
# If `lib` exist for the script, copy that to remote host.
local_libs_path = remote_action.get_local_libs_path_abs()
if os.path.exists(local_libs_path):
extra = {'_local_libs': local_libs_path, '_remote_path': remote_dir}
LOG.debug('Copying libs to remote host.', extra=extra)
put_result_2 = self._parallel_ssh_client.put(local_path=local_libs_path,
remote_path=remote_dir,
mirror_local_mode=True)
result = mkdir_result or put_result_1 or put_result_2
return result
def _run_script_on_remote_host(self, remote_action):
command = remote_action.get_full_command_string()
LOG.info('Command to run: %s', command)
results = self._parallel_ssh_client.run(command, timeout=remote_action.get_timeout())
LOG.debug('Results from script: %s', results)
return results
def _get_remote_action(self, action_parameters):
# remote script actions without entry_point don't make sense, user probably wanted to use
# "remote-shell-cmd" action
if not self.entry_point:
msg = ('Action "%s" is missing "entry_point" attribute. Perhaps wanted to use '
'"remote-shell-script" runner?' % (self.action_name))
raise Exception(msg)
script_local_path_abs = self.entry_point
pos_args, named_args = self._get_script_args(action_parameters)
named_args = self._transform_named_args(named_args)
env_vars = self._get_env_vars()
remote_dir = self.runner_parameters.get(RUNNER_REMOTE_DIR,
cfg.CONF.ssh_runner.remote_dir)
remote_dir = os.path.join(remote_dir, self.liveaction_id)
return ParamikoRemoteScriptAction(self.action_name,
str(self.liveaction_id),
script_local_path_abs,
self.libs_dir_path,
named_args=named_args,
positional_args=pos_args,
env_vars=env_vars,
on_behalf_user=self._on_behalf_user,
user=self._username,
password=self._password,
private_key=self._private_key,
remote_dir=remote_dir,
hosts=self._hosts,
parallel=self._parallel,
sudo=self._sudo,
timeout=self._timeout,
cwd=self._cwd)
@staticmethod
def _generate_error_results(error, tb):
error_dict = {
'error': error,
'traceback': ''.join(traceback.format_tb(tb, 20)) if tb else '',
'failed': True,
'succeeded': False,
'return_code': 255
}
return error_dict
| 47.012346
| 98
| 0.607012
|
4a1580f572a92ce6f03e43ffe56cab65c9be6eaf
| 7,573
|
py
|
Python
|
src/setup.py
|
figtools/figgy-cli
|
88f4ccb8221ef9734f95b2637acfacc6e00983e7
|
[
"Apache-2.0"
] | 36
|
2020-07-21T21:22:02.000Z
|
2021-10-20T06:55:47.000Z
|
src/setup.py
|
figtools/figgy-cli
|
88f4ccb8221ef9734f95b2637acfacc6e00983e7
|
[
"Apache-2.0"
] | 2
|
2020-10-29T12:49:15.000Z
|
2021-04-29T01:12:05.000Z
|
src/setup.py
|
figtools/figgy-cli
|
88f4ccb8221ef9734f95b2637acfacc6e00983e7
|
[
"Apache-2.0"
] | null | null | null |
import re
from setuptools import setup, find_packages
import platform
import os
# Platform Constants
LINUX, MAC, WINDOWS = "Linux", "Darwin", "Windows"
SHORT_DESCRIPTION = "Tired of managing hundreds or thousands of configurations as your microservice footprint " \
"scales? Tired of config files, environment variables, poorly managed secrets, and " \
" constantly crashing containers due to configuration mismanagement? There’s a better way. Figgy!"
with open('figcli/config/constants.py') as file:
contents = file.read()
VERSION = re.search(r'^VERSION\s*=\s*["\'](.*)["\']', contents, re.MULTILINE)
GITHUB = re.search(r'^FIGGY_GITHUB\s*=\s*["\'](.*)["\']', contents, re.MULTILINE)
VERSION = VERSION.group(1)
GITHUB = GITHUB.group(1)
FIGGY_WEBSITE = "https://figgy.dev"
base_requirements = [
"boto3 >= 1.13.19",
"prompt_toolkit == 2.0.7",
"sty >= 1.0.0b12",
"click >= 7.1.2",
"tqdm >= 4.46.0",
"npyscreen >= 4.10.5",
"beautifulsoup4 >= 4.9.1",
"keyring >= 21.2.1",
"keyrings.alt >= 3.4.0",
"tabulate >= 0.8.7",
"jsonpickle >= 1.4.1",
"urllib3 >= 1.25.7",
"pyotp >= 2.3.0",
"pydantic >= 1.5.1",
"python-u2flib-host>=3.0.3",
"pycryptodome>=3.9.7",
"filelock>=3.0.12",
"pygments>=2.6.1"
]
windows_requirements = [
"pyreadline>=2.1",
"windows-curses>=2.1.0",
"pywin32",
]
linux_requirements = [
]
darwin_requriements = [
]
if platform.system() == WINDOWS:
requirements = base_requirements + windows_requirements
elif platform.system() == LINUX:
requirements = base_requirements + linux_requirements
elif platform.system() == MAC:
requirements = base_requirements + darwin_requriements
else:
requirements = base_requirements + linux_requirements
if os.environ.get('FIGGY_TEST') == 'true':
excludes = []
else:
excludes = ["test"]
LONG_DESCRIPTION = """
# Figgy
Cultivate configuration clarity with Figgy. Open-source, cloud-native, configuration & secret management in AWS.
**Learn everything you need to know about Figgy by checking out the website:**
https://www.figgy.dev
Join our Slack community:
https://slack.figgy.dev
### Figgy 1.0 is out!
Figgy is a **_free_** and **_opensource_** serverless application config framework designed to bring simplicity, security, and resilience to
application config management. Figgy is built on top of AWS ParameterStore and leverages native AWS constructs such as AWS IAM,
KMS, among other services to ensure a simple and elegant integration with your AWS environment.
<br/>
> **Never roll another application to production having forgotten to set that last pesky
config in production.**
Figgy makes it possible to **bind your code directly to configurations**. Easily break builds if configs
are missing and application deployments are destined to fail.
> **Control user access like a champ**
Figgy makes it easy to set up and control access to across all of your AWS environments and configuration namespaces. Consider
your role types and use cases, map them up in a simple config file, and let Figgy do the rest. Audit all user activity and
changes over time, and roll back any config or group of configurations to any point-in-time -- to the second!
> **Integrate with your SSO provider, abandon long-lived AWS Keys for good**
Figgy supports SAML based SSO integrations with multi-factor authentication. Simplify AWS access control with Figgy!
> **Feature rich CLI to speed-up your development workflow.**
<br/>
**Figgy will help you:**
- Establish secure best practices from the start
- Prevent failed deployments and application downtime due to configuration mismanagement
- Save you time by automating simple configuration management tasks
- Give you peace of mind through high availability and resiliency, versioned configurations, audit logs, and easy rollbacks or restores.
- Keep secrets with their owners by cutting out the middle-man and establishing a strong framework of least-privilege.
- Avoid 3rd party lock-in or external dependencies -- Figgy deploys serverlessly into your AWS environments
- Keep your configuration store tidy. No more unused or stray configurations causing ongoing confusion.
## Why Figgy?
#### Simple & secure config and secret management
As your cloud footprint grows, so do the configurations you need to manage your applications.
Figgy is a framework for simple, secure, and resilient config management in AWS. The best part? No new servers to
deploy, upgrade, and patch. No complex software to learn. Follow Figgy’s laid-out path for config management.
It’s AWS native, compatible with all AWS services, and follows AWS best practices. Let Figgy help you get it right from the start.
---
#### Prevent downtime due to config mismanagement
Figgy provides a suite of utilities that link your code to your configs.
Detect and remedy misconfigurations before deployment rather than scrambling after the alarm bells are going off.
---
#### Let the secret owners own the secrets
Figgy establishes a framework for teams of secret owners to securely track, manage, and rotate their secrets in their
team’s secure space. From that space they can share secrets directly with the applications that need them --
without going through a middle-man. No more LastPass, one-time urls, secrets sent over Slack, email, encrypted files,
or any of those annoying secret management hoops. In a few weeks, when your coworker "Bill" finds new employment,
don’t ask yourself, "What secrets passed through Bill that we need to rotate now?"
---
#### Easily manage and maintain least privilege
Figgy makes it easy to give both users and applications the exact amount of access they need and nothing more, and provides
a framework for scalably maintaining and enforcing least privilege. By following Figgy best
practices you can easily maintain appropriate access for users and services while keeping your IAM policies short and sweet.
---
#### Maximum visibility & resiliency
Figgy maintains a history of every event that has ever occurred in your configuration store since the day you
installed Figgy. Know what happened, where, when, and by who. Then, roll back any configuration,
or hierarchy of configurations, to any point-in-time in the past, to the second.
Want to dip your toes in and test out the waters? Try out our free [Sandbox](https://www.figgy.dev/getting-started/sandbox/)
"""
setup(
name="figgy-cli",
packages=find_packages(".", exclude=excludes),
entry_points={
"console_scripts": ['figgy = figcli.entrypoint.cli:main']
},
version=VERSION,
description=SHORT_DESCRIPTION,
long_description=LONG_DESCRIPTION,
long_description_content_type="text/markdown",
author="Jordan Mance",
author_email="jordan@figgy.dev",
url=FIGGY_WEBSITE,
python_requires='>=3.7',
install_requires=requirements,
classifiers=[
"Programming Language :: Python :: 3",
"Environment :: Console",
'Intended Audience :: Developers',
"License :: OSI Approved :: Apache Software License",
"Operating System :: OS Independent",
"Natural Language :: English",
"Programming Language :: Python :: 3.5",
"Programming Language :: Python :: 3.6",
"Programming Language :: Python :: 3.7",
"Programming Language :: Python :: Implementation",
"Topic :: Terminals",
"Topic :: Utilities",
]
)
| 39.238342
| 141
| 0.718738
|
4a158144a087a7d04c14ee086ba29694cd53232b
| 7,658
|
py
|
Python
|
rasa_nlu/converters.py
|
MartinoMensio/rasa_nlu
|
29251aa35ce57db25538c819babfb0f0fb42dac6
|
[
"Apache-2.0"
] | null | null | null |
rasa_nlu/converters.py
|
MartinoMensio/rasa_nlu
|
29251aa35ce57db25538c819babfb0f0fb42dac6
|
[
"Apache-2.0"
] | null | null | null |
rasa_nlu/converters.py
|
MartinoMensio/rasa_nlu
|
29251aa35ce57db25538c819babfb0f0fb42dac6
|
[
"Apache-2.0"
] | null | null | null |
import codecs
import json
import re
import warnings
from typing import Optional
from rasa_nlu import utils
from rasa_nlu.tokenizers import Tokenizer
from rasa_nlu.training_data import TrainingData
# Different supported file formats and their identifier
WIT_FILE_FORMAT = "wit"
API_FILE_FORMAT = "api"
LUIS_FILE_FORMAT = "luis"
RASA_FILE_FORMAT = "rasa_nlu"
UNK_FILE_FORMAT = "unk"
def load_api_data(files):
# type: ([str]) -> TrainingData
"""Loads training data stored in the API.ai data format."""
intent_examples = []
entity_examples = []
common_examples = []
entity_synonyms = {}
for filename in files:
with codecs.open(filename, encoding="utf-8-sig") as f:
data = json.loads(f.read())
# get only intents, skip the rest. The property name is the target class
if "userSays" in data:
intent = data.get("name")
for s in data["userSays"]:
text = "".join(map(lambda chunk: chunk["text"], s.get("data")))
# add entities to each token, if available
entities = []
for e in filter(lambda chunk: "alias" in chunk or "meta" in chunk, s.get("data")):
start = text.find(e["text"])
end = start + len(e["text"])
val = text[start:end]
entities.append(
{
"entity": e["alias"] if "alias" in e else e["meta"],
"value": val,
"start": start,
"end": end
}
)
if intent and entities:
common_examples.append({"text": text, "intent": intent, "entities": entities})
elif intent:
intent_examples.append({"text": text, "intent": intent})
elif entities:
entity_examples.append({"text": text, "intent": intent, "entities": entities})
# create synonyms dictionary
if "name" in data and "entries" in data:
for entry in data["entries"]:
if "value" in entry and "synonyms" in entry:
for synonym in entry["synonyms"]:
entity_synonyms[synonym] = entry["value"]
return TrainingData(intent_examples, entity_examples, common_examples, entity_synonyms)
def load_luis_data(filename, tokenizer):
# type: (str, Optional[Tokenizer]) -> TrainingData
"""Loads training data stored in the LUIS.ai data format."""
warnings.warn(
"""LUIS data may not always be correctly imported because entity locations are specified by tokens.
If you use a tokenizer which behaves differently from LUIS's your entities might not be correct""")
if not tokenizer:
raise ValueError("Can not load luis data without a specified tokenizer " +
"(e.g. using the configuration value `luis_data_tokenizer`)")
intent_examples = []
entity_examples = []
common_examples = []
with codecs.open(filename, encoding="utf-8-sig") as f:
data = json.loads(f.read())
for s in data["utterances"]:
text = s.get("text")
tokens = [t for t in tokenizer.tokenize(text)]
intent = s.get("intent")
entities = []
for e in s.get("entities") or []:
i, ii = e["startPos"], e["endPos"] + 1
_regex = u"\s*".join([re.escape(s) for s in tokens[i:ii]])
expr = re.compile(_regex)
m = expr.search(text)
start, end = m.start(), m.end()
val = text[start:end]
entities.append({"entity": e["entity"], "value": val, "start": start, "end": end})
if intent and entities:
common_examples.append({"text": text, "intent": intent, "entities": entities})
elif intent:
intent_examples.append({"text": text, "intent": intent})
elif entities:
entity_examples.append({"text": text, "intent": intent, "entities": entities})
return TrainingData(intent_examples, entity_examples, common_examples)
def load_wit_data(filename):
# type: (str) -> TrainingData
"""Loads training data stored in the WIT.ai data format."""
intent_examples = []
entity_examples = []
common_examples = []
with codecs.open(filename, encoding="utf-8-sig") as f:
data = json.loads(f.read())
for s in data["data"]:
entities = s.get("entities")
if entities is None:
continue
text = s.get("text")
intents = [e["value"] for e in entities if e["entity"] == 'intent']
intent = intents[0] if intents else None
entities = [e for e in entities if ("start" in e and "end" in e)]
for e in entities:
e["value"] = e["value"][1:-1]
if intent and entities:
common_examples.append({"text": text, "intent": intent, "entities": entities})
elif intent:
intent_examples.append({"text": text, "intent": intent})
elif entities:
entity_examples.append({"text": text, "intent": intent, "entities": entities})
return TrainingData(intent_examples, entity_examples, common_examples)
def load_rasa_data(filename):
# type: (str) -> TrainingData
"""Loads training data stored in the rasa NLU data format."""
with codecs.open(filename, encoding="utf-8-sig") as f:
data = json.loads(f.read())
common = data['rasa_nlu_data'].get("common_examples", list())
intent = data['rasa_nlu_data'].get("intent_examples", list())
entity = data['rasa_nlu_data'].get("entity_examples", list())
return TrainingData(intent, entity, common)
def guess_format(files):
# type: ([str]) -> str
"""Given a set of files, tries to guess which data format is used."""
for filename in files:
with codecs.open(filename, encoding="utf-8-sig") as f:
file_data = json.loads(f.read())
if "data" in file_data and type(file_data.get("data")) is list:
return WIT_FILE_FORMAT
elif "luis_schema_version" in file_data:
return LUIS_FILE_FORMAT
elif "userSays" in file_data:
return API_FILE_FORMAT
elif "rasa_nlu_data" in file_data:
return RASA_FILE_FORMAT
return UNK_FILE_FORMAT
def resolve_data_files(resource_name):
# type: (str) -> [str]
"""Lists all data files of the resource name (might be a file or directory)."""
try:
return utils.recursively_find_files(resource_name)
except ValueError as e:
raise ValueError("Invalid training data file / folder specified. " + e.message)
def load_data(resource_name, language, luis_data_tokenizer=None, fformat=None):
# type: (str, str, Optional[Tokenizer], Optional[str]) -> TrainingData
"""Loads training data from disk. If no format is provided, the format will be guessed based on the files."""
files = resolve_data_files(resource_name)
if not fformat:
fformat = guess_format(files)
if fformat == LUIS_FILE_FORMAT:
from rasa_nlu.tokenizers import tokenizer_from_name
tokenizer = tokenizer_from_name(luis_data_tokenizer, language)
return load_luis_data(files[0], tokenizer)
elif fformat == WIT_FILE_FORMAT:
return load_wit_data(files[0])
elif fformat == API_FILE_FORMAT:
return load_api_data(files)
elif fformat == RASA_FILE_FORMAT:
return load_rasa_data(files[0])
else:
raise ValueError("unknown training file format : {0}".format(fformat))
| 38.099502
| 113
| 0.60538
|
4a15817ab29fe03de1e92eebcf19a9a48ee3f94d
| 1,209
|
py
|
Python
|
runFDDB_example.py
|
zhly0/FDDB-evaluation
|
3064424b1f28c1a6ccf32bea97a8311c70aca8f0
|
[
"Unlicense"
] | null | null | null |
runFDDB_example.py
|
zhly0/FDDB-evaluation
|
3064424b1f28c1a6ccf32bea97a8311c70aca8f0
|
[
"Unlicense"
] | null | null | null |
runFDDB_example.py
|
zhly0/FDDB-evaluation
|
3064424b1f28c1a6ccf32bea97a8311c70aca8f0
|
[
"Unlicense"
] | null | null | null |
#!/usr/bin/env python
# coding: utf-8
# In[1]:
# use
#from FDDB import runFDDB
#runFDDB(pred='./FDDB-result/result.txt', result_path=None, index=-1)
# In[2]:
#get_ipython().run_line_magic('matplotlib', 'inline')
from matplotlib import pyplot as plt
import numpy as np
path_ContROC = "./FDDB-result/ContROC.txt"
path_DiscROC = "./FDDB-result/DiscROC.txt"
path_imgSave = "./FDDB-result/result.png"
set_x_lim = 1000
# get data
with open(path_DiscROC, 'r') as fp:
discROC = fp.readlines()
# get disc data x, y
discROC = [line.split() for line in discROC]
disc_x = [float(x[1]) for x in discROC]
disc_y = [float(y[0]) for y in discROC]
# get data we need to be print
count = len(discROC)
### plot data
plt.figure()
# set y limite
plt.ylim((-0.07,1))
# plt.xlim((-2,set_x_lim))
# print label
plt.xlabel('False Positive (FP)')
plt.ylabel('True Positive Rate (FPR)')
# plot data
plt.plot(disc_x,disc_y,color = '#007777', linewidth = 3.0)
# print data text
plt.title('MTCNN-Tensorflow')
plt.text(disc_x[0] - disc_x[0] / 3,disc_y[0] + 0.03,'Discrete Score: %.3f' %(disc_y[0] * 100) + '%')
#
plt.grid()
# save img
# plt.figure(figsize=(10, 10))
plt.savefig('./FDDB-result/result.png')
plt.show()
| 19.5
| 100
| 0.673284
|
4a1582bc743d6a8cdd29402fd39671fa1e1e9c77
| 4,698
|
py
|
Python
|
src/pipeline/region_utils.py
|
weimingguo/open-covid-19-data
|
c0e599a49e27d8f418e90b8a29d740dccffe6c74
|
[
"Apache-2.0"
] | 129
|
2020-05-21T03:35:17.000Z
|
2022-03-16T03:49:34.000Z
|
src/pipeline/region_utils.py
|
weimingguo/open-covid-19-data
|
c0e599a49e27d8f418e90b8a29d740dccffe6c74
|
[
"Apache-2.0"
] | 72
|
2020-06-09T06:20:58.000Z
|
2020-12-17T18:07:35.000Z
|
src/pipeline/region_utils.py
|
weimingguo/open-covid-19-data
|
c0e599a49e27d8f418e90b8a29d740dccffe6c74
|
[
"Apache-2.0"
] | 82
|
2020-05-22T03:30:15.000Z
|
2022-03-16T00:04:55.000Z
|
# Copyright 2020 Google LLC.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# pylint: disable=unused-argument
import pandas as pd
import config
import path_utils
def join_region_codes(data_df, params):
reg_params = params['load']['regions']
if 'single_region_code' in reg_params:
data_df = join_single_region_code(data_df, reg_params['single_region_code'])
else:
data_df = join_on_keys(data_df, reg_params)
return data_df
# This drops states (which have county = Unknown, state = state name, fips = NaN)
# It also drops New York City (which has county = New York City, state = New York, fips = NaN)
def join_nytimes_region_codes(data_df, params):
locations_df = pd.read_csv(path_utils.path_to('locations_csv'))
fips_data_df = data_df[data_df['fips'].notna()]
fips_locations = locations_df[locations_df['region_code_type'] == 'fips_6-4']
fips_data_df['padded_fips_code'] = fips_data_df['fips'].apply(lambda x: str(int(x)).zfill(5))
fips_data_joined = fips_data_df.merge(fips_locations, left_on=['padded_fips_code'],
right_on=['leaf_region_code'], how='left')
return fips_data_joined
def join_mobility_region_codes(data_df, params):
locations_df = pd.read_csv(path_utils.path_to('locations_csv'))
iso1_data = data_df[
data_df['country_region_code'].notna() &
data_df['sub_region_1'].isna() &
data_df['sub_region_2'].isna() &
data_df['metro_area'].isna()]
iso2_data = data_df[
data_df['iso_3166_2_code'].notna() &
data_df['census_fips_code'].isna() &
data_df['metro_area'].isna()]
fips_data = data_df[
data_df['census_fips_code'].notna() &
data_df['metro_area'].isna()]
iso1_locations = locations_df[locations_df['region_code_type'] == 'iso_3166-1']
iso1_joined = iso1_data.merge(iso1_locations, left_on=['country_region_code'],
right_on=['country_iso_3166-1_alpha-2'], how='left')
iso2_locations = locations_df[locations_df['region_code_type'] == 'iso_3166-2']
iso2_joined = iso2_data.merge(iso2_locations, left_on=['iso_3166_2_code'], right_on=['region_code'], how='left')
fips_locations = locations_df[locations_df['region_code_type'] == 'fips_6-4']
fips_data['padded_fips_code'] = fips_data['census_fips_code'].apply(lambda x: str(int(x)).zfill(5))
fips_joined = fips_data.merge(fips_locations, left_on=['padded_fips_code'],
right_on=['leaf_region_code'], how='left')
joined_df = pd.concat([iso1_joined, iso2_joined, fips_joined])
joined_df['census_fips_code'] = joined_df['padded_fips_code']
return joined_df
def join_single_region_code(data_df, single_region_code):
data_df['region_code'] = single_region_code
locations_df = pd.read_csv(path_utils.path_to('locations_csv'))
locations_df = locations_df[config.all_region_columns()]
data_df = data_df.merge(locations_df, on=['region_code'])
return data_df
def join_on_keys(data_df, reg_params):
mapping_keys = reg_params['mapping_keys']
locations_df = pd.read_csv(path_utils.path_to('locations_csv'))
if 'level_1_region_code' in reg_params:
locations_df = locations_df[locations_df['level_1_region_code'] == reg_params['level_1_region_code']]
reversed_mapping_keys = {value: key for key, value in mapping_keys.items()}
data_df = data_df.rename(columns=reversed_mapping_keys)
data_df = data_df.merge(locations_df, on=list(mapping_keys.keys()), how='inner')
return data_df
def aggregate_and_append(data_df, params):
reg_params = params['load']['regions']
if 'aggregate_by' in reg_params:
agg_by = reg_params['aggregate_by']
columns_to_sum = config.col_params_to_col_list(params['data'])
agg_dict = {columns_to_sum[i]: 'sum' for i in range(len(columns_to_sum))}
agg_df = data_df.groupby(['date', agg_by]).agg(agg_dict).reset_index()
agg_df = agg_df.rename(columns={agg_by: 'region_code'})
data_df = data_df.append(agg_df, ignore_index=True)
data_df = data_df.drop_duplicates(subset=['date', 'region_code'])
return data_df
| 47.938776
| 116
| 0.709025
|
4a1586f018040a73f1a21fc6840043174c8284d9
| 1,297
|
py
|
Python
|
testupload.py
|
jlberzal/Paramassi
|
6c740eae60134180f5009c5a4f524a3c50ee384a
|
[
"BSD-2-Clause"
] | null | null | null |
testupload.py
|
jlberzal/Paramassi
|
6c740eae60134180f5009c5a4f524a3c50ee384a
|
[
"BSD-2-Clause"
] | null | null | null |
testupload.py
|
jlberzal/Paramassi
|
6c740eae60134180f5009c5a4f524a3c50ee384a
|
[
"BSD-2-Clause"
] | null | null | null |
import os
from flask import Flask, request, redirect, url_for
from werkzeug import secure_filename
UPLOAD_FOLDER = ''
ALLOWED_EXTENSIONS = set(['txt', 'pdf', 'png', 'jpg', 'jpeg', 'gif'])
app = Flask(__name__)
app.config['UPLOAD_FOLDER'] = UPLOAD_FOLDER
def allowed_file(filename):
return '.' in filename and \
filename.rsplit('.', 1)[1] in ALLOWED_EXTENSIONS
from flask import send_from_directory
@app.route('/uploads/<filename>')
def uploaded_file(filename):
return send_from_directory(app.config['UPLOAD_FOLDER'],
filename)
@app.route('/', methods=['GET', 'POST'])
def upload_file():
if request.method == 'POST':
file = request.files['file']
if file and allowed_file(file.filename):
filename = secure_filename(file.filename)
file.save(os.path.join(app.config['UPLOAD_FOLDER'], filename))
return redirect(url_for('uploaded_file',
filename=filename))
return '''
<!doctype html>
<title>Upload new File</title>
<h1>Upload new File</h1>
<form action="" method=post enctype=multipart/form-data>
<p><input type=file name=file>
<input type=submit value=Upload>
</form>
'''
app.run(debug=True)
| 30.162791
| 74
| 0.624518
|
4a1589bafd54e1cbcae86ebc12eeee475dd47361
| 1,181
|
py
|
Python
|
setup.py
|
zengzhiying/py3monchickey
|
706706140c38be1556b44132200ea77c61e346f6
|
[
"Apache-2.0"
] | 1
|
2021-09-29T08:43:51.000Z
|
2021-09-29T08:43:51.000Z
|
setup.py
|
zengzhiying/py3monchickey
|
706706140c38be1556b44132200ea77c61e346f6
|
[
"Apache-2.0"
] | null | null | null |
setup.py
|
zengzhiying/py3monchickey
|
706706140c38be1556b44132200ea77c61e346f6
|
[
"Apache-2.0"
] | null | null | null |
#!/usr/bin/env python3
# coding=utf-8
import os
import sys
import codecs
from setuptools import setup
here = os.path.abspath(os.path.dirname(__file__))
packages = ['monchickey']
requires = [
'PyMySQL>=0.7.11',
'pyyaml>=3.11',
'xxhash>=1.2.0'
]
longdesc = '''
This is a tool library based on python 3. It is called monchickey.
Features include file handling, database connection acquisition,
Network communication and simple system command calls,
The most commonly used for data calculation, filtering and conversion.
Required packages:
PyMySQL
PyYAML
xxhash
'''
about = {}
with codecs.open(os.path.join(here, 'monchickey', '__version__.py'), 'r', 'utf-8') as f:
exec(f.read(), about)
setup(
name=about['__title__'],
version=about['__version__'],
description=about['__description__'],
long_description=longdesc,
author=about['__author__'],
author_email=about['__author_email__'],
url=about['__url__'],
packages=packages,
license=about['__license__'],
python_requires=">= 3.5",
# platforms='Posix; MacOS X; Windows',
include_package_data=True,
zip_safe=False,
install_requires=requires,
)
| 23.156863
| 88
| 0.69602
|
4a1589cff4d427eb734f212f4fee20fc36f0a807
| 2,527
|
py
|
Python
|
writeup-scripts/2018-2019/SogetiCyberEscape/gocrack/solve.py
|
Lexios4/hackademint.github.io
|
4e1047dd7d91d59daf327da6ae6b6913a451a67a
|
[
"CC0-1.0"
] | 5
|
2019-04-11T16:03:34.000Z
|
2019-08-26T20:50:45.000Z
|
writeup-scripts/2018-2019/SogetiCyberEscape/gocrack/solve.py
|
Lexios4/hackademint.github.io
|
4e1047dd7d91d59daf327da6ae6b6913a451a67a
|
[
"CC0-1.0"
] | 4
|
2019-03-29T23:30:05.000Z
|
2020-08-20T17:12:31.000Z
|
writeup-scripts/2018-2019/SogetiCyberEscape/gocrack/solve.py
|
Lexios4/hackademint.github.io
|
4e1047dd7d91d59daf327da6ae6b6913a451a67a
|
[
"CC0-1.0"
] | 2
|
2019-05-07T12:56:25.000Z
|
2021-06-18T16:32:22.000Z
|
#!/usr/bin/python3
from binascii import hexlify
import hashlib, string
bytes_list = [157, 94, 214, 120, 254, 87, 188, 202, 97, 1, 64, 149, 122, 250, 181, 113, 76, 97, 67, 96, 218, 147, 192, 160, 65, 178, 46, 83, 125, 225, 81, 235, 13, 97, 248, 55, 12, 173, 29, 65, 47, 128, 184, 77, 20, 62, 18, 87, 165, 243, 198, 161, 27, 3, 131, 157, 70, 175, 159, 180, 60, 151, 193, 136, 93, 188, 152, 220, 201, 131, 167, 7, 40, 189, 8, 45, 26, 71, 84, 110, 129, 84, 23, 38, 127, 118, 246, 244, 96, 164, 166, 31, 157, 183, 95, 219, 178, 245, 255, 71, 67, 102, 113, 182, 229, 51, 216, 220, 54, 20, 132, 93, 217, 86, 121, 117, 33, 52, 162, 217, 235, 97, 219, 215, 185, 28, 75, 204, 45, 185, 94, 142, 26, 146, 103, 183, 161, 24, 133, 86, 178, 1, 59, 51, 12, 193, 117, 185, 192, 241, 182, 168, 49, 195, 153, 226, 105, 119, 38, 97, 123, 139, 150, 90, 212, 188, 160, 228, 26, 181, 29, 231, 179, 19, 99, 161, 178, 245, 255, 71, 67, 102, 113, 182, 229, 51, 216, 220, 54, 20, 132, 93, 177, 74, 123, 128, 89, 217, 192, 85, 149, 76, 146, 103, 76, 230, 0, 50, 134, 92, 12, 11, 74, 176, 224, 99, 229, 202, 163, 56, 124, 26, 135, 65, 3, 199, 192, 172, 227, 149, 216, 1, 130, 219, 7, 174, 44, 48, 240, 52, 177, 74, 123, 128, 89, 217, 192, 85, 149, 76, 146, 103, 76, 230, 0, 50, 37, 16, 195, 144, 17, 197, 190, 112, 65, 130, 66, 62, 58, 105, 94, 145, 168, 127, 246, 121, 162, 243, 231, 29, 145, 129, 166, 123, 117, 66, 18, 44, 75, 67, 176, 174, 227, 86, 36, 205, 149, 185, 16, 24, 155, 61, 194, 49, 130, 119, 224, 145, 13, 117, 1, 149, 180, 72, 121, 118, 22, 224, 145, 173, 177, 74, 123, 128, 89, 217, 192, 85, 149, 76, 146, 103, 76, 230, 0, 50, 111, 143, 87, 113, 80, 144, 218, 38, 50, 69, 57, 136, 217, 161, 80, 27, 12, 193, 117, 185, 192, 241, 182, 168, 49, 195, 153, 226, 105, 119, 38, 97, 123, 139, 150, 90, 212, 188, 160, 228, 26, 181, 29, 231, 179, 19, 99, 161, 15, 189, 23, 118, 225, 173, 34, 197, 154, 112, 128, 211, 92, 127, 212, 219]
""" Dictionnaire de hash pour chaque caractère lisible """
dico = {}
for c in string.printable:
dico[c] = hashlib.md5(c.encode()).hexdigest()
""" """
def search_dict(value, dico):
for key in dico:
if dico[key] == value:
return key
return None
password = ''
length = 16
for i in range(0, len(bytes_list), length):
part = bytes_list[i:i+length]
raw_hash = b''.join([bytes([item]) for item in part])
hex_hash = hexlify(raw_hash).decode()
print(hex_hash)
c = search_dict(hex_hash, dico)
password += search_dict(hex_hash, dico)
print()
print(password)
| 76.575758
| 1,843
| 0.587258
|
4a158c50f8312034fc202a5567c59c5cebcdf071
| 5,348
|
py
|
Python
|
miniconda3-lnx/pyzo-4.10.2/source/pyzo/core/commandline.py
|
Thibaut-Kovaltchouk/MultiPyzo
|
a15ecf77e31ebeb195e70385f5ac132f6ab4504d
|
[
"CC0-1.0"
] | null | null | null |
miniconda3-lnx/pyzo-4.10.2/source/pyzo/core/commandline.py
|
Thibaut-Kovaltchouk/MultiPyzo
|
a15ecf77e31ebeb195e70385f5ac132f6ab4504d
|
[
"CC0-1.0"
] | null | null | null |
miniconda3-lnx/pyzo-4.10.2/source/pyzo/core/commandline.py
|
Thibaut-Kovaltchouk/MultiPyzo
|
a15ecf77e31ebeb195e70385f5ac132f6ab4504d
|
[
"CC0-1.0"
] | null | null | null |
# -*- coding: utf-8 -*-
# Copyright (C) 2014, the Pyzo development team
#
# Pyzo is distributed under the terms of the 2-Clause BSD License.
# The full license can be found in 'license.txt'.
""" Module to deal with command line arguments.
In specific, this allows doing "pyzo some_file.py" and the file will be
opened in an existing pyzo window (if available) or a new pyzo process
is started to open the file.
This module is used at the very early stages of starting pyzo, and also
in main.py to apply any command line args for the current process, and
to closse down the server when pyzo is closed.
"""
import sys
import os
from yoton.clientserver import RequestServer, do_request
import pyzo
# Local address to host on. we use yoton's port hash to have an arbitrary port
ADDRESS = "localhost:pyzoserver"
class Server(RequestServer):
""" Server that listens on a port for commands.
The commands can be send by executing the Pyzo executable with
command line arguments.
"""
def handle_request(self, request):
""" This is where the requests enter.
"""
# Get command
request = request.strip()
command, _, arg = request.partition(" ")
# Handle command
try:
reply = handle_command(command, arg)
except Exception as err:
msg = "Error handling request %r:\n%s" % (request, str(err))
pyzo.callLater(print, msg)
return msg
else:
pyzo.callLater(print, "Request:", request)
pyzo.callLater(print, "Reply:", reply)
return reply
def handle_command(command, arg):
""" Function that handles all pyzo commands.
This gets called either from the server, or from the code that
processed command line args.
"""
if not command:
return "empty command?"
elif command == "testerr":
return 1 / 0
elif command == "stopserver":
# For efficiently stopping the server
if server:
server.stop()
return "Stopped the server"
elif command == "echo":
# For testing
return "echo %r" % arg
elif command == "open":
# Open a file in the editor
if not arg:
return "The open command requires a filename."
pyzo.callLater(pyzo.editors.loadFile, arg)
return "Opened file %r" % arg
elif command == "new":
# Open a new (temp) file in the editor
pyzo.callLater(pyzo.editors.newFile)
return "Created new file"
elif command == "close":
# Close pyzo
pyzo.callLater(pyzo.main.close)
return "Closing Pyzo"
else:
# Assume the user wanted to open a file
fname = (command + " " + arg).rstrip()
if not pyzo.editors:
return "Still warming up ..."
else:
pyzo.callLater(pyzo.editors.loadFile, fname)
return "Try opening file %r" % fname
# We should always return. So if we get here, it is a bug.
# Return something so that we can be aware.
return "error " + command
def handle_cmd_args():
""" Handle command line arguments by sending them to the server.
Returns a result string if any commands were processed, and None
otherwise.
"""
args = sys.argv[1:]
request = " ".join(args)
if "psn_" in request and not os.path.isfile(request):
request = " ".join(args[1:]) # An OSX thing when clicking app icon
request = request.strip()
#
if not request:
return None
else:
# Always send to server, even if we are the ones that run the server
try:
return do_request(ADDRESS, request, 0.4).rstrip()
except Exception as err:
print("Could not process command line args:\n%s" % str(err))
return None
def stop_our_server():
""" Stop our server, for shutting down nicely.
This is faster than calling server.stop(), because in the latter
case the server will need to timeout (0.25 s) before it sees that
it needs to stop.
"""
if is_our_server_running():
try:
server.stop() # Post a stop message
do_request(ADDRESS, "stopserver", 0.1) # trigger
print("Stopped our command server.")
except Exception as err:
print("Failed to stop command server:")
print(err)
def is_our_server_running():
""" Return True if our server is running. If it is, this process
is the main Pyzo; the first Pyzo that was started. If the server is
not running, this is probably not the first Pyzo, but there might
also be problem with starting the server.
"""
return server and server.isAlive()
def is_pyzo_server_running():
""" Test whether the Pyzo server is running *somewhere* (not
necesarily in this process).
"""
try:
res = do_request(ADDRESS, "echo", 0.2)
return res.startswith("echo")
except Exception:
return False
# Shold we start the server?
_try_start_server = True
if sys.platform.startswith("win"):
_try_start_server = not is_pyzo_server_running()
# Create server
server_err = None
server = None
try:
if _try_start_server:
server = Server(ADDRESS)
server.start()
except OSError as err:
server_err = err
server = None
| 29.711111
| 78
| 0.631077
|
4a158d95cd04236f6f169edf719b00c3d0df1cc8
| 529
|
py
|
Python
|
setup.py
|
borisbolliet/pi_spec
|
88c96f86253b4e719fe31642f3d779e1f4ae576b
|
[
"MIT"
] | 1
|
2021-11-03T16:11:37.000Z
|
2021-11-03T16:11:37.000Z
|
setup.py
|
borisbolliet/specdist
|
88c96f86253b4e719fe31642f3d779e1f4ae576b
|
[
"MIT"
] | null | null | null |
setup.py
|
borisbolliet/specdist
|
88c96f86253b4e719fe31642f3d779e1f4ae576b
|
[
"MIT"
] | null | null | null |
from setuptools import setup
setup(
name="specdist",
version="0.1",
description="Python package for spectral distortions",
zip_safe=False,
packages=["specdist"],
author = 'Boris Bolliet and Jens Chluba',
author_email = 'boris.bolliet@gmail.com',
url = 'https://github.com/borisbolliet/specdist',
download_url = 'https://github.com/borisbolliet/specdist/archive/master.zip',
package_data={
"specdist": ["data/*txt"],
#"data/ct_database/case_1_040520/*txt"]#,
},
)
| 23
| 81
| 0.654064
|
4a158fbf1c36cc898660cc4dc1bf0aff4f473c70
| 971
|
py
|
Python
|
Platforms/Web/Processing/Admin/protocols.py
|
HeapUnderfl0w/Phaazebot
|
54e637bd4bc213b8efdaf23d5f331f2569e96843
|
[
"MIT"
] | null | null | null |
Platforms/Web/Processing/Admin/protocols.py
|
HeapUnderfl0w/Phaazebot
|
54e637bd4bc213b8efdaf23d5f331f2569e96843
|
[
"MIT"
] | null | null | null |
Platforms/Web/Processing/Admin/protocols.py
|
HeapUnderfl0w/Phaazebot
|
54e637bd4bc213b8efdaf23d5f331f2569e96843
|
[
"MIT"
] | null | null | null |
from typing import TYPE_CHECKING
if TYPE_CHECKING:
from Platforms.Web.index import WebIndex
from aiohttp.web import Response, Request
from Utils.Classes.htmlformatter import HTMLFormatter
from Utils.Classes.webuserinfo import WebUserInfo
from Platforms.Web.utils import getNavbar
async def adminProtocols(cls:"WebIndex", WebRequest:Request) -> Response:
"""
Default url: /admin/protocols
"""
WebUser:WebUserInfo = await cls.getWebUserInfo(WebRequest)
if not WebUser.found: return await cls.accountLogin(WebRequest)
if not WebUser.checkRoles(["superadmin"]): return await cls.notAllowed(WebRequest, msg="Superadmin rights required")
AdminProtocols:HTMLFormatter = HTMLFormatter("Platforms/Web/Content/Html/Admin/protocols.html")
site:str = cls.HTMLRoot.replace(
replace_empty = True,
title = "Phaaze | Admin - Protocols",
header = getNavbar(),
main = AdminProtocols
)
return cls.response(
body=site,
status=200,
content_type='text/html'
)
| 29.424242
| 117
| 0.777549
|
4a15905829aaee4880014437edb09d76e82ccda3
| 288
|
py
|
Python
|
exercises/project_euler_solutions/problem_48.py
|
leonel-123/python-fundamentals
|
1ce9f666449866a13147d4f3a774c43f9107da41
|
[
"MIT"
] | null | null | null |
exercises/project_euler_solutions/problem_48.py
|
leonel-123/python-fundamentals
|
1ce9f666449866a13147d4f3a774c43f9107da41
|
[
"MIT"
] | null | null | null |
exercises/project_euler_solutions/problem_48.py
|
leonel-123/python-fundamentals
|
1ce9f666449866a13147d4f3a774c43f9107da41
|
[
"MIT"
] | null | null | null |
"""
problem 48
The series, 1^1 + 2^2 + 3^3 + ... + 10^10 = 10405071317.
Find the last ten digits of the series,
1^1 + 2^2 + 3^3 + ... + 1000^1000.
"""
def series(to_number):
tn = to_number
s = 0
for k in range(1, tn + 1):
s += k**k
return s
print(series(1000))
| 16.941176
| 56
| 0.538194
|
4a15905c8ae5dd7f02f8e8df3768fb66d74b8b71
| 2,160
|
py
|
Python
|
ezotv/discordbot_tools/discord_bot.py
|
marcsello/ezotv-frontend
|
405c440a567e8a0f1577f10d45385f3171398afe
|
[
"CC0-1.0"
] | null | null | null |
ezotv/discordbot_tools/discord_bot.py
|
marcsello/ezotv-frontend
|
405c440a567e8a0f1577f10d45385f3171398afe
|
[
"CC0-1.0"
] | 7
|
2020-01-23T00:50:39.000Z
|
2020-04-18T20:34:40.000Z
|
ezotv/discordbot_tools/discord_bot.py
|
marcsello/ezotv-frontend
|
405c440a567e8a0f1577f10d45385f3171398afe
|
[
"CC0-1.0"
] | null | null | null |
#!/usr/bin/env python3
from cache_tools import CachedBaseHttpSession
class DiscordBot(object):
def __init__(self, bot_token: str, guild_id: str, admin_role_name: str, admin_chat_id: str):
self._session = CachedBaseHttpSession("DISCORD", "https://discordapp.com/api/")
self._session.headers.update({"Authorization": "Bot {}".format(bot_token)})
self._roles_ilut = {} # inverse lookup table
self._admin_role_name = admin_role_name
self._admin_chat_id = admin_chat_id
self._guild_id = guild_id
def check_membership(self, userid: str) -> bool:
r = self._session.get(f"guilds/{self._guild_id}/members/{userid}")
if r.status_code == 404:
return False
elif r.status_code == 200:
return True
else:
r.raise_for_status()
def check_for_role(self, userid: str, rolename: str) -> bool:
if not self._roles_ilut:
r = self._session.get(f"guilds/{self._guild_id}/roles?limit=1000")
r.raise_for_status()
roles = r.json()
self._roles_ilut = {role['name']: role['id'] for role in roles}
if rolename not in self._roles_ilut.keys():
return False
r = self._session.get(f"guilds/{self._guild_id}/members/{userid}")
if r.status_code == 404:
return False
else:
r.raise_for_status()
return self._roles_ilut[rolename] in r.json()['roles']
def check_is_admin(self, userid: str) -> bool:
return self.check_for_role(userid, self._admin_role_name)
def get_members(self) -> list:
r = self._session.get(f"guilds/{self._guild_id}/members?limit=1000")
r.raise_for_status()
return r.json() # WTF ?!
def get_members_lut(self) -> dict:
members = self.get_members()
return {member['user']['id']: member for member in members}
def post_log(self, msg: str):
data = {
"content": msg,
"tts": False
}
r = self._session.post(f"channels/{self._admin_chat_id}/messages", json=data)
r.raise_for_status()
| 30
| 96
| 0.610648
|
4a15913b285e320842474f1a02025d23f5d9fa3e
| 10,520
|
py
|
Python
|
intersight/model/compute_storage_virtual_drive_operation_all_of.py
|
CiscoDevNet/intersight-python
|
04b721f37c3044646a91c185c7259edfb991557a
|
[
"Apache-2.0"
] | 5
|
2021-12-16T15:13:32.000Z
|
2022-03-29T16:09:54.000Z
|
intersight/model/compute_storage_virtual_drive_operation_all_of.py
|
CiscoDevNet/intersight-python
|
04b721f37c3044646a91c185c7259edfb991557a
|
[
"Apache-2.0"
] | 4
|
2022-01-25T19:05:51.000Z
|
2022-03-29T20:18:37.000Z
|
intersight/model/compute_storage_virtual_drive_operation_all_of.py
|
CiscoDevNet/intersight-python
|
04b721f37c3044646a91c185c7259edfb991557a
|
[
"Apache-2.0"
] | 2
|
2020-07-07T15:01:08.000Z
|
2022-01-31T04:27:35.000Z
|
"""
Cisco Intersight
Cisco Intersight is a management platform delivered as a service with embedded analytics for your Cisco and 3rd party IT infrastructure. This platform offers an intelligent level of management that enables IT organizations to analyze, simplify, and automate their environments in more advanced ways than the prior generations of tools. Cisco Intersight provides an integrated and intuitive management experience for resources in the traditional data center as well as at the edge. With flexible deployment options to address complex security needs, getting started with Intersight is quick and easy. Cisco Intersight has deep integration with Cisco UCS and HyperFlex systems allowing for remote deployment, configuration, and ongoing maintenance. The model-based deployment works for a single system in a remote location or hundreds of systems in a data center and enables rapid, standardized configuration and deployment. It also streamlines maintaining those systems whether you are working with small or very large configurations. The Intersight OpenAPI document defines the complete set of properties that are returned in the HTTP response. From that perspective, a client can expect that no additional properties are returned, unless these properties are explicitly defined in the OpenAPI document. However, when a client uses an older version of the Intersight OpenAPI document, the server may send additional properties because the software is more recent than the client. In that case, the client may receive properties that it does not know about. Some generated SDKs perform a strict validation of the HTTP response body against the OpenAPI document. # noqa: E501
The version of the OpenAPI document: 1.0.9-4950
Contact: intersight@cisco.com
Generated by: https://openapi-generator.tech
"""
import re # noqa: F401
import sys # noqa: F401
from intersight.model_utils import ( # noqa: F401
ApiTypeError,
ModelComposed,
ModelNormal,
ModelSimple,
cached_property,
change_keys_js_to_python,
convert_js_args_to_python_args,
date,
datetime,
file_type,
none_type,
validate_get_composed_info,
)
def lazy_import():
from intersight.model.compute_storage_virtual_drive import ComputeStorageVirtualDrive
globals()['ComputeStorageVirtualDrive'] = ComputeStorageVirtualDrive
class ComputeStorageVirtualDriveOperationAllOf(ModelNormal):
"""NOTE: This class is auto generated by OpenAPI Generator.
Ref: https://openapi-generator.tech
Do not edit the class manually.
Attributes:
allowed_values (dict): The key is the tuple path to the attribute
and the for var_name this is (var_name,). The value is a dict
with a capitalized key describing the allowed value and an allowed
value. These dicts store the allowed enum values.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
discriminator_value_class_map (dict): A dict to go from the discriminator
variable value to the discriminator class name.
validations (dict): The key is the tuple path to the attribute
and the for var_name this is (var_name,). The value is a dict
that stores validations for max_length, min_length, max_items,
min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum,
inclusive_minimum, and regex.
additional_properties_type (tuple): A tuple of classes accepted
as additional properties values.
"""
allowed_values = {
('class_id',): {
'COMPUTE.STORAGEVIRTUALDRIVEOPERATION': "compute.StorageVirtualDriveOperation",
},
('object_type',): {
'COMPUTE.STORAGEVIRTUALDRIVEOPERATION': "compute.StorageVirtualDriveOperation",
},
('admin_action',): {
'NONE': "None",
'DELETE': "Delete",
},
}
validations = {
}
additional_properties_type = None
_nullable = False
@cached_property
def openapi_types():
"""
This must be a method because a model may have properties that are
of type self, this must run after the class is loaded
Returns
openapi_types (dict): The key is attribute name
and the value is attribute type.
"""
lazy_import()
return {
'class_id': (str,), # noqa: E501
'object_type': (str,), # noqa: E501
'admin_action': (str,), # noqa: E501
'controller_id': (str,), # noqa: E501
'virtual_drives': ([ComputeStorageVirtualDrive], none_type,), # noqa: E501
}
@cached_property
def discriminator():
return None
attribute_map = {
'class_id': 'ClassId', # noqa: E501
'object_type': 'ObjectType', # noqa: E501
'admin_action': 'AdminAction', # noqa: E501
'controller_id': 'ControllerId', # noqa: E501
'virtual_drives': 'VirtualDrives', # noqa: E501
}
_composed_schemas = {}
required_properties = set([
'_data_store',
'_check_type',
'_spec_property_naming',
'_path_to_item',
'_configuration',
'_visited_composed_classes',
])
@convert_js_args_to_python_args
def __init__(self, *args, **kwargs): # noqa: E501
"""ComputeStorageVirtualDriveOperationAllOf - a model defined in OpenAPI
Args:
Keyword Args:
class_id (str): The fully-qualified name of the instantiated, concrete type. This property is used as a discriminator to identify the type of the payload when marshaling and unmarshaling data.. defaults to "compute.StorageVirtualDriveOperation", must be one of ["compute.StorageVirtualDriveOperation", ] # noqa: E501
object_type (str): The fully-qualified name of the instantiated, concrete type. The value should be the same as the 'ClassId' property.. defaults to "compute.StorageVirtualDriveOperation", must be one of ["compute.StorageVirtualDriveOperation", ] # noqa: E501
_check_type (bool): if True, values for parameters in openapi_types
will be type checked and a TypeError will be
raised if the wrong type is input.
Defaults to True
_path_to_item (tuple/list): This is a list of keys or values to
drill down to the model in received_data
when deserializing a response
_spec_property_naming (bool): True if the variable names in the input data
are serialized names, as specified in the OpenAPI document.
False if the variable names in the input data
are pythonic names, e.g. snake case (default)
_configuration (Configuration): the instance to use when
deserializing a file_type parameter.
If passed, type conversion is attempted
If omitted no type conversion is done.
_visited_composed_classes (tuple): This stores a tuple of
classes that we have traveled through so that
if we see that class again we will not use its
discriminator again.
When traveling through a discriminator, the
composed schema that is
is traveled through is added to this set.
For example if Animal has a discriminator
petType and we pass in "Dog", and the class Dog
allOf includes Animal, we move through Animal
once using the discriminator, and pick Dog.
Then in Dog, we will make an instance of the
Animal class but this time we won't travel
through its discriminator because we passed in
_visited_composed_classes = (Animal,)
admin_action (str): Administrative actions that can be performed on the Storage Virtual Drives. * `None` - No action on the selected Storage virtual Drives. * `Delete` - Delete action on the selected Storage Virtual Drives.. [optional] if omitted the server will use the default value of "None" # noqa: E501
controller_id (str): Storage Controller Id of the storage Virtual Drives of the server.. [optional] # noqa: E501
virtual_drives ([ComputeStorageVirtualDrive], none_type): [optional] # noqa: E501
"""
class_id = kwargs.get('class_id', "compute.StorageVirtualDriveOperation")
object_type = kwargs.get('object_type', "compute.StorageVirtualDriveOperation")
_check_type = kwargs.pop('_check_type', True)
_spec_property_naming = kwargs.pop('_spec_property_naming', False)
_path_to_item = kwargs.pop('_path_to_item', ())
_configuration = kwargs.pop('_configuration', None)
_visited_composed_classes = kwargs.pop('_visited_composed_classes', ())
if args:
raise ApiTypeError(
"Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % (
args,
self.__class__.__name__,
),
path_to_item=_path_to_item,
valid_classes=(self.__class__,),
)
self._data_store = {}
self._check_type = _check_type
self._spec_property_naming = _spec_property_naming
self._path_to_item = _path_to_item
self._configuration = _configuration
self._visited_composed_classes = _visited_composed_classes + (self.__class__,)
self.class_id = class_id
self.object_type = object_type
for var_name, var_value in kwargs.items():
if var_name not in self.attribute_map and \
self._configuration is not None and \
self._configuration.discard_unknown_keys and \
self.additional_properties_type is None:
# discard variable.
continue
setattr(self, var_name, var_value)
| 52.338308
| 1,678
| 0.645532
|
4a1592875c479347f14aa2c4c71d9e16fe54e043
| 5,164
|
py
|
Python
|
src/serenity/strategy/backtester.py
|
neosavvy/serenity
|
c7c8f4d2f48437cd33e0f6b135455f008031b19f
|
[
"Apache-2.0"
] | null | null | null |
src/serenity/strategy/backtester.py
|
neosavvy/serenity
|
c7c8f4d2f48437cd33e0f6b135455f008031b19f
|
[
"Apache-2.0"
] | null | null | null |
src/serenity/strategy/backtester.py
|
neosavvy/serenity
|
c7c8f4d2f48437cd33e0f6b135455f008031b19f
|
[
"Apache-2.0"
] | null | null | null |
import logging
from pathlib import Path
import fire
import toml
from tau.core import HistoricNetworkScheduler, Event
from serenity.equity.sharadar_api import create_sharadar_session
from serenity.strategy.api import PriceField, InvestmentStrategy, Portfolio
from serenity.strategy.core import TradableUniversePricingContext, DefaultRebalanceContext, \
ZeroCommissionTradingCostCalculator, PandasMarketCalendarMarketScheduleProvider, TradableUniverseDividendContext, \
DailyProcessingSchedule
from serenity.strategy.historical import BacktestStrategyContext, MarketOnCloseTradingSimulator
from serenity.strategy.sharadar import SharadarTradableUniverse, SharadarPricingContext, SharadarDividendContext
from serenity.strategy.utils import StrategyLoader
from serenity.utils import init_logging
class InvestmentStrategyBacktester:
""""
Serenity investment strategy backtester.
"""
logger = logging.getLogger(__name__)
def __init__(self, scheduler: HistoricNetworkScheduler):
self.logger.info('Serenity investment strategy backtester starting up')
self.scheduler = scheduler
def run(self, strategy_instance: InvestmentStrategy, config: dict) -> Portfolio:
session = create_sharadar_session()
base_universe = SharadarTradableUniverse(session)
strategy_ctx = BacktestStrategyContext(self.scheduler, config)
strategy_instance.init(strategy_ctx)
universe = strategy_instance.get_tradable_universe(base_universe)
portfolio = strategy_instance.get_initial_portfolio()
base_pricing_ctx = SharadarPricingContext(session)
pricing_ctx = TradableUniversePricingContext(strategy_ctx, universe, base_pricing_ctx, PriceField.CLOSE)
base_div_ctx = SharadarDividendContext(session)
div_ctx = TradableUniverseDividendContext(strategy_ctx, universe, base_div_ctx)
tc_calc = ZeroCommissionTradingCostCalculator()
trading_ctx = MarketOnCloseTradingSimulator(tc_calc, self.scheduler, pricing_ctx)
rebalance_ctx = DefaultRebalanceContext(self.scheduler, portfolio, universe, pricing_ctx, div_ctx, trading_ctx)
class DailyProcessingAction(Event):
def __init__(self, backtester: InvestmentStrategyBacktester):
self.backtester = backtester
def on_activate(self) -> bool:
today = self.backtester.scheduler.get_clock().get_time()
self.backtester.logger.info(f'Performing daily bookkeeping for {today.date()}')
# process dividends
div_policy = strategy_instance.get_dividend_policy(trading_ctx, pricing_ctx)
for account in portfolio.get_accounts():
for position in account.get_positions():
div = div_ctx.get_dividend(position.get_tradable(), rebalance_ctx.get_rebalance_time().date())
if div is not None:
self.backtester.logger.info(f'Dividend paid on {position.get_tradable().get_symbol()}'
f': {div}')
div_policy.apply(div, rebalance_ctx.get_portfolio())
# mark positions
portfolio.mark(today.date(), pricing_ctx)
return True
class RebalanceAction(Event):
def __init__(self, backtester: InvestmentStrategyBacktester):
self.backtester = backtester
def on_activate(self) -> bool:
self.backtester.logger.info(f'Rebalancing at {rebalance_ctx.get_rebalance_time()}')
strategy_instance.rebalance(rebalance_ctx)
return True
msp = PandasMarketCalendarMarketScheduleProvider(self.scheduler, 'US/Eastern')
rebalance_event = strategy_instance.get_rebalance_schedule(self.scheduler, universe, msp).get_rebalance_event()
daily_close_event = DailyProcessingSchedule(universe, msp).get_market_close_event()
self.scheduler.get_network().connect(rebalance_event, RebalanceAction(self))
self.scheduler.get_network().connect(daily_close_event, DailyProcessingAction(self))
return portfolio
def main(config_path: str, strategy_dir: str, start_time: str, end_time: str):
init_logging()
config = toml.load(config_path)
strategy_module = config['strategy']['module']
strategy_class = config['strategy']['class']
loader = StrategyLoader(Path(strategy_dir))
strategy_instance = loader.load(strategy_module, strategy_class)
scheduler = HistoricNetworkScheduler.new_instance(start_time, end_time)
backtester = InvestmentStrategyBacktester(scheduler)
portfolio = backtester.run(strategy_instance, config)
scheduler.run()
for account in portfolio.get_accounts():
print(f'Account: [{account.get_name()}]')
print(f'\tcash: {account.get_cash_balance().get_balance()}')
for position in account.get_positions():
print(f'\t{position.get_tradable().get_symbol()}: {position.get_qty()} shares ({position.get_notional()})')
if __name__ == '__main__':
fire.Fire(main)
| 44.904348
| 119
| 0.713013
|
4a15936579b91a1ea1cbf4e9a0b98797bb90a4a2
| 519
|
py
|
Python
|
migrations/versions/021_Add_allow_duplicate_study_number_column.py
|
LCBRU/lbrc_upload
|
be42fef97b67c1f25329db52ae3a88eb293a1203
|
[
"MIT"
] | null | null | null |
migrations/versions/021_Add_allow_duplicate_study_number_column.py
|
LCBRU/lbrc_upload
|
be42fef97b67c1f25329db52ae3a88eb293a1203
|
[
"MIT"
] | null | null | null |
migrations/versions/021_Add_allow_duplicate_study_number_column.py
|
LCBRU/lbrc_upload
|
be42fef97b67c1f25329db52ae3a88eb293a1203
|
[
"MIT"
] | null | null | null |
from sqlalchemy import MetaData, Table, Index, Column, Boolean
meta = MetaData()
def upgrade(migrate_engine):
meta = MetaData(bind=migrate_engine)
study = Table("study", meta, autoload=True)
allow_duplicate_study_number = Column("allow_duplicate_study_number", Boolean, default=False)
allow_duplicate_study_number.create(study)
def downgrade(migrate_engine):
meta = MetaData(bind=migrate_engine)
study = Table("study", meta, autoload=True)
study.c.allow_duplicate_study_number.drop()
| 27.315789
| 97
| 0.757225
|
4a15936e9b5c7e40f5dd5436f90eabc9486380b6
| 15,400
|
py
|
Python
|
awx/api/views/root.py
|
sumit-21/awx
|
966a62c6bf2ec0c672e076684341bc6bd75827af
|
[
"Apache-2.0"
] | 17
|
2021-04-03T01:40:17.000Z
|
2022-03-03T11:45:20.000Z
|
awx/api/views/root.py
|
sumit-21/awx
|
966a62c6bf2ec0c672e076684341bc6bd75827af
|
[
"Apache-2.0"
] | 24
|
2021-05-18T21:13:35.000Z
|
2022-03-29T10:23:52.000Z
|
awx/api/views/root.py
|
dkelosky/awx
|
44223003aa87b6580dfb99595f5898b9a88f6e73
|
[
"Apache-2.0"
] | 14
|
2021-04-06T20:05:41.000Z
|
2022-03-24T14:16:03.000Z
|
# Copyright (c) 2018 Ansible, Inc.
# All Rights Reserved.
import logging
import operator
import json
from collections import OrderedDict
from django.conf import settings
from django.utils.encoding import smart_text
from django.utils.decorators import method_decorator
from django.views.decorators.csrf import ensure_csrf_cookie
from django.template.loader import render_to_string
from django.utils.translation import ugettext_lazy as _
from rest_framework.permissions import AllowAny, IsAuthenticated
from rest_framework.response import Response
from rest_framework import status
import requests
from awx.api.generics import APIView
from awx.conf.registry import settings_registry
from awx.main.ha import is_ha_environment
from awx.main.utils import (
get_awx_version,
get_ansible_version,
get_custom_venv_choices,
to_python_boolean,
)
from awx.api.versioning import reverse, drf_reverse
from awx.conf.license import get_license
from awx.main.constants import PRIVILEGE_ESCALATION_METHODS
from awx.main.models import (
Project,
Organization,
Instance,
InstanceGroup,
JobTemplate,
)
from awx.main.utils import set_environ
logger = logging.getLogger('awx.api.views.root')
class ApiRootView(APIView):
permission_classes = (AllowAny,)
name = _('REST API')
versioning_class = None
swagger_topic = 'Versioning'
@method_decorator(ensure_csrf_cookie)
def get(self, request, format=None):
''' List supported API versions '''
v2 = reverse('api:api_v2_root_view', kwargs={'version': 'v2'})
data = OrderedDict()
data['description'] = _('AWX REST API')
data['current_version'] = v2
data['available_versions'] = dict(v2 = v2)
data['oauth2'] = drf_reverse('api:oauth_authorization_root_view')
data['custom_logo'] = settings.CUSTOM_LOGO
data['custom_login_info'] = settings.CUSTOM_LOGIN_INFO
data['login_redirect_override'] = settings.LOGIN_REDIRECT_OVERRIDE
return Response(data)
class ApiOAuthAuthorizationRootView(APIView):
permission_classes = (AllowAny,)
name = _("API OAuth 2 Authorization Root")
versioning_class = None
swagger_topic = 'Authentication'
def get(self, request, format=None):
data = OrderedDict()
data['authorize'] = drf_reverse('api:authorize')
data['token'] = drf_reverse('api:token')
data['revoke_token'] = drf_reverse('api:revoke-token')
return Response(data)
class ApiVersionRootView(APIView):
permission_classes = (AllowAny,)
swagger_topic = 'Versioning'
def get(self, request, format=None):
''' List top level resources '''
data = OrderedDict()
data['ping'] = reverse('api:api_v2_ping_view', request=request)
data['instances'] = reverse('api:instance_list', request=request)
data['instance_groups'] = reverse('api:instance_group_list', request=request)
data['config'] = reverse('api:api_v2_config_view', request=request)
data['settings'] = reverse('api:setting_category_list', request=request)
data['me'] = reverse('api:user_me_list', request=request)
data['dashboard'] = reverse('api:dashboard_view', request=request)
data['organizations'] = reverse('api:organization_list', request=request)
data['users'] = reverse('api:user_list', request=request)
data['projects'] = reverse('api:project_list', request=request)
data['project_updates'] = reverse('api:project_update_list', request=request)
data['teams'] = reverse('api:team_list', request=request)
data['credentials'] = reverse('api:credential_list', request=request)
data['credential_types'] = reverse('api:credential_type_list', request=request)
data['credential_input_sources'] = reverse('api:credential_input_source_list', request=request)
data['applications'] = reverse('api:o_auth2_application_list', request=request)
data['tokens'] = reverse('api:o_auth2_token_list', request=request)
data['metrics'] = reverse('api:metrics_view', request=request)
data['inventory'] = reverse('api:inventory_list', request=request)
data['inventory_scripts'] = reverse('api:inventory_script_list', request=request)
data['inventory_sources'] = reverse('api:inventory_source_list', request=request)
data['inventory_updates'] = reverse('api:inventory_update_list', request=request)
data['groups'] = reverse('api:group_list', request=request)
data['hosts'] = reverse('api:host_list', request=request)
data['job_templates'] = reverse('api:job_template_list', request=request)
data['jobs'] = reverse('api:job_list', request=request)
data['job_events'] = reverse('api:job_event_list', request=request)
data['ad_hoc_commands'] = reverse('api:ad_hoc_command_list', request=request)
data['system_job_templates'] = reverse('api:system_job_template_list', request=request)
data['system_jobs'] = reverse('api:system_job_list', request=request)
data['schedules'] = reverse('api:schedule_list', request=request)
data['roles'] = reverse('api:role_list', request=request)
data['notification_templates'] = reverse('api:notification_template_list', request=request)
data['notifications'] = reverse('api:notification_list', request=request)
data['labels'] = reverse('api:label_list', request=request)
data['unified_job_templates'] = reverse('api:unified_job_template_list', request=request)
data['unified_jobs'] = reverse('api:unified_job_list', request=request)
data['activity_stream'] = reverse('api:activity_stream_list', request=request)
data['workflow_job_templates'] = reverse('api:workflow_job_template_list', request=request)
data['workflow_jobs'] = reverse('api:workflow_job_list', request=request)
data['workflow_approvals'] = reverse('api:workflow_approval_list', request=request)
data['workflow_job_template_nodes'] = reverse('api:workflow_job_template_node_list', request=request)
data['workflow_job_nodes'] = reverse('api:workflow_job_node_list', request=request)
return Response(data)
class ApiV2RootView(ApiVersionRootView):
name = _('Version 2')
class ApiV2PingView(APIView):
"""A simple view that reports very basic information about this
instance, which is acceptable to be public information.
"""
permission_classes = (AllowAny,)
authentication_classes = ()
name = _('Ping')
swagger_topic = 'System Configuration'
def get(self, request, format=None):
"""Return some basic information about this instance
Everything returned here should be considered public / insecure, as
this requires no auth and is intended for use by the installer process.
"""
response = {
'ha': is_ha_environment(),
'version': get_awx_version(),
'active_node': settings.CLUSTER_HOST_ID,
'install_uuid': settings.INSTALL_UUID,
}
response['instances'] = []
for instance in Instance.objects.all():
response['instances'].append(dict(node=instance.hostname, uuid=instance.uuid, heartbeat=instance.modified,
capacity=instance.capacity, version=instance.version))
sorted(response['instances'], key=operator.itemgetter('node'))
response['instance_groups'] = []
for instance_group in InstanceGroup.objects.prefetch_related('instances'):
response['instance_groups'].append(dict(name=instance_group.name,
capacity=instance_group.capacity,
instances=[x.hostname for x in instance_group.instances.all()]))
return Response(response)
class ApiV2SubscriptionView(APIView):
permission_classes = (IsAuthenticated,)
name = _('Configuration')
swagger_topic = 'System Configuration'
def check_permissions(self, request):
super(ApiV2SubscriptionView, self).check_permissions(request)
if not request.user.is_superuser and request.method.lower() not in {'options', 'head'}:
self.permission_denied(request) # Raises PermissionDenied exception.
def post(self, request):
from awx.main.utils.common import get_licenser
data = request.data.copy()
if data.get('rh_password') == '$encrypted$':
data['rh_password'] = settings.REDHAT_PASSWORD
try:
user, pw = data.get('rh_username'), data.get('rh_password')
with set_environ(**settings.AWX_TASK_ENV):
validated = get_licenser().validate_rh(user, pw)
if user:
settings.REDHAT_USERNAME = data['rh_username']
if pw:
settings.REDHAT_PASSWORD = data['rh_password']
except Exception as exc:
msg = _("Invalid License")
if (
isinstance(exc, requests.exceptions.HTTPError) and
getattr(getattr(exc, 'response', None), 'status_code', None) == 401
):
msg = _("The provided credentials are invalid (HTTP 401).")
elif isinstance(exc, requests.exceptions.ProxyError):
msg = _("Unable to connect to proxy server.")
elif isinstance(exc, requests.exceptions.ConnectionError):
msg = _("Could not connect to subscription service.")
elif isinstance(exc, (ValueError, OSError)) and exc.args:
msg = exc.args[0]
else:
logger.exception(smart_text(u"Invalid license submitted."),
extra=dict(actor=request.user.username))
return Response({"error": msg}, status=status.HTTP_400_BAD_REQUEST)
return Response(validated)
class ApiV2ConfigView(APIView):
permission_classes = (IsAuthenticated,)
name = _('Configuration')
swagger_topic = 'System Configuration'
def check_permissions(self, request):
super(ApiV2ConfigView, self).check_permissions(request)
if not request.user.is_superuser and request.method.lower() not in {'options', 'head', 'get'}:
self.permission_denied(request) # Raises PermissionDenied exception.
def get(self, request, format=None):
'''Return various sitewide configuration settings'''
if request.user.is_superuser or request.user.is_system_auditor:
license_data = get_license(show_key=True)
else:
license_data = get_license(show_key=False)
if not license_data.get('valid_key', False):
license_data = {}
if license_data and 'features' in license_data and 'activity_streams' in license_data['features']:
# FIXME: Make the final setting value dependent on the feature?
license_data['features']['activity_streams'] &= settings.ACTIVITY_STREAM_ENABLED
pendo_state = settings.PENDO_TRACKING_STATE if settings.PENDO_TRACKING_STATE in ('off', 'anonymous', 'detailed') else 'off'
data = dict(
time_zone=settings.TIME_ZONE,
license_info=license_data,
version=get_awx_version(),
ansible_version=get_ansible_version(),
eula=render_to_string("eula.md") if license_data.get('license_type', 'UNLICENSED') != 'open' else '',
analytics_status=pendo_state,
become_methods=PRIVILEGE_ESCALATION_METHODS,
)
# If LDAP is enabled, user_ldap_fields will return a list of field
# names that are managed by LDAP and should be read-only for users with
# a non-empty ldap_dn attribute.
if getattr(settings, 'AUTH_LDAP_SERVER_URI', None):
user_ldap_fields = ['username', 'password']
user_ldap_fields.extend(getattr(settings, 'AUTH_LDAP_USER_ATTR_MAP', {}).keys())
user_ldap_fields.extend(getattr(settings, 'AUTH_LDAP_USER_FLAGS_BY_GROUP', {}).keys())
data['user_ldap_fields'] = user_ldap_fields
if request.user.is_superuser \
or request.user.is_system_auditor \
or Organization.accessible_objects(request.user, 'admin_role').exists() \
or Organization.accessible_objects(request.user, 'auditor_role').exists() \
or Organization.accessible_objects(request.user, 'project_admin_role').exists():
data.update(dict(
project_base_dir = settings.PROJECTS_ROOT,
project_local_paths = Project.get_local_path_choices(),
custom_virtualenvs = get_custom_venv_choices()
))
elif JobTemplate.accessible_objects(request.user, 'admin_role').exists():
data['custom_virtualenvs'] = get_custom_venv_choices()
return Response(data)
def post(self, request):
if not isinstance(request.data, dict):
return Response({"error": _("Invalid license data")}, status=status.HTTP_400_BAD_REQUEST)
if "eula_accepted" not in request.data:
return Response({"error": _("Missing 'eula_accepted' property")}, status=status.HTTP_400_BAD_REQUEST)
try:
eula_accepted = to_python_boolean(request.data["eula_accepted"])
except ValueError:
return Response({"error": _("'eula_accepted' value is invalid")}, status=status.HTTP_400_BAD_REQUEST)
if not eula_accepted:
return Response({"error": _("'eula_accepted' must be True")}, status=status.HTTP_400_BAD_REQUEST)
request.data.pop("eula_accepted")
try:
data_actual = json.dumps(request.data)
except Exception:
logger.info(smart_text(u"Invalid JSON submitted for license."),
extra=dict(actor=request.user.username))
return Response({"error": _("Invalid JSON")}, status=status.HTTP_400_BAD_REQUEST)
try:
from awx.main.utils.common import get_licenser
license_data = json.loads(data_actual)
license_data_validated = get_licenser(**license_data).validate()
except Exception:
logger.warning(smart_text(u"Invalid license submitted."),
extra=dict(actor=request.user.username))
return Response({"error": _("Invalid License")}, status=status.HTTP_400_BAD_REQUEST)
# If the license is valid, write it to the database.
if license_data_validated['valid_key']:
settings.LICENSE = license_data
if not settings_registry.is_setting_read_only('TOWER_URL_BASE'):
settings.TOWER_URL_BASE = "{}://{}".format(request.scheme, request.get_host())
return Response(license_data_validated)
logger.warning(smart_text(u"Invalid license submitted."),
extra=dict(actor=request.user.username))
return Response({"error": _("Invalid license")}, status=status.HTTP_400_BAD_REQUEST)
def delete(self, request):
try:
settings.LICENSE = {}
return Response(status=status.HTTP_204_NO_CONTENT)
except Exception:
# FIX: Log
return Response({"error": _("Failed to remove license.")}, status=status.HTTP_400_BAD_REQUEST)
| 46.95122
| 131
| 0.667532
|
4a15944560ddb696558e3eff128a36726f5c40f4
| 2,399
|
py
|
Python
|
pysb/examples/run_earm_hpp.py
|
weilandtd/pysb
|
8037bc735797ca667af43496127b8ee4cc63a75f
|
[
"BSD-2-Clause"
] | null | null | null |
pysb/examples/run_earm_hpp.py
|
weilandtd/pysb
|
8037bc735797ca667af43496127b8ee4cc63a75f
|
[
"BSD-2-Clause"
] | null | null | null |
pysb/examples/run_earm_hpp.py
|
weilandtd/pysb
|
8037bc735797ca667af43496127b8ee4cc63a75f
|
[
"BSD-2-Clause"
] | null | null | null |
#!/usr/bin/env python
""" Run the Extrinsic Apoptosis Reaction Model (EARM) using BioNetGen's
Hybrid-Particle Population (HPP) algorithm.
NFsim provides stochastic simulation without reaction network generation,
allowing simulation of models with large (or infinite) reaction networks by
keeping track of species counts. However, it can fail when the number of
instances of a species gets too large (typically >200000). HPP circumvents
this problem by allowing the user to define species with large instance
counts as populations rather than NFsim particles.
This example runs the EARM 1.0 model with HPP, which fails to run on NFsim
with the default settings due to large initial concentration coutns of
several species. By assigning population maps to these species, we can run
the simulation.
Reference: Hogg et al. Plos Comb Biol 2014
https://doi.org/10.1371/journal.pcbi.1003544
"""
from pysb.examples.earm_1_0 import model
from pysb.simulator import BngSimulator
from pysb.simulator.bng import PopulationMap
from pysb import Parameter
import matplotlib.pyplot as plt
import numpy as np
def plot_mean_min_max(name, title=None):
x = np.array([tr[:][name] for tr in trajectories]).T
if not title:
title = name
plt.figure(title)
plt.plot(tout.T, x, '0.5', lw=2, alpha=0.25) # individual trajectories
plt.plot(tout[0], x.mean(1), 'k--', lw=3, label="Mean")
plt.plot(tout[0], x.min(1), 'b--', lw=3, label="Minimum")
plt.plot(tout[0], x.max(1), 'r--', lw=3, label="Maximum")
plt.legend(loc=0)
plt.xlabel('Time')
plt.ylabel('Population of %s' % name)
PARP, CPARP, Mito, mCytoC = [model.monomers[x] for x in
['PARP', 'CPARP', 'Mito', 'mCytoC']]
klump = Parameter('klump', 10000, _export=False)
model.add_component(klump)
population_maps = [
PopulationMap(PARP(b=None), klump),
PopulationMap(CPARP(b=None), klump),
PopulationMap(Mito(b=None), klump),
PopulationMap(mCytoC(b=None), klump)
]
sim = BngSimulator(model, tspan=np.linspace(0, 20000, 101))
simres = sim.run(n_runs=20, method='nf', population_maps=population_maps)
trajectories = simres.all
tout = simres.tout
plot_mean_min_max('Bid_unbound')
plot_mean_min_max('PARP_unbound')
plot_mean_min_max('mSmac_unbound')
plot_mean_min_max('tBid_total')
plot_mean_min_max('CPARP_total')
plot_mean_min_max('cSmac_total')
plt.show()
| 35.80597
| 76
| 0.72697
|
4a1594590b073202c4e0dfefd88db183da2b39c7
| 10,756
|
py
|
Python
|
server/sqlmap/lib/techniques/brute/use.py
|
kurpav/volcano
|
31d5f8f6f5a282abbea3861368eb39cfe33bba77
|
[
"MIT"
] | null | null | null |
server/sqlmap/lib/techniques/brute/use.py
|
kurpav/volcano
|
31d5f8f6f5a282abbea3861368eb39cfe33bba77
|
[
"MIT"
] | null | null | null |
server/sqlmap/lib/techniques/brute/use.py
|
kurpav/volcano
|
31d5f8f6f5a282abbea3861368eb39cfe33bba77
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python
"""
Copyright (c) 2006-2015 sqlmap developers (http://sqlmap.org/)
See the file 'doc/COPYING' for copying permission
"""
import time
from lib.core.common import clearConsoleLine
from lib.core.common import dataToStdout
from lib.core.common import filterListValue
from lib.core.common import getFileItems
from lib.core.common import Backend
from lib.core.common import getPageWordSet
from lib.core.common import hashDBWrite
from lib.core.common import randomInt
from lib.core.common import randomStr
from lib.core.common import readInput
from lib.core.common import safeStringFormat
from lib.core.common import safeSQLIdentificatorNaming
from lib.core.common import unsafeSQLIdentificatorNaming
from lib.core.data import conf
from lib.core.data import kb
from lib.core.data import logger
from lib.core.enums import DBMS
from lib.core.enums import HASHDB_KEYS
from lib.core.enums import PAYLOAD
from lib.core.exception import SqlmapDataException
from lib.core.exception import SqlmapMissingMandatoryOptionException
from lib.core.settings import METADB_SUFFIX
from lib.core.settings import BRUTE_COLUMN_EXISTS_TEMPLATE
from lib.core.settings import BRUTE_TABLE_EXISTS_TEMPLATE
from lib.core.threads import getCurrentThreadData
from lib.core.threads import runThreads
from lib.request import inject
def _addPageTextWords():
wordsList = []
infoMsg = "adding words used on web page to the check list"
logger.info(infoMsg)
pageWords = getPageWordSet(kb.originalPage)
for word in pageWords:
word = word.lower()
if len(word) > 2 and not word[0].isdigit() and word not in wordsList:
wordsList.append(word)
return wordsList
def tableExists(tableFile, regex=None):
if kb.tableExistsChoice is None and not any(_ for _ in kb.injection.data if _ not in (PAYLOAD.TECHNIQUE.TIME, PAYLOAD.TECHNIQUE.STACKED)) and not conf.direct:
warnMsg = "it's not recommended to use '%s' and/or '%s' " % (PAYLOAD.SQLINJECTION[PAYLOAD.TECHNIQUE.TIME], PAYLOAD.SQLINJECTION[PAYLOAD.TECHNIQUE.STACKED])
warnMsg += "for common table existence check"
logger.warn(warnMsg)
message = "are you sure you want to continue? [y/N] "
test = readInput(message, default="N")
kb.tableExistsChoice = test[0] in ("y", "Y")
if not kb.tableExistsChoice:
return None
result = inject.checkBooleanExpression("%s" % safeStringFormat(BRUTE_TABLE_EXISTS_TEMPLATE, (randomInt(1), randomStr())))
if conf.db and Backend.getIdentifiedDbms() in (DBMS.ORACLE, DBMS.DB2):
conf.db = conf.db.upper()
if result:
errMsg = "can't use table existence check because of detected invalid results "
errMsg += "(most probably caused by inability of the used injection "
errMsg += "to distinguish errornous results)"
raise SqlmapDataException(errMsg)
tables = getFileItems(tableFile, lowercase=Backend.getIdentifiedDbms() in (DBMS.ACCESS,), unique=True)
infoMsg = "checking table existence using items from '%s'" % tableFile
logger.info(infoMsg)
tables.extend(_addPageTextWords())
tables = filterListValue(tables, regex)
threadData = getCurrentThreadData()
threadData.shared.count = 0
threadData.shared.limit = len(tables)
threadData.shared.value = []
threadData.shared.unique = set()
def tableExistsThread():
threadData = getCurrentThreadData()
while kb.threadContinue:
kb.locks.count.acquire()
if threadData.shared.count < threadData.shared.limit:
table = safeSQLIdentificatorNaming(tables[threadData.shared.count], True)
threadData.shared.count += 1
kb.locks.count.release()
else:
kb.locks.count.release()
break
if conf.db and METADB_SUFFIX not in conf.db and Backend.getIdentifiedDbms() not in (DBMS.SQLITE, DBMS.ACCESS, DBMS.FIREBIRD):
fullTableName = "%s%s%s" % (conf.db, '..' if Backend.getIdentifiedDbms() in (DBMS.MSSQL, DBMS.SYBASE) else '.', table)
else:
fullTableName = table
result = inject.checkBooleanExpression("%s" % safeStringFormat(BRUTE_TABLE_EXISTS_TEMPLATE, (randomInt(1), fullTableName)))
kb.locks.io.acquire()
if result and table.lower() not in threadData.shared.unique:
threadData.shared.value.append(table)
threadData.shared.unique.add(table.lower())
if conf.verbose in (1, 2) and not hasattr(conf, "api"):
clearConsoleLine(True)
infoMsg = "[%s] [INFO] retrieved: %s\n" % (time.strftime("%X"), unsafeSQLIdentificatorNaming(table))
dataToStdout(infoMsg, True)
if conf.verbose in (1, 2):
status = '%d/%d items (%d%%)' % (threadData.shared.count, threadData.shared.limit, round(100.0 * threadData.shared.count / threadData.shared.limit))
dataToStdout("\r[%s] [INFO] tried %s" % (time.strftime("%X"), status), True)
kb.locks.io.release()
try:
runThreads(conf.threads, tableExistsThread, threadChoice=True)
except KeyboardInterrupt:
warnMsg = "user aborted during table existence "
warnMsg += "check. sqlmap will display partial output"
logger.warn(warnMsg)
clearConsoleLine(True)
dataToStdout("\n")
if not threadData.shared.value:
warnMsg = "no table(s) found"
logger.warn(warnMsg)
else:
for item in threadData.shared.value:
if conf.db not in kb.data.cachedTables:
kb.data.cachedTables[conf.db] = [item]
else:
kb.data.cachedTables[conf.db].append(item)
for _ in ((conf.db, item) for item in threadData.shared.value):
if _ not in kb.brute.tables:
kb.brute.tables.append(_)
hashDBWrite(HASHDB_KEYS.KB_BRUTE_TABLES, kb.brute.tables, True)
return kb.data.cachedTables
def columnExists(columnFile, regex=None):
if kb.columnExistsChoice is None and not any(_ for _ in kb.injection.data if _ not in (PAYLOAD.TECHNIQUE.TIME, PAYLOAD.TECHNIQUE.STACKED)) and not conf.direct:
warnMsg = "it's not recommended to use '%s' and/or '%s' " % (PAYLOAD.SQLINJECTION[PAYLOAD.TECHNIQUE.TIME], PAYLOAD.SQLINJECTION[PAYLOAD.TECHNIQUE.STACKED])
warnMsg += "for common column existence check"
logger.warn(warnMsg)
message = "are you sure you want to continue? [y/N] "
test = readInput(message, default="N")
kb.columnExistsChoice = test[0] in ("y", "Y")
if not kb.columnExistsChoice:
return None
if not conf.tbl:
errMsg = "missing table parameter"
raise SqlmapMissingMandatoryOptionException(errMsg)
if conf.db and Backend.getIdentifiedDbms() in (DBMS.ORACLE, DBMS.DB2):
conf.db = conf.db.upper()
result = inject.checkBooleanExpression(safeStringFormat(BRUTE_COLUMN_EXISTS_TEMPLATE, (randomStr(), randomStr())))
if result:
errMsg = "can't use column existence check because of detected invalid results "
errMsg += "(most probably caused by inability of the used injection "
errMsg += "to distinguish errornous results)"
raise SqlmapDataException(errMsg)
infoMsg = "checking column existence using items from '%s'" % columnFile
logger.info(infoMsg)
columns = getFileItems(columnFile, unique=True)
columns.extend(_addPageTextWords())
columns = filterListValue(columns, regex)
table = safeSQLIdentificatorNaming(conf.tbl, True)
if conf.db and METADB_SUFFIX not in conf.db and Backend.getIdentifiedDbms() not in (DBMS.SQLITE, DBMS.ACCESS, DBMS.FIREBIRD):
table = "%s.%s" % (safeSQLIdentificatorNaming(conf.db), table)
kb.threadContinue = True
kb.bruteMode = True
threadData = getCurrentThreadData()
threadData.shared.count = 0
threadData.shared.limit = len(columns)
threadData.shared.value = []
def columnExistsThread():
threadData = getCurrentThreadData()
while kb.threadContinue:
kb.locks.count.acquire()
if threadData.shared.count < threadData.shared.limit:
column = safeSQLIdentificatorNaming(columns[threadData.shared.count])
threadData.shared.count += 1
kb.locks.count.release()
else:
kb.locks.count.release()
break
result = inject.checkBooleanExpression(safeStringFormat(BRUTE_COLUMN_EXISTS_TEMPLATE, (column, table)))
kb.locks.io.acquire()
if result:
threadData.shared.value.append(column)
if conf.verbose in (1, 2) and not hasattr(conf, "api"):
clearConsoleLine(True)
infoMsg = "[%s] [INFO] retrieved: %s\n" % (time.strftime("%X"), unsafeSQLIdentificatorNaming(column))
dataToStdout(infoMsg, True)
if conf.verbose in (1, 2):
status = "%d/%d items (%d%%)" % (threadData.shared.count, threadData.shared.limit, round(100.0 * threadData.shared.count / threadData.shared.limit))
dataToStdout("\r[%s] [INFO] tried %s" % (time.strftime("%X"), status), True)
kb.locks.io.release()
try:
runThreads(conf.threads, columnExistsThread, threadChoice=True)
except KeyboardInterrupt:
warnMsg = "user aborted during column existence "
warnMsg += "check. sqlmap will display partial output"
logger.warn(warnMsg)
clearConsoleLine(True)
dataToStdout("\n")
if not threadData.shared.value:
warnMsg = "no column(s) found"
logger.warn(warnMsg)
else:
columns = {}
for column in threadData.shared.value:
if Backend.getIdentifiedDbms() in (DBMS.MYSQL,):
result = not inject.checkBooleanExpression("%s" % safeStringFormat("EXISTS(SELECT %s FROM %s WHERE %s REGEXP '[^0-9]')", (column, table, column)))
else:
result = inject.checkBooleanExpression("%s" % safeStringFormat("EXISTS(SELECT %s FROM %s WHERE ROUND(%s)=ROUND(%s))", (column, table, column, column)))
if result:
columns[column] = "numeric"
else:
columns[column] = "non-numeric"
kb.data.cachedColumns[conf.db] = {conf.tbl: columns}
for _ in map(lambda x: (conf.db, conf.tbl, x[0], x[1]), columns.items()):
if _ not in kb.brute.columns:
kb.brute.columns.append(_)
hashDBWrite(HASHDB_KEYS.KB_BRUTE_COLUMNS, kb.brute.columns, True)
return kb.data.cachedColumns
| 39.399267
| 167
| 0.659074
|
4a15954f862cd5afee68d629b588a3ed21b05c3b
| 5,861
|
py
|
Python
|
test/unit/test_pack.py
|
Kris-b50122/pyOCD
|
df605c7aa04d72c5187ab652445063cc232cf378
|
[
"Apache-2.0"
] | 3
|
2019-06-05T01:32:06.000Z
|
2020-05-20T08:55:46.000Z
|
test/unit/test_pack.py
|
asky2083664719/pyOCD
|
5a835e1017182975a11a4caddd83332f74b75fa4
|
[
"Apache-2.0"
] | 1
|
2019-07-05T10:13:09.000Z
|
2019-07-05T10:51:43.000Z
|
test/unit/test_pack.py
|
asky2083664719/pyOCD
|
5a835e1017182975a11a4caddd83332f74b75fa4
|
[
"Apache-2.0"
] | 1
|
2019-01-21T03:01:53.000Z
|
2019-01-21T03:01:53.000Z
|
# pyOCD debugger
# Copyright (c) 2019-2020 Arm Limited
# SPDX-License-Identifier: Apache-2.0
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import pytest
import six
import cmsis_pack_manager
import os
import zipfile
from xml.etree import ElementTree
from pyocd.target.pack import (cmsis_pack, flash_algo, pack_target)
from pyocd.target import TARGET
from pyocd.core import (memory_map, target)
K64F = "MK64FN1M0VDC12"
TEST_DATA_DIR = os.path.join(os.path.dirname(os.path.dirname(__file__)), "data")
K64F_PACK_NAME = "NXP.MK64F12_DFP.11.0.0.pack"
K64F_PACK_PATH = os.path.join(TEST_DATA_DIR, K64F_PACK_NAME)
K64F_1M0_FLM = "arm/MK_P1M0.FLM"
@pytest.fixture(scope='module')
def pack_ref():
return cmsis_pack_manager.CmsisPackRef(
"NXP",
"MK64F12_DFP",
"11.0.1",
)
@pytest.fixture(scope='module')#, autouse=True)
def cache(tmpdir_factory, pack_ref):
tmp_path = str(tmpdir_factory.mktemp("cpm"))
c = cmsis_pack_manager.Cache(False, False, json_path=tmp_path, data_path=tmp_path)
c.download_pack_list([pack_ref])
return c
@pytest.fixture(scope='module')
def k64dev(cache):
devs = pack_target.ManagedPacks.get_installed_targets()
return [d for d in devs if d.part_number == K64F].pop()
@pytest.fixture()#autouse=True)
def fixed_installed_packs(monkeypatch, pack_ref):
def my_get_installed_packs(cache=None):
return [pack_ref]
monkeypatch.setattr(pack_target.ManagedPacks, 'get_installed_packs', my_get_installed_packs)
@pytest.fixture(scope='function')
def k64pack():
return cmsis_pack.CmsisPack(K64F_PACK_PATH)
@pytest.fixture(scope='function')
def k64f1m0(k64pack):
return [d for d in k64pack.devices if d.part_number == "MK64FN1M0VLL12"].pop()
@pytest.fixture(scope='function')
def k64algo(k64pack):
flm = k64pack.get_file(K64F_1M0_FLM)
return flash_algo.PackFlashAlgo(flm)
# Tests for managed packs. Currently disabled as they fail on most systems.
class Disabled_TestPack(object):
def test_get_installed(self, pack_ref):
p = pack_target.ManagedPacks.get_installed_packs()
assert p == [pack_ref]
def test_get_targets(self, k64dev):
assert k64dev.part_number == K64F
def test_pop_managed_k64(self):
pack_target.ManagedPacks.populate_target(K64F)
assert K64F.lower() in TARGET
def test_k64_mem_map(self, k64dev):
map = k64dev.memory_map
raml = map.get_region_for_address(0x1fff0000)
ramu = map.get_region_for_address(0x20000000)
flash = map.get_default_region_of_type(memory_map.MemoryType.FLASH)
assert raml.start == 0x1fff0000 and raml.length == 0x10000
assert ramu.start == 0x20000000 and ramu.length == 0x30000
assert flash.start == 0 and flash.length == 0x100000
assert flash.sector_size == 0x1000
class TestPack(object):
def test_devices(self, k64pack):
devs = k64pack.devices
pns = [x.part_number for x in devs]
assert "MK64FN1M0xxx12" in pns
assert "MK64FX512xxx12" in pns
# Make sure CmsisPack can open a zip file too.
def test_zipfile(self):
z = zipfile.ZipFile(K64F_PACK_PATH, 'r')
p = cmsis_pack.CmsisPack(z)
pns = [x.part_number for x in p.devices]
assert "MK64FN1M0xxx12" in pns
def test_parse_device_info(self, k64f1m0):
assert k64f1m0.vendor == "NXP"
assert k64f1m0.families == ["MK64F12"]
assert k64f1m0.default_reset_type == target.Target.ResetType.SW
def test_get_svd(self, k64f1m0):
svd = k64f1m0.svd
x = ElementTree.parse(svd)
assert x.getroot().tag == 'device'
def test_mem_map(self, k64f1m0):
map = k64f1m0.memory_map
bm = map.get_boot_memory()
assert bm.start == 0 and bm.length == 1 * 1024 * 1024
ram = map.get_default_region_of_type(memory_map.MemoryType.RAM)
assert ram.start == 0x20000000 and ram.length == 0x30000
# Verify the flash region was converted correctly.
def test_flash(self, k64f1m0):
map = k64f1m0.memory_map
flash = map.get_boot_memory()
assert isinstance(flash, memory_map.FlashRegion)
assert flash.start == 0 and flash.length == 1 * 1024 * 1024
assert flash.sector_size == 4096
class TestFLM(object):
def test_algo(self, k64algo):
i = k64algo.flash_info
# print(i)
assert i.type == 1
assert i.start == 0
assert i.size == 1 * 1024 * 1024
assert i.page_size == 512
assert i.sector_info_list == [(0, 4 * 1024)]
def test_algo_dict(self, k64algo, k64f1m0):
map = k64f1m0.memory_map
ram = map.get_default_region_of_type(memory_map.MemoryType.RAM)
d = k64algo.get_pyocd_flash_algo(4096, ram)
# print(len(d['instructions']) * 4)
# del d['instructions']
# print(d)
STACK_SIZE = 0x200
assert d['load_address'] == ram.start + STACK_SIZE
assert d['pc_init'] == ram.start + STACK_SIZE + 0x21
assert d['pc_unInit'] == ram.start + STACK_SIZE + 0x71
assert d['pc_eraseAll'] == ram.start + STACK_SIZE + 0x95
assert d['pc_erase_sector'] == ram.start + STACK_SIZE + 0xcb
assert d['pc_program_page'] == ram.start + STACK_SIZE + 0xdf
| 36.403727
| 97
| 0.679918
|
4a1595ac89967546b356e0952e012452c53a4ed4
| 1,336
|
py
|
Python
|
coco_annotation.py
|
exploding-gradients/keras-yolo3
|
dd6787a892e41d405b543060b2ecb25bdca99b6e
|
[
"MIT"
] | null | null | null |
coco_annotation.py
|
exploding-gradients/keras-yolo3
|
dd6787a892e41d405b543060b2ecb25bdca99b6e
|
[
"MIT"
] | null | null | null |
coco_annotation.py
|
exploding-gradients/keras-yolo3
|
dd6787a892e41d405b543060b2ecb25bdca99b6e
|
[
"MIT"
] | null | null | null |
import json
from collections import defaultdict
name_box_id = defaultdict(list)
id_name = dict()
f = open(
"/data/COCO_2017/annotations/instances_train2017_og.json",
encoding='utf-8')
data = json.load(f)
annotations = data['annotations']
for ant in annotations:
id = ant['image_id']
name = '/data/COCO_2017/train2017/%012d.jpg' % id
cat = ant['category_id']
if cat >= 1 and cat <= 11:
cat = cat - 1
elif cat >= 13 and cat <= 25:
cat = cat - 2
elif cat >= 27 and cat <= 28:
cat = cat - 3
elif cat >= 31 and cat <= 44:
cat = cat - 5
elif cat >= 46 and cat <= 65:
cat = cat - 6
elif cat == 67:
cat = cat - 7
elif cat == 70:
cat = cat - 9
elif cat >= 72 and cat <= 82:
cat = cat - 10
elif cat >= 84 and cat <= 90:
cat = cat - 11
name_box_id[name].append([ant['bbox'], cat])
f = open('train.txt', 'w')
for key in name_box_id.keys():
f.write(key)
box_infos = name_box_id[key]
for info in box_infos:
x_min = int(info[0][0])
y_min = int(info[0][1])
x_max = x_min + int(info[0][2])
y_max = y_min + int(info[0][3])
box_info = " %d,%d,%d,%d,%d" % (
x_min, y_min, x_max, y_max, int(info[1]))
f.write(box_info)
f.write('\n')
f.close()
| 25.207547
| 62
| 0.538922
|
4a159610a98c71cfee9df2edb82543ba63e96ed0
| 14,782
|
py
|
Python
|
sdk/storage/azure-mgmt-storagesync/tests/test_cli_mgmt_storagesync.py
|
iscai-msft/azure-sdk-for-python
|
83715b95c41e519d5be7f1180195e2fba136fc0f
|
[
"MIT"
] | 8
|
2021-01-13T23:44:08.000Z
|
2021-03-17T10:13:36.000Z
|
sdk/storage/azure-mgmt-storagesync/tests/test_cli_mgmt_storagesync.py
|
iscai-msft/azure-sdk-for-python
|
83715b95c41e519d5be7f1180195e2fba136fc0f
|
[
"MIT"
] | 226
|
2019-07-24T07:57:21.000Z
|
2019-10-15T01:07:24.000Z
|
sdk/storage/azure-mgmt-storagesync/tests/test_cli_mgmt_storagesync.py
|
iscai-msft/azure-sdk-for-python
|
83715b95c41e519d5be7f1180195e2fba136fc0f
|
[
"MIT"
] | 2
|
2020-05-21T22:51:22.000Z
|
2020-05-26T20:53:01.000Z
|
# coding: utf-8
#-------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
#--------------------------------------------------------------------------
# TEST SCENARIO COVERAGE
# ----------------------
# Methods Total : 37
# Methods Covered : 37
# Examples Total : 38
# Examples Tested : 38
# Coverage % : 100
# ----------------------
import os
import unittest
import azure.mgmt.storagesync
from devtools_testutils import AzureMgmtTestCase, ResourceGroupPreparer
AZURE_LOCATION = 'eastus'
class MgmtMicrosoftStorageSyncTest(AzureMgmtTestCase):
def setUp(self):
super(MgmtMicrosoftStorageSyncTest, self).setUp()
self.mgmt_client = self.create_mgmt_client(
azure.mgmt.storagesync.StorageSyncManagementClient
)
@ResourceGroupPreparer(location=AZURE_LOCATION)
def test_storagesync(self, resource_group):
SUBSCRIPTION_ID = None
if self.is_live:
SUBSCRIPTION_ID = os.environ.get("AZURE_SUBSCRIPTION_ID", None)
if not SUBSCRIPTION_ID:
SUBSCRIPTION_ID = self.settings.SUBSCRIPTION_ID
RESOURCE_GROUP = resource_group.name
STORAGE_SYNC_SERVICE_NAME = "ssservicename"
SYNC_GROUP_NAME = "groupname"
REGISTERED_SERVER_NAME = "rservicename"
STORAGE_ACCOUNT_NAME = "accountnamexyz"
CLOUD_ENDPOINT_NAME = "cendpointname"
SERVER_ENDPOINT_NAME = "sendpointname"
LOCATION_NAME = AZURE_LOCATION
# StorageSyncServices_Create[put]
BODY = {
"location": "WestUS",
"tags": {}
}
result = self.mgmt_client.storage_sync_services.create(resource_group.name, STORAGE_SYNC_SERVICE_NAME, BODY)
# SyncGroups_Create[put]
BODY = {}
result = self.mgmt_client.sync_groups.create(resource_group.name, STORAGE_SYNC_SERVICE_NAME, SYNC_GROUP_NAME, BODY)
"""
# RegisteredServers_Create[put]
BODY = {
"server_role": "Standalone",
"server_certificate": "\"MIIDFjCCAf6gAwIBAgIQQS+DS8uhc4VNzUkTw7wbRjANBgkqhkiG9w0BAQ0FADAzMTEwLwYDVQQDEyhhbmt1c2hiLXByb2QzLnJlZG1vbmQuY29ycC5taWNyb3NvZnQuY29tMB4XDTE3MDgwMzE3MDQyNFoXDTE4MDgwNDE3MDQyNFowMzExMC8GA1UEAxMoYW5rdXNoYi1wcm9kMy5yZWRtb25kLmNvcnAubWljcm9zb2Z0LmNvbTCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBALDRvV4gmsIy6jGDPiHsXmvgVP749NNP7DopdlbHaNhjFmYINHl0uWylyaZmgJrROt2mnxN/zEyJtGnqYHlzUr4xvGq/qV5pqgdB9tag/sw9i22gfe9PRZ0FmSOZnXMbLYgLiDFqLtut5gHcOuWMj03YnkfoBEKlFBxWbagvW2yxz/Sxi9OVSJOKCaXra0RpcIHrO/KFl6ho2eE1/7Ykmfa8hZvSdoPd5gHdLiQcMB/pxq+mWp1fI6c8vFZoDu7Atn+NXTzYPKUxKzaisF12TsaKpohUsJpbB3Wocb0F5frn614D2pg14ERB5otjAMWw1m65csQWPI6dP8KIYe0+QPkCAwEAAaMmMCQwIgYDVR0lAQH/BBgwFgYIKwYBBQUHAwIGCisGAQQBgjcKAwwwDQYJKoZIhvcNAQENBQADggEBAA4RhVIBkw34M1RwakJgHvtjsOFxF1tVQA941NtLokx1l2Z8+GFQkcG4xpZSt+UN6wLerdCbnNhtkCErWUDeaT0jxk4g71Ofex7iM04crT4iHJr8mi96/XnhnkTUs+GDk12VgdeeNEczMZz+8Mxw9dJ5NCnYgTwO0SzGlclRsDvjzkLo8rh2ZG6n/jKrEyNXXo+hOqhupij0QbRP2Tvexdfw201kgN1jdZify8XzJ8Oi0bTS0KpJf2pNPOlooK2bjMUei9ANtEdXwwfVZGWvVh6tJjdv6k14wWWJ1L7zhA1IIVb1J+sQUzJji5iX0DrezjTz1Fg+gAzITaA/WsuujlM=\"",
"last_heart_beat": "\"2017-08-08T18:29:06.470652Z\"",
"server_osversion": "10.0.14393.0",
"agent_version": "1.0.277.0"
}
result = self.mgmt_client.registered_servers.create(resource_group.name, STORAGE_SYNC_SERVICE_NAME, REGISTERED_SERVER_NAME, BODY)
result = result.result()
# CloudEndpoints_Create[put]
BODY = {
"storage_account_resource_id": "/subscriptions/" + SUBSCRIPTION_ID + "/resourceGroups/" + RESOURCE_GROUP + "/providers/Microsoft.Storage/storageAccounts/" + STORAGE_ACCOUNT_NAME + "",
"azure_file_share_name": "cvcloud-afscv-0719-058-a94a1354-a1fd-4e9a-9a50-919fad8c4ba4",
"storage_account_tenant_id": "\"72f988bf-86f1-41af-91ab-2d7cd011db47\"",
"friendly_name": "ankushbsubscriptionmgmtmab"
}
result = self.mgmt_client.cloud_endpoints.create(resource_group.name, STORAGE_SYNC_SERVICE_NAME, SYNC_GROUP_NAME, CLOUD_ENDPOINT_NAME, BODY)
result = result.result()
# ServerEndpoints_Create[put]
BODY = {
"server_local_path": "D:\\SampleServerEndpoint_1",
"server_resource_id": "/subscriptions/" + SUBSCRIPTION_ID + "/resourceGroups/" + RESOURCE_GROUP + "/providers/Microsoft.StorageSync/storageSyncServices/" + STORAGE_SYNC_SERVICE_NAME + "/registeredServers/" + REGISTERED_SERVER_NAME + "",
"cloud_tiering": "off",
"volume_free_space_percent": "100",
"tier_files_older_than_days": "0",
"offline_data_transfer": "on",
"offline_data_transfer_share_name": "myfileshare"
}
result = self.mgmt_client.server_endpoints.create(resource_group.name, STORAGE_SYNC_SERVICE_NAME, SYNC_GROUP_NAME, SERVER_ENDPOINT_NAME, BODY)
result = result.result()
# ServerEndpoints_Get[get]
result = self.mgmt_client.server_endpoints.get(resource_group.name, STORAGE_SYNC_SERVICE_NAME, SYNC_GROUP_NAME, SERVER_ENDPOINT_NAME)
# CloudEndpoints_Get[get]
result = self.mgmt_client.cloud_endpoints.get(resource_group.name, STORAGE_SYNC_SERVICE_NAME, SYNC_GROUP_NAME, CLOUD_ENDPOINT_NAME)
# ServerEndpoints_ListBySyncGroup[get]
result = self.mgmt_client.server_endpoints.list_by_sync_group(resource_group.name, STORAGE_SYNC_SERVICE_NAME, SYNC_GROUP_NAME)
# CloudEndpoints_ListBySyncGroup[get]
result = self.mgmt_client.cloud_endpoints.list_by_sync_group(resource_group.name, STORAGE_SYNC_SERVICE_NAME, SYNC_GROUP_NAME)
# RegisteredServers_Get[get]
result = self.mgmt_client.registered_servers.get(resource_group.name, STORAGE_SYNC_SERVICE_NAME, REGISTERED_SERVER_NAME)
# Workflows_Get[get]
result = self.mgmt_client.workflows.get(resource_group.name, STORAGE_SYNC_SERVICE_NAME, WORKFLOW_NAME)
"""
# SyncGroups_Get[get]
result = self.mgmt_client.sync_groups.get(resource_group.name, STORAGE_SYNC_SERVICE_NAME, SYNC_GROUP_NAME)
"""
# Workflows_Get[get]
result = self.mgmt_client.workflows.get(resource_group.name, STORAGE_SYNC_SERVICE_NAME, WORKFLOW_NAME)
# RegisteredServers_ListByStorageSyncService[get]
result = self.mgmt_client.registered_servers.list_by_storage_sync_service(resource_group.name, STORAGE_SYNC_SERVICE_NAME)
"""
# SyncGroups_ListByStorageSyncService[get]
result = self.mgmt_client.sync_groups.list_by_storage_sync_service(resource_group.name, STORAGE_SYNC_SERVICE_NAME)
# Workflows_ListByStorageSyncService[get]
result = self.mgmt_client.workflows.list_by_storage_sync_service(resource_group.name, STORAGE_SYNC_SERVICE_NAME)
# StorageSyncServices_Get[get]
result = self.mgmt_client.storage_sync_services.get(resource_group.name, STORAGE_SYNC_SERVICE_NAME)
# StorageSyncServices_ListByResourceGroup[get]
result = self.mgmt_client.storage_sync_services.list_by_resource_group(resource_group.name)
# StorageSyncServices_ListBySubscription[get]
result = self.mgmt_client.storage_sync_services.list_by_subscription()
# Operations_List[get]
result = self.mgmt_client.operations.list()
"""
# CloudEndpoints_TriggerChangeDetection[post]
BODY = {
"directory_path": "NewDirectory",
"change_detection_mode": "Recursive"
}
result = self.mgmt_client.cloud_endpoints.trigger_change_detection(resource_group.name, STORAGE_SYNC_SERVICE_NAME, SYNC_GROUP_NAME, CLOUD_ENDPOINT_NAME, BODY)
result = result.result()
# CloudEndpoints_restoreheartbeat[post]
result = self.mgmt_client.cloud_endpoints.restoreheartbeat(resource_group.name, STORAGE_SYNC_SERVICE_NAME, SYNC_GROUP_NAME, CLOUD_ENDPOINT_NAME)
# ServerEndpoints_recallAction[post]
BODY = {
"pattern": "",
"recall_path": ""
}
result = self.mgmt_client.server_endpoints.recall_action(resource_group.name, STORAGE_SYNC_SERVICE_NAME, SYNC_GROUP_NAME, SERVER_ENDPOINT_NAME, BODY)
result = result.result()
# CloudEndpoints_PostRestore[post]
BODY = {
"azure_file_share_uri": "https://hfsazbackupdevintncus2.file.core.test-cint.azure-test.net/sampleFileShare",
"source_azure_file_share_uri": "https://hfsazbackupdevintncus2.file.core.test-cint.azure-test.net/sampleFileShare",
"status": "Succeeded",
"restore_file_spec": [
{
"path": "text1.txt",
"isdir": False
},
{
"path": "MyDir",
"isdir": True
},
{
"path": "MyDir/SubDir",
"isdir": False
},
{
"path": "MyDir/SubDir/File1.pdf",
"isdir": False
}
]
}
result = self.mgmt_client.cloud_endpoints.post_restore(resource_group.name, STORAGE_SYNC_SERVICE_NAME, SYNC_GROUP_NAME, CLOUD_ENDPOINT_NAME, BODY)
result = result.result()
# CloudEndpoints_PreRestore[post]
BODY = {
"azure_file_share_uri": "https://hfsazbackupdevintncus2.file.core.test-cint.azure-test.net/sampleFileShare",
"restore_file_spec": [
{
"path": "text1.txt",
"isdir": False
},
{
"path": "MyDir",
"isdir": True
},
{
"path": "MyDir/SubDir",
"isdir": False
},
{
"path": "MyDir/SubDir/File1.pdf",
"isdir": False
}
]
}
result = self.mgmt_client.cloud_endpoints.pre_restore(resource_group.name, STORAGE_SYNC_SERVICE_NAME, SYNC_GROUP_NAME, CLOUD_ENDPOINT_NAME, BODY)
result = result.result()
# CloudEndpoints_PostBackup[post]
BODY = {
"azure_file_share": "https://sampleserver.file.core.test-cint.azure-test.net/sampleFileShare"
}
result = self.mgmt_client.cloud_endpoints.post_backup(resource_group.name, STORAGE_SYNC_SERVICE_NAME, SYNC_GROUP_NAME, CLOUD_ENDPOINT_NAME, BODY)
result = result.result()
# CloudEndpoints_PreBackup[post]
BODY = {
"azure_file_share": "https://sampleserver.file.core.test-cint.azure-test.net/sampleFileShare"
}
result = self.mgmt_client.cloud_endpoints.pre_backup(resource_group.name, STORAGE_SYNC_SERVICE_NAME, SYNC_GROUP_NAME, CLOUD_ENDPOINT_NAME, BODY)
result = result.result()
# ServerEndpoints_Update[patch]
BODY = {
"cloud_tiering": "off",
"volume_free_space_percent": "100",
"tier_files_older_than_days": "0",
"offline_data_transfer": "off"
}
result = self.mgmt_client.server_endpoints.update(resource_group.name, STORAGE_SYNC_SERVICE_NAME, SYNC_GROUP_NAME, SERVER_ENDPOINT_NAME, BODY)
result = result.result()
# RegisteredServers_triggerRollover[post]
BODY = {
"server_certificate": "\"MIIDFjCCAf6gAwIBAgIQQS+DS8uhc4VNzUkTw7wbRjANBgkqhkiG9w0BAQ0FADAzMTEwLwYDVQQDEyhhbmt1c2hiLXByb2QzLnJlZG1vbmQuY29ycC5taWNyb3NvZnQuY29tMB4XDTE3MDgwMzE3MDQyNFoXDTE4MDgwNDE3MDQyNFowMzExMC8GA1UEAxMoYW5rdXNoYi1wcm9kMy5yZWRtb25kLmNvcnAubWljcm9zb2Z0LmNvbTCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBALDRvV4gmsIy6jGDPiHsXmvgVP749NNP7DopdlbHaNhjFmYINHl0uWylyaZmgJrROt2mnxN/zEyJtGnqYHlzUr4xvGq/qV5pqgdB9tag/sw9i22gfe9PRZ0FmSOZnXMbLYgLiDFqLtut5gHcOuWMj03YnkfoBEKlFBxWbagvW2yxz/Sxi9OVSJOKCaXra0RpcIHrO/KFl6ho2eE1/7Ykmfa8hZvSdoPd5gHdLiQcMB/pxq+mWp1fI6c8vFZoDu7Atn+NXTzYPKUxKzaisF12TsaKpohUsJpbB3Wocb0F5frn614D2pg14ERB5otjAMWw1m65csQWPI6dP8KIYe0+QPkCAwEAAaMmMCQwIgYDVR0lAQH/BBgwFgYIKwYBBQUHAwIGCisGAQQBgjcKAwwwDQYJKoZIhvcNAQENBQADggEBAA4RhVIBkw34M1RwakJgHvtjsOFxF1tVQA941NtLokx1l2Z8+GFQkcG4xpZSt+UN6wLerdCbnNhtkCErWUDeaT0jxk4g71Ofex7iM04crT4iHJr8mi96/XnhnkTUs+GDk12VgdeeNEczMZz+8Mxw9dJ5NCnYgTwO0SzGlclRsDvjzkLo8rh2ZG6n/jKrEyNXXo+hOqhupij0QbRP2Tvexdfw201kgN1jdZify8XzJ8Oi0bTS0KpJf2pNPOlooK2bjMUei9ANtEdXwwfVZGWvVh6tJjdv6k14wWWJ1L7zhA1IIVb1J+sQUzJji5iX0DrezjTz1Fg+gAzITaA/WsuujlM=\""
}
result = self.mgmt_client.registered_servers.trigger_rollover(resource_group.name, STORAGE_SYNC_SERVICE_NAME, REGISTERED_SERVER_NAME, BODY)
result = result.result()
# Workflows_Abort[post]
result = self.mgmt_client.workflows.abort(resource_group.name, STORAGE_SYNC_SERVICE_NAME, WORKFLOW_NAME)
"""
# StorageSyncServices_Update[patch]
BODY = {
"tags": {
"environment": "Test",
"dept": "IT"
}
}
result = self.mgmt_client.storage_sync_services.update(resource_group.name, STORAGE_SYNC_SERVICE_NAME, BODY)
# StorageSyncServiceCheckNameAvailability_AlreadyExists[post]
BODY = {
"name": "newstoragesyncservicename",
"type": "Microsoft.StorageSync/storageSyncServices"
}
result = self.mgmt_client.storage_sync_services.check_name_availability(LOCATION_NAME, BODY)
# StorageSyncServiceCheckNameAvailability_Available[post]
BODY = {
"name": "newstoragesyncservicename",
"type": "Microsoft.StorageSync/storageSyncServices"
}
result = self.mgmt_client.storage_sync_services.check_name_availability(LOCATION_NAME, BODY)
"""
# ServerEndpoints_Delete[delete]
result = self.mgmt_client.server_endpoints.delete(resource_group.name, STORAGE_SYNC_SERVICE_NAME, SYNC_GROUP_NAME, SERVER_ENDPOINT_NAME)
result = result.result()
# CloudEndpoints_Delete[delete]
result = self.mgmt_client.cloud_endpoints.delete(resource_group.name, STORAGE_SYNC_SERVICE_NAME, SYNC_GROUP_NAME, CLOUD_ENDPOINT_NAME)
result = result.result()
# RegisteredServers_Delete[delete]
result = self.mgmt_client.registered_servers.delete(resource_group.name, STORAGE_SYNC_SERVICE_NAME, REGISTERED_SERVER_NAME)
result = result.result()
"""
# SyncGroups_Delete[delete]
result = self.mgmt_client.sync_groups.delete(resource_group.name, STORAGE_SYNC_SERVICE_NAME, SYNC_GROUP_NAME)
# StorageSyncServices_Delete[delete]
result = self.mgmt_client.storage_sync_services.delete(resource_group.name, STORAGE_SYNC_SERVICE_NAME)
#------------------------------------------------------------------------------
if __name__ == '__main__':
unittest.main()
| 49.109635
| 1,099
| 0.707414
|
4a15970d41c9cb141afaa234ae4fd1670349f8ed
| 2,955
|
py
|
Python
|
sopel/modules/keyexchange.py
|
paulmadore/funkshelper
|
cfe60ef4015381b6c4fb01e453523d571af0b2de
|
[
"EFL-2.0"
] | null | null | null |
sopel/modules/keyexchange.py
|
paulmadore/funkshelper
|
cfe60ef4015381b6c4fb01e453523d571af0b2de
|
[
"EFL-2.0"
] | null | null | null |
sopel/modules/keyexchange.py
|
paulmadore/funkshelper
|
cfe60ef4015381b6c4fb01e453523d571af0b2de
|
[
"EFL-2.0"
] | null | null | null |
# coding=utf-8
"""
Woodcoin IRC GPG Key Association Module copyright 2015 phm.link
Licensed under Mozilla Public License Version 2.
Synopsis: a module that will register a user with their designated GPG key.
Behavior: should import the public key it is told to import, then store that in a file it then associates with the
user in question once the user signs a message that the bot tells the user. This will be a relatively simple implementation, but if required it could be that once someone has registered, only after they've verified with the bot can their username be used to say things in the room. This could be ideal for a name registration, at the channel level, system.
"""
from __future__ import unicode_literals
from __future__ import print_function
import gnupg
from sopel.module import commands, rule
from sopel.bot import Sopel
import requests
import random
import string
import os
from urllib.request import urlopen
@commands('register help')
@rule('$nickname register help')
def registerhelp(bot, trigger):
return bot.say('You must have your GPG key at a reputable server to use it here. Then do .register key KEY-ID, where KEY-ID is the ID of your key. I will then go and fetch the key and associate it with your name. The only way to remove the association will be to sign a message with it.')
return bot.say('Additionally, at various times no one with your user name will be able to talk without verifying their identity with your GPG key.')
@commands('register key' + providedKey)
@rule('$nickname register key' + providedKey)
def registerkey(bot, trigger):
keyresult = gpg.recv_keys('aes.keys.peer.sh', providedKey)
public_keys = gpg.export_keys(keyresult)
public_keys = gpg.export_keys(keyresult, True)
filename = providedKey + '.txt'
def msgToSign(size=12, chars=string.ascii_uppercase + string.digits):
return ''.join(random.choice(chars) for _ in range(size))
msgFilename = msgToSign + '.txt'
with open(msgFilename, 'w') as msgFile:
msgFile.write(msgToSign)
with open(filename, 'w') as keyFile:
keyFile.write(public_keys)
return bot.say('Now sign the message ' + msgToSign + ' with your key, and return to say finishregistration signed_message key_id, where signed_message is the signed message and key_id is the proper key id you are trying to finish registration for.')
@commands('finishregistration' + signedMsg + keyId)
@rule('$nickname finishregistration' + signedMsg + keyId)
def finishReg(bot, trigger):
if signedMsg = ' ' OR keyId = ' '
''' ^^invalid^^ '''
return bot.say('Say finishregistration signed_message keyID and I will confirm your registration.')
elif signedMsg != ' '
holdMsg = signedMsg
holdKey = keyId
holdMsg = gpg.verify(signedMsg)
holdKeyFile = holdMsg + '.txt'
with open(keyId, 'w') as holdKeyFile:
holdKeyFile.write(holdMsg)
| 51.842105
| 357
| 0.729949
|
4a1599b08eeb93706ded3ad73a16c3dc4a0128d4
| 614
|
py
|
Python
|
houses/migrations/0014_auto_20190908_2134.py
|
xNovax/RoomScout
|
287240a9d13f2b8f6ce9abdc95cf611671970fc3
|
[
"MIT"
] | 24
|
2020-02-01T17:22:47.000Z
|
2020-10-24T19:49:36.000Z
|
houses/migrations/0014_auto_20190908_2134.py
|
xNovax/RoomScout
|
287240a9d13f2b8f6ce9abdc95cf611671970fc3
|
[
"MIT"
] | 16
|
2020-02-01T14:30:15.000Z
|
2020-08-13T20:49:56.000Z
|
houses/migrations/0014_auto_20190908_2134.py
|
aaronspindler/RoomScout
|
287240a9d13f2b8f6ce9abdc95cf611671970fc3
|
[
"MIT"
] | 6
|
2020-02-01T22:07:46.000Z
|
2021-03-05T14:05:27.000Z
|
# Generated by Django 2.2.4 on 2019-09-09 01:34
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('houses', '0013_auto_20190908_2133'),
]
operations = [
migrations.RemoveField(
model_name='house',
name='cats_allowed',
),
migrations.RemoveField(
model_name='house',
name='dogs_allowed',
),
migrations.AddField(
model_name='house',
name='has_air_conditioning',
field=models.BooleanField(default=False),
),
]
| 23.615385
| 53
| 0.566775
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.