hexsha stringlengths 40 40 | size int64 4 996k | ext stringclasses 8
values | lang stringclasses 1
value | max_stars_repo_path stringlengths 4 245 | max_stars_repo_name stringlengths 6 130 | max_stars_repo_head_hexsha stringlengths 40 40 | max_stars_repo_licenses listlengths 1 10 | max_stars_count int64 1 191k ⌀ | max_stars_repo_stars_event_min_datetime stringlengths 24 24 ⌀ | max_stars_repo_stars_event_max_datetime stringlengths 24 24 ⌀ | max_issues_repo_path stringlengths 4 245 | max_issues_repo_name stringlengths 6 130 | max_issues_repo_head_hexsha stringlengths 40 40 | max_issues_repo_licenses listlengths 1 10 | max_issues_count int64 1 67k ⌀ | max_issues_repo_issues_event_min_datetime stringlengths 24 24 ⌀ | max_issues_repo_issues_event_max_datetime stringlengths 24 24 ⌀ | max_forks_repo_path stringlengths 4 245 | max_forks_repo_name stringlengths 6 130 | max_forks_repo_head_hexsha stringlengths 40 40 | max_forks_repo_licenses listlengths 1 10 | max_forks_count int64 1 105k ⌀ | max_forks_repo_forks_event_min_datetime stringlengths 24 24 ⌀ | max_forks_repo_forks_event_max_datetime stringlengths 24 24 ⌀ | content stringlengths 4 996k | avg_line_length float64 1.33 58.2k | max_line_length int64 2 323k | alphanum_fraction float64 0 0.97 | content_no_comment stringlengths 0 946k | is_comment_constant_removed bool 2
classes | is_sharp_comment_removed bool 1
class |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
f724461d9b5f371de5e8cc03ae8c7e10e9f3cfa1 | 2,813 | py | Python | task_manager/tasks/migrations/0001_initial.py | rabilrbl/task_manager | 10c96df8f41caf2db6a0ec2aa7cb961135412843 | [
"BSD-3-Clause"
] | null | null | null | task_manager/tasks/migrations/0001_initial.py | rabilrbl/task_manager | 10c96df8f41caf2db6a0ec2aa7cb961135412843 | [
"BSD-3-Clause"
] | null | null | null | task_manager/tasks/migrations/0001_initial.py | rabilrbl/task_manager | 10c96df8f41caf2db6a0ec2aa7cb961135412843 | [
"BSD-3-Clause"
] | null | null | null | # Generated by Django 3.2.12 on 2022-02-16 17:52
import datetime
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
import uuid
class Migration(migrations.Migration):
initial = True
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
]
operations = [
migrations.CreateModel(
name='Task',
fields=[
('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('external_id', models.UUIDField(db_index=True, default=uuid.uuid4, editable=False, unique=True)),
('title', models.CharField(max_length=100)),
('priority', models.IntegerField(default=0)),
('description', models.TextField(blank=True, max_length=500)),
('completed', models.BooleanField(default=False)),
('date_created', models.DateTimeField(auto_now_add=True)),
('deleted', models.BooleanField(default=False)),
('status', models.CharField(choices=[('pending', 'Pending'), ('in_progress', 'In Progress'), ('completed', 'Completed'), ('cancelled', 'Cancelled')], default='pending', max_length=100)),
('user', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)),
],
),
migrations.CreateModel(
name='Report',
fields=[
('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('consent', models.BooleanField(default=False, help_text='Uncheck to stop receiving reports')),
('time', models.TimeField(default=datetime.time(0, 0), help_text='All times are in UTC format.')),
('user', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)),
],
),
migrations.CreateModel(
name='History',
fields=[
('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('old_status', models.CharField(choices=[('pending', 'Pending'), ('in_progress', 'In Progress'), ('completed', 'Completed'), ('cancelled', 'Cancelled')], default='n/a', max_length=100)),
('new_status', models.CharField(choices=[('pending', 'Pending'), ('in_progress', 'In Progress'), ('completed', 'Completed'), ('cancelled', 'Cancelled')], default='n/a', max_length=100)),
('change_date', models.DateTimeField(auto_now=True)),
('task', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='tasks.task')),
],
),
]
| 52.092593 | 202 | 0.614646 |
import datetime
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
import uuid
class Migration(migrations.Migration):
initial = True
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
]
operations = [
migrations.CreateModel(
name='Task',
fields=[
('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('external_id', models.UUIDField(db_index=True, default=uuid.uuid4, editable=False, unique=True)),
('title', models.CharField(max_length=100)),
('priority', models.IntegerField(default=0)),
('description', models.TextField(blank=True, max_length=500)),
('completed', models.BooleanField(default=False)),
('date_created', models.DateTimeField(auto_now_add=True)),
('deleted', models.BooleanField(default=False)),
('status', models.CharField(choices=[('pending', 'Pending'), ('in_progress', 'In Progress'), ('completed', 'Completed'), ('cancelled', 'Cancelled')], default='pending', max_length=100)),
('user', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)),
],
),
migrations.CreateModel(
name='Report',
fields=[
('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('consent', models.BooleanField(default=False, help_text='Uncheck to stop receiving reports')),
('time', models.TimeField(default=datetime.time(0, 0), help_text='All times are in UTC format.')),
('user', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)),
],
),
migrations.CreateModel(
name='History',
fields=[
('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('old_status', models.CharField(choices=[('pending', 'Pending'), ('in_progress', 'In Progress'), ('completed', 'Completed'), ('cancelled', 'Cancelled')], default='n/a', max_length=100)),
('new_status', models.CharField(choices=[('pending', 'Pending'), ('in_progress', 'In Progress'), ('completed', 'Completed'), ('cancelled', 'Cancelled')], default='n/a', max_length=100)),
('change_date', models.DateTimeField(auto_now=True)),
('task', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='tasks.task')),
],
),
]
| true | true |
f724467171d010ee0e4c6085d11642893a2e975d | 430 | py | Python | packages/python/plotly/plotly/validators/barpolar/marker/colorbar/title/_text.py | mastermind88/plotly.py | efa70710df1af22958e1be080e105130042f1839 | [
"MIT"
] | null | null | null | packages/python/plotly/plotly/validators/barpolar/marker/colorbar/title/_text.py | mastermind88/plotly.py | efa70710df1af22958e1be080e105130042f1839 | [
"MIT"
] | null | null | null | packages/python/plotly/plotly/validators/barpolar/marker/colorbar/title/_text.py | mastermind88/plotly.py | efa70710df1af22958e1be080e105130042f1839 | [
"MIT"
] | null | null | null | import _plotly_utils.basevalidators
class TextValidator(_plotly_utils.basevalidators.StringValidator):
def __init__(
self, plotly_name="text", parent_name="barpolar.marker.colorbar.title", **kwargs
):
super(TextValidator, self).__init__(
plotly_name=plotly_name,
parent_name=parent_name,
edit_type=kwargs.pop("edit_type", "colorbars"),
**kwargs,
)
| 30.714286 | 88 | 0.65814 | import _plotly_utils.basevalidators
class TextValidator(_plotly_utils.basevalidators.StringValidator):
def __init__(
self, plotly_name="text", parent_name="barpolar.marker.colorbar.title", **kwargs
):
super(TextValidator, self).__init__(
plotly_name=plotly_name,
parent_name=parent_name,
edit_type=kwargs.pop("edit_type", "colorbars"),
**kwargs,
)
| true | true |
f7244861300db686a544ca4278551c0acac34d9c | 2,864 | py | Python | code/week 6/rat_in_a_maze.py | c235gsy/Sustech_Data-Structure-and-Algorithm-Analysis | fcbd450216e9e62cd3365ad2a8ccab00b9eb679f | [
"MIT"
] | 1 | 2020-01-04T13:35:29.000Z | 2020-01-04T13:35:29.000Z | code/week 6/rat_in_a_maze.py | c235gsy/Sustech_Data-Structure-and-Algorithm-Analysis | fcbd450216e9e62cd3365ad2a8ccab00b9eb679f | [
"MIT"
] | null | null | null | code/week 6/rat_in_a_maze.py | c235gsy/Sustech_Data-Structure-and-Algorithm-Analysis | fcbd450216e9e62cd3365ad2a8ccab00b9eb679f | [
"MIT"
] | null | null | null |
class Queue:
# A container with a first-in-first-out (FIFO) queuing policy.
def __init__(self):
self.list = []
def push(self,item):
# Enqueue the 'item' into the queue
self.list.insert(0, item)
def pop(self):
# Dequeue the earliest enqueued item still in the queue. This operation removes the item from the queue.
return self.list.pop()
def is_empty(self):
# Returns true if the queue is empty
return len(self.list) == 0
global maze
maze = [[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0],
[0, 1, 0, 0, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0],
[0, 1, 0, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 1, 0, 0, 0, 1, 1, 1, 1, 1, 0, 0],
[0, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0],
[0, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 1, 1, 0, 0, 0, 0, 1, 0, 1, 0, 1, 0, 1, 1],
[0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 1, 1, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 1, 1, 0, 0, 1, 1, 0, 0, 0, 0, 1, 0, 0, 0],
[0, 1, 1, 0, 0, 0, 1, 1, 1, 1, 1, 1, 0, 0, 0],
[0, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0],
[0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0]]
def get_choices(stage):
# Move order is: right, down, left, up
choices = []
x = stage[0]
y = stage[1]
if y != 14 and maze[x][y+1] == 0:
choices.append([(x, y+1), "right"])
if x != 13 and maze[x+1][y] == 0:
choices.append([(x+1, y), "down"])
if y != 0 and maze[x][y-1] == 0:
choices.append([(x, y-1), "left"])
if x != 0 and maze[x-1][y] == 0:
choices.append([(x-1, y), "up"])
return choices
def get_start_stage():
return 0, 0
def get_goal_stage():
return 13, 14
def is_goal_stage(stage):
return stage == (13, 14)
def breadth_first_search():
states_to_expand = Queue ()
states_to_expand.push (get_start_stage())
visited_states = []
path_to_goal = []
path_to_current_state = Queue ()
current_state = states_to_expand.pop ()
flag = 1
while flag == 1:
if is_goal_stage(current_state):
break
elif current_state not in visited_states:
visited_states.append(current_state)
choices_of_move = get_choices(current_state)
for p in range(0, len(choices_of_move)):
choice = choices_of_move[p]
new_position = choice[0]
direction = choice[1]
states_to_expand.push(new_position)
path_to_current_state.push(path_to_goal + [direction])
current_state = states_to_expand.pop()
path_to_goal = path_to_current_state.pop()
return path_to_goal
result = breadth_first_search()
for step in result:
print(step)
| 29.525773 | 112 | 0.511522 |
class Queue:
def __init__(self):
self.list = []
def push(self,item):
self.list.insert(0, item)
def pop(self):
return self.list.pop()
def is_empty(self):
return len(self.list) == 0
global maze
maze = [[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0],
[0, 1, 0, 0, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0],
[0, 1, 0, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 1, 0, 0, 0, 1, 1, 1, 1, 1, 0, 0],
[0, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0],
[0, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 1, 1, 0, 0, 0, 0, 1, 0, 1, 0, 1, 0, 1, 1],
[0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 1, 1, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 1, 1, 0, 0, 1, 1, 0, 0, 0, 0, 1, 0, 0, 0],
[0, 1, 1, 0, 0, 0, 1, 1, 1, 1, 1, 1, 0, 0, 0],
[0, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0],
[0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0]]
def get_choices(stage):
choices = []
x = stage[0]
y = stage[1]
if y != 14 and maze[x][y+1] == 0:
choices.append([(x, y+1), "right"])
if x != 13 and maze[x+1][y] == 0:
choices.append([(x+1, y), "down"])
if y != 0 and maze[x][y-1] == 0:
choices.append([(x, y-1), "left"])
if x != 0 and maze[x-1][y] == 0:
choices.append([(x-1, y), "up"])
return choices
def get_start_stage():
return 0, 0
def get_goal_stage():
return 13, 14
def is_goal_stage(stage):
return stage == (13, 14)
def breadth_first_search():
states_to_expand = Queue ()
states_to_expand.push (get_start_stage())
visited_states = []
path_to_goal = []
path_to_current_state = Queue ()
current_state = states_to_expand.pop ()
flag = 1
while flag == 1:
if is_goal_stage(current_state):
break
elif current_state not in visited_states:
visited_states.append(current_state)
choices_of_move = get_choices(current_state)
for p in range(0, len(choices_of_move)):
choice = choices_of_move[p]
new_position = choice[0]
direction = choice[1]
states_to_expand.push(new_position)
path_to_current_state.push(path_to_goal + [direction])
current_state = states_to_expand.pop()
path_to_goal = path_to_current_state.pop()
return path_to_goal
result = breadth_first_search()
for step in result:
print(step)
| true | true |
f72448dcbaaf3b3cb88177c88c779358a33a7210 | 545 | py | Python | lib/models/__init__.py | ablattmann/pose_estimation_hrnet | 67d5a3446979c2abe54578ee4bba3787862d4077 | [
"MIT"
] | null | null | null | lib/models/__init__.py | ablattmann/pose_estimation_hrnet | 67d5a3446979c2abe54578ee4bba3787862d4077 | [
"MIT"
] | null | null | null | lib/models/__init__.py | ablattmann/pose_estimation_hrnet | 67d5a3446979c2abe54578ee4bba3787862d4077 | [
"MIT"
] | null | null | null | # ------------------------------------------------------------------------------
# Copyright (c) Microsoft
# Licensed under the MIT License.
# Written by Bin Xiao (Bin.Xiao@microsoft.com)
# ------------------------------------------------------------------------------
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
# import models.pose_resnet
# import models.pose_hrnet
| 32.058824 | 80 | 0.585321 |
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
| true | true |
f72449514abd4834918b9dc32df86e5d6d182d1c | 2,299 | py | Python | magnum/tests/functional/tempest_tests/config.py | mail2nsrajesh/magnum | 2e7e5a77967028c961337177ce577eb936c3845c | [
"Apache-2.0"
] | null | null | null | magnum/tests/functional/tempest_tests/config.py | mail2nsrajesh/magnum | 2e7e5a77967028c961337177ce577eb936c3845c | [
"Apache-2.0"
] | null | null | null | magnum/tests/functional/tempest_tests/config.py | mail2nsrajesh/magnum | 2e7e5a77967028c961337177ce577eb936c3845c | [
"Apache-2.0"
] | null | null | null | # Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from __future__ import print_function
from oslo_config import cfg
from tempest import config # noqa
service_available_group = cfg.OptGroup(name="service_available",
title="Available OpenStack Services")
ServiceAvailableGroup = [
cfg.BoolOpt("magnum",
default=True,
help="Whether or not magnum is expected to be available"),
]
magnum_group = cfg.OptGroup(name="magnum", title="Magnum Options")
MagnumGroup = [
cfg.StrOpt("image_id",
default="fedora-atomic-latest",
help="Image id to be used for ClusterTemplate."),
cfg.StrOpt("nic_id",
default="public",
help="NIC id."),
cfg.StrOpt("keypair_id",
default="default",
help="Keypair id to use to log into nova instances."),
cfg.StrOpt("flavor_id",
default="s1.magnum",
help="Flavor id to use for ClusterTemplate."),
cfg.StrOpt("magnum_url",
help="Bypass URL for Magnum to skip service catalog lookup"),
cfg.StrOpt("master_flavor_id",
default="m1.magnum",
help="Master flavor id to use for ClusterTemplate."),
cfg.StrOpt("csr_location",
default="/opt/stack/new/magnum/default.csr",
deprecated_for_removal=True,
help="CSR location for certificates. This option is no "
"longer used for anything."),
cfg.StrOpt("dns_nameserver",
default="8.8.8.8",
help="DNS nameserver to use for ClusterTemplate."),
cfg.BoolOpt("copy_logs",
default=True,
help="Specify whether to copy nova server logs on failure."),
]
| 33.808824 | 77 | 0.628099 |
from __future__ import print_function
from oslo_config import cfg
from tempest import config
service_available_group = cfg.OptGroup(name="service_available",
title="Available OpenStack Services")
ServiceAvailableGroup = [
cfg.BoolOpt("magnum",
default=True,
help="Whether or not magnum is expected to be available"),
]
magnum_group = cfg.OptGroup(name="magnum", title="Magnum Options")
MagnumGroup = [
cfg.StrOpt("image_id",
default="fedora-atomic-latest",
help="Image id to be used for ClusterTemplate."),
cfg.StrOpt("nic_id",
default="public",
help="NIC id."),
cfg.StrOpt("keypair_id",
default="default",
help="Keypair id to use to log into nova instances."),
cfg.StrOpt("flavor_id",
default="s1.magnum",
help="Flavor id to use for ClusterTemplate."),
cfg.StrOpt("magnum_url",
help="Bypass URL for Magnum to skip service catalog lookup"),
cfg.StrOpt("master_flavor_id",
default="m1.magnum",
help="Master flavor id to use for ClusterTemplate."),
cfg.StrOpt("csr_location",
default="/opt/stack/new/magnum/default.csr",
deprecated_for_removal=True,
help="CSR location for certificates. This option is no "
"longer used for anything."),
cfg.StrOpt("dns_nameserver",
default="8.8.8.8",
help="DNS nameserver to use for ClusterTemplate."),
cfg.BoolOpt("copy_logs",
default=True,
help="Specify whether to copy nova server logs on failure."),
]
| true | true |
f7244962c5bfd8e1aa5c08eb18369e6d57850268 | 1,912 | py | Python | EnvironmentAM2315MuxSensor.py | MBI-Div-B/pytango-EnvironmentAM2315Mux | ebbea69d2c954b6d1e59e49a31eb36ec60929bad | [
"MIT"
] | null | null | null | EnvironmentAM2315MuxSensor.py | MBI-Div-B/pytango-EnvironmentAM2315Mux | ebbea69d2c954b6d1e59e49a31eb36ec60929bad | [
"MIT"
] | null | null | null | EnvironmentAM2315MuxSensor.py | MBI-Div-B/pytango-EnvironmentAM2315Mux | ebbea69d2c954b6d1e59e49a31eb36ec60929bad | [
"MIT"
] | null | null | null | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
# Copyright (C) 2020 MBI-Division-B
# MIT License, refer to LICENSE file
# Author: Luca Barbera / Email: barbera@mbi-berlin.de
from tango import AttrWriteType, DevState, DebugIt, ErrorIt, InfoIt, DeviceProxy
from tango.server import Device, attribute, command, device_property
class EnvironmentAM2315MuxSensor(Device):
CtrlDevice = device_property(
dtype="str",
default_value="domain/family/memeber",
)
Channel = device_property(
dtype="int",
default_value=0,
)
temperature = attribute(label='Temperature',
access=AttrWriteType.READ,
dtype=float,
format='3.1f',
unit='C')
humidity = attribute(label='Humidity',
access=AttrWriteType.READ,
dtype=float,
format='3.1f',
unit='%')
def init_device(self):
Device.init_device(self)
self.set_state(DevState.INIT)
try:
self.ctrl = DeviceProxy(self.CtrlDevice)
self.info_stream("Connection established.")
self.set_state(DevState.ON)
except Exception:
self.error_stream('Connection could not be established.')
self.set_state(DevState.OFF)
self._temp = 0
self._humid = 0
def always_executed_hook(self):
try:
# _read_data measures both humidity and temperature
self._temp, self._humid = self.ctrl.read_data(self.Channel)
except Exception:
self.error_stream('Data could not be read')
def read_temperature(self):
return self._temp
def read_humidity(self):
return self._humid
if __name__ == "__main__":
EnvironmentAM2315MuxSensor.run_server()
| 28.537313 | 80 | 0.582636 |
from tango import AttrWriteType, DevState, DebugIt, ErrorIt, InfoIt, DeviceProxy
from tango.server import Device, attribute, command, device_property
class EnvironmentAM2315MuxSensor(Device):
CtrlDevice = device_property(
dtype="str",
default_value="domain/family/memeber",
)
Channel = device_property(
dtype="int",
default_value=0,
)
temperature = attribute(label='Temperature',
access=AttrWriteType.READ,
dtype=float,
format='3.1f',
unit='C')
humidity = attribute(label='Humidity',
access=AttrWriteType.READ,
dtype=float,
format='3.1f',
unit='%')
def init_device(self):
Device.init_device(self)
self.set_state(DevState.INIT)
try:
self.ctrl = DeviceProxy(self.CtrlDevice)
self.info_stream("Connection established.")
self.set_state(DevState.ON)
except Exception:
self.error_stream('Connection could not be established.')
self.set_state(DevState.OFF)
self._temp = 0
self._humid = 0
def always_executed_hook(self):
try:
self._temp, self._humid = self.ctrl.read_data(self.Channel)
except Exception:
self.error_stream('Data could not be read')
def read_temperature(self):
return self._temp
def read_humidity(self):
return self._humid
if __name__ == "__main__":
EnvironmentAM2315MuxSensor.run_server()
| true | true |
f7244a801802bebba70ac938a121a24fcb049c4f | 1,578 | py | Python | nssrc/com/citrix/netscaler/nitro/resource/config/router/routerdynamicrouting_args.py | guardicore/nitro-python | 5346a5086134aead80968f15a41ff527adaa0ec1 | [
"Apache-2.0"
] | null | null | null | nssrc/com/citrix/netscaler/nitro/resource/config/router/routerdynamicrouting_args.py | guardicore/nitro-python | 5346a5086134aead80968f15a41ff527adaa0ec1 | [
"Apache-2.0"
] | null | null | null | nssrc/com/citrix/netscaler/nitro/resource/config/router/routerdynamicrouting_args.py | guardicore/nitro-python | 5346a5086134aead80968f15a41ff527adaa0ec1 | [
"Apache-2.0"
] | null | null | null | #
# Copyright (c) 2021 Citrix Systems, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License")
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
class routerdynamicrouting_args :
r""" Provides additional arguments required for fetching the routerdynamicrouting resource.
"""
def __init__(self) :
self._commandstring = None
self._nodeid = None
@property
def commandstring(self) :
r"""command to be executed.
"""
try :
return self._commandstring
except Exception as e:
raise e
@commandstring.setter
def commandstring(self, commandstring) :
r"""command to be executed.
"""
try :
self._commandstring = commandstring
except Exception as e:
raise e
@property
def nodeid(self) :
r"""Unique number that identifies the cluster node.<br/>Minimum value = 0<br/>Maximum value = 31.
"""
try :
return self._nodeid
except Exception as e:
raise e
@nodeid.setter
def nodeid(self, nodeid) :
r"""Unique number that identifies the cluster node.<br/>Minimum value = 0<br/>Maximum value = 31
"""
try :
self._nodeid = nodeid
except Exception as e:
raise e
| 25.868852 | 101 | 0.709125 |
class routerdynamicrouting_args :
def __init__(self) :
self._commandstring = None
self._nodeid = None
@property
def commandstring(self) :
try :
return self._commandstring
except Exception as e:
raise e
@commandstring.setter
def commandstring(self, commandstring) :
try :
self._commandstring = commandstring
except Exception as e:
raise e
@property
def nodeid(self) :
try :
return self._nodeid
except Exception as e:
raise e
@nodeid.setter
def nodeid(self, nodeid) :
try :
self._nodeid = nodeid
except Exception as e:
raise e
| true | true |
f7244c2c22cf9787986fc05f8c297d20c042b807 | 14,326 | py | Python | aries_cloudagent/wallet/tests/test_routes.py | jcourt562/aries-cloudagent-python | de291184c59006391a76317826983dd1eb0ada5d | [
"Apache-2.0"
] | 1 | 2020-11-30T05:47:54.000Z | 2020-11-30T05:47:54.000Z | aries_cloudagent/wallet/tests/test_routes.py | jcourt562/aries-cloudagent-python | de291184c59006391a76317826983dd1eb0ada5d | [
"Apache-2.0"
] | 1 | 2020-02-25T19:01:16.000Z | 2020-02-25T19:01:16.000Z | aries_cloudagent/wallet/tests/test_routes.py | jcourt562/aries-cloudagent-python | de291184c59006391a76317826983dd1eb0ada5d | [
"Apache-2.0"
] | 2 | 2020-02-18T20:34:01.000Z | 2021-03-12T16:18:30.000Z | from asynctest import TestCase as AsyncTestCase
from asynctest import mock as async_mock
import pytest
from aiohttp.web import HTTPForbidden
from ...config.injection_context import InjectionContext
from ...ledger.base import BaseLedger
from ...wallet.base import BaseWallet, DIDInfo
from .. import routes as test_module
class TestWalletRoutes(AsyncTestCase):
def setUp(self):
self.context = InjectionContext(enforce_typing=False)
self.wallet = async_mock.create_autospec(BaseWallet)
self.context.injector.bind_instance(BaseWallet, self.wallet)
self.app = {
"outbound_message_router": async_mock.CoroutineMock(),
"request_context": self.context,
}
self.test_did = "did"
self.test_verkey = "verkey"
async def test_missing_wallet(self):
request = async_mock.MagicMock()
request.app = self.app
self.context.injector.clear_binding(BaseWallet)
with self.assertRaises(HTTPForbidden):
await test_module.wallet_create_did(request)
with self.assertRaises(HTTPForbidden):
await test_module.wallet_did_list(request)
with self.assertRaises(HTTPForbidden):
await test_module.wallet_get_public_did(request)
with self.assertRaises(HTTPForbidden):
await test_module.wallet_set_public_did(request)
def test_format_did_info(self):
did_info = DIDInfo(self.test_did, self.test_verkey, {})
result = test_module.format_did_info(did_info)
assert (
result["did"] == self.test_did
and result["verkey"] == self.test_verkey
and result["public"] == "false"
)
did_info = DIDInfo(self.test_did, self.test_verkey, {"public": True})
result = test_module.format_did_info(did_info)
assert result["public"] == "true"
async def test_create_did(self):
request = async_mock.MagicMock()
request.app = self.app
with async_mock.patch.object(
test_module.web, "json_response", async_mock.Mock()
) as json_response, async_mock.patch.object(
test_module, "format_did_info", async_mock.Mock()
) as format_did_info:
self.wallet.create_local_did.return_value = DIDInfo(
self.test_did, self.test_verkey, {}
)
result = await test_module.wallet_create_did(request)
format_did_info.assert_called_once_with(
self.wallet.create_local_did.return_value
)
json_response.assert_called_once_with(
{"result": format_did_info.return_value}
)
assert result is json_response.return_value
async def test_did_list(self):
request = async_mock.MagicMock()
request.app = self.app
request.query = {}
with async_mock.patch.object(
test_module.web, "json_response", async_mock.Mock()
) as json_response, async_mock.patch.object(
test_module, "format_did_info", async_mock.Mock()
) as format_did_info:
self.wallet.get_local_dids.return_value = [
DIDInfo(self.test_did, self.test_verkey, {})
]
format_did_info.return_value = {"did": self.test_did}
result = await test_module.wallet_did_list(request)
format_did_info.assert_called_once_with(
self.wallet.get_local_dids.return_value[0]
)
json_response.assert_called_once_with(
{"results": [format_did_info.return_value]}
)
assert json_response.return_value is json_response()
assert result is json_response.return_value
async def test_did_list_filter_public(self):
request = async_mock.MagicMock()
request.app = self.app
request.query = {"public": "true"}
with async_mock.patch.object(
test_module.web, "json_response", async_mock.Mock()
) as json_response, async_mock.patch.object(
test_module, "format_did_info", async_mock.Mock()
) as format_did_info:
self.wallet.get_public_did.return_value = DIDInfo(
self.test_did, self.test_verkey, {}
)
format_did_info.return_value = {"did": self.test_did}
result = await test_module.wallet_did_list(request)
format_did_info.assert_called_once_with(
self.wallet.get_public_did.return_value
)
json_response.assert_called_once_with(
{"results": [format_did_info.return_value]}
)
assert json_response.return_value is json_response()
assert result is json_response.return_value
async def test_did_list_filter_did(self):
request = async_mock.MagicMock()
request.app = self.app
request.query = {"did": self.test_did}
with async_mock.patch.object(
test_module.web, "json_response", async_mock.Mock()
) as json_response, async_mock.patch.object(
test_module, "format_did_info", async_mock.Mock()
) as format_did_info:
self.wallet.get_local_did.return_value = DIDInfo(
self.test_did, self.test_verkey, {}
)
format_did_info.return_value = {"did": self.test_did}
result = await test_module.wallet_did_list(request)
format_did_info.assert_called_once_with(
self.wallet.get_local_did.return_value
)
json_response.assert_called_once_with(
{"results": [format_did_info.return_value]}
)
assert json_response.return_value is json_response()
assert result is json_response.return_value
async def test_did_list_filter_did_x(self):
request = async_mock.MagicMock()
request.app = self.app
request.query = {"did": self.test_did}
with async_mock.patch.object(
test_module.web, "json_response", async_mock.Mock()
) as json_response:
self.wallet.get_local_did.side_effect = test_module.WalletError()
result = await test_module.wallet_did_list(request)
json_response.assert_called_once_with({"results": []})
assert json_response.return_value is json_response()
assert result is json_response.return_value
async def test_did_list_filter_verkey(self):
request = async_mock.MagicMock()
request.app = self.app
request.query = {"verkey": self.test_verkey}
with async_mock.patch.object(
test_module.web, "json_response", async_mock.Mock()
) as json_response, async_mock.patch.object(
test_module, "format_did_info", async_mock.Mock()
) as format_did_info:
self.wallet.get_local_did_for_verkey.return_value = DIDInfo(
self.test_did, self.test_verkey, {}
)
format_did_info.return_value = {"did": self.test_did}
result = await test_module.wallet_did_list(request)
format_did_info.assert_called_once_with(
self.wallet.get_local_did_for_verkey.return_value
)
json_response.assert_called_once_with(
{"results": [format_did_info.return_value]}
)
assert json_response.return_value is json_response()
assert result is json_response.return_value
async def test_did_list_filter_verkey_x(self):
request = async_mock.MagicMock()
request.app = self.app
request.query = {"verkey": self.test_verkey}
with async_mock.patch.object(
test_module.web, "json_response", async_mock.Mock()
) as json_response:
self.wallet.get_local_did_for_verkey.side_effect = test_module.WalletError()
result = await test_module.wallet_did_list(request)
json_response.assert_called_once_with({"results": []})
assert json_response.return_value is json_response()
assert result is json_response.return_value
async def test_get_public_did(self):
request = async_mock.MagicMock()
request.app = self.app
with async_mock.patch.object(
test_module.web, "json_response", async_mock.Mock()
) as json_response, async_mock.patch.object(
test_module, "format_did_info", async_mock.Mock()
) as format_did_info:
self.wallet.get_public_did.return_value = DIDInfo(
self.test_did, self.test_verkey, {}
)
result = await test_module.wallet_get_public_did(request)
format_did_info.assert_called_once_with(
self.wallet.get_public_did.return_value
)
json_response.assert_called_once_with(
{"result": format_did_info.return_value}
)
assert result is json_response.return_value
async def test_set_public_did(self):
request = async_mock.MagicMock()
request.app = self.app
request.query = {"did": self.test_did}
with async_mock.patch.object(
test_module.web, "json_response", async_mock.Mock()
) as json_response, async_mock.patch.object(
test_module, "format_did_info", async_mock.Mock()
) as format_did_info:
self.wallet.get_public_did.return_value = DIDInfo(
self.test_did, self.test_verkey, {}
)
result = await test_module.wallet_set_public_did(request)
self.wallet.set_public_did.assert_awaited_once_with(request.query["did"])
format_did_info.assert_called_once_with(
self.wallet.set_public_did.return_value
)
json_response.assert_called_once_with(
{"result": format_did_info.return_value}
)
assert result is json_response.return_value
async def test_set_public_did_no_did(self):
request = async_mock.MagicMock()
request.app = self.app
request.query = {}
with self.assertRaises(test_module.web.HTTPBadRequest):
await test_module.wallet_set_public_did(request)
async def test_set_public_did_not_found(self):
request = async_mock.MagicMock()
request.app = self.app
request.query = {"did": self.test_did}
self.wallet.get_local_did.side_effect = test_module.WalletError()
with self.assertRaises(test_module.web.HTTPBadRequest):
await test_module.wallet_set_public_did(request)
async def test_set_public_did_update_endpoint(self):
request = async_mock.MagicMock()
request.app = self.app
request.query = {"did": self.test_did}
Ledger = async_mock.MagicMock()
self.ledger = Ledger()
self.ledger.update_endpoint_for_did = async_mock.CoroutineMock()
self.ledger.__aenter__ = async_mock.CoroutineMock(return_value=self.ledger)
self.context.injector.bind_instance(BaseLedger, self.ledger)
with async_mock.patch.object(
test_module.web, "json_response", async_mock.Mock()
) as json_response, async_mock.patch.object(
test_module, "format_did_info", async_mock.Mock()
) as format_did_info:
self.wallet.get_public_did.return_value = DIDInfo(
self.test_did, self.test_verkey, {}
)
result = await test_module.wallet_set_public_did(request)
self.wallet.set_public_did.assert_awaited_once_with(request.query["did"])
format_did_info.assert_called_once_with(
self.wallet.set_public_did.return_value
)
json_response.assert_called_once_with(
{"result": format_did_info.return_value}
)
assert result is json_response.return_value
async def test_get_catpol(self):
request = async_mock.MagicMock()
request.app = self.app
with async_mock.patch.object(
test_module.web, "json_response", async_mock.Mock()
) as json_response:
self.wallet.WALLET_TYPE = "indy"
self.wallet.get_credential_definition_tag_policy = async_mock.CoroutineMock(
return_value=["a", "b", "c"]
)
result = await test_module.wallet_get_tagging_policy(request)
json_response.assert_called_once_with({"taggables": ["a", "b", "c"]})
assert result is json_response.return_value
async def test_get_catpol_not_indy_x(self):
request = async_mock.MagicMock()
request.app = self.app
self.wallet.WALLET_TYPE = "rich-corinthian-leather"
with self.assertRaises(test_module.web.HTTPForbidden):
await test_module.wallet_get_tagging_policy(request)
async def test_set_catpol(self):
request = async_mock.MagicMock()
request.app = self.app
request.json = async_mock.CoroutineMock(
return_value={"taggables": ["a", "b", "c"]}
)
with async_mock.patch.object(
test_module.web, "json_response", async_mock.Mock()
) as json_response:
self.wallet.WALLET_TYPE = "indy"
self.wallet.set_credential_definition_tag_policy = async_mock.CoroutineMock(
return_value=["a", "b", "c"]
)
result = await test_module.wallet_set_tagging_policy(request)
json_response.assert_called_once_with({})
assert result is json_response.return_value
async def test_set_catpol_not_indy_x(self):
request = async_mock.MagicMock()
request.app = self.app
request.json = async_mock.CoroutineMock(
return_value={"taggables": ["a", "b", "c"]}
)
self.wallet.WALLET_TYPE = "rich-corinthian-leather"
with self.assertRaises(test_module.web.HTTPForbidden):
await test_module.wallet_set_tagging_policy(request)
async def test_register(self):
mock_app = async_mock.MagicMock()
mock_app.add_routes = async_mock.MagicMock()
await test_module.register(mock_app)
mock_app.add_routes.assert_called_once()
| 42.259587 | 88 | 0.644144 | from asynctest import TestCase as AsyncTestCase
from asynctest import mock as async_mock
import pytest
from aiohttp.web import HTTPForbidden
from ...config.injection_context import InjectionContext
from ...ledger.base import BaseLedger
from ...wallet.base import BaseWallet, DIDInfo
from .. import routes as test_module
class TestWalletRoutes(AsyncTestCase):
def setUp(self):
self.context = InjectionContext(enforce_typing=False)
self.wallet = async_mock.create_autospec(BaseWallet)
self.context.injector.bind_instance(BaseWallet, self.wallet)
self.app = {
"outbound_message_router": async_mock.CoroutineMock(),
"request_context": self.context,
}
self.test_did = "did"
self.test_verkey = "verkey"
async def test_missing_wallet(self):
request = async_mock.MagicMock()
request.app = self.app
self.context.injector.clear_binding(BaseWallet)
with self.assertRaises(HTTPForbidden):
await test_module.wallet_create_did(request)
with self.assertRaises(HTTPForbidden):
await test_module.wallet_did_list(request)
with self.assertRaises(HTTPForbidden):
await test_module.wallet_get_public_did(request)
with self.assertRaises(HTTPForbidden):
await test_module.wallet_set_public_did(request)
def test_format_did_info(self):
did_info = DIDInfo(self.test_did, self.test_verkey, {})
result = test_module.format_did_info(did_info)
assert (
result["did"] == self.test_did
and result["verkey"] == self.test_verkey
and result["public"] == "false"
)
did_info = DIDInfo(self.test_did, self.test_verkey, {"public": True})
result = test_module.format_did_info(did_info)
assert result["public"] == "true"
async def test_create_did(self):
request = async_mock.MagicMock()
request.app = self.app
with async_mock.patch.object(
test_module.web, "json_response", async_mock.Mock()
) as json_response, async_mock.patch.object(
test_module, "format_did_info", async_mock.Mock()
) as format_did_info:
self.wallet.create_local_did.return_value = DIDInfo(
self.test_did, self.test_verkey, {}
)
result = await test_module.wallet_create_did(request)
format_did_info.assert_called_once_with(
self.wallet.create_local_did.return_value
)
json_response.assert_called_once_with(
{"result": format_did_info.return_value}
)
assert result is json_response.return_value
async def test_did_list(self):
request = async_mock.MagicMock()
request.app = self.app
request.query = {}
with async_mock.patch.object(
test_module.web, "json_response", async_mock.Mock()
) as json_response, async_mock.patch.object(
test_module, "format_did_info", async_mock.Mock()
) as format_did_info:
self.wallet.get_local_dids.return_value = [
DIDInfo(self.test_did, self.test_verkey, {})
]
format_did_info.return_value = {"did": self.test_did}
result = await test_module.wallet_did_list(request)
format_did_info.assert_called_once_with(
self.wallet.get_local_dids.return_value[0]
)
json_response.assert_called_once_with(
{"results": [format_did_info.return_value]}
)
assert json_response.return_value is json_response()
assert result is json_response.return_value
async def test_did_list_filter_public(self):
request = async_mock.MagicMock()
request.app = self.app
request.query = {"public": "true"}
with async_mock.patch.object(
test_module.web, "json_response", async_mock.Mock()
) as json_response, async_mock.patch.object(
test_module, "format_did_info", async_mock.Mock()
) as format_did_info:
self.wallet.get_public_did.return_value = DIDInfo(
self.test_did, self.test_verkey, {}
)
format_did_info.return_value = {"did": self.test_did}
result = await test_module.wallet_did_list(request)
format_did_info.assert_called_once_with(
self.wallet.get_public_did.return_value
)
json_response.assert_called_once_with(
{"results": [format_did_info.return_value]}
)
assert json_response.return_value is json_response()
assert result is json_response.return_value
async def test_did_list_filter_did(self):
request = async_mock.MagicMock()
request.app = self.app
request.query = {"did": self.test_did}
with async_mock.patch.object(
test_module.web, "json_response", async_mock.Mock()
) as json_response, async_mock.patch.object(
test_module, "format_did_info", async_mock.Mock()
) as format_did_info:
self.wallet.get_local_did.return_value = DIDInfo(
self.test_did, self.test_verkey, {}
)
format_did_info.return_value = {"did": self.test_did}
result = await test_module.wallet_did_list(request)
format_did_info.assert_called_once_with(
self.wallet.get_local_did.return_value
)
json_response.assert_called_once_with(
{"results": [format_did_info.return_value]}
)
assert json_response.return_value is json_response()
assert result is json_response.return_value
async def test_did_list_filter_did_x(self):
request = async_mock.MagicMock()
request.app = self.app
request.query = {"did": self.test_did}
with async_mock.patch.object(
test_module.web, "json_response", async_mock.Mock()
) as json_response:
self.wallet.get_local_did.side_effect = test_module.WalletError()
result = await test_module.wallet_did_list(request)
json_response.assert_called_once_with({"results": []})
assert json_response.return_value is json_response()
assert result is json_response.return_value
async def test_did_list_filter_verkey(self):
request = async_mock.MagicMock()
request.app = self.app
request.query = {"verkey": self.test_verkey}
with async_mock.patch.object(
test_module.web, "json_response", async_mock.Mock()
) as json_response, async_mock.patch.object(
test_module, "format_did_info", async_mock.Mock()
) as format_did_info:
self.wallet.get_local_did_for_verkey.return_value = DIDInfo(
self.test_did, self.test_verkey, {}
)
format_did_info.return_value = {"did": self.test_did}
result = await test_module.wallet_did_list(request)
format_did_info.assert_called_once_with(
self.wallet.get_local_did_for_verkey.return_value
)
json_response.assert_called_once_with(
{"results": [format_did_info.return_value]}
)
assert json_response.return_value is json_response()
assert result is json_response.return_value
async def test_did_list_filter_verkey_x(self):
request = async_mock.MagicMock()
request.app = self.app
request.query = {"verkey": self.test_verkey}
with async_mock.patch.object(
test_module.web, "json_response", async_mock.Mock()
) as json_response:
self.wallet.get_local_did_for_verkey.side_effect = test_module.WalletError()
result = await test_module.wallet_did_list(request)
json_response.assert_called_once_with({"results": []})
assert json_response.return_value is json_response()
assert result is json_response.return_value
async def test_get_public_did(self):
request = async_mock.MagicMock()
request.app = self.app
with async_mock.patch.object(
test_module.web, "json_response", async_mock.Mock()
) as json_response, async_mock.patch.object(
test_module, "format_did_info", async_mock.Mock()
) as format_did_info:
self.wallet.get_public_did.return_value = DIDInfo(
self.test_did, self.test_verkey, {}
)
result = await test_module.wallet_get_public_did(request)
format_did_info.assert_called_once_with(
self.wallet.get_public_did.return_value
)
json_response.assert_called_once_with(
{"result": format_did_info.return_value}
)
assert result is json_response.return_value
async def test_set_public_did(self):
request = async_mock.MagicMock()
request.app = self.app
request.query = {"did": self.test_did}
with async_mock.patch.object(
test_module.web, "json_response", async_mock.Mock()
) as json_response, async_mock.patch.object(
test_module, "format_did_info", async_mock.Mock()
) as format_did_info:
self.wallet.get_public_did.return_value = DIDInfo(
self.test_did, self.test_verkey, {}
)
result = await test_module.wallet_set_public_did(request)
self.wallet.set_public_did.assert_awaited_once_with(request.query["did"])
format_did_info.assert_called_once_with(
self.wallet.set_public_did.return_value
)
json_response.assert_called_once_with(
{"result": format_did_info.return_value}
)
assert result is json_response.return_value
async def test_set_public_did_no_did(self):
request = async_mock.MagicMock()
request.app = self.app
request.query = {}
with self.assertRaises(test_module.web.HTTPBadRequest):
await test_module.wallet_set_public_did(request)
async def test_set_public_did_not_found(self):
request = async_mock.MagicMock()
request.app = self.app
request.query = {"did": self.test_did}
self.wallet.get_local_did.side_effect = test_module.WalletError()
with self.assertRaises(test_module.web.HTTPBadRequest):
await test_module.wallet_set_public_did(request)
async def test_set_public_did_update_endpoint(self):
request = async_mock.MagicMock()
request.app = self.app
request.query = {"did": self.test_did}
Ledger = async_mock.MagicMock()
self.ledger = Ledger()
self.ledger.update_endpoint_for_did = async_mock.CoroutineMock()
self.ledger.__aenter__ = async_mock.CoroutineMock(return_value=self.ledger)
self.context.injector.bind_instance(BaseLedger, self.ledger)
with async_mock.patch.object(
test_module.web, "json_response", async_mock.Mock()
) as json_response, async_mock.patch.object(
test_module, "format_did_info", async_mock.Mock()
) as format_did_info:
self.wallet.get_public_did.return_value = DIDInfo(
self.test_did, self.test_verkey, {}
)
result = await test_module.wallet_set_public_did(request)
self.wallet.set_public_did.assert_awaited_once_with(request.query["did"])
format_did_info.assert_called_once_with(
self.wallet.set_public_did.return_value
)
json_response.assert_called_once_with(
{"result": format_did_info.return_value}
)
assert result is json_response.return_value
async def test_get_catpol(self):
request = async_mock.MagicMock()
request.app = self.app
with async_mock.patch.object(
test_module.web, "json_response", async_mock.Mock()
) as json_response:
self.wallet.WALLET_TYPE = "indy"
self.wallet.get_credential_definition_tag_policy = async_mock.CoroutineMock(
return_value=["a", "b", "c"]
)
result = await test_module.wallet_get_tagging_policy(request)
json_response.assert_called_once_with({"taggables": ["a", "b", "c"]})
assert result is json_response.return_value
async def test_get_catpol_not_indy_x(self):
request = async_mock.MagicMock()
request.app = self.app
self.wallet.WALLET_TYPE = "rich-corinthian-leather"
with self.assertRaises(test_module.web.HTTPForbidden):
await test_module.wallet_get_tagging_policy(request)
async def test_set_catpol(self):
request = async_mock.MagicMock()
request.app = self.app
request.json = async_mock.CoroutineMock(
return_value={"taggables": ["a", "b", "c"]}
)
with async_mock.patch.object(
test_module.web, "json_response", async_mock.Mock()
) as json_response:
self.wallet.WALLET_TYPE = "indy"
self.wallet.set_credential_definition_tag_policy = async_mock.CoroutineMock(
return_value=["a", "b", "c"]
)
result = await test_module.wallet_set_tagging_policy(request)
json_response.assert_called_once_with({})
assert result is json_response.return_value
async def test_set_catpol_not_indy_x(self):
request = async_mock.MagicMock()
request.app = self.app
request.json = async_mock.CoroutineMock(
return_value={"taggables": ["a", "b", "c"]}
)
self.wallet.WALLET_TYPE = "rich-corinthian-leather"
with self.assertRaises(test_module.web.HTTPForbidden):
await test_module.wallet_set_tagging_policy(request)
async def test_register(self):
mock_app = async_mock.MagicMock()
mock_app.add_routes = async_mock.MagicMock()
await test_module.register(mock_app)
mock_app.add_routes.assert_called_once()
| true | true |
f7244d3b8570046485a9f0792f05527b6f08760b | 62 | py | Python | app/db/repos/base/protocols/statements/__init__.py | maxzhenzhera/my_vocab_backend | 2e9f968374e0bc2fcc0ae40830ca40f3cf5754d1 | [
"MIT"
] | null | null | null | app/db/repos/base/protocols/statements/__init__.py | maxzhenzhera/my_vocab_backend | 2e9f968374e0bc2fcc0ae40830ca40f3cf5754d1 | [
"MIT"
] | null | null | null | app/db/repos/base/protocols/statements/__init__.py | maxzhenzhera/my_vocab_backend | 2e9f968374e0bc2fcc0ae40830ca40f3cf5754d1 | [
"MIT"
] | null | null | null | from .returnable import Returnable
__all__ = ['Returnable']
| 12.4 | 34 | 0.758065 | from .returnable import Returnable
__all__ = ['Returnable']
| true | true |
f7244e8ebbd013097871e683eb9e56711f004cd7 | 461 | py | Python | EDUREKA/Course.3/Case.Study.1.Programs/distanceCalculation.py | linkeshkanna/ProblemSolving | 8286ce66fbe82a78e1a19396da2d888d755d4cf4 | [
"Apache-2.0"
] | null | null | null | EDUREKA/Course.3/Case.Study.1.Programs/distanceCalculation.py | linkeshkanna/ProblemSolving | 8286ce66fbe82a78e1a19396da2d888d755d4cf4 | [
"Apache-2.0"
] | null | null | null | EDUREKA/Course.3/Case.Study.1.Programs/distanceCalculation.py | linkeshkanna/ProblemSolving | 8286ce66fbe82a78e1a19396da2d888d755d4cf4 | [
"Apache-2.0"
] | null | null | null | import math
from math import radians
lat1 = 13.0827
lat2 = 9.4533
long1 = 80.2707
long2 = 77.8024
R = 6371
teta1 = radians(lat1)
teta2 = radians(lat2)
teta = radians(lat2 - lat1)
landa = radians(long2 - long1)
a = math.sin(teta/2) * math.sin(teta/2) + math.cos(teta1) * math.cos(teta2) * math.sin(landa/2) * math.sin(landa/2)
c = 2 * math.atan2(math.sqrt(a), math.sqrt(1 - a))
d = R * c
print("Distance between two Latitude, Longtitude is : " + str(d))
| 19.208333 | 115 | 0.661605 | import math
from math import radians
lat1 = 13.0827
lat2 = 9.4533
long1 = 80.2707
long2 = 77.8024
R = 6371
teta1 = radians(lat1)
teta2 = radians(lat2)
teta = radians(lat2 - lat1)
landa = radians(long2 - long1)
a = math.sin(teta/2) * math.sin(teta/2) + math.cos(teta1) * math.cos(teta2) * math.sin(landa/2) * math.sin(landa/2)
c = 2 * math.atan2(math.sqrt(a), math.sqrt(1 - a))
d = R * c
print("Distance between two Latitude, Longtitude is : " + str(d))
| true | true |
f7244e9d2237fe01ddaeb6e8e95ca04552be563e | 3,688 | py | Python | airiam/terraform/entity_terraformers/IAMPolicyDocumentTransformer.py | metahertz/AirIAM | 212f84e1b1a51c7a614384f91b220e7f2a57a079 | [
"Apache-2.0"
] | 501 | 2020-03-04T16:00:54.000Z | 2022-03-30T17:31:10.000Z | airiam/terraform/entity_terraformers/IAMPolicyDocumentTransformer.py | rckasa/AirIAM | 5a99dc25354c1bc6525dbaf25a3afcd472f71b2f | [
"Apache-2.0"
] | 34 | 2020-03-23T08:12:18.000Z | 2022-02-13T08:50:39.000Z | airiam/terraform/entity_terraformers/IAMPolicyDocumentTransformer.py | rckasa/AirIAM | 5a99dc25354c1bc6525dbaf25a3afcd472f71b2f | [
"Apache-2.0"
] | 51 | 2020-04-16T06:43:29.000Z | 2022-03-20T14:20:24.000Z | import json
from airiam.terraform.entity_terraformers.BaseEntityTransformer import BaseEntityTransformer
class IAMPolicyDocumentTransformer(BaseEntityTransformer):
def __init__(self, entity_json: dict, policy_name, principal_name=None):
policy_document_name = f"{policy_name}_document"
if principal_name:
policy_document_name = f"{principal_name}_{policy_document_name}"
super().__init__('data.aws_iam_policy_document', policy_document_name, entity_json)
def _generate_hcl2_code(self, entity_json) -> str:
statements = IAMPolicyDocumentTransformer.force_list(entity_json['Statement'])
if 'Principal' in statements[0]:
statements = self.transform_assume_policy_statements(statements)
else:
statements = self.transform_execution_policy(statements)
code = f"""data "aws_iam_policy_document" "{self._safe_name}" {{
version = "{entity_json.get('Version', '2012-10-17')}"
{statements}}}"""
return code
@staticmethod
def transform_execution_policy(statements):
statement_block = ""
for statement in statements:
sid_string = ""
if statement.get('Sid', '') != '':
sid_string = f"sid = \"{statement['Sid']}\"\n "
actions = IAMPolicyDocumentTransformer.force_list(statement.get('Action'))
if 'Action' in statement:
action_str = f"actions = {json.dumps(actions)}"
else:
actions = IAMPolicyDocumentTransformer.force_list(statement.get('NotAction'))
action_str = f"not_actions = {json.dumps(actions)}"
condition_block = IAMPolicyDocumentTransformer.transform_conditions(statement)
resources_list_str = json.dumps(IAMPolicyDocumentTransformer.force_list(statement.get('Resource'))).replace('${', '$\\u0024{')
statement_block += f""" statement {{
{sid_string}effect = "{statement['Effect']}"
{action_str}
resources = {resources_list_str}
{condition_block}
}}
"""
return statement_block
@staticmethod
def transform_assume_policy_statements(statements):
statement_block = ""
for statement in statements:
sid_string = ""
if statement.get('Sid', '') != '':
sid_string = f"sid = \"{statement['Sid']}\"\n "
condition_block = IAMPolicyDocumentTransformer.transform_conditions(statement)
statement_block += f""" statement {{
{sid_string}effect = "{statement['Effect']}"
actions = {json.dumps(IAMPolicyDocumentTransformer.force_list(statement['Action']))}
principals {{
type = "{list(statement['Principal'].keys())[0]}"
identifiers = {json.dumps(IAMPolicyDocumentTransformer.force_list(statement['Principal'][list(statement['Principal'].keys())[0]]))}
}}
{condition_block}}}
"""
return statement_block
@staticmethod
def transform_conditions(statement):
condition_block = ""
if 'Condition' in statement:
for test, items in statement['Condition'].items():
for variable, values in items.items():
values_str = json.dumps(IAMPolicyDocumentTransformer.force_list(values)).replace('${', '$\\u0024{')
condition_block += f"""
condition {{
test = "{test}"
variable = "{variable}"
values = {values_str}
}}
"""
return condition_block
@staticmethod
def force_list(x):
if isinstance(x, list):
return x
return [x]
def entities_to_import(self) -> list:
return []
| 38.821053 | 138 | 0.632863 | import json
from airiam.terraform.entity_terraformers.BaseEntityTransformer import BaseEntityTransformer
class IAMPolicyDocumentTransformer(BaseEntityTransformer):
def __init__(self, entity_json: dict, policy_name, principal_name=None):
policy_document_name = f"{policy_name}_document"
if principal_name:
policy_document_name = f"{principal_name}_{policy_document_name}"
super().__init__('data.aws_iam_policy_document', policy_document_name, entity_json)
def _generate_hcl2_code(self, entity_json) -> str:
statements = IAMPolicyDocumentTransformer.force_list(entity_json['Statement'])
if 'Principal' in statements[0]:
statements = self.transform_assume_policy_statements(statements)
else:
statements = self.transform_execution_policy(statements)
code = f"""data "aws_iam_policy_document" "{self._safe_name}" {{
version = "{entity_json.get('Version', '2012-10-17')}"
{statements}}}"""
return code
@staticmethod
def transform_execution_policy(statements):
statement_block = ""
for statement in statements:
sid_string = ""
if statement.get('Sid', '') != '':
sid_string = f"sid = \"{statement['Sid']}\"\n "
actions = IAMPolicyDocumentTransformer.force_list(statement.get('Action'))
if 'Action' in statement:
action_str = f"actions = {json.dumps(actions)}"
else:
actions = IAMPolicyDocumentTransformer.force_list(statement.get('NotAction'))
action_str = f"not_actions = {json.dumps(actions)}"
condition_block = IAMPolicyDocumentTransformer.transform_conditions(statement)
resources_list_str = json.dumps(IAMPolicyDocumentTransformer.force_list(statement.get('Resource'))).replace('${', '$\\u0024{')
statement_block += f""" statement {{
{sid_string}effect = "{statement['Effect']}"
{action_str}
resources = {resources_list_str}
{condition_block}
}}
"""
return statement_block
@staticmethod
def transform_assume_policy_statements(statements):
statement_block = ""
for statement in statements:
sid_string = ""
if statement.get('Sid', '') != '':
sid_string = f"sid = \"{statement['Sid']}\"\n "
condition_block = IAMPolicyDocumentTransformer.transform_conditions(statement)
statement_block += f""" statement {{
{sid_string}effect = "{statement['Effect']}"
actions = {json.dumps(IAMPolicyDocumentTransformer.force_list(statement['Action']))}
principals {{
type = "{list(statement['Principal'].keys())[0]}"
identifiers = {json.dumps(IAMPolicyDocumentTransformer.force_list(statement['Principal'][list(statement['Principal'].keys())[0]]))}
}}
{condition_block}}}
"""
return statement_block
@staticmethod
def transform_conditions(statement):
condition_block = ""
if 'Condition' in statement:
for test, items in statement['Condition'].items():
for variable, values in items.items():
values_str = json.dumps(IAMPolicyDocumentTransformer.force_list(values)).replace('${', '$\\u0024{')
condition_block += f"""
condition {{
test = "{test}"
variable = "{variable}"
values = {values_str}
}}
"""
return condition_block
@staticmethod
def force_list(x):
if isinstance(x, list):
return x
return [x]
def entities_to_import(self) -> list:
return []
| true | true |
f7244effc59fcc20c066ca606a185366478f6693 | 620 | py | Python | smpc_demo_platform/benchmarking/views.py | Safe-DEED/mpc-mock-up | 7c12b94d50bcde8480da8a7abf93c32b2708e2aa | [
"MIT"
] | null | null | null | smpc_demo_platform/benchmarking/views.py | Safe-DEED/mpc-mock-up | 7c12b94d50bcde8480da8a7abf93c32b2708e2aa | [
"MIT"
] | null | null | null | smpc_demo_platform/benchmarking/views.py | Safe-DEED/mpc-mock-up | 7c12b94d50bcde8480da8a7abf93c32b2708e2aa | [
"MIT"
] | null | null | null | from django.views.generic.base import TemplateView
from django.contrib.auth.decorators import login_required
from django.utils.decorators import method_decorator
# decorators = [login_required, ]
# @method_decorator(decorators, name='dispatch')
class BenchmarkViewAppCGOne(TemplateView):
template_name = "benchmarking/app-x3-Z63/benchmarking-home.html"
def get_context_data(self, **kwargs):
context = super().get_context_data(**kwargs)
return context
# @method_decorator(decorators, name='dispatch')
class BenchmarkEndView(TemplateView):
template_name = "benchmarking/end-of-demo.html"
| 31 | 68 | 0.774194 | from django.views.generic.base import TemplateView
from django.contrib.auth.decorators import login_required
from django.utils.decorators import method_decorator
class BenchmarkViewAppCGOne(TemplateView):
template_name = "benchmarking/app-x3-Z63/benchmarking-home.html"
def get_context_data(self, **kwargs):
context = super().get_context_data(**kwargs)
return context
class BenchmarkEndView(TemplateView):
template_name = "benchmarking/end-of-demo.html"
| true | true |
f72450ed27bed040eb378e87323d130b0d5efc33 | 143 | py | Python | utests/test_parsers.py | Zephor5/zspider | 49178415137d67d7c88f2904bcb204df32082204 | [
"MIT"
] | 12 | 2015-12-23T10:13:31.000Z | 2021-04-25T17:12:39.000Z | utests/test_parsers.py | Zephor5/zspider | 49178415137d67d7c88f2904bcb204df32082204 | [
"MIT"
] | 1 | 2022-03-02T14:53:18.000Z | 2022-03-02T14:53:18.000Z | utests/test_parsers.py | Zephor5/zspider | 49178415137d67d7c88f2904bcb204df32082204 | [
"MIT"
] | 1 | 2017-09-18T08:51:51.000Z | 2017-09-18T08:51:51.000Z | # coding=utf-8
import unittest
__author__ = "zephor"
# noinspection PyUnresolvedReferences
class TestNewspaper(unittest.TestCase):
pass
| 14.3 | 39 | 0.783217 |
import unittest
__author__ = "zephor"
class TestNewspaper(unittest.TestCase):
pass
| true | true |
f7245165fdf913d8a10e8d0815498a4fecb991a0 | 2,721 | py | Python | lumbermill/modifier/Permutate.py | dstore-dbap/LumberMill | b7cbadc209a83386871735b8ad88b61da917a6ab | [
"Apache-2.0"
] | 15 | 2015-12-14T19:07:28.000Z | 2022-02-28T13:32:11.000Z | lumbermill/modifier/Permutate.py | dstore-dbap/LumberMill | b7cbadc209a83386871735b8ad88b61da917a6ab | [
"Apache-2.0"
] | null | null | null | lumbermill/modifier/Permutate.py | dstore-dbap/LumberMill | b7cbadc209a83386871735b8ad88b61da917a6ab | [
"Apache-2.0"
] | 4 | 2017-02-08T10:49:55.000Z | 2019-03-19T18:47:46.000Z | # -*- coding: utf-8 -*-
import itertools
import sys
from lumbermill.BaseThreadedModule import BaseThreadedModule
from lumbermill.utils.Decorators import ModuleDocstringParser
@ModuleDocstringParser
class Permutate(BaseThreadedModule):
"""
Creates successive len('target_fields') length permutations of elements in 'source_field'.
To add some context data to each emitted event 'context_data_field' can specify a field
containing a dictionary with the values of 'source_field' as keys.
Configuration template:
- Permutate:
source_field: # <type: string; is: required>
target_fields: # <type: list; is: required>
context_data_field: # <default: ""; type:string; is: optional>
context_target_mapping: # <default: {}; type: dict; is: optional if context_data_field == "" else required>
receivers:
- NextModule
"""
module_type = "modifier"
"""Set module type"""
def handleEvent(self, event):
"""
Process the event.
@param event: dictionary
@return data: dictionary
"""
try:
context_data = event[self.getConfigurationValue('context_data_field')]
except KeyError:
context_data = False
try:
permutation_data = event[self.getConfigurationValue('source_field')]
except KeyError:
yield event
return
if type(permutation_data) is not list:
yield event
return
target_field_names = self.getConfigurationValue('target_fields')
context_target_mapping = self.getConfigurationValue('context_target_mapping')
for permutation in itertools.permutations(permutation_data, r=len(target_field_names)):
event_copy = event.copy()
if context_data:
try:
# Rewrite the context data keys to new keys in context_target_mapping
ctx_data = {}
for idx, dct in enumerate([context_data[key] for key in permutation if key in context_data]):
for mapping_key, newkeys in context_target_mapping.items():
if mapping_key in dct:
ctx_data[newkeys[idx]] = dct[mapping_key]
event_copy.update(ctx_data)
except:
etype, evalue, etb = sys.exc_info()
self.logger.warning("Could not add context data. Exception: %s, Error: %s." % (etype, evalue))
perm = dict(zip(target_field_names, permutation))
event_copy.update(perm)
yield event_copy
| 38.871429 | 123 | 0.606027 |
import itertools
import sys
from lumbermill.BaseThreadedModule import BaseThreadedModule
from lumbermill.utils.Decorators import ModuleDocstringParser
@ModuleDocstringParser
class Permutate(BaseThreadedModule):
module_type = "modifier"
def handleEvent(self, event):
try:
context_data = event[self.getConfigurationValue('context_data_field')]
except KeyError:
context_data = False
try:
permutation_data = event[self.getConfigurationValue('source_field')]
except KeyError:
yield event
return
if type(permutation_data) is not list:
yield event
return
target_field_names = self.getConfigurationValue('target_fields')
context_target_mapping = self.getConfigurationValue('context_target_mapping')
for permutation in itertools.permutations(permutation_data, r=len(target_field_names)):
event_copy = event.copy()
if context_data:
try:
ctx_data = {}
for idx, dct in enumerate([context_data[key] for key in permutation if key in context_data]):
for mapping_key, newkeys in context_target_mapping.items():
if mapping_key in dct:
ctx_data[newkeys[idx]] = dct[mapping_key]
event_copy.update(ctx_data)
except:
etype, evalue, etb = sys.exc_info()
self.logger.warning("Could not add context data. Exception: %s, Error: %s." % (etype, evalue))
perm = dict(zip(target_field_names, permutation))
event_copy.update(perm)
yield event_copy
| true | true |
f72451c6dd941af7a8fa151c731d1b07245f0b6c | 8,252 | py | Python | tests/test_utils.py | austinjp/textacy | dddfdbf0e0ab3bf756bc4eda042eab1001aac709 | [
"Apache-2.0"
] | 1,929 | 2016-02-14T08:30:38.000Z | 2022-03-31T03:00:35.000Z | tests/test_utils.py | austinjp/textacy | dddfdbf0e0ab3bf756bc4eda042eab1001aac709 | [
"Apache-2.0"
] | 304 | 2016-02-18T15:52:22.000Z | 2022-03-31T18:06:54.000Z | tests/test_utils.py | austinjp/textacy | dddfdbf0e0ab3bf756bc4eda042eab1001aac709 | [
"Apache-2.0"
] | 285 | 2016-03-20T04:25:08.000Z | 2022-03-24T11:31:17.000Z | import datetime
import pathlib
import pytest
from textacy import utils
@pytest.mark.parametrize(
"val,val_type,col_type,expected",
[
(None, int, list, None),
(1, int, list, [1]),
([1, 2], int, tuple, (1, 2)),
((1, 1.0), (int, float), set, {1, 1.0}),
],
)
def test_to_collection(val, val_type, col_type, expected):
assert utils.to_collection(val, val_type, col_type) == expected
class TestToUnicode:
@pytest.mark.parametrize("s", [b"bytes", "unicode", "úñîçødé"])
def test_valid(self, s):
assert isinstance(utils.to_unicode(s), str)
@pytest.mark.parametrize("s", [1, 2.0, ["foo", "bar"], {"foo": "bar"}])
def test_invalid(self, s):
with pytest.raises(TypeError):
_ = utils.to_unicode(s)
class TestToBytes:
@pytest.mark.parametrize("s", [b"bytes", "unicode", "úñîçødé"])
def test_valid(self, s):
assert isinstance(utils.to_bytes(s), bytes)
@pytest.mark.parametrize("s", [1, 2.0, ["foo", "bar"], {"foo": "bar"}])
def test_invalid(self, s):
with pytest.raises(TypeError):
_ = utils.to_bytes(s)
class TestToPath:
@pytest.mark.parametrize("path", [pathlib.Path("."), pathlib.Path.home()])
def test_path_input(self, path):
assert isinstance(utils.to_path(path), pathlib.Path)
@pytest.mark.parametrize("path", ["unicode", "úñîçødé"])
def test_str_input(self, path):
assert isinstance(utils.to_path(path), pathlib.Path)
@pytest.mark.parametrize("path", [1, 2.0, ["foo", "bar"], {"foo": "bar"}])
def test_invalid_input(self, path):
with pytest.raises(TypeError):
_ = utils.to_path(path)
class TestValidateAndClipRange:
@pytest.mark.parametrize(
"range_vals,full_range,val_type",
[
[("2001-01", "2002-01"), ("2000-01", "2003-01"), None],
[["2001-01", "2004-01"], ("2000-01", "2003-01"), None],
[("2001-01", "2002-01"), ["2000-01", "2003-01"], (str, bytes)],
[[-5, 5], [-10, 10], None],
[(-5, 5), (0, 10), None],
[(-5, 5), (-10, 10), int],
[(-5, 5), (-10, 10), (int, float)],
[(0, None), (-5, 5), None],
[(None, 0), (-5, 5), None],
],
)
def test_valid_inputs(self, range_vals, full_range, val_type):
output = utils.validate_and_clip_range(range_vals, full_range, val_type)
assert isinstance(output, tuple)
assert len(output) == 2
if range_vals[0] is None:
assert output[0] == full_range[0]
else:
assert output[0] == max(range_vals[0], full_range[0])
if range_vals[1] is None:
assert output[1] == full_range[1]
else:
assert output[1] == min(range_vals[1], full_range[1])
@pytest.mark.parametrize(
"range_vals,full_range,val_type,error",
[
["2001-01", ("2000-01", "2003-01"), None, pytest.raises(TypeError)],
[("2001-01", "2002-01"), "2000-01", None, pytest.raises(TypeError)],
[
{"2001-01", "2002-01"},
("2000-01", "2003-01"),
None,
pytest.raises(TypeError),
],
[
("2001-01", "2002-01"),
("2000-01", "2003-01"),
datetime.date,
pytest.raises(TypeError),
],
[0, [-10, 10], None, pytest.raises(TypeError)],
[(-5, 5), 0, None, pytest.raises(TypeError)],
[[-5, 5], [-10, 10], (str, bytes), pytest.raises(TypeError)],
[
("2001-01", "2002-01", "2003-01"),
("2000-01", "2003-01"),
None,
pytest.raises(ValueError),
],
[
("2001-01", "2002-01"),
["2000-01", "2002-01", "2004-01"],
None,
pytest.raises(ValueError),
],
[[0, 5, 10], (-10, 10), None, pytest.raises(ValueError)],
[(-5, 5), [-10, 0, 10], None, pytest.raises(ValueError)],
[(-5, 5), [-10, 0, 10], (str, bytes), pytest.raises(ValueError)],
],
)
def test_invalid_inputs(self, range_vals, full_range, val_type, error):
with error:
_ = utils.validate_and_clip_range(range_vals, full_range, val_type)
class TestValidateSetMembers:
@pytest.mark.parametrize(
"vals,val_type,valid_vals",
[
[{"a", "b"}, (str, bytes), {"a", "b", "c"}],
["a", (str, bytes), {"a", "b", "c"}],
[("a", "b"), (str, bytes), {"a", "b", "c"}],
[["a", "b"], (str, bytes), None],
[{1, 2}, int, {1, 2, 3}],
[{1, 2}, (int, float), {1, 2, 3}],
[1, int, {1: "a", 2: "b", 3: "c"}],
[{3.14, 42.0}, float, None],
[3.14, (int, float), None],
]
)
def test_valid_inputs(self, vals, val_type, valid_vals):
output = utils.validate_set_members(vals, val_type, valid_vals)
assert isinstance(output, set)
assert all(isinstance(val, val_type) for val in output)
@pytest.mark.parametrize(
"vals,val_type,valid_vals,error",
[
[{"a", "b"}, int, None, pytest.raises(TypeError)],
["a", int, None, pytest.raises(TypeError)],
[("a", "b"), (int, float), None, pytest.raises(TypeError)],
[{"a", "b"}, (str, bytes), {"x", "y", "z"}, pytest.raises(ValueError)],
[{"a", "x"}, (str, bytes), {"x", "y", "z"}, pytest.raises(ValueError)],
["a", (str, bytes), {"x", "y", "z"}, pytest.raises(ValueError)],
["a", (str, bytes), {"x": 24, "y": 25, "z": 26}, pytest.raises(ValueError)],
]
)
def test_invalid_inputs(self, vals, val_type, valid_vals, error):
with error:
_ = utils.validate_set_members(vals, val_type, valid_vals)
# TODO: uncomment this when we're only supporting PY3.8+
# def _func_pos_only_args(parg1, parg2, /):
# return (parg1, parg2)
# TODO: uncomment this when we're only supporting PY3.8+
# def _func_mix_args(parg, /, arg, *, kwarg):
# return (parg, arg, kwarg)
def _func_mix_args(arg, *, kwarg):
return (arg, kwarg)
def _func_kw_only_args(*, kwarg1, kwarg2):
return (kwarg1, kwarg2)
@pytest.mark.parametrize(
"func,kwargs,expected",
[
# (_func_pos_only_args, {"kwarg": "kwargval"}, {}),
(_func_mix_args, {"arg": "argval"}, {"arg": "argval"}),
(
_func_mix_args,
{"arg": "argval", "kwarg": "kwarval"},
{"arg": "argval", "kwarg": "kwarval"},
),
(
_func_mix_args,
{"arg": "argval", "kwarg": "kwargval", "foo": "bar"},
{"arg": "argval", "kwarg": "kwargval"},
),
(
_func_kw_only_args,
{"kwarg1": "kwarg1val", "kwarg2": "kwarg2val"},
{"kwarg1": "kwarg1val", "kwarg2": "kwarg2val"},
),
(
_func_kw_only_args,
{"kwarg1": "kwarg1val", "kwarg3": "kwarg3val"},
{"kwarg1": "kwarg1val"},
),
(_func_kw_only_args, {}, {}),
],
)
def test_get_kwargs_for_func(func, kwargs, expected):
assert utils.get_kwargs_for_func(func, kwargs) == expected
@pytest.mark.parametrize(
"text, n, pad, exp",
[
(
"testing 123",
1,
False,
('t', 'e', 's', 't', 'i', 'n', 'g', ' ', '1', '2', '3'),
),
(
"testing 123",
1,
True,
('t', 'e', 's', 't', 'i', 'n', 'g', ' ', '1', '2', '3'),
),
(
"testing 123",
2,
False,
('te', 'es', 'st', 'ti', 'in', 'ng', 'g ', ' 1', '12', '23'),
),
(
"testing 123",
2,
True,
('_t', 'te', 'es', 'st', 'ti', 'in', 'ng', 'g ', ' 1', '12', '23', '3_'),
),
]
)
def test_text_to_char_ngrams(text, n, pad, exp):
obs = utils.text_to_char_ngrams(text, n, pad=pad)
assert all(isinstance(cng, str) and len(cng) == n for cng in obs)
assert obs == exp
| 32.234375 | 88 | 0.491032 | import datetime
import pathlib
import pytest
from textacy import utils
@pytest.mark.parametrize(
"val,val_type,col_type,expected",
[
(None, int, list, None),
(1, int, list, [1]),
([1, 2], int, tuple, (1, 2)),
((1, 1.0), (int, float), set, {1, 1.0}),
],
)
def test_to_collection(val, val_type, col_type, expected):
assert utils.to_collection(val, val_type, col_type) == expected
class TestToUnicode:
@pytest.mark.parametrize("s", [b"bytes", "unicode", "úñîçødé"])
def test_valid(self, s):
assert isinstance(utils.to_unicode(s), str)
@pytest.mark.parametrize("s", [1, 2.0, ["foo", "bar"], {"foo": "bar"}])
def test_invalid(self, s):
with pytest.raises(TypeError):
_ = utils.to_unicode(s)
class TestToBytes:
@pytest.mark.parametrize("s", [b"bytes", "unicode", "úñîçødé"])
def test_valid(self, s):
assert isinstance(utils.to_bytes(s), bytes)
@pytest.mark.parametrize("s", [1, 2.0, ["foo", "bar"], {"foo": "bar"}])
def test_invalid(self, s):
with pytest.raises(TypeError):
_ = utils.to_bytes(s)
class TestToPath:
@pytest.mark.parametrize("path", [pathlib.Path("."), pathlib.Path.home()])
def test_path_input(self, path):
assert isinstance(utils.to_path(path), pathlib.Path)
@pytest.mark.parametrize("path", ["unicode", "úñîçødé"])
def test_str_input(self, path):
assert isinstance(utils.to_path(path), pathlib.Path)
@pytest.mark.parametrize("path", [1, 2.0, ["foo", "bar"], {"foo": "bar"}])
def test_invalid_input(self, path):
with pytest.raises(TypeError):
_ = utils.to_path(path)
class TestValidateAndClipRange:
@pytest.mark.parametrize(
"range_vals,full_range,val_type",
[
[("2001-01", "2002-01"), ("2000-01", "2003-01"), None],
[["2001-01", "2004-01"], ("2000-01", "2003-01"), None],
[("2001-01", "2002-01"), ["2000-01", "2003-01"], (str, bytes)],
[[-5, 5], [-10, 10], None],
[(-5, 5), (0, 10), None],
[(-5, 5), (-10, 10), int],
[(-5, 5), (-10, 10), (int, float)],
[(0, None), (-5, 5), None],
[(None, 0), (-5, 5), None],
],
)
def test_valid_inputs(self, range_vals, full_range, val_type):
output = utils.validate_and_clip_range(range_vals, full_range, val_type)
assert isinstance(output, tuple)
assert len(output) == 2
if range_vals[0] is None:
assert output[0] == full_range[0]
else:
assert output[0] == max(range_vals[0], full_range[0])
if range_vals[1] is None:
assert output[1] == full_range[1]
else:
assert output[1] == min(range_vals[1], full_range[1])
@pytest.mark.parametrize(
"range_vals,full_range,val_type,error",
[
["2001-01", ("2000-01", "2003-01"), None, pytest.raises(TypeError)],
[("2001-01", "2002-01"), "2000-01", None, pytest.raises(TypeError)],
[
{"2001-01", "2002-01"},
("2000-01", "2003-01"),
None,
pytest.raises(TypeError),
],
[
("2001-01", "2002-01"),
("2000-01", "2003-01"),
datetime.date,
pytest.raises(TypeError),
],
[0, [-10, 10], None, pytest.raises(TypeError)],
[(-5, 5), 0, None, pytest.raises(TypeError)],
[[-5, 5], [-10, 10], (str, bytes), pytest.raises(TypeError)],
[
("2001-01", "2002-01", "2003-01"),
("2000-01", "2003-01"),
None,
pytest.raises(ValueError),
],
[
("2001-01", "2002-01"),
["2000-01", "2002-01", "2004-01"],
None,
pytest.raises(ValueError),
],
[[0, 5, 10], (-10, 10), None, pytest.raises(ValueError)],
[(-5, 5), [-10, 0, 10], None, pytest.raises(ValueError)],
[(-5, 5), [-10, 0, 10], (str, bytes), pytest.raises(ValueError)],
],
)
def test_invalid_inputs(self, range_vals, full_range, val_type, error):
with error:
_ = utils.validate_and_clip_range(range_vals, full_range, val_type)
class TestValidateSetMembers:
@pytest.mark.parametrize(
"vals,val_type,valid_vals",
[
[{"a", "b"}, (str, bytes), {"a", "b", "c"}],
["a", (str, bytes), {"a", "b", "c"}],
[("a", "b"), (str, bytes), {"a", "b", "c"}],
[["a", "b"], (str, bytes), None],
[{1, 2}, int, {1, 2, 3}],
[{1, 2}, (int, float), {1, 2, 3}],
[1, int, {1: "a", 2: "b", 3: "c"}],
[{3.14, 42.0}, float, None],
[3.14, (int, float), None],
]
)
def test_valid_inputs(self, vals, val_type, valid_vals):
output = utils.validate_set_members(vals, val_type, valid_vals)
assert isinstance(output, set)
assert all(isinstance(val, val_type) for val in output)
@pytest.mark.parametrize(
"vals,val_type,valid_vals,error",
[
[{"a", "b"}, int, None, pytest.raises(TypeError)],
["a", int, None, pytest.raises(TypeError)],
[("a", "b"), (int, float), None, pytest.raises(TypeError)],
[{"a", "b"}, (str, bytes), {"x", "y", "z"}, pytest.raises(ValueError)],
[{"a", "x"}, (str, bytes), {"x", "y", "z"}, pytest.raises(ValueError)],
["a", (str, bytes), {"x", "y", "z"}, pytest.raises(ValueError)],
["a", (str, bytes), {"x": 24, "y": 25, "z": 26}, pytest.raises(ValueError)],
]
)
def test_invalid_inputs(self, vals, val_type, valid_vals, error):
with error:
_ = utils.validate_set_members(vals, val_type, valid_vals)
# def _func_pos_only_args(parg1, parg2, /):
# return (parg1, parg2)
# TODO: uncomment this when we're only supporting PY3.8+
def _func_mix_args(arg, *, kwarg):
return (arg, kwarg)
def _func_kw_only_args(*, kwarg1, kwarg2):
return (kwarg1, kwarg2)
@pytest.mark.parametrize(
"func,kwargs,expected",
[
(_func_mix_args, {"arg": "argval"}, {"arg": "argval"}),
(
_func_mix_args,
{"arg": "argval", "kwarg": "kwarval"},
{"arg": "argval", "kwarg": "kwarval"},
),
(
_func_mix_args,
{"arg": "argval", "kwarg": "kwargval", "foo": "bar"},
{"arg": "argval", "kwarg": "kwargval"},
),
(
_func_kw_only_args,
{"kwarg1": "kwarg1val", "kwarg2": "kwarg2val"},
{"kwarg1": "kwarg1val", "kwarg2": "kwarg2val"},
),
(
_func_kw_only_args,
{"kwarg1": "kwarg1val", "kwarg3": "kwarg3val"},
{"kwarg1": "kwarg1val"},
),
(_func_kw_only_args, {}, {}),
],
)
def test_get_kwargs_for_func(func, kwargs, expected):
assert utils.get_kwargs_for_func(func, kwargs) == expected
@pytest.mark.parametrize(
"text, n, pad, exp",
[
(
"testing 123",
1,
False,
('t', 'e', 's', 't', 'i', 'n', 'g', ' ', '1', '2', '3'),
),
(
"testing 123",
1,
True,
('t', 'e', 's', 't', 'i', 'n', 'g', ' ', '1', '2', '3'),
),
(
"testing 123",
2,
False,
('te', 'es', 'st', 'ti', 'in', 'ng', 'g ', ' 1', '12', '23'),
),
(
"testing 123",
2,
True,
('_t', 'te', 'es', 'st', 'ti', 'in', 'ng', 'g ', ' 1', '12', '23', '3_'),
),
]
)
def test_text_to_char_ngrams(text, n, pad, exp):
obs = utils.text_to_char_ngrams(text, n, pad=pad)
assert all(isinstance(cng, str) and len(cng) == n for cng in obs)
assert obs == exp
| true | true |
f72453a57dbfc168b184e62aede21ddeafb3650f | 2,970 | py | Python | cerebro/storage/local.py | Abhishek2304/Cerebro-System-Ray | 1e2f2ae291cd449573f87bb83fb2bda12e606b3a | [
"Apache-2.0"
] | 16 | 2020-05-09T03:55:38.000Z | 2022-02-27T01:06:09.000Z | cerebro/storage/local.py | Abhishek2304/Cerebro-System-Ray | 1e2f2ae291cd449573f87bb83fb2bda12e606b3a | [
"Apache-2.0"
] | 16 | 2020-04-20T20:47:10.000Z | 2021-12-02T05:11:09.000Z | cerebro/storage/local.py | Abhishek2304/Cerebro-System-Ray | 1e2f2ae291cd449573f87bb83fb2bda12e606b3a | [
"Apache-2.0"
] | 6 | 2020-06-08T01:27:03.000Z | 2021-12-02T12:06:44.000Z | # Copyright 2020 Supun Nakandala, Yuhao Zhang, and Arun Kumar. All Rights Reserved.
# Copyright 2019 Uber Technologies, Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
from __future__ import absolute_import
from __future__ import print_function
import contextlib
import errno
import os
import pyarrow as pa
from .base import FilesystemStore
class LocalStore(FilesystemStore):
"""Uses the local filesystem as a store of intermediate data and training artifacts (also works with NFS mounted
remote storage).
:param prefix_path: Prefix path of the local directory (e.g., /user/test/cerebro).
:param train_path: (Optional) Path of the directory to store training data. If not specified will default to
<prefix_path>/train_data
:param val_path: (Optional) Path of the directory to store validation data. If not specified will default to
<prefix_path>/val_data
:param runs_path: (Optional) Path of the directory to store model checkpoints and log. If not specified will default
to <prefix_path>/runs
"""
FS_PREFIX = 'file://'
def __init__(self, prefix_path, train_path=None, val_path=None, runs_path=None):
self._fs = pa.LocalFileSystem()
super(LocalStore, self).__init__(prefix_path, train_path=train_path, val_path=val_path, runs_path=runs_path)
def path_prefix(self):
return self.FS_PREFIX
def get_filesystem(self):
return self._fs
def get_local_output_dir_fn(self, run_id):
run_path = self.get_localized_path(self.get_run_path(run_id))
@contextlib.contextmanager
def local_run_path():
if not os.path.exists(run_path):
try:
os.makedirs(run_path, mode=0o755)
except OSError as e:
# Race condition from workers on the same host: ignore
if e.errno != errno.EEXIST:
raise
yield run_path
return local_run_path
def sync_fn(self, run_id):
run_path = self.get_localized_path(self.get_run_path(run_id))
def fn(local_run_path):
# No-op for LocalStore since the `local_run_path` will be the same as the run path
assert run_path == local_run_path
return fn
@classmethod
def filesystem_prefix(cls):
return cls.FS_PREFIX
| 36.666667 | 120 | 0.676768 |
from __future__ import absolute_import
from __future__ import print_function
import contextlib
import errno
import os
import pyarrow as pa
from .base import FilesystemStore
class LocalStore(FilesystemStore):
FS_PREFIX = 'file://'
def __init__(self, prefix_path, train_path=None, val_path=None, runs_path=None):
self._fs = pa.LocalFileSystem()
super(LocalStore, self).__init__(prefix_path, train_path=train_path, val_path=val_path, runs_path=runs_path)
def path_prefix(self):
return self.FS_PREFIX
def get_filesystem(self):
return self._fs
def get_local_output_dir_fn(self, run_id):
run_path = self.get_localized_path(self.get_run_path(run_id))
@contextlib.contextmanager
def local_run_path():
if not os.path.exists(run_path):
try:
os.makedirs(run_path, mode=0o755)
except OSError as e:
if e.errno != errno.EEXIST:
raise
yield run_path
return local_run_path
def sync_fn(self, run_id):
run_path = self.get_localized_path(self.get_run_path(run_id))
def fn(local_run_path):
assert run_path == local_run_path
return fn
@classmethod
def filesystem_prefix(cls):
return cls.FS_PREFIX
| true | true |
f7245409b3ef11681b481c7e78e544f12653dcc6 | 714 | py | Python | pyhfo/ui/adjust_spines.py | andersonbrisil/pyhfo | 0fdbe834442550117dc9d9c8f611989bb600db62 | [
"MIT"
] | null | null | null | pyhfo/ui/adjust_spines.py | andersonbrisil/pyhfo | 0fdbe834442550117dc9d9c8f611989bb600db62 | [
"MIT"
] | null | null | null | pyhfo/ui/adjust_spines.py | andersonbrisil/pyhfo | 0fdbe834442550117dc9d9c8f611989bb600db62 | [
"MIT"
] | null | null | null | # -*- coding: utf-8 -*-
"""
Function adjust spines
Created on Fri Apr 17 10:18:30 2015
@author: anderson
"""
def adjust_spines(ax, spines):
for loc, spine in ax.spines.items():
if loc in spines:
spine.set_position(('outward', 2)) # outward by 10 points
spine.set_smart_bounds(False)
else:
spine.set_color('none') # don't draw spine
# turn off ticks where there is no spine
if 'left' in spines:
ax.yaxis.set_ticks_position('left')
else:
# no yaxis ticks
ax.yaxis.set_ticks([])
if 'bottom' in spines:
ax.xaxis.set_ticks_position('bottom')
else:
# no xaxis ticks
ax.xaxis.set_ticks([]) | 25.5 | 70 | 0.591036 |
def adjust_spines(ax, spines):
for loc, spine in ax.spines.items():
if loc in spines:
spine.set_position(('outward', 2))
spine.set_smart_bounds(False)
else:
spine.set_color('none')
# turn off ticks where there is no spine
if 'left' in spines:
ax.yaxis.set_ticks_position('left')
else:
# no yaxis ticks
ax.yaxis.set_ticks([])
if 'bottom' in spines:
ax.xaxis.set_ticks_position('bottom')
else:
# no xaxis ticks
ax.xaxis.set_ticks([]) | true | true |
f7245440fe7dba32ffeb3a85b4b83af243aba25b | 290 | py | Python | ceuclid.py | jprzywoski/faster-python | 44252bf0a746dd862d752efbe2012a8a404ec7bf | [
"MIT"
] | null | null | null | ceuclid.py | jprzywoski/faster-python | 44252bf0a746dd862d752efbe2012a8a404ec7bf | [
"MIT"
] | null | null | null | ceuclid.py | jprzywoski/faster-python | 44252bf0a746dd862d752efbe2012a8a404ec7bf | [
"MIT"
] | null | null | null | import ctypes
from numpy.ctypeslib import ndpointer
lib = ctypes.cdll.LoadLibrary('./libdist.so')
fn = lib.dist
fn.restype = ctypes.c_double
fn.argtypes = [
ndpointer(ctypes.c_double),
ndpointer(ctypes.c_double),
ctypes.c_size_t
]
def dist(x, y):
return fn(x, y, len(x))
| 18.125 | 45 | 0.7 | import ctypes
from numpy.ctypeslib import ndpointer
lib = ctypes.cdll.LoadLibrary('./libdist.so')
fn = lib.dist
fn.restype = ctypes.c_double
fn.argtypes = [
ndpointer(ctypes.c_double),
ndpointer(ctypes.c_double),
ctypes.c_size_t
]
def dist(x, y):
return fn(x, y, len(x))
| true | true |
f724555c65e6b8b852f9d596dad3446f0fbf8099 | 448 | py | Python | chap_7/decay_plot.py | jieyanzhu/codes-effective-computation-in-physics | 0c99f2da9d462229e6b174a010d7c7b08af4482b | [
"MIT"
] | null | null | null | chap_7/decay_plot.py | jieyanzhu/codes-effective-computation-in-physics | 0c99f2da9d462229e6b174a010d7c7b08af4482b | [
"MIT"
] | 1 | 2021-12-23T10:09:01.000Z | 2021-12-23T12:06:25.000Z | chap_7/decay_plot.py | jieyanzhu/codes-effective-computation-in-physics | 0c99f2da9d462229e6b174a010d7c7b08af4482b | [
"MIT"
] | null | null | null | import numpy as np
# as in the previous example, load decays.csv into a NumPy array
decaydata = np.loadtxt('decays.csv', delimiter=',', skiprows=1)
# provide handles for the x and y columns
time = decaydata[:,0]
decays = decaydata[:,1]
# import the matplotlib plotting functionality
import pylab as plt
plt.plot(time, decays)
plt.xlabel('Time (s)')
plt.ylabel('Decays')
plt.title('Decays')
plt.grid(True)
plt.savefig("decays_matplotlib.png")
| 22.4 | 64 | 0.734375 | import numpy as np
decaydata = np.loadtxt('decays.csv', delimiter=',', skiprows=1)
time = decaydata[:,0]
decays = decaydata[:,1]
import pylab as plt
plt.plot(time, decays)
plt.xlabel('Time (s)')
plt.ylabel('Decays')
plt.title('Decays')
plt.grid(True)
plt.savefig("decays_matplotlib.png")
| true | true |
f72455bce16e4d2b4c4851dc371a6ac3b783489a | 1,136 | py | Python | oautom/execution/lambda_execution.py | FabienArcellier/oautom | ed818a34ca726355b1227d3485052793e159b177 | [
"MIT"
] | null | null | null | oautom/execution/lambda_execution.py | FabienArcellier/oautom | ed818a34ca726355b1227d3485052793e159b177 | [
"MIT"
] | null | null | null | oautom/execution/lambda_execution.py | FabienArcellier/oautom | ed818a34ca726355b1227d3485052793e159b177 | [
"MIT"
] | null | null | null | # pylint: disable=useless-super-delegation
import json
from concurrent.futures import ThreadPoolExecutor, Future
import boto3
from oautom import get_logger
from oautom.execution.execution import Execution
class LambdaExecution(Execution):
def __init__(self, name: str, flow: 'Flow', lambda_function: str, payload: dict = {}):
super().__init__(name, flow)
self._future = None # type: Future
self._lambda_arn = lambda_function
self._payload = payload
def run(self):
super().run()
# self._logger.info(f"lambda: {self._lambda_arn}")
with ThreadPoolExecutor(max_workers=1) as executor:
self._future = executor.submit(_run_lambda, self._lambda_arn, self._payload)
def check(self) -> bool:
return self._future.done()
def _run_lambda(lambda_function: str, payload: dict):
logger = get_logger()
logger.info(f"lambda: {lambda_function}")
client = boto3.client('lambda')
client.invoke(
FunctionName=lambda_function,
InvocationType='RequestResponse',
LogType='None',
Payload=json.dumps(payload),
)
| 29.128205 | 90 | 0.681338 |
import json
from concurrent.futures import ThreadPoolExecutor, Future
import boto3
from oautom import get_logger
from oautom.execution.execution import Execution
class LambdaExecution(Execution):
def __init__(self, name: str, flow: 'Flow', lambda_function: str, payload: dict = {}):
super().__init__(name, flow)
self._future = None
self._lambda_arn = lambda_function
self._payload = payload
def run(self):
super().run()
with ThreadPoolExecutor(max_workers=1) as executor:
self._future = executor.submit(_run_lambda, self._lambda_arn, self._payload)
def check(self) -> bool:
return self._future.done()
def _run_lambda(lambda_function: str, payload: dict):
logger = get_logger()
logger.info(f"lambda: {lambda_function}")
client = boto3.client('lambda')
client.invoke(
FunctionName=lambda_function,
InvocationType='RequestResponse',
LogType='None',
Payload=json.dumps(payload),
)
| true | true |
f72455eb2dee4eaf4803e76cccf48e4cb510f279 | 1,423 | py | Python | cam_lecture/scripts/edge_filter_compressed.py | yasutomo57jp/ros_lecture | 811afaded5a5780fa1291bd41196d80446da1e53 | [
"MIT"
] | 110 | 2018-11-13T15:04:35.000Z | 2022-03-27T20:48:03.000Z | cam_lecture/scripts/edge_filter_compressed.py | yasutomo57jp/ros_lecture | 811afaded5a5780fa1291bd41196d80446da1e53 | [
"MIT"
] | 4 | 2020-07-16T13:32:22.000Z | 2022-01-11T01:08:12.000Z | cam_lecture/scripts/edge_filter_compressed.py | yasutomo57jp/ros_lecture | 811afaded5a5780fa1291bd41196d80446da1e53 | [
"MIT"
] | 57 | 2019-07-02T23:43:17.000Z | 2022-03-27T20:47:28.000Z | #!/usr/bin/env python
import rospy
import sys
import cv2
from sensor_msgs.msg import Image, CompressedImage, CameraInfo
from cv_bridge import CvBridge, CvBridgeError
import numpy as np
class cvBridgeDemo:
def __init__(self):
self.node_name = "cv_bridge_demo_compressed"
rospy.init_node(self.node_name)
rospy.on_shutdown(self.cleanup)
self.bridge = CvBridge()
self.image_sub = rospy.Subscriber("input_image", CompressedImage, self.image_callback, queue_size=1)
self.image_pub = rospy.Publisher('output_image', Image, queue_size=1)
def image_callback(self, ros_image_compressed):
try:
np_arr = np.fromstring(ros_image_compressed.data, np.uint8)
input_image = cv2.imdecode(np_arr, cv2.IMREAD_COLOR)
except CvBridgeError, e:
print e
output_image = self.process_image(input_image)
self.image_pub.publish(self.bridge.cv2_to_imgmsg(output_image, "mono8"))
cv2.imshow(self.node_name, output_image)
cv2.waitKey(1)
def process_image(self, frame):
grey = cv2.cvtColor(frame, cv2.COLOR_BGR2GRAY)
grey = cv2.blur(grey, (7, 7))
edges = cv2.Canny(grey, 15.0, 30.0)
return edges
def cleanup(self):
cv2.destroyAllWindows()
if __name__ == '__main__':
cvBridgeDemo()
rospy.spin() | 33.880952 | 108 | 0.650738 |
import rospy
import sys
import cv2
from sensor_msgs.msg import Image, CompressedImage, CameraInfo
from cv_bridge import CvBridge, CvBridgeError
import numpy as np
class cvBridgeDemo:
def __init__(self):
self.node_name = "cv_bridge_demo_compressed"
rospy.init_node(self.node_name)
rospy.on_shutdown(self.cleanup)
self.bridge = CvBridge()
self.image_sub = rospy.Subscriber("input_image", CompressedImage, self.image_callback, queue_size=1)
self.image_pub = rospy.Publisher('output_image', Image, queue_size=1)
def image_callback(self, ros_image_compressed):
try:
np_arr = np.fromstring(ros_image_compressed.data, np.uint8)
input_image = cv2.imdecode(np_arr, cv2.IMREAD_COLOR)
except CvBridgeError, e:
print e
output_image = self.process_image(input_image)
self.image_pub.publish(self.bridge.cv2_to_imgmsg(output_image, "mono8"))
cv2.imshow(self.node_name, output_image)
cv2.waitKey(1)
def process_image(self, frame):
grey = cv2.cvtColor(frame, cv2.COLOR_BGR2GRAY)
grey = cv2.blur(grey, (7, 7))
edges = cv2.Canny(grey, 15.0, 30.0)
return edges
def cleanup(self):
cv2.destroyAllWindows()
if __name__ == '__main__':
cvBridgeDemo()
rospy.spin() | false | true |
f724577d79ac37b03a6ecb734534f38b37edce0a | 14,449 | py | Python | skimage/measure/tests/test_regionprops.py | jjhelmus/scikit-image | b9b5fde0821fe8bcece2528b30d012c65c64ad6f | [
"BSD-3-Clause"
] | 2 | 2017-03-30T11:22:11.000Z | 2019-03-03T05:18:01.000Z | skimage/measure/tests/test_regionprops.py | jjhelmus/scikit-image | b9b5fde0821fe8bcece2528b30d012c65c64ad6f | [
"BSD-3-Clause"
] | null | null | null | skimage/measure/tests/test_regionprops.py | jjhelmus/scikit-image | b9b5fde0821fe8bcece2528b30d012c65c64ad6f | [
"BSD-3-Clause"
] | 1 | 2019-12-17T14:53:28.000Z | 2019-12-17T14:53:28.000Z | from numpy.testing import assert_array_equal, assert_almost_equal, \
assert_array_almost_equal, assert_raises, assert_equal
import numpy as np
import math
from skimage.measure._regionprops import (regionprops, PROPS, perimeter,
_parse_docs)
SAMPLE = np.array(
[[0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 1, 1, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0],
[0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0],
[1, 0, 1, 0, 0, 1, 1, 0, 1, 1, 0, 0, 1, 1, 1, 1, 1, 0],
[0, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 0, 0, 0, 1, 1, 1, 1],
[0, 1, 1, 0, 0, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1]]
)
INTENSITY_SAMPLE = SAMPLE.copy()
INTENSITY_SAMPLE[1, 9:11] = 2
SAMPLE_3D = np.zeros((6, 6, 6), dtype=np.uint8)
SAMPLE_3D[1:3, 1:3, 1:3] = 1
SAMPLE_3D[3, 2, 2] = 1
INTENSITY_SAMPLE_3D = SAMPLE_3D.copy()
def test_all_props():
region = regionprops(SAMPLE, INTENSITY_SAMPLE)[0]
for prop in PROPS:
assert_almost_equal(region[prop], getattr(region, PROPS[prop]))
def test_all_props_3d():
region = regionprops(SAMPLE_3D, INTENSITY_SAMPLE_3D)[0]
for prop in PROPS:
try:
assert_almost_equal(region[prop], getattr(region, PROPS[prop]))
except NotImplementedError:
pass
def test_dtype():
regionprops(np.zeros((10, 10), dtype=np.int))
regionprops(np.zeros((10, 10), dtype=np.uint))
assert_raises((TypeError), regionprops,
np.zeros((10, 10), dtype=np.float))
assert_raises((TypeError), regionprops,
np.zeros((10, 10), dtype=np.double))
def test_ndim():
regionprops(np.zeros((10, 10), dtype=np.int))
regionprops(np.zeros((10, 10, 1), dtype=np.int))
regionprops(np.zeros((10, 10, 1, 1), dtype=np.int))
regionprops(np.zeros((10, 10, 10), dtype=np.int))
assert_raises(TypeError, regionprops, np.zeros((10, 10, 10, 2), dtype=np.int))
def test_area():
area = regionprops(SAMPLE)[0].area
assert area == np.sum(SAMPLE)
area = regionprops(SAMPLE_3D)[0].area
assert area == np.sum(SAMPLE_3D)
def test_bbox():
bbox = regionprops(SAMPLE)[0].bbox
assert_array_almost_equal(bbox, (0, 0, SAMPLE.shape[0], SAMPLE.shape[1]))
SAMPLE_mod = SAMPLE.copy()
SAMPLE_mod[:, -1] = 0
bbox = regionprops(SAMPLE_mod)[0].bbox
assert_array_almost_equal(bbox, (0, 0, SAMPLE.shape[0], SAMPLE.shape[1]-1))
bbox = regionprops(SAMPLE_3D)[0].bbox
assert_array_almost_equal(bbox, (1, 1, 1, 4, 3, 3))
def test_moments_central():
mu = regionprops(SAMPLE)[0].moments_central
# determined with OpenCV
assert_almost_equal(mu[0,2], 436.00000000000045)
# different from OpenCV results, bug in OpenCV
assert_almost_equal(mu[0,3], -737.333333333333)
assert_almost_equal(mu[1,1], -87.33333333333303)
assert_almost_equal(mu[1,2], -127.5555555555593)
assert_almost_equal(mu[2,0], 1259.7777777777774)
assert_almost_equal(mu[2,1], 2000.296296296291)
assert_almost_equal(mu[3,0], -760.0246913580195)
def test_centroid():
centroid = regionprops(SAMPLE)[0].centroid
# determined with MATLAB
assert_array_almost_equal(centroid, (5.66666666666666, 9.444444444444444))
def test_convex_area():
area = regionprops(SAMPLE)[0].convex_area
# determined with MATLAB
assert area == 124
def test_convex_image():
img = regionprops(SAMPLE)[0].convex_image
# determined with MATLAB
ref = np.array(
[[0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0],
[0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0],
[0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0],
[0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0],
[0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0],
[1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1],
[0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1],
[0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1]]
)
assert_array_equal(img, ref)
def test_coordinates():
sample = np.zeros((10, 10), dtype=np.int8)
coords = np.array([[3, 2], [3, 3], [3, 4]])
sample[coords[:, 0], coords[:, 1]] = 1
prop_coords = regionprops(sample)[0].coords
assert_array_equal(prop_coords, coords)
sample = np.zeros((6, 6, 6), dtype=np.int8)
coords = np.array([[1, 1, 1], [1, 2, 1], [1, 3, 1]])
sample[coords[:, 0], coords[:, 1], coords[:, 2]] = 1
prop_coords = regionprops(sample)[0].coords
assert_array_equal(prop_coords, coords)
def test_eccentricity():
eps = regionprops(SAMPLE)[0].eccentricity
assert_almost_equal(eps, 0.814629313427)
img = np.zeros((5, 5), dtype=np.int)
img[2, 2] = 1
eps = regionprops(img)[0].eccentricity
assert_almost_equal(eps, 0)
def test_equiv_diameter():
diameter = regionprops(SAMPLE)[0].equivalent_diameter
# determined with MATLAB
assert_almost_equal(diameter, 9.57461472963)
def test_euler_number():
en = regionprops(SAMPLE)[0].euler_number
assert en == 1
SAMPLE_mod = SAMPLE.copy()
SAMPLE_mod[7, -3] = 0
en = regionprops(SAMPLE_mod)[0].euler_number
assert en == 0
def test_extent():
extent = regionprops(SAMPLE)[0].extent
assert_almost_equal(extent, 0.4)
def test_moments_hu():
hu = regionprops(SAMPLE)[0].moments_hu
ref = np.array([
3.27117627e-01,
2.63869194e-02,
2.35390060e-02,
1.23151193e-03,
1.38882330e-06,
-2.72586158e-05,
6.48350653e-06
])
# bug in OpenCV caused in Central Moments calculation?
assert_array_almost_equal(hu, ref)
def test_image():
img = regionprops(SAMPLE)[0].image
assert_array_equal(img, SAMPLE)
img = regionprops(SAMPLE_3D)[0].image
assert_array_equal(img, SAMPLE_3D[1:4, 1:3, 1:3])
def test_label():
label = regionprops(SAMPLE)[0].label
assert_array_equal(label, 1)
label = regionprops(SAMPLE_3D)[0].label
assert_array_equal(label, 1)
def test_filled_area():
area = regionprops(SAMPLE)[0].filled_area
assert area == np.sum(SAMPLE)
SAMPLE_mod = SAMPLE.copy()
SAMPLE_mod[7, -3] = 0
area = regionprops(SAMPLE_mod)[0].filled_area
assert area == np.sum(SAMPLE)
def test_filled_image():
img = regionprops(SAMPLE)[0].filled_image
assert_array_equal(img, SAMPLE)
def test_major_axis_length():
length = regionprops(SAMPLE)[0].major_axis_length
# MATLAB has different interpretation of ellipse than found in literature,
# here implemented as found in literature
assert_almost_equal(length, 16.7924234999)
def test_max_intensity():
intensity = regionprops(SAMPLE, intensity_image=INTENSITY_SAMPLE
)[0].max_intensity
assert_almost_equal(intensity, 2)
def test_mean_intensity():
intensity = regionprops(SAMPLE, intensity_image=INTENSITY_SAMPLE
)[0].mean_intensity
assert_almost_equal(intensity, 1.02777777777777)
def test_min_intensity():
intensity = regionprops(SAMPLE, intensity_image=INTENSITY_SAMPLE
)[0].min_intensity
assert_almost_equal(intensity, 1)
def test_minor_axis_length():
length = regionprops(SAMPLE)[0].minor_axis_length
# MATLAB has different interpretation of ellipse than found in literature,
# here implemented as found in literature
assert_almost_equal(length, 9.739302807263)
def test_moments():
m = regionprops(SAMPLE)[0].moments
# determined with OpenCV
assert_almost_equal(m[0,0], 72.0)
assert_almost_equal(m[0,1], 408.0)
assert_almost_equal(m[0,2], 2748.0)
assert_almost_equal(m[0,3], 19776.0)
assert_almost_equal(m[1,0], 680.0)
assert_almost_equal(m[1,1], 3766.0)
assert_almost_equal(m[1,2], 24836.0)
assert_almost_equal(m[2,0], 7682.0)
assert_almost_equal(m[2,1], 43882.0)
assert_almost_equal(m[3,0], 95588.0)
def test_moments_normalized():
nu = regionprops(SAMPLE)[0].moments_normalized
# determined with OpenCV
assert_almost_equal(nu[0,2], 0.08410493827160502)
assert_almost_equal(nu[1,1], -0.016846707818929982)
assert_almost_equal(nu[1,2], -0.002899800614433943)
assert_almost_equal(nu[2,0], 0.24301268861454037)
assert_almost_equal(nu[2,1], 0.045473992910668816)
assert_almost_equal(nu[3,0], -0.017278118992041805)
def test_orientation():
orientation = regionprops(SAMPLE)[0].orientation
# determined with MATLAB
assert_almost_equal(orientation, 0.10446844651921)
# test correct quadrant determination
orientation2 = regionprops(SAMPLE.T)[0].orientation
assert_almost_equal(orientation2, math.pi / 2 - orientation)
# test diagonal regions
diag = np.eye(10, dtype=int)
orientation_diag = regionprops(diag)[0].orientation
assert_almost_equal(orientation_diag, -math.pi / 4)
orientation_diag = regionprops(np.flipud(diag))[0].orientation
assert_almost_equal(orientation_diag, math.pi / 4)
orientation_diag = regionprops(np.fliplr(diag))[0].orientation
assert_almost_equal(orientation_diag, math.pi / 4)
orientation_diag = regionprops(np.fliplr(np.flipud(diag)))[0].orientation
assert_almost_equal(orientation_diag, -math.pi / 4)
def test_perimeter():
per = regionprops(SAMPLE)[0].perimeter
assert_almost_equal(per, 55.2487373415)
per = perimeter(SAMPLE.astype('double'), neighbourhood=8)
assert_almost_equal(per, 46.8284271247)
def test_solidity():
solidity = regionprops(SAMPLE)[0].solidity
# determined with MATLAB
assert_almost_equal(solidity, 0.580645161290323)
def test_weighted_moments_central():
wmu = regionprops(SAMPLE, intensity_image=INTENSITY_SAMPLE
)[0].weighted_moments_central
ref = np.array(
[[ 7.4000000000e+01, -2.1316282073e-13, 4.7837837838e+02,
-7.5943608473e+02],
[ 3.7303493627e-14, -8.7837837838e+01, -1.4801314828e+02,
-1.2714707125e+03],
[ 1.2602837838e+03, 2.1571526662e+03, 6.6989799420e+03,
1.5304076361e+04],
[ -7.6561796932e+02, -4.2385971907e+03, -9.9501164076e+03,
-3.3156729271e+04]]
)
np.set_printoptions(precision=10)
assert_array_almost_equal(wmu, ref)
def test_weighted_centroid():
centroid = regionprops(SAMPLE, intensity_image=INTENSITY_SAMPLE
)[0].weighted_centroid
assert_array_almost_equal(centroid, (5.540540540540, 9.445945945945))
def test_weighted_moments_hu():
whu = regionprops(SAMPLE, intensity_image=INTENSITY_SAMPLE
)[0].weighted_moments_hu
ref = np.array([
3.1750587329e-01,
2.1417517159e-02,
2.3609322038e-02,
1.2565683360e-03,
8.3014209421e-07,
-3.5073773473e-05,
6.7936409056e-06
])
assert_array_almost_equal(whu, ref)
def test_weighted_moments():
wm = regionprops(SAMPLE, intensity_image=INTENSITY_SAMPLE
)[0].weighted_moments
ref = np.array(
[[ 7.4000000000e+01, 4.1000000000e+02, 2.7500000000e+03,
1.9778000000e+04],
[ 6.9900000000e+02, 3.7850000000e+03, 2.4855000000e+04,
1.7500100000e+05],
[ 7.8630000000e+03, 4.4063000000e+04, 2.9347700000e+05,
2.0810510000e+06],
[ 9.7317000000e+04, 5.7256700000e+05, 3.9007170000e+06,
2.8078871000e+07]]
)
assert_array_almost_equal(wm, ref)
def test_weighted_moments_normalized():
wnu = regionprops(SAMPLE, intensity_image=INTENSITY_SAMPLE
)[0].weighted_moments_normalized
ref = np.array(
[[ np.nan, np.nan, 0.0873590903, -0.0161217406],
[ np.nan, -0.0160405109, -0.0031421072, -0.0031376984],
[ 0.230146783, 0.0457932622, 0.0165315478, 0.0043903193],
[-0.0162529732, -0.0104598869, -0.0028544152, -0.0011057191]]
)
assert_array_almost_equal(wnu, ref)
def test_label_sequence():
a = np.empty((2, 2), dtype=np.int)
a[:, :] = 2
ps = regionprops(a)
assert len(ps) == 1
assert ps[0].label == 2
def test_pure_background():
a = np.zeros((2, 2), dtype=np.int)
ps = regionprops(a)
assert len(ps) == 0
def test_invalid():
ps = regionprops(SAMPLE)
def get_intensity_image():
ps[0].intensity_image
assert_raises(AttributeError, get_intensity_image)
def test_invalid_size():
wrong_intensity_sample = np.array([[1], [1]])
assert_raises(ValueError, regionprops, SAMPLE, wrong_intensity_sample)
def test_equals():
arr = np.zeros((100, 100), dtype=np.int)
arr[0:25, 0:25] = 1
arr[50:99, 50:99] = 2
regions = regionprops(arr)
r1 = regions[0]
regions = regionprops(arr)
r2 = regions[0]
r3 = regions[1]
assert_equal(r1 == r2, True, "Same regionprops are not equal")
assert_equal(r1 != r3, True, "Different regionprops are equal")
def test_iterate_all_props():
region = regionprops(SAMPLE)[0]
p0 = dict((p, region[p]) for p in region)
region = regionprops(SAMPLE, intensity_image=INTENSITY_SAMPLE)[0]
p1 = dict((p, region[p]) for p in region)
assert len(p0) < len(p1)
def test_cache():
region = regionprops(SAMPLE)[0]
f0 = region.filled_image
region._label_image[:10] = 1
f1 = region.filled_image
# Changed underlying image, but cache keeps result the same
assert_array_equal(f0, f1)
# Now invalidate cache
region._cache_active = False
f1 = region.filled_image
assert np.any(f0 != f1)
def test_docstrings_and_props():
region = regionprops(SAMPLE)[0]
docs = _parse_docs()
props = [m for m in dir(region) if not m.startswith('_')]
nr_docs_parsed = len(docs)
nr_props = len(props)
assert_equal(nr_docs_parsed, nr_props)
ds = docs['weighted_moments_normalized']
assert 'iteration' not in ds
assert len(ds.split('\n')) > 3
if __name__ == "__main__":
from numpy.testing import run_module_suite
run_module_suite()
| 31.479303 | 82 | 0.637207 | from numpy.testing import assert_array_equal, assert_almost_equal, \
assert_array_almost_equal, assert_raises, assert_equal
import numpy as np
import math
from skimage.measure._regionprops import (regionprops, PROPS, perimeter,
_parse_docs)
SAMPLE = np.array(
[[0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 1, 1, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0],
[0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0],
[1, 0, 1, 0, 0, 1, 1, 0, 1, 1, 0, 0, 1, 1, 1, 1, 1, 0],
[0, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 0, 0, 0, 1, 1, 1, 1],
[0, 1, 1, 0, 0, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1]]
)
INTENSITY_SAMPLE = SAMPLE.copy()
INTENSITY_SAMPLE[1, 9:11] = 2
SAMPLE_3D = np.zeros((6, 6, 6), dtype=np.uint8)
SAMPLE_3D[1:3, 1:3, 1:3] = 1
SAMPLE_3D[3, 2, 2] = 1
INTENSITY_SAMPLE_3D = SAMPLE_3D.copy()
def test_all_props():
region = regionprops(SAMPLE, INTENSITY_SAMPLE)[0]
for prop in PROPS:
assert_almost_equal(region[prop], getattr(region, PROPS[prop]))
def test_all_props_3d():
region = regionprops(SAMPLE_3D, INTENSITY_SAMPLE_3D)[0]
for prop in PROPS:
try:
assert_almost_equal(region[prop], getattr(region, PROPS[prop]))
except NotImplementedError:
pass
def test_dtype():
regionprops(np.zeros((10, 10), dtype=np.int))
regionprops(np.zeros((10, 10), dtype=np.uint))
assert_raises((TypeError), regionprops,
np.zeros((10, 10), dtype=np.float))
assert_raises((TypeError), regionprops,
np.zeros((10, 10), dtype=np.double))
def test_ndim():
regionprops(np.zeros((10, 10), dtype=np.int))
regionprops(np.zeros((10, 10, 1), dtype=np.int))
regionprops(np.zeros((10, 10, 1, 1), dtype=np.int))
regionprops(np.zeros((10, 10, 10), dtype=np.int))
assert_raises(TypeError, regionprops, np.zeros((10, 10, 10, 2), dtype=np.int))
def test_area():
area = regionprops(SAMPLE)[0].area
assert area == np.sum(SAMPLE)
area = regionprops(SAMPLE_3D)[0].area
assert area == np.sum(SAMPLE_3D)
def test_bbox():
bbox = regionprops(SAMPLE)[0].bbox
assert_array_almost_equal(bbox, (0, 0, SAMPLE.shape[0], SAMPLE.shape[1]))
SAMPLE_mod = SAMPLE.copy()
SAMPLE_mod[:, -1] = 0
bbox = regionprops(SAMPLE_mod)[0].bbox
assert_array_almost_equal(bbox, (0, 0, SAMPLE.shape[0], SAMPLE.shape[1]-1))
bbox = regionprops(SAMPLE_3D)[0].bbox
assert_array_almost_equal(bbox, (1, 1, 1, 4, 3, 3))
def test_moments_central():
mu = regionprops(SAMPLE)[0].moments_central
assert_almost_equal(mu[0,2], 436.00000000000045)
assert_almost_equal(mu[0,3], -737.333333333333)
assert_almost_equal(mu[1,1], -87.33333333333303)
assert_almost_equal(mu[1,2], -127.5555555555593)
assert_almost_equal(mu[2,0], 1259.7777777777774)
assert_almost_equal(mu[2,1], 2000.296296296291)
assert_almost_equal(mu[3,0], -760.0246913580195)
def test_centroid():
centroid = regionprops(SAMPLE)[0].centroid
assert_array_almost_equal(centroid, (5.66666666666666, 9.444444444444444))
def test_convex_area():
area = regionprops(SAMPLE)[0].convex_area
assert area == 124
def test_convex_image():
img = regionprops(SAMPLE)[0].convex_image
ref = np.array(
[[0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0],
[0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0],
[0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0],
[0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0],
[0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0],
[1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1],
[0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1],
[0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1]]
)
assert_array_equal(img, ref)
def test_coordinates():
sample = np.zeros((10, 10), dtype=np.int8)
coords = np.array([[3, 2], [3, 3], [3, 4]])
sample[coords[:, 0], coords[:, 1]] = 1
prop_coords = regionprops(sample)[0].coords
assert_array_equal(prop_coords, coords)
sample = np.zeros((6, 6, 6), dtype=np.int8)
coords = np.array([[1, 1, 1], [1, 2, 1], [1, 3, 1]])
sample[coords[:, 0], coords[:, 1], coords[:, 2]] = 1
prop_coords = regionprops(sample)[0].coords
assert_array_equal(prop_coords, coords)
def test_eccentricity():
eps = regionprops(SAMPLE)[0].eccentricity
assert_almost_equal(eps, 0.814629313427)
img = np.zeros((5, 5), dtype=np.int)
img[2, 2] = 1
eps = regionprops(img)[0].eccentricity
assert_almost_equal(eps, 0)
def test_equiv_diameter():
diameter = regionprops(SAMPLE)[0].equivalent_diameter
assert_almost_equal(diameter, 9.57461472963)
def test_euler_number():
en = regionprops(SAMPLE)[0].euler_number
assert en == 1
SAMPLE_mod = SAMPLE.copy()
SAMPLE_mod[7, -3] = 0
en = regionprops(SAMPLE_mod)[0].euler_number
assert en == 0
def test_extent():
extent = regionprops(SAMPLE)[0].extent
assert_almost_equal(extent, 0.4)
def test_moments_hu():
hu = regionprops(SAMPLE)[0].moments_hu
ref = np.array([
3.27117627e-01,
2.63869194e-02,
2.35390060e-02,
1.23151193e-03,
1.38882330e-06,
-2.72586158e-05,
6.48350653e-06
])
assert_array_almost_equal(hu, ref)
def test_image():
img = regionprops(SAMPLE)[0].image
assert_array_equal(img, SAMPLE)
img = regionprops(SAMPLE_3D)[0].image
assert_array_equal(img, SAMPLE_3D[1:4, 1:3, 1:3])
def test_label():
label = regionprops(SAMPLE)[0].label
assert_array_equal(label, 1)
label = regionprops(SAMPLE_3D)[0].label
assert_array_equal(label, 1)
def test_filled_area():
area = regionprops(SAMPLE)[0].filled_area
assert area == np.sum(SAMPLE)
SAMPLE_mod = SAMPLE.copy()
SAMPLE_mod[7, -3] = 0
area = regionprops(SAMPLE_mod)[0].filled_area
assert area == np.sum(SAMPLE)
def test_filled_image():
img = regionprops(SAMPLE)[0].filled_image
assert_array_equal(img, SAMPLE)
def test_major_axis_length():
length = regionprops(SAMPLE)[0].major_axis_length
assert_almost_equal(length, 16.7924234999)
def test_max_intensity():
intensity = regionprops(SAMPLE, intensity_image=INTENSITY_SAMPLE
)[0].max_intensity
assert_almost_equal(intensity, 2)
def test_mean_intensity():
intensity = regionprops(SAMPLE, intensity_image=INTENSITY_SAMPLE
)[0].mean_intensity
assert_almost_equal(intensity, 1.02777777777777)
def test_min_intensity():
intensity = regionprops(SAMPLE, intensity_image=INTENSITY_SAMPLE
)[0].min_intensity
assert_almost_equal(intensity, 1)
def test_minor_axis_length():
length = regionprops(SAMPLE)[0].minor_axis_length
assert_almost_equal(length, 9.739302807263)
def test_moments():
m = regionprops(SAMPLE)[0].moments
assert_almost_equal(m[0,0], 72.0)
assert_almost_equal(m[0,1], 408.0)
assert_almost_equal(m[0,2], 2748.0)
assert_almost_equal(m[0,3], 19776.0)
assert_almost_equal(m[1,0], 680.0)
assert_almost_equal(m[1,1], 3766.0)
assert_almost_equal(m[1,2], 24836.0)
assert_almost_equal(m[2,0], 7682.0)
assert_almost_equal(m[2,1], 43882.0)
assert_almost_equal(m[3,0], 95588.0)
def test_moments_normalized():
nu = regionprops(SAMPLE)[0].moments_normalized
assert_almost_equal(nu[0,2], 0.08410493827160502)
assert_almost_equal(nu[1,1], -0.016846707818929982)
assert_almost_equal(nu[1,2], -0.002899800614433943)
assert_almost_equal(nu[2,0], 0.24301268861454037)
assert_almost_equal(nu[2,1], 0.045473992910668816)
assert_almost_equal(nu[3,0], -0.017278118992041805)
def test_orientation():
orientation = regionprops(SAMPLE)[0].orientation
assert_almost_equal(orientation, 0.10446844651921)
orientation2 = regionprops(SAMPLE.T)[0].orientation
assert_almost_equal(orientation2, math.pi / 2 - orientation)
diag = np.eye(10, dtype=int)
orientation_diag = regionprops(diag)[0].orientation
assert_almost_equal(orientation_diag, -math.pi / 4)
orientation_diag = regionprops(np.flipud(diag))[0].orientation
assert_almost_equal(orientation_diag, math.pi / 4)
orientation_diag = regionprops(np.fliplr(diag))[0].orientation
assert_almost_equal(orientation_diag, math.pi / 4)
orientation_diag = regionprops(np.fliplr(np.flipud(diag)))[0].orientation
assert_almost_equal(orientation_diag, -math.pi / 4)
def test_perimeter():
per = regionprops(SAMPLE)[0].perimeter
assert_almost_equal(per, 55.2487373415)
per = perimeter(SAMPLE.astype('double'), neighbourhood=8)
assert_almost_equal(per, 46.8284271247)
def test_solidity():
solidity = regionprops(SAMPLE)[0].solidity
assert_almost_equal(solidity, 0.580645161290323)
def test_weighted_moments_central():
wmu = regionprops(SAMPLE, intensity_image=INTENSITY_SAMPLE
)[0].weighted_moments_central
ref = np.array(
[[ 7.4000000000e+01, -2.1316282073e-13, 4.7837837838e+02,
-7.5943608473e+02],
[ 3.7303493627e-14, -8.7837837838e+01, -1.4801314828e+02,
-1.2714707125e+03],
[ 1.2602837838e+03, 2.1571526662e+03, 6.6989799420e+03,
1.5304076361e+04],
[ -7.6561796932e+02, -4.2385971907e+03, -9.9501164076e+03,
-3.3156729271e+04]]
)
np.set_printoptions(precision=10)
assert_array_almost_equal(wmu, ref)
def test_weighted_centroid():
centroid = regionprops(SAMPLE, intensity_image=INTENSITY_SAMPLE
)[0].weighted_centroid
assert_array_almost_equal(centroid, (5.540540540540, 9.445945945945))
def test_weighted_moments_hu():
whu = regionprops(SAMPLE, intensity_image=INTENSITY_SAMPLE
)[0].weighted_moments_hu
ref = np.array([
3.1750587329e-01,
2.1417517159e-02,
2.3609322038e-02,
1.2565683360e-03,
8.3014209421e-07,
-3.5073773473e-05,
6.7936409056e-06
])
assert_array_almost_equal(whu, ref)
def test_weighted_moments():
wm = regionprops(SAMPLE, intensity_image=INTENSITY_SAMPLE
)[0].weighted_moments
ref = np.array(
[[ 7.4000000000e+01, 4.1000000000e+02, 2.7500000000e+03,
1.9778000000e+04],
[ 6.9900000000e+02, 3.7850000000e+03, 2.4855000000e+04,
1.7500100000e+05],
[ 7.8630000000e+03, 4.4063000000e+04, 2.9347700000e+05,
2.0810510000e+06],
[ 9.7317000000e+04, 5.7256700000e+05, 3.9007170000e+06,
2.8078871000e+07]]
)
assert_array_almost_equal(wm, ref)
def test_weighted_moments_normalized():
wnu = regionprops(SAMPLE, intensity_image=INTENSITY_SAMPLE
)[0].weighted_moments_normalized
ref = np.array(
[[ np.nan, np.nan, 0.0873590903, -0.0161217406],
[ np.nan, -0.0160405109, -0.0031421072, -0.0031376984],
[ 0.230146783, 0.0457932622, 0.0165315478, 0.0043903193],
[-0.0162529732, -0.0104598869, -0.0028544152, -0.0011057191]]
)
assert_array_almost_equal(wnu, ref)
def test_label_sequence():
a = np.empty((2, 2), dtype=np.int)
a[:, :] = 2
ps = regionprops(a)
assert len(ps) == 1
assert ps[0].label == 2
def test_pure_background():
a = np.zeros((2, 2), dtype=np.int)
ps = regionprops(a)
assert len(ps) == 0
def test_invalid():
ps = regionprops(SAMPLE)
def get_intensity_image():
ps[0].intensity_image
assert_raises(AttributeError, get_intensity_image)
def test_invalid_size():
wrong_intensity_sample = np.array([[1], [1]])
assert_raises(ValueError, regionprops, SAMPLE, wrong_intensity_sample)
def test_equals():
arr = np.zeros((100, 100), dtype=np.int)
arr[0:25, 0:25] = 1
arr[50:99, 50:99] = 2
regions = regionprops(arr)
r1 = regions[0]
regions = regionprops(arr)
r2 = regions[0]
r3 = regions[1]
assert_equal(r1 == r2, True, "Same regionprops are not equal")
assert_equal(r1 != r3, True, "Different regionprops are equal")
def test_iterate_all_props():
region = regionprops(SAMPLE)[0]
p0 = dict((p, region[p]) for p in region)
region = regionprops(SAMPLE, intensity_image=INTENSITY_SAMPLE)[0]
p1 = dict((p, region[p]) for p in region)
assert len(p0) < len(p1)
def test_cache():
region = regionprops(SAMPLE)[0]
f0 = region.filled_image
region._label_image[:10] = 1
f1 = region.filled_image
assert_array_equal(f0, f1)
region._cache_active = False
f1 = region.filled_image
assert np.any(f0 != f1)
def test_docstrings_and_props():
region = regionprops(SAMPLE)[0]
docs = _parse_docs()
props = [m for m in dir(region) if not m.startswith('_')]
nr_docs_parsed = len(docs)
nr_props = len(props)
assert_equal(nr_docs_parsed, nr_props)
ds = docs['weighted_moments_normalized']
assert 'iteration' not in ds
assert len(ds.split('\n')) > 3
if __name__ == "__main__":
from numpy.testing import run_module_suite
run_module_suite()
| true | true |
f724579a93aae45369d068c1ff4226dc640ec527 | 1,153 | py | Python | Easy21/plot_cuts.py | vuk119/RL | 2f5309bfff719b2965060492a19d008ed8382856 | [
"MIT"
] | null | null | null | Easy21/plot_cuts.py | vuk119/RL | 2f5309bfff719b2965060492a19d008ed8382856 | [
"MIT"
] | null | null | null | Easy21/plot_cuts.py | vuk119/RL | 2f5309bfff719b2965060492a19d008ed8382856 | [
"MIT"
] | null | null | null | """
Some useful plot functions
"""
import matplotlib.pyplot as plt
import numpy as np
def matrix_surf(m, xlimits=None, ylimits=None, **kwargs):
if xlimits is None:
xlimits = [0, m.shape[0]]
if ylimits is None:
ylimits = [0, m.shape[1]]
Y, X = np.meshgrid(np.arange(ylimits[0], ylimits[1]), np.arange(xlimits[0], xlimits[1]))
fig = plt.figure()
ax = fig.add_subplot(111,projection='3d',**kwargs)
ax.plot_surface(X,Y,m)
plt.show()
def matrix_scatter(m):
X=[]
Y=[]
Z=[]
for i in range(m.shape[0]):
for j in range(m.shape[1]):
X.append(i)
Y.append(j)
Z.append(m[i,j])
fig = plt.figure()
ax = fig.add_subplot(111, projection='3d')
ax.scatter(X, Y, Z)
plt.show()
# mat = np.zeros((6,5))
# mat[0,0] = 5
# mat[0,1] = 4
# mat[1,0] = 4
# mat[1,1] = 3
# mat[1,2] = 3
# mat[2,1] = 3
# mat[0,2] = 3
# mat[2,0] = 3
# mat[0,3] = 3
# mat[3,0] = 3
# matrix_surf(mat, xlabel = 'X AXIS', ylabel = 'Y AXIS', zlabel='Z', xticks =range(10))
#
#
#
# Y, X = np.meshgrid(np.arange(mat.shape[1]), np.arange(mat.shape[0]))
#
# print(X)
# print(Y)
| 18.901639 | 92 | 0.548135 |
import matplotlib.pyplot as plt
import numpy as np
def matrix_surf(m, xlimits=None, ylimits=None, **kwargs):
if xlimits is None:
xlimits = [0, m.shape[0]]
if ylimits is None:
ylimits = [0, m.shape[1]]
Y, X = np.meshgrid(np.arange(ylimits[0], ylimits[1]), np.arange(xlimits[0], xlimits[1]))
fig = plt.figure()
ax = fig.add_subplot(111,projection='3d',**kwargs)
ax.plot_surface(X,Y,m)
plt.show()
def matrix_scatter(m):
X=[]
Y=[]
Z=[]
for i in range(m.shape[0]):
for j in range(m.shape[1]):
X.append(i)
Y.append(j)
Z.append(m[i,j])
fig = plt.figure()
ax = fig.add_subplot(111, projection='3d')
ax.scatter(X, Y, Z)
plt.show()
| true | true |
f724596b02b40510e329925892cf1f8457e42c59 | 2,489 | py | Python | progs/Eval/docReader.py | feddy/Argumentation-Mining | ed2915552d373bdfebf92d4c1d0a356d37bd1f7d | [
"Apache-2.0"
] | 57 | 2017-04-24T21:53:24.000Z | 2022-03-29T11:12:13.000Z | progs/Eval/docReader.py | feddy/Argumentation-Mining | ed2915552d373bdfebf92d4c1d0a356d37bd1f7d | [
"Apache-2.0"
] | 4 | 2017-08-22T07:13:31.000Z | 2020-09-02T05:47:52.000Z | progs/Eval/docReader.py | feddy/Argumentation-Mining | ed2915552d373bdfebf92d4c1d0a356d37bd1f7d | [
"Apache-2.0"
] | 22 | 2017-10-01T06:58:19.000Z | 2021-11-04T00:59:46.000Z | #! /usr/bin/python
import sys
def readDocs(fn):
docs=[]
doc=[]
for line in open(fn):
line = line.strip()
if line=="":
if doc!=[]: docs.append(doc)
doc=[]
else:
doc.append(line)
if doc!=[]:
docs.append(doc)
return docs
def readDocsFine(fn,field=5):
docs=[]
doc=[[]]
lastLabel = None
for line in open(fn):
line = line.strip()
if line=="":
if doc!=[[]]: docs.append(doc)
doc=[[]]
lastLabel = None
else:
x = line.split("\t")
label = x[field]
# print label
if label.startswith("B-"): # and lastLabel!="O" and lastLabel:
if doc[-1]!=[]:
doc.append([])
elif label.startswith("O") and lastLabel!="O" and lastLabel:
if doc[-1]!=[]:
doc.append([])
doc[-1].append(line)
lastLabel = label[0]
if doc!=[[]]:
docs.append(doc)
return docs
def readDocsFine2(fn,field):
docs=[]
doc=[[]]
argTypes = []
atype = []
lastLabel = None
for line in open(fn):
line = line.strip()
if line=="":
if doc!=[[]]:
docs.append(doc)
argTypes.append(atype)
doc=[[]]
atype = []
lastLabel = None
else:
x = line.split("\t")
#print x
label = x[field]
if label.startswith("B-"): # and lastLabel!="O" and lastLabel:
atype.append(label.split(":")[0])
if doc[-1]!=[]:
doc.append([])
elif label.startswith("O") and lastLabel!="O" and lastLabel:
atype.append(None)
if doc[-1]!=[]:
doc.append([])
elif label.startswith("O") and lastLabel!="O":
atype.append(None)
doc[-1].append(line)
lastLabel = label[0]
if doc!=[[]]:
docs.append(doc)
argTypes.append(atype)
return docs,argTypes
if __name__ == "__main__":
import random
docs = readDocs(sys.argv[1])
random.shuffle(docs)
n = int(sys.argv[2])
for doc in docs[:n]:
for line in doc:
x = line.split("\t")
x[0] = x[0].split("_")[-1]
print("\t".join(x))
print
for doc in docs[n:]:
for line in doc:
x = line.split("\t")
x[0] = x[0].split("_")[-1]
sys.stderr.write("\t".join(x)+"\n")
sys.stderr.write("\n")
sys.exit(1)
docs=readDocsFine(sys.argv[1])
n=int(sys.argv[2])
print len(docs[n])
print docs[n]
for comp in docs[n]:
for line in comp:
print line
| 22.026549 | 72 | 0.505424 |
import sys
def readDocs(fn):
docs=[]
doc=[]
for line in open(fn):
line = line.strip()
if line=="":
if doc!=[]: docs.append(doc)
doc=[]
else:
doc.append(line)
if doc!=[]:
docs.append(doc)
return docs
def readDocsFine(fn,field=5):
docs=[]
doc=[[]]
lastLabel = None
for line in open(fn):
line = line.strip()
if line=="":
if doc!=[[]]: docs.append(doc)
doc=[[]]
lastLabel = None
else:
x = line.split("\t")
label = x[field]
if label.startswith("B-"):
if doc[-1]!=[]:
doc.append([])
elif label.startswith("O") and lastLabel!="O" and lastLabel:
if doc[-1]!=[]:
doc.append([])
doc[-1].append(line)
lastLabel = label[0]
if doc!=[[]]:
docs.append(doc)
return docs
def readDocsFine2(fn,field):
docs=[]
doc=[[]]
argTypes = []
atype = []
lastLabel = None
for line in open(fn):
line = line.strip()
if line=="":
if doc!=[[]]:
docs.append(doc)
argTypes.append(atype)
doc=[[]]
atype = []
lastLabel = None
else:
x = line.split("\t")
label = x[field]
if label.startswith("B-"):
atype.append(label.split(":")[0])
if doc[-1]!=[]:
doc.append([])
elif label.startswith("O") and lastLabel!="O" and lastLabel:
atype.append(None)
if doc[-1]!=[]:
doc.append([])
elif label.startswith("O") and lastLabel!="O":
atype.append(None)
doc[-1].append(line)
lastLabel = label[0]
if doc!=[[]]:
docs.append(doc)
argTypes.append(atype)
return docs,argTypes
if __name__ == "__main__":
import random
docs = readDocs(sys.argv[1])
random.shuffle(docs)
n = int(sys.argv[2])
for doc in docs[:n]:
for line in doc:
x = line.split("\t")
x[0] = x[0].split("_")[-1]
print("\t".join(x))
print
for doc in docs[n:]:
for line in doc:
x = line.split("\t")
x[0] = x[0].split("_")[-1]
sys.stderr.write("\t".join(x)+"\n")
sys.stderr.write("\n")
sys.exit(1)
docs=readDocsFine(sys.argv[1])
n=int(sys.argv[2])
print len(docs[n])
print docs[n]
for comp in docs[n]:
for line in comp:
print line
| false | true |
f7245a27daac0be722083cf7badfe71aa73117d0 | 110 | py | Python | torsionfit/tests/test_import.py | ChayaSt/torsionfit | 0b810b6da4a930b13c3ab8f8b700c6834824173b | [
"MIT"
] | 14 | 2015-10-09T15:46:09.000Z | 2020-11-25T15:30:28.000Z | torsionfit/tests/test_import.py | ChayaSt/torsionfit | 0b810b6da4a930b13c3ab8f8b700c6834824173b | [
"MIT"
] | 25 | 2015-08-28T02:09:08.000Z | 2019-08-06T19:29:41.000Z | torsionfit/tests/test_import.py | ChayaSt/torsionfit | 0b810b6da4a930b13c3ab8f8b700c6834824173b | [
"MIT"
] | 3 | 2015-05-14T19:34:03.000Z | 2015-07-27T21:46:36.000Z | #!/usr/bin/python
def test_import():
"""
Testing import of torsionfit.
"""
import torsionfit
| 13.75 | 33 | 0.609091 |
def test_import():
import torsionfit
| true | true |
f7245a5a8dda4747976414d44dfdefe86c9c2717 | 1,861 | py | Python | pyvoqc/cirq/voqc_optimization.py | akshajgaur/pyvoqc | 6352d64542be5fed72e7cae941d4a2a7db012a4f | [
"MIT"
] | 1 | 2021-11-17T10:50:50.000Z | 2021-11-17T10:50:50.000Z | pyvoqc/cirq/voqc_optimization.py | akshajgaur/pyvoqc | 6352d64542be5fed72e7cae941d4a2a7db012a4f | [
"MIT"
] | 5 | 2021-03-14T20:13:25.000Z | 2021-04-10T01:15:05.000Z | pyvoqc/cirq/voqc_optimization.py | akshajgaur/pyvoqc | 6352d64542be5fed72e7cae941d4a2a7db012a4f | [
"MIT"
] | 2 | 2021-03-13T16:16:34.000Z | 2022-01-27T19:28:15.000Z | from cirq import circuits, ops, protocols
import cirq
from cirq.contrib.qasm_import import circuit_from_qasm, qasm
import re
import os
from cirq import decompose
from cirq.circuits import Circuit
from pyvoqc.formatting.format_from_qasm import format_from_qasm
from pyvoqc.formatting.rzq_to_rz import rzq_to_rz
from pyvoqc.voqc import VOQC
from pyvoqc.exceptions import InvalidVOQCFunction,InvalidVOQCGate
from pyvoqc.cirq.decompose_cirq_gates import *
class CqVOQC:
def __init__(self, func = None):
self.functions = ["optimize", "not_propagation", "cancel_single_qubit_gates", "cancel_two_qubit_gates", "hadamard_reduction", "merge_rotations"]
self.func = func if func else ["optimize"]
for i in range(len(self.func)):
if ((self.func[i] in self.functions) == False):
raise InvalidVOQCFunction(str(self.func[i]), self.functions)
def optimize_circuit(self, circuit: circuits.Circuit):
#Write qasm file from circuit
circuit = Circuit(decompose(circuit, intercepting_decomposer=decompose_library,keep=need_to_keep))
qasm_str = cirq.qasm(circuit)
f = open("temp.qasm", "w")
f.write(qasm_str)
f.close()
#Call VOQC optimizations from input list and go from rzq to rz
t = self.function_call("temp.qasm")
rzq_to_rz("temp2.qasm")
#Get Cirq Circuit from qasm file
with open("temp2.qasm", "r") as f:
c = f.read()
circ = circuit_from_qasm(c)
#Remove temporary files
os.remove("temp.qasm")
os.remove("temp2.qasm")
return circ
def function_call(self,fname_in):
a = VOQC(fname_in, False)
for i in range(len(self.func)):
call = getattr(a,self.func[i])
call()
return a.write("temp2.qasm")
| 36.490196 | 152 | 0.661472 | from cirq import circuits, ops, protocols
import cirq
from cirq.contrib.qasm_import import circuit_from_qasm, qasm
import re
import os
from cirq import decompose
from cirq.circuits import Circuit
from pyvoqc.formatting.format_from_qasm import format_from_qasm
from pyvoqc.formatting.rzq_to_rz import rzq_to_rz
from pyvoqc.voqc import VOQC
from pyvoqc.exceptions import InvalidVOQCFunction,InvalidVOQCGate
from pyvoqc.cirq.decompose_cirq_gates import *
class CqVOQC:
def __init__(self, func = None):
self.functions = ["optimize", "not_propagation", "cancel_single_qubit_gates", "cancel_two_qubit_gates", "hadamard_reduction", "merge_rotations"]
self.func = func if func else ["optimize"]
for i in range(len(self.func)):
if ((self.func[i] in self.functions) == False):
raise InvalidVOQCFunction(str(self.func[i]), self.functions)
def optimize_circuit(self, circuit: circuits.Circuit):
circuit = Circuit(decompose(circuit, intercepting_decomposer=decompose_library,keep=need_to_keep))
qasm_str = cirq.qasm(circuit)
f = open("temp.qasm", "w")
f.write(qasm_str)
f.close()
t = self.function_call("temp.qasm")
rzq_to_rz("temp2.qasm")
with open("temp2.qasm", "r") as f:
c = f.read()
circ = circuit_from_qasm(c)
os.remove("temp.qasm")
os.remove("temp2.qasm")
return circ
def function_call(self,fname_in):
a = VOQC(fname_in, False)
for i in range(len(self.func)):
call = getattr(a,self.func[i])
call()
return a.write("temp2.qasm")
| true | true |
f7245abaa8a5cb38551388a2aee2cf80e1403e41 | 12,195 | py | Python | Scripts/plot_ProfileVar_Monthly_FDR.py | zmlabe/StratoVari | c5549f54482a2b05e89bded3e3b0b3c9faa686f3 | [
"MIT"
] | 4 | 2019-11-23T19:44:21.000Z | 2020-02-20T16:54:45.000Z | Scripts/plot_ProfileVar_Monthly_FDR.py | zmlabe/StratoVari | c5549f54482a2b05e89bded3e3b0b3c9faa686f3 | [
"MIT"
] | null | null | null | Scripts/plot_ProfileVar_Monthly_FDR.py | zmlabe/StratoVari | c5549f54482a2b05e89bded3e3b0b3c9faa686f3 | [
"MIT"
] | 2 | 2019-06-21T19:27:55.000Z | 2021-02-12T19:13:22.000Z | """
Plot vertical plots of PAMIP data for each month from November to April using
the ensemble mean (300)
Notes
-----
Author : Zachary Labe
Date : 26 June 2019
"""
### Import modules
import numpy as np
import matplotlib.pyplot as plt
import datetime
import read_MonthlyData as MO
import statsmodels.stats.multitest as fdr
import cmocean
import itertools
### Define directories
directorydata = '/seley/zlabe/simu/'
directoryfigure = '/home/zlabe/Desktop/STRATOVARI/'
#directoryfigure = '/home/zlabe/Documents/Research/SITperturb/Figures/'
### Define time
now = datetime.datetime.now()
currentmn = str(now.month)
currentdy = str(now.day)
currentyr = str(now.year)
currenttime = currentmn + '_' + currentdy + '_' + currentyr
titletime = currentmn + '/' + currentdy + '/' + currentyr
print('\n' '----Plotting Monthly Vertical Profiles- %s----' % titletime)
### Alott time series (300 ensemble members)
year1 = 1701
year2 = 2000
years = np.arange(year1,year2+1,1)
###############################################################################
###############################################################################
###############################################################################
### Call arguments
varnames = ['U','GEOP','TEMP','V','EGR']
def calc_indttestfdr(varx,vary):
"""
Function calculates statistical difference for 2 independent
sample t-test
Parameters
----------
varx : 3d array
vary : 3d array
Returns
-------
stat = calculated t-statistic
pvalue = two-tailed p-value
Usage
-----
stat,pvalue = calc_ttest(varx,vary)
"""
print('\n>>> Using calc_ttest function!')
### Import modules
import scipy.stats as sts
### 2-independent sample t-test
stat,pvalue = sts.ttest_ind(varx,vary,nan_policy='omit')
print('*Completed: Finished calc_ttest function!')
return stat,pvalue
######################
def readDataPeriods(varnames,sliceq):
### Call function for 4d variable data
lat,lon,lev,varfuture = MO.readExperiAll(varnames,'Future','profile')
lat,lon,lev,varpast = MO.readExperiAll(varnames,'Past','profile')
### Select ensemble mean period
if sliceq == 'Mean':
varfuture = varfuture[:,:,:,:,:]
varpast = varpast[:,:,:,:,:]
elif sliceq == 'A':
varfuture = varfuture[:100,:,:,:,:]
varpast = varpast[:100,:,:,:,:]
elif sliceq == 'B':
varfuture = varfuture[100:200,:,:,:,:]
varpast = varpast[100:200,:,:,:,:]
elif sliceq == 'C':
varfuture = varfuture[200:,:,:,:,:]
varpast = varpast[200:,:,:,:,:]
### Create 2d array of latitude and longitude
lon2,lat2 = np.meshgrid(lon,lat)
### Remove missing data
varfuture[np.where(varfuture <= -1e10)] = np.nan
varpast[np.where(varpast <= -1e10)] = np.nan
### Rearrange months (N,D,J,F,M,A)
varfuturem = np.append(varfuture[:,-2:,:,:,:],varfuture[:,:4,:,:,:],
axis=1)
varpastm = np.append(varpast[:,-2:,:,:,:],varpast[:,:4,:,:,:],axis=1)
### Calculate zonal means
varfuturemz = np.nanmean(varfuturem,axis=4)
varpastmz = np.nanmean(varpastm,axis=4)
### Calculate anomalies
anompi = varfuturemz - varpastmz
### Calculate ensemble mean
anompim = np.nanmean(anompi,axis=0)
zdiffruns = anompim
### Calculate climatologies
zclimo = np.nanmean(varpastmz,axis=0)
### Calculate significance for each month
stat_past = np.empty((varpastm.shape[1],len(lev),len(lat)))
pvalue_past = np.empty((varpastm.shape[1],len(lev),len(lat)))
for i in range(varpastm.shape[1]):
stat_past[i],pvalue_past[i] = calc_indttestfdr(varfuturemz[:,i,:,:],
varpastmz[:,i,:,:])
### Ravel into month x all p values
prunsr = np.reshape(pvalue_past,
(pvalue_past.shape[0],pvalue_past.shape[1] \
* pvalue_past.shape[2]))
### Calculate false discovery rate
prunsq = np.empty((prunsr.shape))
prunsq.fill(np.nan)
prunsqq = np.empty((prunsr.shape[1]))
prunsqq.fill(np.nan)
for i in range(prunsr.shape[0]):
### Check for nans before correction!!
mask = np.isfinite(prunsr[i,:])
prunsrr = prunsr[i,:]
score,prunsqq[mask] = fdr.fdrcorrection(prunsrr[mask],alpha=0.05,
method='indep')
prunsq[i,:] = prunsqq
### Reshape into month x lat x lon
pruns = np.reshape(prunsq,(pvalue_past.shape))
### Mask variables by their adjusted p-values
pruns[np.where(pruns >= 0.05)] = np.nan
pruns[np.where(pruns < 0.05)] = 1.
pruns[np.where(np.isnan(pruns))] = 0.
return zdiffruns,zclimo,pruns,lat,lon,lev
###########################################################################
###########################################################################
###########################################################################
### Read in data
for v in range(len(varnames)):
diffm,climom,pvalm,lat,lon,lev = readDataPeriods(varnames[v],'Mean')
diffa,climoa,pvala,lat,lon,lev = readDataPeriods(varnames[v],'A')
diffb,climob,pvalb,lat,lon,lev = readDataPeriods(varnames[v],'B')
diffc,climoc,pvalc,lat,lon,lev = readDataPeriods(varnames[v],'C')
varn = list(itertools.chain(*[diffm,diffa,diffb,diffc]))
zclimo = list(itertools.chain(*[climom,climoa,climob,climoc]))
pvarn = list(itertools.chain(*[pvalm,pvala,pvalb,pvalc]))
### Plot Variables
plt.rc('text',usetex=True)
plt.rc('font',**{'family':'sans-serif','sans-serif':['Avant Garde']})
### Set limits for contours and colorbars
if varnames[v] == 'U':
limit = np.arange(-2,2.1,0.1)
barlim = np.arange(-2,3,1)
elif varnames[v] == 'TEMP':
limit = np.arange(-4,4.1,0.2)
barlim = np.arange(-4,5,1)
elif varnames[v] == 'GEOP':
limit = np.arange(-60,61,2)
barlim = np.arange(-60,61,30)
elif varnames[v] == 'V':
limit = np.arange(-0.2,0.21,0.02)
barlim = np.arange(-0.2,0.3,0.1)
elif varnames[v] == 'EGR':
limit = np.arange(-0.08,0.081,0.005)
barlim = np.arange(-0.08,0.09,0.04)
zscale = np.array([1000,700,500,300,200,
100,50,30,10])
latq,levq = np.meshgrid(lat,lev)
fig = plt.figure()
for i in range(len(varn)):
ax1 = plt.subplot(4,6,i+1)
ax1.spines['top'].set_color('dimgrey')
ax1.spines['right'].set_color('dimgrey')
ax1.spines['bottom'].set_color('dimgrey')
ax1.spines['left'].set_color('dimgrey')
ax1.spines['left'].set_linewidth(2)
ax1.spines['bottom'].set_linewidth(2)
ax1.spines['right'].set_linewidth(2)
ax1.spines['top'].set_linewidth(2)
ax1.tick_params(axis='y',direction='out',which='major',pad=3,
width=2,color='dimgrey')
ax1.tick_params(axis='x',direction='out',which='major',pad=3,
width=2,color='dimgrey')
cs = plt.contourf(lat,lev,varn[i]*pvarn[i],limit,extend='both')
if varnames[v] == 'U':
cs2 = plt.contour(lat,lev,zclimo[i],np.arange(-20,101,5),
linewidths=0.5,colors='dimgrey')
plt.gca().invert_yaxis()
plt.yscale('log',nonposy='clip')
plt.xticks(np.arange(0,96,30),map(str,np.arange(0,91,30)),fontsize=5)
plt.yticks(zscale,map(str,zscale),ha='right',fontsize=5)
plt.minorticks_off()
plt.xlim([0,90])
plt.ylim([1000,10])
if any([i==0,i==6,i==12,i==18]):
ax1.tick_params(labelleft='on')
else:
ax1.tick_params(labelleft='off')
if i < 18:
ax1.tick_params(labelbottom='off')
if any([i==0,i==6,i==12]):
ax1.tick_params(axis='y',direction='out',which='major',pad=3,
width=2,color='dimgrey')
ax1.tick_params(axis='x',direction='out',which='major',pad=3,
width=0,color='dimgrey')
else:
if i < 24 and i != 18:
ax1.tick_params(axis='y',direction='out',which='major',pad=3,
width=0,color='dimgrey')
if i < 18:
ax1.tick_params(axis='y',direction='out',which='major',
pad=3,width=0,color='dimgrey')
ax1.tick_params(axis='x',direction='out',which='major',
pad=3,width=0,color='dimgrey')
if varnames[v] == 'U':
cmap = cmocean.cm.balance
cs.set_cmap(cmap)
elif varnames[v] == 'TEMP':
cmap = cmocean.cm.balance
cs.set_cmap(cmap)
elif varnames[v] == 'GEOP':
cmap = cmocean.cm.balance
cs.set_cmap(cmap)
elif varnames[v] == 'V':
cmap = cmocean.cm.balance
cs.set_cmap(cmap)
elif varnames[v] == 'EGR':
cmap = cmocean.cm.diff
cs.set_cmap(cmap)
labelmonths = [r'NOV',r'DEC',r'JAN',r'FEB',r'MAR',r'APR']
if i < 6:
ax1.annotate(r'\textbf{%s}' % labelmonths[i],
xy=(0, 0),xytext=(0.5,1.13),xycoords='axes fraction',
fontsize=13,color='dimgrey',rotation=0,
ha='center',va='center')
if i==0:
plt.annotate(r'\textbf{Mean}',
xy=(0, 0),xytext=(-0.6,0.5),xycoords='axes fraction',
fontsize=15,color='k',rotation=90,
ha='center',va='center')
elif i==6:
plt.annotate(r'\textbf{A}',
xy=(0, 0),xytext=(-0.6,0.5),xycoords='axes fraction',
fontsize=15,color='k',rotation=90,
ha='center',va='center')
elif i==12:
plt.annotate(r'\textbf{B}',
xy=(0, 0),xytext=(-0.6,0.5),xycoords='axes fraction',
fontsize=15,color='k',rotation=90,
ha='center',va='center')
elif i==18:
plt.annotate(r'\textbf{C}',
xy=(0, 0),xytext=(-0.6,0.5),xycoords='axes fraction',
fontsize=15,color='k',rotation=90,
ha='center',va='center')
cbar_ax = fig.add_axes([0.312,0.07,0.4,0.02])
cbar = fig.colorbar(cs,cax=cbar_ax,orientation='horizontal',
extend='both',extendfrac=0.07,drawedges=False)
if varnames[v] == 'U':
cbar.set_label(r'\textbf{m/s}',fontsize=9,color='dimgray',
labelpad=0)
elif varnames[v] == 'TEMP':
cbar.set_label(r'\textbf{$^\circ$C}',fontsize=9,color='dimgray',
labelpad=0)
elif varnames[v] == 'GEOP':
cbar.set_label(r'\textbf{m}',fontsize=9,color='dimgray',
labelpad=0)
elif varnames[v] == 'V':
cbar.set_label(r'\textbf{m/s}',fontsize=9,color='dimgray',
labelpad=0)
elif varnames[v] == 'EGR':
cbar.set_label(r'\textbf{1/day}',fontsize=9,color='dimgray',
labelpad=0)
cbar.set_ticks(barlim)
cbar.set_ticklabels(list(map(str,barlim)))
cbar.ax.tick_params(axis='x', size=.01)
cbar.outline.set_edgecolor('dimgrey')
cbar.outline.set_linewidth(0.5)
cbar.ax.tick_params(labelsize=6)
plt.annotate(r'\textbf{Latitude ($^{\circ}$N)',
xy=(0, 0),xytext=(0.515,0.12),xycoords='figure fraction',
fontsize=6,color='k',rotation=0,
ha='center',va='center')
plt.subplots_adjust(hspace=0.1,bottom=0.17,top=0.93,wspace=0.1)
plt.savefig(directoryfigure + '%s_MonthlyProfiles_100yr_FDR.png' % varnames[v],
dpi=300)
print('Completed: Script done!')
| 36.8429 | 83 | 0.526117 |
tlib.pyplot as plt
import datetime
import read_MonthlyData as MO
import statsmodels.stats.multitest as fdr
import cmocean
import itertools
ectoryfigure = '/home/zlabe/Desktop/STRATOVARI/'
w.month)
currentdy = str(now.day)
currentyr = str(now.year)
currenttime = currentmn + '_' + currentdy + '_' + currentyr
titletime = currentmn + '/' + currentdy + '/' + currentyr
print('\n' '----Plotting Monthly Vertical Profiles- %s----' % titletime)
| true | true |
f7245c8c7d5a9be02f2a22ebc20f3d4370dc8234 | 1,216 | py | Python | package.py | rittikaadhikari/stock-recommendation | 1f14276a955301b1c6fa1c00bd88b00cf5668d8c | [
"MIT"
] | null | null | null | package.py | rittikaadhikari/stock-recommendation | 1f14276a955301b1c6fa1c00bd88b00cf5668d8c | [
"MIT"
] | null | null | null | package.py | rittikaadhikari/stock-recommendation | 1f14276a955301b1c6fa1c00bd88b00cf5668d8c | [
"MIT"
] | null | null | null | # Inspired by npm's package.json file
name = 'hisa'
version = '0.1.0'
release = '0.1.0'
description = 'A stock market predictor and model builder'
long_description = ['README.md']
keywords = ['neural', 'network', 'machine', 'deep',
'learning', 'tensorflow', 'stock', 'market', 'prediction']
authors = [
{ 'name': 'Rittika Adhikari', 'email': 'rittika.adhikari@gmail.com' },
{ 'name': 'Sahil Modi', 'email': 'sm34524@gmail.com'},
{ 'name': 'Utkarsh Awasthi', 'email': 'navamawasthi@gmail.com'}
]
maintainers = [
{ 'name': 'Rittika Adhikari', 'email': 'rittika.adhikari@gmail.com' },
{ 'name': 'Sahil Modi', 'email': 'sm34524@gmail.com'},
{ 'name': 'Utkarsh Awasthi', 'email': 'navamawasthi@gmail.com'}
]
license = 'MIT'
modules = [
'hisa',
'hisa.config',
'hisa._util',
'hisa.capsule',
'hisa.learn',
'hisa.learn.models',
'hisa.learn.sentiment',
]
github_username = 'rittikaadhikari'
github_repository = 'hisa'
github_url = '{baseurl}/{username}/{repository}'.format(
baseurl = 'https://github.com',
username = github_username,
repository = github_repository)
| 34.742857 | 74 | 0.591283 |
name = 'hisa'
version = '0.1.0'
release = '0.1.0'
description = 'A stock market predictor and model builder'
long_description = ['README.md']
keywords = ['neural', 'network', 'machine', 'deep',
'learning', 'tensorflow', 'stock', 'market', 'prediction']
authors = [
{ 'name': 'Rittika Adhikari', 'email': 'rittika.adhikari@gmail.com' },
{ 'name': 'Sahil Modi', 'email': 'sm34524@gmail.com'},
{ 'name': 'Utkarsh Awasthi', 'email': 'navamawasthi@gmail.com'}
]
maintainers = [
{ 'name': 'Rittika Adhikari', 'email': 'rittika.adhikari@gmail.com' },
{ 'name': 'Sahil Modi', 'email': 'sm34524@gmail.com'},
{ 'name': 'Utkarsh Awasthi', 'email': 'navamawasthi@gmail.com'}
]
license = 'MIT'
modules = [
'hisa',
'hisa.config',
'hisa._util',
'hisa.capsule',
'hisa.learn',
'hisa.learn.models',
'hisa.learn.sentiment',
]
github_username = 'rittikaadhikari'
github_repository = 'hisa'
github_url = '{baseurl}/{username}/{repository}'.format(
baseurl = 'https://github.com',
username = github_username,
repository = github_repository)
| true | true |
f7245d00c96a3042401b96defdbc1bd8590b3469 | 425 | py | Python | raspberrypi/blinkgpio24.py | jbrucepayne/pitoys | 49a60dd62f57e1bf5f463fa11dbf3a070ef152f1 | [
"MIT"
] | null | null | null | raspberrypi/blinkgpio24.py | jbrucepayne/pitoys | 49a60dd62f57e1bf5f463fa11dbf3a070ef152f1 | [
"MIT"
] | null | null | null | raspberrypi/blinkgpio24.py | jbrucepayne/pitoys | 49a60dd62f57e1bf5f463fa11dbf3a070ef152f1 | [
"MIT"
] | null | null | null | #!/usr/bin/python
import RPi.GPIO as GPIO
import datetime
import time
GPIO.setwarnings(False)
GPIO.setmode(GPIO.BCM)
GPIO.setup(24, GPIO.OUT)
print "Starting to blink GPIO 24" + datetime.datetime.now().isoformat()
while True:
print "GPIO On : " + datetime.datetime.now().isoformat()
GPIO.output(24, True)
time.sleep(2)
print "GPIO Off: " + datetime.datetime.now().isoformat()
GPIO.output(24, False)
time.sleep(2)
| 26.5625 | 71 | 0.72 |
import RPi.GPIO as GPIO
import datetime
import time
GPIO.setwarnings(False)
GPIO.setmode(GPIO.BCM)
GPIO.setup(24, GPIO.OUT)
print "Starting to blink GPIO 24" + datetime.datetime.now().isoformat()
while True:
print "GPIO On : " + datetime.datetime.now().isoformat()
GPIO.output(24, True)
time.sleep(2)
print "GPIO Off: " + datetime.datetime.now().isoformat()
GPIO.output(24, False)
time.sleep(2)
| false | true |
f7245ff14c37b2aedccbcae8d5af768476c65b57 | 587 | py | Python | setup.py | dimitri-yatsenko/adamacs | c83ce744d207fb5fa3e7069a15cff4a52b9dcf52 | [
"MIT"
] | null | null | null | setup.py | dimitri-yatsenko/adamacs | c83ce744d207fb5fa3e7069a15cff4a52b9dcf52 | [
"MIT"
] | null | null | null | setup.py | dimitri-yatsenko/adamacs | c83ce744d207fb5fa3e7069a15cff4a52b9dcf52 | [
"MIT"
] | null | null | null | from setuptools import setup
with open('README.md') as f:
readme = f.read()
with open('LICENSE') as f:
license = f.read()
setup(
name='adamacs',
version='0.0.1',
description='Architectures for Data Management and Computational Support.',
long_description=readme,
author='Daniel Müller-Komorowska',
author_email='danielmuellermsc@gmail.com',
url='https://github.com/SFB1089/adamacs.git',
license=license,
packages=['adamacs'],
install_requires=[
'numpy',
'pandas',
'matplotlib',
'scipy'
])
| 22.576923 | 79 | 0.623509 | from setuptools import setup
with open('README.md') as f:
readme = f.read()
with open('LICENSE') as f:
license = f.read()
setup(
name='adamacs',
version='0.0.1',
description='Architectures for Data Management and Computational Support.',
long_description=readme,
author='Daniel Müller-Komorowska',
author_email='danielmuellermsc@gmail.com',
url='https://github.com/SFB1089/adamacs.git',
license=license,
packages=['adamacs'],
install_requires=[
'numpy',
'pandas',
'matplotlib',
'scipy'
])
| true | true |
f7246097ee00e1071047af06705d6ed6469d67cb | 7,066 | py | Python | tensorflow/contrib/keras/python/keras/models_test.py | DEVESHTARASIA/tensorflow | d3edb8c60ed4fd831d62833ed22f5c23486c561c | [
"Apache-2.0"
] | 384 | 2017-02-21T18:38:04.000Z | 2022-02-22T07:30:25.000Z | tensorflow/contrib/keras/python/keras/models_test.py | DEVESHTARASIA/tensorflow | d3edb8c60ed4fd831d62833ed22f5c23486c561c | [
"Apache-2.0"
] | 15 | 2017-03-01T20:18:43.000Z | 2020-05-07T10:33:51.000Z | tensorflow/contrib/keras/python/keras/models_test.py | DEVESHTARASIA/tensorflow | d3edb8c60ed4fd831d62833ed22f5c23486c561c | [
"Apache-2.0"
] | 81 | 2017-02-21T19:31:19.000Z | 2022-02-22T07:30:24.000Z | # Copyright 2016 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Tests for training routines."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import os
import tempfile
import numpy as np
from tensorflow.contrib.keras.python import keras
from tensorflow.python.platform import test
try:
import h5py # pylint:disable=g-import-not-at-top
except ImportError:
h5py = None
class TestModelSaving(test.TestCase):
def test_sequential_model_saving(self):
if h5py is None:
return # Skip test if models cannot be saved.
with self.test_session():
model = keras.models.Sequential()
model.add(keras.layers.Dense(2, input_shape=(3,)))
model.add(keras.layers.RepeatVector(3))
model.add(keras.layers.TimeDistributed(keras.layers.Dense(3)))
model.compile(loss=keras.losses.MSE,
optimizer=keras.optimizers.RMSprop(lr=0.0001),
metrics=[keras.metrics.categorical_accuracy],
sample_weight_mode='temporal')
x = np.random.random((1, 3))
y = np.random.random((1, 3, 3))
model.train_on_batch(x, y)
out = model.predict(x)
_, fname = tempfile.mkstemp('.h5')
keras.models.save_model(model, fname)
new_model = keras.models.load_model(fname)
os.remove(fname)
out2 = new_model.predict(x)
self.assertAllClose(out, out2, atol=1e-05)
# test that new updates are the same with both models
x = np.random.random((1, 3))
y = np.random.random((1, 3, 3))
model.train_on_batch(x, y)
new_model.train_on_batch(x, y)
out = model.predict(x)
out2 = new_model.predict(x)
self.assertAllClose(out, out2, atol=1e-05)
def test_sequential_model_saving_2(self):
if h5py is None:
return # Skip test if models cannot be saved.
with self.test_session():
# test with custom optimizer, loss
class CustomOp(keras.optimizers.RMSprop):
pass
def custom_loss(y_true, y_pred):
return keras.losses.mse(y_true, y_pred)
model = keras.models.Sequential()
model.add(keras.layers.Dense(2, input_shape=(3,)))
model.add(keras.layers.Dense(3))
model.compile(loss=custom_loss, optimizer=CustomOp(), metrics=['acc'])
x = np.random.random((1, 3))
y = np.random.random((1, 3))
model.train_on_batch(x, y)
out = model.predict(x)
_, fname = tempfile.mkstemp('.h5')
keras.models.save_model(model, fname)
model = keras.models.load_model(
fname,
custom_objects={'CustomOp': CustomOp,
'custom_loss': custom_loss})
os.remove(fname)
out2 = model.predict(x)
self.assertAllClose(out, out2, atol=1e-05)
def test_functional_model_saving(self):
if h5py is None:
return # Skip test if models cannot be saved.
with self.test_session():
inputs = keras.layers.Input(shape=(3,))
x = keras.layers.Dense(2)(inputs)
output = keras.layers.Dense(3)(x)
model = keras.models.Model(inputs, output)
model.compile(loss=keras.losses.MSE,
optimizer=keras.optimizers.RMSprop(lr=0.0001),
metrics=[keras.metrics.categorical_accuracy])
x = np.random.random((1, 3))
y = np.random.random((1, 3))
model.train_on_batch(x, y)
out = model.predict(x)
_, fname = tempfile.mkstemp('.h5')
keras.models.save_model(model, fname)
model = keras.models.load_model(fname)
os.remove(fname)
out2 = model.predict(x)
self.assertAllClose(out, out2, atol=1e-05)
def test_saving_without_compilation(self):
if h5py is None:
return # Skip test if models cannot be saved.
with self.test_session():
model = keras.models.Sequential()
model.add(keras.layers.Dense(2, input_shape=(3,)))
model.add(keras.layers.Dense(3))
model.compile(loss='mse', optimizer='sgd', metrics=['acc'])
_, fname = tempfile.mkstemp('.h5')
keras.models.save_model(model, fname)
model = keras.models.load_model(fname)
os.remove(fname)
def test_saving_right_after_compilation(self):
if h5py is None:
return # Skip test if models cannot be saved.
with self.test_session():
model = keras.models.Sequential()
model.add(keras.layers.Dense(2, input_shape=(3,)))
model.add(keras.layers.Dense(3))
model.compile(loss='mse', optimizer='sgd', metrics=['acc'])
model.model._make_train_function()
_, fname = tempfile.mkstemp('.h5')
keras.models.save_model(model, fname)
model = keras.models.load_model(fname)
os.remove(fname)
def test_saving_lambda_numpy_array_arguments(self):
if h5py is None:
return # Skip test if models cannot be saved.
mean = np.random.random((4, 2, 3))
std = np.abs(np.random.random((4, 2, 3))) + 1e-5
inputs = keras.layers.Input(shape=(4, 2, 3))
output = keras.layers.Lambda(lambda image, mu, std: (image - mu) / std,
arguments={'mu': mean, 'std': std})(inputs)
model = keras.models.Model(inputs, output)
model.compile(loss='mse', optimizer='sgd', metrics=['acc'])
_, fname = tempfile.mkstemp('.h5')
keras.models.save_model(model, fname)
model = keras.models.load_model(fname)
os.remove(fname)
self.assertAllClose(mean, model.layers[1].arguments['mu'])
self.assertAllClose(std, model.layers[1].arguments['std'])
class TestSequential(test.TestCase):
"""Most Sequential model API tests are covered in `training_test.py`.
"""
def test_sequential_pop(self):
num_hidden = 5
input_dim = 3
batch_size = 5
num_classes = 2
with self.test_session():
model = keras.models.Sequential()
model.add(keras.layers.Dense(num_hidden, input_dim=input_dim))
model.add(keras.layers.Dense(num_classes))
model.compile(loss='mse', optimizer='sgd')
x = np.random.random((batch_size, input_dim))
y = np.random.random((batch_size, num_classes))
model.fit(x, y, epochs=1)
model.pop()
self.assertEqual(len(model.layers), 1)
self.assertEqual(model.output_shape, (None, num_hidden))
model.compile(loss='mse', optimizer='sgd')
y = np.random.random((batch_size, num_hidden))
model.fit(x, y, epochs=1)
if __name__ == '__main__':
test.main()
| 32.865116 | 80 | 0.649731 |
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import os
import tempfile
import numpy as np
from tensorflow.contrib.keras.python import keras
from tensorflow.python.platform import test
try:
import h5py
except ImportError:
h5py = None
class TestModelSaving(test.TestCase):
def test_sequential_model_saving(self):
if h5py is None:
return
with self.test_session():
model = keras.models.Sequential()
model.add(keras.layers.Dense(2, input_shape=(3,)))
model.add(keras.layers.RepeatVector(3))
model.add(keras.layers.TimeDistributed(keras.layers.Dense(3)))
model.compile(loss=keras.losses.MSE,
optimizer=keras.optimizers.RMSprop(lr=0.0001),
metrics=[keras.metrics.categorical_accuracy],
sample_weight_mode='temporal')
x = np.random.random((1, 3))
y = np.random.random((1, 3, 3))
model.train_on_batch(x, y)
out = model.predict(x)
_, fname = tempfile.mkstemp('.h5')
keras.models.save_model(model, fname)
new_model = keras.models.load_model(fname)
os.remove(fname)
out2 = new_model.predict(x)
self.assertAllClose(out, out2, atol=1e-05)
x = np.random.random((1, 3))
y = np.random.random((1, 3, 3))
model.train_on_batch(x, y)
new_model.train_on_batch(x, y)
out = model.predict(x)
out2 = new_model.predict(x)
self.assertAllClose(out, out2, atol=1e-05)
def test_sequential_model_saving_2(self):
if h5py is None:
return
with self.test_session():
class CustomOp(keras.optimizers.RMSprop):
pass
def custom_loss(y_true, y_pred):
return keras.losses.mse(y_true, y_pred)
model = keras.models.Sequential()
model.add(keras.layers.Dense(2, input_shape=(3,)))
model.add(keras.layers.Dense(3))
model.compile(loss=custom_loss, optimizer=CustomOp(), metrics=['acc'])
x = np.random.random((1, 3))
y = np.random.random((1, 3))
model.train_on_batch(x, y)
out = model.predict(x)
_, fname = tempfile.mkstemp('.h5')
keras.models.save_model(model, fname)
model = keras.models.load_model(
fname,
custom_objects={'CustomOp': CustomOp,
'custom_loss': custom_loss})
os.remove(fname)
out2 = model.predict(x)
self.assertAllClose(out, out2, atol=1e-05)
def test_functional_model_saving(self):
if h5py is None:
return
with self.test_session():
inputs = keras.layers.Input(shape=(3,))
x = keras.layers.Dense(2)(inputs)
output = keras.layers.Dense(3)(x)
model = keras.models.Model(inputs, output)
model.compile(loss=keras.losses.MSE,
optimizer=keras.optimizers.RMSprop(lr=0.0001),
metrics=[keras.metrics.categorical_accuracy])
x = np.random.random((1, 3))
y = np.random.random((1, 3))
model.train_on_batch(x, y)
out = model.predict(x)
_, fname = tempfile.mkstemp('.h5')
keras.models.save_model(model, fname)
model = keras.models.load_model(fname)
os.remove(fname)
out2 = model.predict(x)
self.assertAllClose(out, out2, atol=1e-05)
def test_saving_without_compilation(self):
if h5py is None:
return
with self.test_session():
model = keras.models.Sequential()
model.add(keras.layers.Dense(2, input_shape=(3,)))
model.add(keras.layers.Dense(3))
model.compile(loss='mse', optimizer='sgd', metrics=['acc'])
_, fname = tempfile.mkstemp('.h5')
keras.models.save_model(model, fname)
model = keras.models.load_model(fname)
os.remove(fname)
def test_saving_right_after_compilation(self):
if h5py is None:
return
with self.test_session():
model = keras.models.Sequential()
model.add(keras.layers.Dense(2, input_shape=(3,)))
model.add(keras.layers.Dense(3))
model.compile(loss='mse', optimizer='sgd', metrics=['acc'])
model.model._make_train_function()
_, fname = tempfile.mkstemp('.h5')
keras.models.save_model(model, fname)
model = keras.models.load_model(fname)
os.remove(fname)
def test_saving_lambda_numpy_array_arguments(self):
if h5py is None:
return
mean = np.random.random((4, 2, 3))
std = np.abs(np.random.random((4, 2, 3))) + 1e-5
inputs = keras.layers.Input(shape=(4, 2, 3))
output = keras.layers.Lambda(lambda image, mu, std: (image - mu) / std,
arguments={'mu': mean, 'std': std})(inputs)
model = keras.models.Model(inputs, output)
model.compile(loss='mse', optimizer='sgd', metrics=['acc'])
_, fname = tempfile.mkstemp('.h5')
keras.models.save_model(model, fname)
model = keras.models.load_model(fname)
os.remove(fname)
self.assertAllClose(mean, model.layers[1].arguments['mu'])
self.assertAllClose(std, model.layers[1].arguments['std'])
class TestSequential(test.TestCase):
def test_sequential_pop(self):
num_hidden = 5
input_dim = 3
batch_size = 5
num_classes = 2
with self.test_session():
model = keras.models.Sequential()
model.add(keras.layers.Dense(num_hidden, input_dim=input_dim))
model.add(keras.layers.Dense(num_classes))
model.compile(loss='mse', optimizer='sgd')
x = np.random.random((batch_size, input_dim))
y = np.random.random((batch_size, num_classes))
model.fit(x, y, epochs=1)
model.pop()
self.assertEqual(len(model.layers), 1)
self.assertEqual(model.output_shape, (None, num_hidden))
model.compile(loss='mse', optimizer='sgd')
y = np.random.random((batch_size, num_hidden))
model.fit(x, y, epochs=1)
if __name__ == '__main__':
test.main()
| true | true |
f72460e93b5cc9a2ba178d43a0407e7595709d8c | 399 | py | Python | run.py | palazzem/gello | 19fe9e4aa8de485dd829a87047ec64f89b5fa7ee | [
"Apache-2.0"
] | 44 | 2018-03-28T14:22:23.000Z | 2022-03-15T07:25:06.000Z | run.py | palazzem/gello | 19fe9e4aa8de485dd829a87047ec64f89b5fa7ee | [
"Apache-2.0"
] | 44 | 2018-03-28T14:19:03.000Z | 2022-02-16T10:24:57.000Z | run.py | palazzem/gello | 19fe9e4aa8de485dd829a87047ec64f89b5fa7ee | [
"Apache-2.0"
] | 12 | 2018-03-28T14:15:43.000Z | 2021-07-19T17:33:20.000Z | # -*- coding: utf-8 -*-
#
# Unless explicitly stated otherwise all files in this repository are licensed
# under the Apache 2 License.
#
# This product includes software developed at Datadog
# (https://www.datadoghq.com/).
#
# Copyright 2018 Datadog, Inc.
#
"""run.py
Run the application locally by runnnig:
`python run.py`
"""
from app import app
if __name__ == '__main__':
app.run()
| 16.625 | 78 | 0.689223 |
from app import app
if __name__ == '__main__':
app.run()
| true | true |
f7246108e996bd99236ce11f115ac45903250e54 | 399 | py | Python | code/poc/emergency.py | a10pepo/parrot_ar_drone | af3c15379772a7e86082957776ed8c39193170ec | [
"MIT"
] | null | null | null | code/poc/emergency.py | a10pepo/parrot_ar_drone | af3c15379772a7e86082957776ed8c39193170ec | [
"MIT"
] | 3 | 2021-06-08T20:51:39.000Z | 2022-03-12T00:13:08.000Z | code/poc/emergency.py | a10pepo/parrot_ar_drone | af3c15379772a7e86082957776ed8c39193170ec | [
"MIT"
] | 1 | 2020-02-03T16:24:38.000Z | 2020-02-03T16:24:38.000Z | # -*- coding: utf-8 -*-
"""
Created on Wed Dec 18 18:19:15 2019
@author: pepo
"""
import libardrone
import pygame
from time import sleep
import time
import cv2
drone = libardrone.ARDrone()
def operation(sleep):
t1 = time.time()
t2=t1
while t2-t1<sleep:
drone.turn_left()
t2=time.time()
def main():
drone.land()
if __name__ == '__main__':
main() | 12.090909 | 35 | 0.606516 |
import libardrone
import pygame
from time import sleep
import time
import cv2
drone = libardrone.ARDrone()
def operation(sleep):
t1 = time.time()
t2=t1
while t2-t1<sleep:
drone.turn_left()
t2=time.time()
def main():
drone.land()
if __name__ == '__main__':
main() | true | true |
f724637d889f2486a08417116366b6d3968dc1b4 | 503 | py | Python | ports/gprs_a9/examples/example_31_ssl.py | sebi5361/micropython | 6c054cd124bc6229bee127128264dc0829dea53c | [
"MIT"
] | 79 | 2019-02-07T09:04:50.000Z | 2022-02-20T06:54:44.000Z | ports/gprs_a9/examples/example_31_ssl.py | sebi5361/micropython | 6c054cd124bc6229bee127128264dc0829dea53c | [
"MIT"
] | 100 | 2019-05-16T09:25:23.000Z | 2021-09-20T07:46:54.000Z | ports/gprs_a9/examples/example_31_ssl.py | sebi5361/micropython | 6c054cd124bc6229bee127128264dc0829dea53c | [
"MIT"
] | 25 | 2019-03-20T08:16:57.000Z | 2022-03-11T17:59:36.000Z | # Micropython a9g example
# Source: https://github.com/pulkin/micropython
# Author: pulkin
# Demonstrates how to wrap sockets into ssl tunnel
import cellular
import socket
import ssl
cellular.gprs("internet", "", "")
print("IP", socket.get_local_ip())
host = "httpstat.us"
port = 443
s = socket.socket()
s.connect((host, port))
s = ssl.wrap_socket(s)
message = "GET /200 HTTP/1.1\r\nHost: {}\r\nConnection: close\r\n\r\n"
s.write(message.format(host))
print(s.read(256))
s.close()
cellular.gprs(False)
| 23.952381 | 70 | 0.719682 |
import cellular
import socket
import ssl
cellular.gprs("internet", "", "")
print("IP", socket.get_local_ip())
host = "httpstat.us"
port = 443
s = socket.socket()
s.connect((host, port))
s = ssl.wrap_socket(s)
message = "GET /200 HTTP/1.1\r\nHost: {}\r\nConnection: close\r\n\r\n"
s.write(message.format(host))
print(s.read(256))
s.close()
cellular.gprs(False)
| true | true |
f7246452a8b54521f9de02de28a010c82e5a7bdd | 675 | py | Python | script/dbus_mock.py | lingxiao-Zhu/electron | 2d85b1f8f527d55f884904dbfdde50ee66a49830 | [
"MIT"
] | 88,283 | 2016-04-04T19:29:13.000Z | 2022-03-31T23:33:33.000Z | script/dbus_mock.py | lingxiao-Zhu/electron | 2d85b1f8f527d55f884904dbfdde50ee66a49830 | [
"MIT"
] | 27,327 | 2016-04-04T19:38:58.000Z | 2022-03-31T22:34:10.000Z | script/dbus_mock.py | lingxiao-Zhu/electron | 2d85b1f8f527d55f884904dbfdde50ee66a49830 | [
"MIT"
] | 15,972 | 2016-04-04T19:32:06.000Z | 2022-03-31T08:54:00.000Z | #!/usr/bin/env python
import os
import subprocess
import sys
from dbusmock import DBusTestCase
from lib.config import is_verbose_mode
def stop():
DBusTestCase.stop_dbus(DBusTestCase.system_bus_pid)
DBusTestCase.stop_dbus(DBusTestCase.session_bus_pid)
def start():
log = sys.stdout if is_verbose_mode() else open(os.devnull, 'w')
DBusTestCase.start_system_bus()
DBusTestCase.spawn_server_template('logind', None, log)
DBusTestCase.start_session_bus()
DBusTestCase.spawn_server_template('notification_daemon', None, log)
if __name__ == '__main__':
start()
try:
subprocess.check_call(sys.argv[1:])
finally:
stop()
| 22.5 | 72 | 0.734815 |
import os
import subprocess
import sys
from dbusmock import DBusTestCase
from lib.config import is_verbose_mode
def stop():
DBusTestCase.stop_dbus(DBusTestCase.system_bus_pid)
DBusTestCase.stop_dbus(DBusTestCase.session_bus_pid)
def start():
log = sys.stdout if is_verbose_mode() else open(os.devnull, 'w')
DBusTestCase.start_system_bus()
DBusTestCase.spawn_server_template('logind', None, log)
DBusTestCase.start_session_bus()
DBusTestCase.spawn_server_template('notification_daemon', None, log)
if __name__ == '__main__':
start()
try:
subprocess.check_call(sys.argv[1:])
finally:
stop()
| true | true |
f7246744af0401037d527244f3f9bd85914a3e69 | 3,357 | py | Python | train_scripts/lognet.py | franciscovargas/TeviotDataScienceGame | 77bdffb87b2a25490ca175b3a2f96459067e4ccb | [
"MIT"
] | null | null | null | train_scripts/lognet.py | franciscovargas/TeviotDataScienceGame | 77bdffb87b2a25490ca175b3a2f96459067e4ccb | [
"MIT"
] | null | null | null | train_scripts/lognet.py | franciscovargas/TeviotDataScienceGame | 77bdffb87b2a25490ca175b3a2f96459067e4ccb | [
"MIT"
] | null | null | null | from keras.layers.convolutional import Convolution2D
from keras.layers.convolutional import MaxPooling2D
from keras.models import Sequential
from keras.layers import Dense, Dropout, Activation, Flatten
from keras.optimizers import SGD, Adam
import numpy as np
import theano as th
from keras.utils.np_utils import to_categorical
from keras.regularizers import l2
import cPickle as pkl
from dutils import subtools
from keras.preprocessing.image import ImageDataGenerator
LoG = np.array([[0, 1,0],
[1,-4,1],
[0, 1,0]])
Lox = np.array([[-1, 0,1],
[-2, 0,2],
[-1, 0,1]])
Loy = np.array([[1, 2,1],
[0, 0,0],
[-1, -2,-1]])
sha = np.array([[0, -1,0],
[-1,5,-1],
[0, -1,0]])
avg = (1/25.0)*np.ones((5,5))
weight = np.array([LoG for c in range(3)])
# weight = np.array([LoG, Lox, Loy])
# weights.swapaxes(0,-1)
weights = np.array([weight for c in range(10)])
weight1 = np.array([avg for c in range(10)])
# weights.swapaxes(0,-1)
weights1 = np.array([weight1 for c in range(10)])
"""
CNN Layer signature
Convolution1D(nb_filter,
filter_length,
init='uniform',
activation='linear',
weights=None,
border_mode='valid',
subsample_length=1,
W_regularizer=None,
b_regularizer=None,
activity_regularizer=None,
W_constraint=None, b_constraint=None, bias=True, input_dim=None, input_length=None)
"""
model = Sequential()
model.add(Convolution2D(nb_filter=10,border_mode='valid',nb_row=3,nb_col=3 ,
input_shape=(3,64,64),
weights=[weights, np.array([0]*10) ]) )
model.add(Activation('relu'))
model.add(MaxPooling2D(pool_size=(2, 2)))
model.add(Convolution2D(nb_filter=32,border_mode='valid',
nb_row=5, nb_col=5,init="glorot_normal"))
model.add(Activation('relu'))
model.add(MaxPooling2D(pool_size=(2, 2)))
# model.add(Convolution2D(nb_filter=10,border_mode='valid',
# nb_row=5, nb_col=5,W_regularizer=l2(0.01)))
# model.add(Activation('relu'))
# model.add(MaxPooling2D(pool_size=(2, 2)))
model.add(Flatten())
# MLP
model.add(Dense(200, W_regularizer=l2(0.01), init="glorot_normal" ))
model.add(Activation('relu'))
model.add(Dropout(0.5))
#Classification Layer
model.add(Dense(4))
model.add(Activation('softmax'))
# compile model
sgd = SGD(lr=0.25)
adam = Adam(lr=0.1)
model.compile(optimizer='rmsprop',
loss='categorical_crossentropy',
metrics=['accuracy'])
print "loading train"
trainX = pkl.load(open("../data/pkl/trainX.pkl"))
print "done loading train"
trainX=trainX.transpose(0,3,1,2)
trainY = pkl.load(open("../data/pkl/trainY.pkl"))
datagen = ImageDataGenerator(
horizontal_flip=True, rotation_range=5, zoom_range=0.2,vertical_flip=True)
datagen.fit(trainX)
print "GENERATED"
generator = datagen.flow(trainX, to_categorical(trainY-1,4) , batch_size=32)
model.fit_generator(generator,
samples_per_epoch=len(trainX), nb_epoch=155)
testX = pkl.load(open("../data/pkl/testX.pkl"))
testX=testX.transpose(0,3,1,2)
results = np.argmax(model.predict(testX),axis=-1) +1
subtools.create_submision(results,sub=233)
| 24.683824 | 97 | 0.634793 | from keras.layers.convolutional import Convolution2D
from keras.layers.convolutional import MaxPooling2D
from keras.models import Sequential
from keras.layers import Dense, Dropout, Activation, Flatten
from keras.optimizers import SGD, Adam
import numpy as np
import theano as th
from keras.utils.np_utils import to_categorical
from keras.regularizers import l2
import cPickle as pkl
from dutils import subtools
from keras.preprocessing.image import ImageDataGenerator
LoG = np.array([[0, 1,0],
[1,-4,1],
[0, 1,0]])
Lox = np.array([[-1, 0,1],
[-2, 0,2],
[-1, 0,1]])
Loy = np.array([[1, 2,1],
[0, 0,0],
[-1, -2,-1]])
sha = np.array([[0, -1,0],
[-1,5,-1],
[0, -1,0]])
avg = (1/25.0)*np.ones((5,5))
weight = np.array([LoG for c in range(3)])
weights = np.array([weight for c in range(10)])
weight1 = np.array([avg for c in range(10)])
weights1 = np.array([weight1 for c in range(10)])
"""
CNN Layer signature
Convolution1D(nb_filter,
filter_length,
init='uniform',
activation='linear',
weights=None,
border_mode='valid',
subsample_length=1,
W_regularizer=None,
b_regularizer=None,
activity_regularizer=None,
W_constraint=None, b_constraint=None, bias=True, input_dim=None, input_length=None)
"""
model = Sequential()
model.add(Convolution2D(nb_filter=10,border_mode='valid',nb_row=3,nb_col=3 ,
input_shape=(3,64,64),
weights=[weights, np.array([0]*10) ]) )
model.add(Activation('relu'))
model.add(MaxPooling2D(pool_size=(2, 2)))
model.add(Convolution2D(nb_filter=32,border_mode='valid',
nb_row=5, nb_col=5,init="glorot_normal"))
model.add(Activation('relu'))
model.add(MaxPooling2D(pool_size=(2, 2)))
model.add(Flatten())
model.add(Dense(200, W_regularizer=l2(0.01), init="glorot_normal" ))
model.add(Activation('relu'))
model.add(Dropout(0.5))
model.add(Dense(4))
model.add(Activation('softmax'))
sgd = SGD(lr=0.25)
adam = Adam(lr=0.1)
model.compile(optimizer='rmsprop',
loss='categorical_crossentropy',
metrics=['accuracy'])
print "loading train"
trainX = pkl.load(open("../data/pkl/trainX.pkl"))
print "done loading train"
trainX=trainX.transpose(0,3,1,2)
trainY = pkl.load(open("../data/pkl/trainY.pkl"))
datagen = ImageDataGenerator(
horizontal_flip=True, rotation_range=5, zoom_range=0.2,vertical_flip=True)
datagen.fit(trainX)
print "GENERATED"
generator = datagen.flow(trainX, to_categorical(trainY-1,4) , batch_size=32)
model.fit_generator(generator,
samples_per_epoch=len(trainX), nb_epoch=155)
testX = pkl.load(open("../data/pkl/testX.pkl"))
testX=testX.transpose(0,3,1,2)
results = np.argmax(model.predict(testX),axis=-1) +1
subtools.create_submision(results,sub=233)
| false | true |
f724678f5e7ade231a5e84c6bdf82ae0868ec843 | 40,659 | py | Python | applications/admin/languages/he.py | misl6/web2py | 4191d4c48c37c66cc7eb293b610a6b6e86870571 | [
"BSD-3-Clause"
] | 1 | 2019-09-05T03:54:51.000Z | 2019-09-05T03:54:51.000Z | applications/admin/languages/he.py | misl6/web2py | 4191d4c48c37c66cc7eb293b610a6b6e86870571 | [
"BSD-3-Clause"
] | null | null | null | applications/admin/languages/he.py | misl6/web2py | 4191d4c48c37c66cc7eb293b610a6b6e86870571 | [
"BSD-3-Clause"
] | 1 | 2019-09-05T03:54:52.000Z | 2019-09-05T03:54:52.000Z | # -*- coding: utf-8 -*-
{
'!langcode!': 'he-il',
'!langname!': 'עברית',
'"update" is an optional expression like "field1=\'newvalue\'". You cannot update or delete the results of a JOIN': '"עדכן" הוא ביטוי אופציונאלי, כגון "field1=newvalue". אינך יוכל להשתמש בjoin, בעת שימוש ב"עדכן" או "מחק".',
'"User Exception" debug mode. ': '"User Exception" debug mode. ',
'%s': '%s',
'%s %%{row} deleted': '%s רשומות נמחקו',
'%s %%{row} updated': '%s רשומות עודכנו',
'%s selected': '%s selected',
'%s students registered': '%s students registered',
'%Y-%m-%d': '%Y-%m-%d',
'%Y-%m-%d %H:%M:%S': '%Y-%m-%d %H:%M:%S',
'(requires internet access)': '(requires internet access)',
'(requires internet access, experimental)': '(requires internet access, experimental)',
'(something like "it-it")': '(למשל "it-it")',
'(version %s)': '(version %s)',
'?': '?',
'@markmin\x01(**%.0d MB**)': '(**%.0d MB**)',
'@markmin\x01(file **gluon/contrib/plural_rules/%s.py** is not found)': '(file **gluon/contrib/plural_rules/%s.py** is not found)',
'@markmin\x01**%(items)s** %%{item(items)}, **%(bytes)s** %%{byte(bytes)}': '**%(items)s** %%{item(items)}, **%(bytes)s** %%{byte(bytes)}',
'@markmin\x01**%(items)s** items, **%(bytes)s** %%{byte(bytes)}': '**%(items)s** items, **%(bytes)s** %%{byte(bytes)}',
'@markmin\x01**not available** (requires the Python [[Pympler https://pypi.python.org/pypi/Pympler popup]] library)': '**not available** (requires the Python [[Pympler https://pypi.python.org/pypi/Pympler popup]] library)',
'@markmin\x01``**not available**``:red (requires the Python [[Pympler https://pypi.python.org/pypi/Pympler popup]] library)': '``**not available**``:red (requires the Python [[Pympler https://pypi.python.org/pypi/Pympler popup]] library)',
'@markmin\x01An error occured, please [[reload %s]] the page': 'An error occured, please [[reload %s]] the page',
'@markmin\x01Cache contains items up to **%(hours)02d** %%{hour(hours)} **%(min)02d** %%{minute(min)} **%(sec)02d** %%{second(sec)} old.': 'Cache contains items up to **%(hours)02d** %%{hour(hours)} **%(min)02d** %%{minute(min)} **%(sec)02d** %%{second(sec)} old.',
'@markmin\x01DISK contains items up to **%(hours)02d** %%{hour(hours)} **%(min)02d** %%{minute(min)} **%(sec)02d** %%{second(sec)} old.': 'DISK contains items up to **%(hours)02d** %%{hour(hours)} **%(min)02d** %%{minute(min)} **%(sec)02d** %%{second(sec)} old.',
'@markmin\x01Hit Ratio: **%(ratio)s%%** (**%(hits)s** %%{hit(hits)} and **%(misses)s** %%{miss(misses)})': 'Hit Ratio: **%(ratio)s%%** (**%(hits)s** %%{hit(hits)} and **%(misses)s** %%{miss(misses)})',
"@markmin\x01Mercurial Version Control System Interface[[NEWLINE]]for application '%s'": "Mercurial Version Control System Interface[[NEWLINE]]for application '%s'",
'@markmin\x01Number of entries: **%s**': 'Number of entries: **%s**',
'@markmin\x01Please [[refresh %s]] this page to see if a breakpoint was hit and debug interaction is required.': 'Please [[refresh %s]] this page to see if a breakpoint was hit and debug interaction is required.',
'@markmin\x01RAM contains items up to **%(hours)02d** %%{hour(hours)} **%(min)02d** %%{minute(min)} **%(sec)02d** %%{second(sec)} old.': 'RAM contains items up to **%(hours)02d** %%{hour(hours)} **%(min)02d** %%{minute(min)} **%(sec)02d** %%{second(sec)} old.',
'@markmin\x01Searching: **%s** %%{file}': 'Searching: **%s** files',
'@markmin\x01You need to set up and reach a [[breakpoint %s]] to use the debugger!': 'You need to set up and reach a [[breakpoint %s]] to use the debugger!',
'A new password was emailed to you': 'A new password was emailed to you',
'A new version of web2py is available: %s': 'גירסא חדשה של web2py זמינה: %s',
'Abort': 'Abort',
'About': 'אודות',
'About application': 'אודות אפליקציה',
'Accept Terms': 'Accept Terms',
'Add breakpoint': 'Add breakpoint',
'additional code for your application': 'קוד נוסף עבור האפליקציה שלך',
'Additional code for your application': 'Additional code for your application',
'Admin design page': 'Admin design page',
'admin disabled because no admin password': 'ממשק המנהל מנוטרל כי לא הוגדרה סיסמת מנהל',
'admin disabled because not supported on google app engine': 'ממשק המנהל נוטרל, כי אין תמיכה בGoogle app engine',
'admin disabled because too many invalid login attempts': 'admin disabled because too many invalid login attempts',
'admin disabled because unable to access password file': 'ממשק מנהל נוטרל, כי לא ניתן לגשת לקובץ הסיסמאות',
'Admin is disabled because insecure channel': 'ממשק האדמין נוטרל בשל גישה לא מאובטחת',
'Admin language': 'Admin language',
'Admin versioning page': 'Admin versioning page',
'administrative interface': 'administrative interface',
'Administrator Password:': 'סיסמת מנהל',
'and rename it (required):': 'ושנה את שמו (חובה):',
'and rename it:': 'ושנה את שמו:',
'App does not exist or you are not authorized': 'App does not exist or you are not authorized',
'appadmin': 'מנהל מסד הנתונים',
'appadmin is disabled because insecure channel': 'מנהל מסד הנתונים נוטרל בשל ערוץ לא מאובטח',
'Application': 'Application',
'application "%s" uninstalled': 'אפליקציה "%s" הוסרה',
'Application cannot be generated in demo mode': 'Application cannot be generated in demo mode',
'application compiled': 'אפליקציה קומפלה',
'Application exists already': 'Application exists already',
'application is compiled and cannot be designed': 'לא ניתן לערוך אפליקציה מקומפלת',
'Application name:': 'Application name:',
'Application updated via git pull': 'Application updated via git pull',
'Apply changes': 'Apply changes',
'are not used': 'are not used',
'are not used yet': 'are not used yet',
'Are you sure you want to delete file "%s"?': 'האם אתה בטוח שברצונך למחוק את הקובץ "%s"?',
'Are you sure you want to delete plugin "%s"?': 'האם אתה בטוח שברצונך למחוק את התוסף "%s"?',
'Are you sure you want to delete this object?': 'Are you sure you want to delete this object?',
'Are you sure you want to uninstall application "%s"?': 'האם אתה בטוח שברצונך להסיר את האפליקציה "%s"?',
'Are you sure you want to upgrade web2py now?': 'האם אתה בטוח שאתה רוצה לשדרג את web2py עכשיו?',
'Are you sure?': 'Are you sure?',
'arguments': 'פרמטרים',
'at char %s': 'at char %s',
'at line %s': 'at line %s',
'ATTENTION:': 'ATTENTION:',
'ATTENTION: Login requires a secure (HTTPS) connection or running on localhost.': 'לתשומת ליבך: ניתן להתחבר רק בערוץ מאובטח (HTTPS) או מlocalhost',
'ATTENTION: TESTING IS NOT THREAD SAFE SO DO NOT PERFORM MULTIPLE TESTS CONCURRENTLY.': 'לתשומת ליבך: אין לערוך מספר בדיקות במקביל, שכן הן עשויות להפריע זו לזו',
'ATTENTION: you cannot edit the running application!': 'לתשומת ליבך: לא ניתן לערוך אפליקציה בזמן הרצתה',
'Authentication code': 'Authentication code',
'Autocomplete Python Code': 'Autocomplete Python Code',
'Available databases and tables': 'מסדי נתונים וטבלאות זמינים',
'Available Databases and Tables': 'Available Databases and Tables',
'back': 'אחורה',
'Back to the plugins list': 'Back to the plugins list',
'Back to wizard': 'Back to wizard',
'Basics': 'Basics',
'Begin': 'Begin',
'breakpoint': 'breakpoint',
'Breakpoints': 'Breakpoints',
'breakpoints': 'breakpoints',
'Bulk Register': 'Bulk Register',
'Bulk Student Registration': 'Bulk Student Registration',
'Cache': 'Cache',
'cache': 'מטמון',
'Cache Cleared': 'Cache Cleared',
'Cache Keys': 'Cache Keys',
'cache, errors and sessions cleaned': 'מטמון, שגיאות וסשן נוקו',
'can be a git repo': 'can be a git repo',
'Cancel': 'Cancel',
'Cannot be empty': 'אינו יכול להישאר ריק',
'Cannot compile: there are errors in your app:': 'לא ניתן לקמפל: ישנן שגיאות באפליקציה שלך:',
'cannot create file': 'לא מצליח ליצור קובץ',
'cannot upload file "%(filename)s"': 'לא הצלחתי להעלות את הקובץ "%(filename)s"',
'Change Admin Password': 'Change Admin Password',
'Change admin password': 'סיסמת מנהל שונתה',
'change editor settings': 'change editor settings',
'Change password': 'Change password',
'Changelog': 'Changelog',
'check all': 'סמן הכל',
'Check for upgrades': 'check for upgrades',
'Check to delete': 'סמן כדי למחוק',
'Checking for upgrades...': 'מחפש עדכונים',
'Clean': 'נקה',
'Clear': 'Clear',
'Clear CACHE?': 'Clear CACHE?',
'Clear DISK': 'Clear DISK',
'Clear RAM': 'Clear RAM',
'Click on the link %(link)s to reset your password': 'Click on the link %(link)s to reset your password',
'Click row to expand traceback': 'Click row to expand traceback',
'Click row to view a ticket': 'Click row to view a ticket',
'click to check for upgrades': 'לחץ כדי לחפש עדכונים',
'Client IP': 'Client IP',
'code': 'קוד',
'Code listing': 'Code listing',
'collapse/expand all': 'collapse/expand all',
'Command': 'Command',
'Comment:': 'Comment:',
'Commit': 'Commit',
'Commit form': 'Commit form',
'Committed files': 'Committed files',
'Compile': 'קמפל',
'Compile (all or nothing)': 'Compile (all or nothing)',
'Compile (skip failed views)': 'Compile (skip failed views)',
'compiled application removed': 'אפליקציה מקומפלת הוסרה',
'Condition': 'Condition',
'continue': 'continue',
'Controllers': 'בקרים',
'controllers': 'בקרים',
'Count': 'Count',
'Create': 'צור',
'create file with filename:': 'צור קובץ בשם:',
'create new application:': 'צור אפליקציה חדשה:',
'Create new simple application': 'צור אפליקציה חדשה',
'Create/Upload': 'Create/Upload',
'created by': 'נוצר ע"י',
'Created by:': 'Created by:',
'Created On': 'Created On',
'Created on:': 'Created on:',
'crontab': 'משימות מתוזמנות',
'Current request': 'בקשה נוכחית',
'Current response': 'מענה נוכחי',
'Current session': 'סשן זה',
'currently running': 'currently running',
'currently saved or': 'נשמר כעת או',
'data uploaded': 'המידע הועלה',
'Database': 'Database',
'database': 'מסד נתונים',
'Database %s select': 'Database %s select',
'database %s select': 'מסד הנתונים %s נבחר',
'Database administration': 'Database administration',
'database administration': 'ניהול מסד נתונים',
'Database Administration (appadmin)': 'Database Administration (appadmin)',
'Date and Time': 'תאריך ושעה',
'db': 'מסד נתונים',
'Debug': 'Debug',
'defines tables': 'הגדר טבלאות',
'Delete': 'מחק',
'delete': 'מחק',
'delete all checked': 'סמן הכל למחיקה',
'delete plugin': 'מחק תוסף',
'Delete this file (you will be asked to confirm deletion)': 'Delete this file (you will be asked to confirm deletion)',
'Delete:': 'מחק:',
'deleted after first hit': 'deleted after first hit',
'Demo': 'Demo',
'Deploy': 'deploy',
'Deploy on Google App Engine': 'העלה ל Google App Engine',
'Deploy to OpenShift': 'Deploy to OpenShift',
'Deploy to pythonanywhere': 'Deploy to pythonanywhere',
'Deploy to PythonAnywhere': 'Deploy to PythonAnywhere',
'Deployment form': 'Deployment form',
'Deployment Interface': 'Deployment Interface',
'Description': 'Description',
'Description:': 'Description:',
'design': 'עיצוב',
'Detailed traceback description': 'Detailed traceback description',
'details': 'details',
'direction: ltr': 'direction: rtl',
'directory not found': 'directory not found',
'Disable': 'Disable',
'Disabled': 'Disabled',
'disabled in demo mode': 'disabled in demo mode',
'disabled in GAE mode': 'disabled in GAE mode',
'disabled in multi user mode': 'disabled in multi user mode',
'DISK': 'DISK',
'Disk Cache Keys': 'Disk Cache Keys',
'Disk Cleared': 'Disk Cleared',
'Display line numbers': 'Display line numbers',
'DO NOT use the "Pack compiled" feature.': 'DO NOT use the "Pack compiled" feature.',
'docs': 'docs',
'Docs': 'Docs',
'done!': 'הסתיים!',
'Downgrade': 'Downgrade',
'Download .w2p': 'Download .w2p',
'Download as .exe': 'Download as .exe',
'download layouts': 'download layouts',
'Download layouts from repository': 'Download layouts from repository',
'download plugins': 'download plugins',
'Download plugins from repository': 'Download plugins from repository',
'E-mail': 'E-mail',
'EDIT': 'ערוך!',
'Edit': 'ערוך',
'edit all': 'edit all',
'Edit application': 'ערוך אפליקציה',
'edit controller': 'ערוך בקר',
'edit controller:': 'edit controller:',
'Edit current record': 'ערוך רשומה נוכחית',
'edit views:': 'ערוך קיבצי תצוגה:',
'Editing %s': 'Editing %s',
'Editing file "%s"': 'עורך את הקובץ "%s"',
'Editing Language file': 'עורך את קובץ השפה',
'Editing Plural Forms File': 'Editing Plural Forms File',
'Editor': 'Editor',
'Email Address': 'Email Address',
'Email sent': 'Email sent',
'Email verification': 'Email verification',
'Email verified': 'Email verified',
'Enable': 'Enable',
'Enable Close-Tag': 'Enable Close-Tag',
'Enable Code Folding': 'Enable Code Folding',
'Enterprise Web Framework': 'סביבת הפיתוח לרשת',
'Error': 'Error',
'Error logs for "%(app)s"': 'דו"ח שגיאות עבור אפליקציה "%(app)s"',
'Error snapshot': 'Error snapshot',
'Error ticket': 'Error ticket',
'Errors': 'שגיאות',
'Exception %(extype)s: %(exvalue)s': 'Exception %(extype)s: %(exvalue)s',
'Exception %s': 'Exception %s',
'Exception instance attributes': 'נתוני החריגה',
'Exit Fullscreen': 'Exit Fullscreen',
'Expand Abbreviation (html files only)': 'Expand Abbreviation (html files only)',
'export as csv file': 'יצא לקובץ csv',
'Exports:': 'Exports:',
'exposes': 'חושף את',
'exposes:': 'exposes:',
'extends': 'הרחבה של',
'failed to compile file because:': 'failed to compile file because:',
'failed to reload module because:': 'נכשל בטעינה חוזרת של מודול בגלל:',
'File': 'File',
'file "%(filename)s" created': 'הקובץ "%(filename)s" נוצר',
'file "%(filename)s" deleted': 'הקובץ "%(filename)s" נמחק',
'file "%(filename)s" uploaded': 'הקובץ "%(filename)s" הועלה',
'file "%s" of %s restored': 'הקובץ "%s" of %s שוחזר',
'file changed on disk': 'קובץ שונה על גבי הדיסק',
'file does not exist': 'קובץ לא נמצא',
'file not found': 'file not found',
'file saved on %(time)s': 'הקובץ נשמר בשעה %(time)s',
'file saved on %s': 'הקובץ נשמר ב%s',
'filename': 'filename',
'Filename': 'Filename',
'Files added': 'Files added',
'filter': 'filter',
'Find Next': 'Find Next',
'Find Previous': 'Find Previous',
'First name': 'First name',
'Form has errors': 'Form has errors',
'Frames': 'Frames',
'Function disabled': 'Function disabled',
'Functions with no doctests will result in [passed] tests.': 'פונקציות שלא הוגדר להן doctest ירשמו כבדיקות ש[עברו בהצלחה].',
'GAE Email': 'GAE Email',
'GAE Output': 'GAE Output',
'GAE Password': 'GAE Password',
'Generate': 'Generate',
'Git Pull': 'Git Pull',
'Git Push': 'Git Push',
'Globals##debug': 'Globals##debug',
'go!': 'go!',
'Google App Engine Deployment Interface': 'Google App Engine Deployment Interface',
'Google Application Id': 'Google Application Id',
'Goto': 'Goto',
'graph model': 'graph model',
'Graph Model': 'Graph Model',
'Group %(group_id)s created': 'Group %(group_id)s created',
'Group %(group_id)s deleted': 'Group %(group_id)s deleted',
'Group ID': 'Group ID',
'Group uniquely assigned to user %(id)s': 'Group uniquely assigned to user %(id)s',
'Help': 'עזרה',
'here': 'here',
'Hide/Show Translated strings': 'Hide/Show Translated strings',
'Highlight current line': 'Highlight current line',
'Hits': 'Hits',
'Home': 'Home',
'honored only if the expression evaluates to true': 'honored only if the expression evaluates to true',
'htmledit': 'עורך ויזואלי',
'If start the downgrade, be patient, it may take a while to rollback': 'If start the downgrade, be patient, it may take a while to rollback',
'If start the upgrade, be patient, it may take a while to download': 'If start the upgrade, be patient, it may take a while to download',
'If the report above contains a ticket number it indicates a failure in executing the controller, before any attempt to execute the doctests. This is usually due to an indentation error or an error outside function code.\n\t\tA green title indicates that all tests (if defined) passed. In this case test results are not shown.': 'If the report above contains a ticket number it indicates a failure in executing the controller, before any attempt to execute the doctests. This is usually due to an indentation error or an error outside function code.\n\t\tA green title indicates that all tests (if defined) passed. In this case test results are not shown.',
'If the report above contains a ticket number it indicates a failure in executing the controller, before any attempt to execute the doctests. This is usually due to an indentation error or an error outside function code.\nA green title indicates that all tests (if defined) passed. In this case test results are not shown.': 'אם בדו"ח לעיל מופיע מספר דו"ח שגיאה, זה מצביע על שגיאה בבקר, עוד לפני שניתן היה להריץ את הdoctest. לרוב מדובר בשגיאת הזחה, או שגיאה שאינה בקוד של הפונקציה.\r\nכותרת ירוקה מצביע על כך שכל הבדיקות (אם הוגדרו) עברו בהצלחה, במידה ותוצאות הבדיקה אינן מופיעות.',
'if your application uses a database other than sqlite you will then have to configure its DAL in pythonanywhere.': 'if your application uses a database other than sqlite you will then have to configure its DAL in pythonanywhere.',
'import': 'import',
'Import/Export': 'יבא\\יצא',
'In development, use the default Rocket webserver that is currently supported by this debugger.': 'In development, use the default Rocket webserver that is currently supported by this debugger.',
'includes': 'מכיל',
'Incorrect code. {0} more attempt(s) remaining.': 'Incorrect code. {0} more attempt(s) remaining.',
'Indent with tabs': 'Indent with tabs',
'insert new': 'הכנס נוסף',
'insert new %s': 'הכנס %s נוסף',
'inspect attributes': 'inspect attributes',
'Install': 'התקן',
'Installation of %(plugin)s for %(app)s': 'Installation of %(plugin)s for %(app)s',
'Installed applications': 'אפליקציות מותקנות',
'Insufficient privileges': 'Insufficient privileges',
'Interaction at %s line %s': 'Interaction at %s line %s',
'Interactive console': 'Interactive console',
'internal error': 'שגיאה מובנית',
'internal error: %s': 'internal error: %s',
'Internal State': 'מצב מובנה',
'Invalid action': 'הוראה לא קיימת',
'Invalid application name': 'Invalid application name',
'invalid circular reference': 'invalid circular reference',
'Invalid email': 'Invalid email',
'Invalid git repository specified.': 'Invalid git repository specified.',
'Invalid key': 'Invalid key',
'Invalid login': 'Invalid login',
'invalid password': 'סיסמא שגויה',
'Invalid password': 'Invalid password',
'invalid password.': 'invalid password.',
'Invalid Query': 'שאילתה לא תקינה',
'invalid request': 'בקשה לא תקינה',
'Invalid request': 'Invalid request',
'Invalid reset password': 'Invalid reset password',
'invalid table names (auth_* tables already defined)': 'invalid table names (auth_* tables already defined)',
'invalid ticket': 'דו"ח שגיאה לא קיים',
'Invalid user': 'Invalid user',
'Invalid username': 'Invalid username',
'Invitation to join %(site)s': 'Invitation to join %(site)s',
'Key': 'Key',
'Key verified': 'Key verified',
'Keyboard shortcuts': 'Keyboard shortcuts',
'kill process': 'kill process',
'language file "%(filename)s" created/updated': 'קובץ השפה "%(filename)s" נוצר\\עודכן',
'Language files (static strings) updated': 'קובץ השפה (מחרוזות סטאטיות) עודכן',
'languages': 'שפות',
'Languages': 'שפות',
'Last name': 'Last name',
'Last Revision': 'Last Revision',
'Last saved on:': 'לאחרונה נשמר בתאריך:',
'License for': 'רשיון עבור',
'License:': 'License:',
'Line Nr': 'Line Nr',
'Line number': 'Line number',
'lists by exception': 'lists by exception',
'lists by ticket': 'lists by ticket',
'Loading...': 'Loading...',
'loading...': 'טוען...',
'Local Apps': 'Local Apps',
'locals': 'locals',
'Locals##debug': 'Locals##debug',
'Log In': 'Log In',
'Logged in': 'Logged in',
'Logged out': 'Logged out',
'Login': 'התחבר',
'login': 'התחבר',
'Login disabled by administrator': 'Login disabled by administrator',
'Login successful': 'Login successful',
'Login to the Administrative Interface': 'התחבר לממשק המנהל',
'Login/Register': 'Login/Register',
'Logout': 'התנתק',
'lost password': 'lost password',
'Main Menu': 'Main Menu',
'Manage': 'Manage',
'Manage %(action)s': 'Manage %(action)s',
'Manage Access Control': 'Manage Access Control',
'Manage Admin Users/Students': 'Manage Admin Users/Students',
'Manage Cache': 'Manage Cache',
'Manage Students': 'Manage Students',
'Memberships': 'Memberships',
'merge': 'מזג',
'Models': 'מבני נתונים',
'models': 'מבני נתונים',
'Modified On': 'Modified On',
'Modules': 'מודולים',
'modules': 'מודולים',
'Multi User Mode': 'Multi User Mode',
'Name': 'Name',
'new application "%s" created': 'האפליקציה "%s" נוצרה',
'new application "%s" imported': 'new application "%s" imported',
'New Application Wizard': 'New Application Wizard',
'New application wizard': 'New application wizard',
'New password': 'New password',
'new plugin installed': 'פלאגין חדש הותקן',
'New plugin installed: %s': 'New plugin installed: %s',
'New Record': 'רשומה חדשה',
'new record inserted': 'הרשומה נוספה',
'New simple application': 'New simple application',
'next': 'next',
'next %s rows': 'next %s rows',
'next 100 rows': '100 הרשומות הבאות',
'NO': 'לא',
'no changes': 'no changes',
'No databases in this application': 'אין מסדי נתונים לאפליקציה זו',
'No Interaction yet': 'No Interaction yet',
'no match': 'לא נמצאה התאמה',
'no package selected': 'no package selected',
'no permission to uninstall "%s"': 'no permission to uninstall "%s"',
'Node:': 'Node:',
'Not Authorized': 'Not Authorized',
'Not supported': 'Not supported',
'Note: If you receive an error with github status code of 128, ensure the system and account you are deploying from has a cooresponding ssh key configured in the openshift account.': 'Note: If you receive an error with github status code of 128, ensure the system and account you are deploying from has a cooresponding ssh key configured in the openshift account.',
'Object or table name': 'Object or table name',
'Old password': 'Old password',
"On production, you'll have to configure your webserver to use one process and multiple threads to use this debugger.": "On production, you'll have to configure your webserver to use one process and multiple threads to use this debugger.",
'Open new app in new window': 'Open new app in new window',
'OpenShift Deployment Interface': 'OpenShift Deployment Interface',
'OpenShift Output': 'OpenShift Output',
'or alternatively': 'or alternatively',
'Or Get from URL:': 'Or Get from URL:',
'or import from csv file': 'או יבא מקובץ csv',
'or provide app url:': 'או ספק כתובת url של אפליקציה',
'Origin': 'Origin',
'Original/Translation': 'מקור\\תרגום',
'Overview': 'Overview',
'Overwrite installed app': 'התקן על גבי אפלקציה מותקנת',
'Pack all': 'ארוז הכל',
'Pack compiled': 'ארוז מקומפל',
'Pack custom': 'Pack custom',
'pack plugin': 'ארוז תוסף',
'PAM authenticated user, cannot change password here': 'שינוי סיסמא באמצעות PAM אינו יכול להתבצע כאן',
'Password': 'Password',
'password changed': 'סיסמא שונתה',
'Password changed': 'Password changed',
"Password fields don't match": "Password fields don't match",
'Password reset': 'Password reset',
'Password retrieve': 'Password retrieve',
'Past revisions': 'Past revisions',
'Path to appcfg.py': 'Path to appcfg.py',
'Path to local openshift repo root.': 'Path to local openshift repo root.',
'Peeking at file': 'מעיין בקובץ',
'Permission': 'Permission',
'Permissions': 'Permissions',
'Please': 'Please',
'please input your password again': 'please input your password again',
'Please wait, giving pythonanywhere a moment...': 'Please wait, giving pythonanywhere a moment...',
'plugin "%(plugin)s" deleted': 'תוסף "%(plugin)s" נמחק',
'Plugin "%s" in application': 'פלאגין "%s" של אפליקציה',
'plugin not specified': 'plugin not specified',
'Plugin page': 'Plugin page',
'plugins': 'plugins',
'Plugins': 'תוספים',
'Plural Form #%s': 'Plural Form #%s',
'Plural-Forms:': 'Plural-Forms:',
'Powered by': 'מופעל ע"י',
'Preferences saved correctly': 'Preferences saved correctly',
'Preferences saved on session only': 'Preferences saved on session only',
'previous %s rows': 'previous %s rows',
'previous 100 rows': '100 הרשומות הקודמות',
'Private files': 'Private files',
'private files': 'private files',
'Profile updated': 'Profile updated',
'Project Progress': 'Project Progress',
'Pull': 'Pull',
'Pull failed, certain files could not be checked out. Check logs for details.': 'Pull failed, certain files could not be checked out. Check logs for details.',
'Pull is not possible because you have unmerged files. Fix them up in the work tree, and then try again.': 'Pull is not possible because you have unmerged files. Fix them up in the work tree, and then try again.',
'Push': 'Push',
'Push failed, there are unmerged entries in the cache. Resolve merge issues manually and try again.': 'Push failed, there are unmerged entries in the cache. Resolve merge issues manually and try again.',
'pygraphviz library not found': 'pygraphviz library not found',
'PythonAnywhere Apps': 'PythonAnywhere Apps',
'PythonAnywhere Password': 'PythonAnywhere Password',
'Query:': 'שאילתה:',
'RAM': 'RAM',
'RAM Cache Keys': 'RAM Cache Keys',
'Ram Cleared': 'Ram Cleared',
'Rapid Search': 'Rapid Search',
'Record': 'Record',
'record': 'רשומה',
'Record %(id)s created': 'Record %(id)s created',
'Record %(id)s deleted': 'Record %(id)s deleted',
'Record %(id)s read': 'Record %(id)s read',
'Record %(id)s updated': 'Record %(id)s updated',
'Record Created': 'Record Created',
'Record Deleted': 'Record Deleted',
'record does not exist': 'הרשומה אינה קיימת',
'record id': 'מזהה רשומה',
'Record id': 'Record id',
'Record ID': 'Record ID',
'Record Updated': 'Record Updated',
'refresh': 'refresh',
'register': 'register',
'Registration identifier': 'Registration identifier',
'Registration is pending approval': 'Registration is pending approval',
'Registration key': 'Registration key',
'Registration needs verification': 'Registration needs verification',
'Registration successful': 'Registration successful',
'Reload routes': 'Reload routes',
'Remember me (for 30 days)': 'Remember me (for 30 days)',
'Remove compiled': 'הסר מקומפל',
'Removed Breakpoint on %s at line %s': 'Removed Breakpoint on %s at line %s',
'Replace': 'Replace',
'Replace All': 'Replace All',
'Repository (%s)': 'Repository (%s)',
'request': 'request',
'Request reset password': 'Request reset password',
'requires distutils, but not installed': 'requires distutils, but not installed',
'requires python-git, but not installed': 'requires python-git, but not installed',
'Reset Password key': 'Reset Password key',
'Resolve Conflict file': 'הסר קובץ היוצר קונפליקט',
'response': 'response',
'restart': 'restart',
'restore': 'שחזר',
'return': 'return',
'Revert': 'Revert',
'revert': 'חזור לגירסא קודמת',
'reverted to revision %s': 'reverted to revision %s',
'Revision %s': 'Revision %s',
'Revision:': 'Revision:',
'Role': 'Role',
'Roles': 'Roles',
'Rows in Table': 'Rows in Table',
'Rows in table': 'רשומות בטבלה',
'Rows selected': 'רשומות נבחרו',
'rules are not defined': 'rules are not defined',
'Run tests': 'Run tests',
'Run tests in this file': 'Run tests in this file',
"Run tests in this file (to run all files, you may also use the button labelled 'test')": "Run tests in this file (to run all files, you may also use the button labelled 'test')",
'Running on %s': 'Running on %s',
'Save': 'Save',
'Save file:': 'Save file:',
'Save file: %s': 'Save file: %s',
'Save model as...': 'Save model as...',
'Save via Ajax': 'Save via Ajax',
'Saved file hash:': 'גיבוב הקובץ השמור:',
'Screenshot %s': 'Screenshot %s',
'Search': 'Search',
'Select Files to Package': 'Select Files to Package',
'selected': 'נבחרו',
'session': 'session',
'session expired': 'תם הסשן',
'Session saved correctly': 'Session saved correctly',
'Session saved on session only': 'Session saved on session only',
'Set Breakpoint on %s at line %s: %s': 'Set Breakpoint on %s at line %s: %s',
'shell': 'שורת פקודה',
'Showing %s to %s of %s %s found': 'Showing %s to %s of %s %s found',
'Sign Up': 'Sign Up',
'Singular Form': 'Singular Form',
'Site': 'אתר',
'Size of cache:': 'Size of cache:',
'skip to generate': 'skip to generate',
'some files could not be removed': 'לא ניתן היה להסיר חלק מהקבצים',
'Something went wrong please wait a few minutes before retrying': 'Something went wrong please wait a few minutes before retrying',
'Sorry, could not find mercurial installed': 'Sorry, could not find mercurial installed',
'source : db': 'source : db',
'source : filesystem': 'source : filesystem',
'Start a new app': 'Start a new app',
'Start searching': 'Start searching',
'Start wizard': 'start wizard',
'state': 'מצב',
'Static': 'Static',
'static': 'קבצים סטאטיים',
'Static files': 'קבצים סטאטיים',
'Statistics': 'Statistics',
'Step': 'Step',
'step': 'step',
'stop': 'stop',
'submit': 'שלח',
'Submit': 'Submit',
'successful': 'successful',
'Sure you want to delete this object?': 'האם אתה בטוח שברצונך למחוק אובייקט זה?',
'switch to : db': 'switch to : db',
'switch to : filesystem': 'switch to : filesystem',
'Tab width (# characters)': 'Tab width (# characters)',
'table': 'טבלה',
'Table': 'Table',
'Temporary': 'Temporary',
'test': 'בדיקות',
'Testing application': 'בודק את האפליקציה',
'The "query" is a condition like "db.table1.field1==\'value\'". Something like "db.table1.field1==db.table2.field2" results in a SQL JOIN.': '"שאליתה" היא תנאי כגון "db1.table1.filed1=\'value\'" ביטוי כמו db.table1.field1=db.table2.field1 יחולל join',
'The app exists, was created by wizard, continue to overwrite!': 'The app exists, was created by wizard, continue to overwrite!',
'The app exists, was NOT created by wizard, continue to overwrite!': 'The app exists, was NOT created by wizard, continue to overwrite!',
'the application logic, each URL path is mapped in one exposed function in the controller': 'הלוגיקה של האפליקציה, כל url ממופה לפונקציה חשופה בבקר',
'The application logic, each URL path is mapped in one exposed function in the controller': 'The application logic, each URL path is mapped in one exposed function in the controller',
'the data representation, define database tables and sets': 'ייצוג המידע, בו מוגדרים טבלאות ומבנים',
'The data representation, define database tables and sets': 'The data representation, define database tables and sets',
'The presentations layer, views are also known as templates': 'The presentations layer, views are also known as templates',
'the presentations layer, views are also known as templates': 'שכבת התצוגה, המכונה גם template',
'Theme': 'Theme',
'There are no controllers': 'אין בקרים',
'There are no models': 'אין מבני נתונים',
'There are no modules': 'אין מודולים',
'There are no plugins': 'There are no plugins',
'There are no private files': 'There are no private files',
'There are no static files': 'אין קבצים סטאטיים',
'There are no translators': 'There are no translators',
'There are no translators, only default language is supported': 'אין תרגומים. רק שפת ברירת המחדל נתמכת',
'There are no views': 'אין קבצי תצוגה',
'These files are not served, they are only available from within your app': 'These files are not served, they are only available from within your app',
'These files are served without processing, your images go here': 'These files are served without processing, your images go here',
'these files are served without processing, your images go here': 'אלו הם קבצים הנשלחים מהשרת ללא עיבוד. הכנס את התמונות כאן',
'This code was emailed to you and is required for login.': 'This code was emailed to you and is required for login.',
"This debugger may not work properly if you don't have a threaded webserver or you're using multiple daemon processes.": "This debugger may not work properly if you don't have a threaded webserver or you're using multiple daemon processes.",
'This email already has an account': 'This email already has an account',
'This is an experimental feature and it needs more testing. If you decide to downgrade you do it at your own risk': 'This is an experimental feature and it needs more testing. If you decide to downgrade you do it at your own risk',
'This is an experimental feature and it needs more testing. If you decide to upgrade you do it at your own risk': 'This is an experimental feature and it needs more testing. If you decide to upgrade you do it at your own risk',
'This is the %(filename)s template': 'זוהי תבנית הקובץ %(filename)s ',
"This page can commit your changes to an openshift app repo and push them to your cloud instance. This assumes that you've already created the application instance using the web2py skeleton and have that repo somewhere on a filesystem that this web2py instance can access. This functionality requires GitPython installed and on the python path of the runtime that web2py is operating in.": "This page can commit your changes to an openshift app repo and push them to your cloud instance. This assumes that you've already created the application instance using the web2py skeleton and have that repo somewhere on a filesystem that this web2py instance can access. This functionality requires GitPython installed and on the python path of the runtime that web2py is operating in.",
'This page can upload your application to the Google App Engine computing cloud. Mind that you must first create indexes locally and this is done by installing the Google appserver and running the app locally with it once, or there will be errors when selecting records. Attention: deployment may take long time, depending on the network speed. Attention: it will overwrite your app.yaml. DO NOT SUBMIT TWICE.': 'This page can upload your application to the Google App Engine computing cloud. Mind that you must first create indexes locally and this is done by installing the Google appserver and running the app locally with it once, or there will be errors when selecting records. Attention: deployment may take long time, depending on the network speed. Attention: it will overwrite your app.yaml. DO NOT SUBMIT TWICE.',
'this page to see if a breakpoint was hit and debug interaction is required.': 'this page to see if a breakpoint was hit and debug interaction is required.',
'This will pull changes from the remote repo for application "%s"?': 'This will pull changes from the remote repo for application "%s"?',
'This will push changes to the remote repo for application "%s".': 'This will push changes to the remote repo for application "%s".',
'Ticket': 'דו"ח שגיאה',
'Ticket ID': 'Ticket ID',
'Ticket Missing': 'Ticket Missing',
'Time in Cache (h:m:s)': 'Time in Cache (h:m:s)',
'Timestamp': 'Timestamp',
'TM': 'סימן רשום',
'to previous version.': 'אין גירסא קודמת',
'To create a plugin, name a file/folder plugin_[name]': 'כדי ליצור תוסף, קרא לקובץ או סיפריה בשם לפי התבנית plugin_[name]',
'To emulate a breakpoint programatically, write:': 'To emulate a breakpoint programatically, write:',
'to use the debugger!': 'to use the debugger!',
'toggle breakpoint': 'toggle breakpoint',
'Toggle comment': 'Toggle comment',
'Toggle Fullscreen': 'Toggle Fullscreen',
'Traceback': 'Traceback',
'translation strings for the application': 'מחרוזות תרגום עבור האפליקציה',
'Translation strings for the application': 'Translation strings for the application',
'try': 'נסה',
'try something like': 'נסה משהו כמו',
'Try the mobile interface': 'Try the mobile interface',
'try view': 'try view',
'Two-step Login Authentication Code': 'Two-step Login Authentication Code',
'Type PDB debugger command in here and hit Return (Enter) to execute it.': 'Type PDB debugger command in here and hit Return (Enter) to execute it.',
'Type some Python code in here and hit Return (Enter) to execute it.': 'Type some Python code in here and hit Return (Enter) to execute it.',
'Unable to check for upgrades': 'לא ניתן היה לבדוק אם יש שדרוגים',
'unable to create application "%s"': 'נכשל ביצירת האפליקציה "%s"',
'unable to delete file "%(filename)s"': 'נכשל במחיקת הקובץ "%(filename)s"',
'unable to delete file plugin "%(plugin)s"': 'נכשל במחיקת התוסף "%(plugin)s"',
'Unable to determine the line number!': 'Unable to determine the line number!',
'Unable to download app because:': 'לא ניתן היה להוריד את האפליקציה כי:',
'Unable to download because': 'לא הצלחתי להוריד כי',
'unable to download layout': 'unable to download layout',
'unable to download plugin: %s': 'unable to download plugin: %s',
'Unable to download the list of plugins': 'Unable to download the list of plugins',
'unable to install plugin "%s"': 'unable to install plugin "%s"',
'unable to parse csv file': 'לא הצלחתי לנתח את הקלט של קובץ csv',
'Unable to send email': 'Unable to send email',
'unable to uninstall "%s"': 'לא ניתן להסיר את "%s"',
'unable to upgrade because "%s"': 'לא ניתן היה לשדרג כי "%s"',
'uncheck all': 'הסר סימון מהכל',
'Uninstall': 'הסר התקנה',
'Unsupported webserver working mode: %s': 'Unsupported webserver working mode: %s',
'update': 'עדכן',
'update all languages': 'עדכן את כלל קיבצי השפה',
'Update:': 'עדכן:',
'Upgrade': 'Upgrade',
'upgrade now': 'upgrade now',
'upgrade now to %s': 'upgrade now to %s',
'upgrade web2py now': 'שדרג את web2py עכשיו',
'upload': 'upload',
'Upload': 'Upload',
'Upload & install packed application': 'העלה והתקן אפליקציה ארוזה',
'Upload a package:': 'Upload a package:',
'Upload and install packed application': 'Upload and install packed application',
'upload application:': 'העלה אפליקציה:',
'upload file:': 'העלה קובץ:',
'upload plugin file:': 'העלה קובץ תוסף:',
'Use (...)&(...) for AND, (...)|(...) for OR, and ~(...) for NOT to build more complex queries.': 'השתמש ב (...)&(...) עבור תנאי AND, (...)|(...) עבור תנאי OR ו~(...) עבור תנאי NOT ליצירת שאילתות מורכבות',
'Use an url:': 'Use an url:',
'User': 'User',
'User %(id)s is impersonating %(other_id)s': 'User %(id)s is impersonating %(other_id)s',
'User %(id)s Logged-in': 'User %(id)s Logged-in',
'User %(id)s Logged-out': 'User %(id)s Logged-out',
'User %(id)s Password changed': 'User %(id)s Password changed',
'User %(id)s Password reset': 'User %(id)s Password reset',
'User %(id)s Password retrieved': 'User %(id)s Password retrieved',
'User %(id)s Profile updated': 'User %(id)s Profile updated',
'User %(id)s Registered': 'User %(id)s Registered',
'User %(id)s Username retrieved': 'User %(id)s Username retrieved',
'User %(id)s Verification email sent': 'User %(id)s Verification email sent',
'User %(id)s verified registration key': 'User %(id)s verified registration key',
'User ID': 'User ID',
'Username': 'Username',
'Username already taken': 'Username already taken',
'Username retrieve': 'Username retrieve',
'Users': 'Users',
'Using the shell may lock the database to other users of this app.': 'Using the shell may lock the database to other users of this app.',
'variables': 'משתנים',
'Verify Password': 'Verify Password',
'Version': 'גירסא',
'Versioning': 'Versioning',
'versioning': 'מנגנון גירסאות',
'view': 'הצג',
'Views': 'מראה',
'views': 'מראה',
'Warning!': 'Warning!',
'WARNING:': 'WARNING:',
'WARNING: The following views could not be compiled:': 'WARNING: The following views could not be compiled:',
'Web Framework': 'Web Framework',
'web2py Admin Password': 'web2py Admin Password',
'web2py apps to deploy': 'web2py apps to deploy',
'web2py Debugger': 'web2py Debugger',
'web2py downgrade': 'web2py downgrade',
'web2py is up to date': 'web2py מותקנת בגירסתה האחרונה',
'web2py online debugger': 'web2py online debugger',
'web2py Recent Tweets': 'ציוצים אחרונים של web2py',
'web2py upgrade': 'web2py upgrade',
'web2py upgraded; please restart it': 'web2py שודרגה; נא אתחל אותה',
'Welcome %(username)s! Click on the link %(link)s to verify your email': 'Welcome %(username)s! Click on the link %(link)s to verify your email',
'Working...': 'Working...',
'WSGI reference name': 'WSGI reference name',
'YES': 'כן',
'Yes': 'Yes',
'You can also set and remove breakpoint in the edit window, using the Toggle Breakpoint button': 'You can also set and remove breakpoint in the edit window, using the Toggle Breakpoint button',
'You can inspect variables using the console below': 'You can inspect variables using the console below',
'You have been invited to join %(site)s, click %(link)s to complete the process': 'You have been invited to join %(site)s, click %(link)s to complete the process',
'You have one more login attempt before you are locked out': 'You have one more login attempt before you are locked out',
'You need to set up and reach a': 'You need to set up and reach a',
'You only need these if you have already registered': 'You only need these if you have already registered',
'Your application will be blocked until you click an action button (next, step, continue, etc.)': 'Your application will be blocked until you click an action button (next, step, continue, etc.)',
'Your password is: %(password)s': 'Your password is: %(password)s',
'Your temporary login code is {0}': 'Your temporary login code is {0}',
'Your username is: %(username)s': 'Your username is: %(username)s',
'Your username was emailed to you': 'Your username was emailed to you',
}
| 56.004132 | 823 | 0.710618 |
{
'!langcode!': 'he-il',
'!langname!': 'עברית',
'"update" is an optional expression like "field1=\'newvalue\'". You cannot update or delete the results of a JOIN': '"עדכן" הוא ביטוי אופציונאלי, כגון "field1=newvalue". אינך יוכל להשתמש בjoin, בעת שימוש ב"עדכן" או "מחק".',
'"User Exception" debug mode. ': '"User Exception" debug mode. ',
'%s': '%s',
'%s %%{row} deleted': '%s רשומות נמחקו',
'%s %%{row} updated': '%s רשומות עודכנו',
'%s selected': '%s selected',
'%s students registered': '%s students registered',
'%Y-%m-%d': '%Y-%m-%d',
'%Y-%m-%d %H:%M:%S': '%Y-%m-%d %H:%M:%S',
'(requires internet access)': '(requires internet access)',
'(requires internet access, experimental)': '(requires internet access, experimental)',
'(something like "it-it")': '(למשל "it-it")',
'(version %s)': '(version %s)',
'?': '?',
'@markmin\x01(**%.0d MB**)': '(**%.0d MB**)',
'@markmin\x01(file **gluon/contrib/plural_rules/%s.py** is not found)': '(file **gluon/contrib/plural_rules/%s.py** is not found)',
'@markmin\x01**%(items)s** %%{item(items)}, **%(bytes)s** %%{byte(bytes)}': '**%(items)s** %%{item(items)}, **%(bytes)s** %%{byte(bytes)}',
'@markmin\x01**%(items)s** items, **%(bytes)s** %%{byte(bytes)}': '**%(items)s** items, **%(bytes)s** %%{byte(bytes)}',
'@markmin\x01**not available** (requires the Python [[Pympler https://pypi.python.org/pypi/Pympler popup]] library)': '**not available** (requires the Python [[Pympler https://pypi.python.org/pypi/Pympler popup]] library)',
'@markmin\x01``**not available**``:red (requires the Python [[Pympler https://pypi.python.org/pypi/Pympler popup]] library)': '``**not available**``:red (requires the Python [[Pympler https://pypi.python.org/pypi/Pympler popup]] library)',
'@markmin\x01An error occured, please [[reload %s]] the page': 'An error occured, please [[reload %s]] the page',
'@markmin\x01Cache contains items up to **%(hours)02d** %%{hour(hours)} **%(min)02d** %%{minute(min)} **%(sec)02d** %%{second(sec)} old.': 'Cache contains items up to **%(hours)02d** %%{hour(hours)} **%(min)02d** %%{minute(min)} **%(sec)02d** %%{second(sec)} old.',
'@markmin\x01DISK contains items up to **%(hours)02d** %%{hour(hours)} **%(min)02d** %%{minute(min)} **%(sec)02d** %%{second(sec)} old.': 'DISK contains items up to **%(hours)02d** %%{hour(hours)} **%(min)02d** %%{minute(min)} **%(sec)02d** %%{second(sec)} old.',
'@markmin\x01Hit Ratio: **%(ratio)s%%** (**%(hits)s** %%{hit(hits)} and **%(misses)s** %%{miss(misses)})': 'Hit Ratio: **%(ratio)s%%** (**%(hits)s** %%{hit(hits)} and **%(misses)s** %%{miss(misses)})',
"@markmin\x01Mercurial Version Control System Interface[[NEWLINE]]for application '%s'": "Mercurial Version Control System Interface[[NEWLINE]]for application '%s'",
'@markmin\x01Number of entries: **%s**': 'Number of entries: **%s**',
'@markmin\x01Please [[refresh %s]] this page to see if a breakpoint was hit and debug interaction is required.': 'Please [[refresh %s]] this page to see if a breakpoint was hit and debug interaction is required.',
'@markmin\x01RAM contains items up to **%(hours)02d** %%{hour(hours)} **%(min)02d** %%{minute(min)} **%(sec)02d** %%{second(sec)} old.': 'RAM contains items up to **%(hours)02d** %%{hour(hours)} **%(min)02d** %%{minute(min)} **%(sec)02d** %%{second(sec)} old.',
'@markmin\x01Searching: **%s** %%{file}': 'Searching: **%s** files',
'@markmin\x01You need to set up and reach a [[breakpoint %s]] to use the debugger!': 'You need to set up and reach a [[breakpoint %s]] to use the debugger!',
'A new password was emailed to you': 'A new password was emailed to you',
'A new version of web2py is available: %s': 'גירסא חדשה של web2py זמינה: %s',
'Abort': 'Abort',
'About': 'אודות',
'About application': 'אודות אפליקציה',
'Accept Terms': 'Accept Terms',
'Add breakpoint': 'Add breakpoint',
'additional code for your application': 'קוד נוסף עבור האפליקציה שלך',
'Additional code for your application': 'Additional code for your application',
'Admin design page': 'Admin design page',
'admin disabled because no admin password': 'ממשק המנהל מנוטרל כי לא הוגדרה סיסמת מנהל',
'admin disabled because not supported on google app engine': 'ממשק המנהל נוטרל, כי אין תמיכה בGoogle app engine',
'admin disabled because too many invalid login attempts': 'admin disabled because too many invalid login attempts',
'admin disabled because unable to access password file': 'ממשק מנהל נוטרל, כי לא ניתן לגשת לקובץ הסיסמאות',
'Admin is disabled because insecure channel': 'ממשק האדמין נוטרל בשל גישה לא מאובטחת',
'Admin language': 'Admin language',
'Admin versioning page': 'Admin versioning page',
'administrative interface': 'administrative interface',
'Administrator Password:': 'סיסמת מנהל',
'and rename it (required):': 'ושנה את שמו (חובה):',
'and rename it:': 'ושנה את שמו:',
'App does not exist or you are not authorized': 'App does not exist or you are not authorized',
'appadmin': 'מנהל מסד הנתונים',
'appadmin is disabled because insecure channel': 'מנהל מסד הנתונים נוטרל בשל ערוץ לא מאובטח',
'Application': 'Application',
'application "%s" uninstalled': 'אפליקציה "%s" הוסרה',
'Application cannot be generated in demo mode': 'Application cannot be generated in demo mode',
'application compiled': 'אפליקציה קומפלה',
'Application exists already': 'Application exists already',
'application is compiled and cannot be designed': 'לא ניתן לערוך אפליקציה מקומפלת',
'Application name:': 'Application name:',
'Application updated via git pull': 'Application updated via git pull',
'Apply changes': 'Apply changes',
'are not used': 'are not used',
'are not used yet': 'are not used yet',
'Are you sure you want to delete file "%s"?': 'האם אתה בטוח שברצונך למחוק את הקובץ "%s"?',
'Are you sure you want to delete plugin "%s"?': 'האם אתה בטוח שברצונך למחוק את התוסף "%s"?',
'Are you sure you want to delete this object?': 'Are you sure you want to delete this object?',
'Are you sure you want to uninstall application "%s"?': 'האם אתה בטוח שברצונך להסיר את האפליקציה "%s"?',
'Are you sure you want to upgrade web2py now?': 'האם אתה בטוח שאתה רוצה לשדרג את web2py עכשיו?',
'Are you sure?': 'Are you sure?',
'arguments': 'פרמטרים',
'at char %s': 'at char %s',
'at line %s': 'at line %s',
'ATTENTION:': 'ATTENTION:',
'ATTENTION: Login requires a secure (HTTPS) connection or running on localhost.': 'לתשומת ליבך: ניתן להתחבר רק בערוץ מאובטח (HTTPS) או מlocalhost',
'ATTENTION: TESTING IS NOT THREAD SAFE SO DO NOT PERFORM MULTIPLE TESTS CONCURRENTLY.': 'לתשומת ליבך: אין לערוך מספר בדיקות במקביל, שכן הן עשויות להפריע זו לזו',
'ATTENTION: you cannot edit the running application!': 'לתשומת ליבך: לא ניתן לערוך אפליקציה בזמן הרצתה',
'Authentication code': 'Authentication code',
'Autocomplete Python Code': 'Autocomplete Python Code',
'Available databases and tables': 'מסדי נתונים וטבלאות זמינים',
'Available Databases and Tables': 'Available Databases and Tables',
'back': 'אחורה',
'Back to the plugins list': 'Back to the plugins list',
'Back to wizard': 'Back to wizard',
'Basics': 'Basics',
'Begin': 'Begin',
'breakpoint': 'breakpoint',
'Breakpoints': 'Breakpoints',
'breakpoints': 'breakpoints',
'Bulk Register': 'Bulk Register',
'Bulk Student Registration': 'Bulk Student Registration',
'Cache': 'Cache',
'cache': 'מטמון',
'Cache Cleared': 'Cache Cleared',
'Cache Keys': 'Cache Keys',
'cache, errors and sessions cleaned': 'מטמון, שגיאות וסשן נוקו',
'can be a git repo': 'can be a git repo',
'Cancel': 'Cancel',
'Cannot be empty': 'אינו יכול להישאר ריק',
'Cannot compile: there are errors in your app:': 'לא ניתן לקמפל: ישנן שגיאות באפליקציה שלך:',
'cannot create file': 'לא מצליח ליצור קובץ',
'cannot upload file "%(filename)s"': 'לא הצלחתי להעלות את הקובץ "%(filename)s"',
'Change Admin Password': 'Change Admin Password',
'Change admin password': 'סיסמת מנהל שונתה',
'change editor settings': 'change editor settings',
'Change password': 'Change password',
'Changelog': 'Changelog',
'check all': 'סמן הכל',
'Check for upgrades': 'check for upgrades',
'Check to delete': 'סמן כדי למחוק',
'Checking for upgrades...': 'מחפש עדכונים',
'Clean': 'נקה',
'Clear': 'Clear',
'Clear CACHE?': 'Clear CACHE?',
'Clear DISK': 'Clear DISK',
'Clear RAM': 'Clear RAM',
'Click on the link %(link)s to reset your password': 'Click on the link %(link)s to reset your password',
'Click row to expand traceback': 'Click row to expand traceback',
'Click row to view a ticket': 'Click row to view a ticket',
'click to check for upgrades': 'לחץ כדי לחפש עדכונים',
'Client IP': 'Client IP',
'code': 'קוד',
'Code listing': 'Code listing',
'collapse/expand all': 'collapse/expand all',
'Command': 'Command',
'Comment:': 'Comment:',
'Commit': 'Commit',
'Commit form': 'Commit form',
'Committed files': 'Committed files',
'Compile': 'קמפל',
'Compile (all or nothing)': 'Compile (all or nothing)',
'Compile (skip failed views)': 'Compile (skip failed views)',
'compiled application removed': 'אפליקציה מקומפלת הוסרה',
'Condition': 'Condition',
'continue': 'continue',
'Controllers': 'בקרים',
'controllers': 'בקרים',
'Count': 'Count',
'Create': 'צור',
'create file with filename:': 'צור קובץ בשם:',
'create new application:': 'צור אפליקציה חדשה:',
'Create new simple application': 'צור אפליקציה חדשה',
'Create/Upload': 'Create/Upload',
'created by': 'נוצר ע"י',
'Created by:': 'Created by:',
'Created On': 'Created On',
'Created on:': 'Created on:',
'crontab': 'משימות מתוזמנות',
'Current request': 'בקשה נוכחית',
'Current response': 'מענה נוכחי',
'Current session': 'סשן זה',
'currently running': 'currently running',
'currently saved or': 'נשמר כעת או',
'data uploaded': 'המידע הועלה',
'Database': 'Database',
'database': 'מסד נתונים',
'Database %s select': 'Database %s select',
'database %s select': 'מסד הנתונים %s נבחר',
'Database administration': 'Database administration',
'database administration': 'ניהול מסד נתונים',
'Database Administration (appadmin)': 'Database Administration (appadmin)',
'Date and Time': 'תאריך ושעה',
'db': 'מסד נתונים',
'Debug': 'Debug',
'defines tables': 'הגדר טבלאות',
'Delete': 'מחק',
'delete': 'מחק',
'delete all checked': 'סמן הכל למחיקה',
'delete plugin': 'מחק תוסף',
'Delete this file (you will be asked to confirm deletion)': 'Delete this file (you will be asked to confirm deletion)',
'Delete:': 'מחק:',
'deleted after first hit': 'deleted after first hit',
'Demo': 'Demo',
'Deploy': 'deploy',
'Deploy on Google App Engine': 'העלה ל Google App Engine',
'Deploy to OpenShift': 'Deploy to OpenShift',
'Deploy to pythonanywhere': 'Deploy to pythonanywhere',
'Deploy to PythonAnywhere': 'Deploy to PythonAnywhere',
'Deployment form': 'Deployment form',
'Deployment Interface': 'Deployment Interface',
'Description': 'Description',
'Description:': 'Description:',
'design': 'עיצוב',
'Detailed traceback description': 'Detailed traceback description',
'details': 'details',
'direction: ltr': 'direction: rtl',
'directory not found': 'directory not found',
'Disable': 'Disable',
'Disabled': 'Disabled',
'disabled in demo mode': 'disabled in demo mode',
'disabled in GAE mode': 'disabled in GAE mode',
'disabled in multi user mode': 'disabled in multi user mode',
'DISK': 'DISK',
'Disk Cache Keys': 'Disk Cache Keys',
'Disk Cleared': 'Disk Cleared',
'Display line numbers': 'Display line numbers',
'DO NOT use the "Pack compiled" feature.': 'DO NOT use the "Pack compiled" feature.',
'docs': 'docs',
'Docs': 'Docs',
'done!': 'הסתיים!',
'Downgrade': 'Downgrade',
'Download .w2p': 'Download .w2p',
'Download as .exe': 'Download as .exe',
'download layouts': 'download layouts',
'Download layouts from repository': 'Download layouts from repository',
'download plugins': 'download plugins',
'Download plugins from repository': 'Download plugins from repository',
'E-mail': 'E-mail',
'EDIT': 'ערוך!',
'Edit': 'ערוך',
'edit all': 'edit all',
'Edit application': 'ערוך אפליקציה',
'edit controller': 'ערוך בקר',
'edit controller:': 'edit controller:',
'Edit current record': 'ערוך רשומה נוכחית',
'edit views:': 'ערוך קיבצי תצוגה:',
'Editing %s': 'Editing %s',
'Editing file "%s"': 'עורך את הקובץ "%s"',
'Editing Language file': 'עורך את קובץ השפה',
'Editing Plural Forms File': 'Editing Plural Forms File',
'Editor': 'Editor',
'Email Address': 'Email Address',
'Email sent': 'Email sent',
'Email verification': 'Email verification',
'Email verified': 'Email verified',
'Enable': 'Enable',
'Enable Close-Tag': 'Enable Close-Tag',
'Enable Code Folding': 'Enable Code Folding',
'Enterprise Web Framework': 'סביבת הפיתוח לרשת',
'Error': 'Error',
'Error logs for "%(app)s"': 'דו"ח שגיאות עבור אפליקציה "%(app)s"',
'Error snapshot': 'Error snapshot',
'Error ticket': 'Error ticket',
'Errors': 'שגיאות',
'Exception %(extype)s: %(exvalue)s': 'Exception %(extype)s: %(exvalue)s',
'Exception %s': 'Exception %s',
'Exception instance attributes': 'נתוני החריגה',
'Exit Fullscreen': 'Exit Fullscreen',
'Expand Abbreviation (html files only)': 'Expand Abbreviation (html files only)',
'export as csv file': 'יצא לקובץ csv',
'Exports:': 'Exports:',
'exposes': 'חושף את',
'exposes:': 'exposes:',
'extends': 'הרחבה של',
'failed to compile file because:': 'failed to compile file because:',
'failed to reload module because:': 'נכשל בטעינה חוזרת של מודול בגלל:',
'File': 'File',
'file "%(filename)s" created': 'הקובץ "%(filename)s" נוצר',
'file "%(filename)s" deleted': 'הקובץ "%(filename)s" נמחק',
'file "%(filename)s" uploaded': 'הקובץ "%(filename)s" הועלה',
'file "%s" of %s restored': 'הקובץ "%s" of %s שוחזר',
'file changed on disk': 'קובץ שונה על גבי הדיסק',
'file does not exist': 'קובץ לא נמצא',
'file not found': 'file not found',
'file saved on %(time)s': 'הקובץ נשמר בשעה %(time)s',
'file saved on %s': 'הקובץ נשמר ב%s',
'filename': 'filename',
'Filename': 'Filename',
'Files added': 'Files added',
'filter': 'filter',
'Find Next': 'Find Next',
'Find Previous': 'Find Previous',
'First name': 'First name',
'Form has errors': 'Form has errors',
'Frames': 'Frames',
'Function disabled': 'Function disabled',
'Functions with no doctests will result in [passed] tests.': 'פונקציות שלא הוגדר להן doctest ירשמו כבדיקות ש[עברו בהצלחה].',
'GAE Email': 'GAE Email',
'GAE Output': 'GAE Output',
'GAE Password': 'GAE Password',
'Generate': 'Generate',
'Git Pull': 'Git Pull',
'Git Push': 'Git Push',
'Globals##debug': 'Globals##debug',
'go!': 'go!',
'Google App Engine Deployment Interface': 'Google App Engine Deployment Interface',
'Google Application Id': 'Google Application Id',
'Goto': 'Goto',
'graph model': 'graph model',
'Graph Model': 'Graph Model',
'Group %(group_id)s created': 'Group %(group_id)s created',
'Group %(group_id)s deleted': 'Group %(group_id)s deleted',
'Group ID': 'Group ID',
'Group uniquely assigned to user %(id)s': 'Group uniquely assigned to user %(id)s',
'Help': 'עזרה',
'here': 'here',
'Hide/Show Translated strings': 'Hide/Show Translated strings',
'Highlight current line': 'Highlight current line',
'Hits': 'Hits',
'Home': 'Home',
'honored only if the expression evaluates to true': 'honored only if the expression evaluates to true',
'htmledit': 'עורך ויזואלי',
'If start the downgrade, be patient, it may take a while to rollback': 'If start the downgrade, be patient, it may take a while to rollback',
'If start the upgrade, be patient, it may take a while to download': 'If start the upgrade, be patient, it may take a while to download',
'If the report above contains a ticket number it indicates a failure in executing the controller, before any attempt to execute the doctests. This is usually due to an indentation error or an error outside function code.\n\t\tA green title indicates that all tests (if defined) passed. In this case test results are not shown.': 'If the report above contains a ticket number it indicates a failure in executing the controller, before any attempt to execute the doctests. This is usually due to an indentation error or an error outside function code.\n\t\tA green title indicates that all tests (if defined) passed. In this case test results are not shown.',
'If the report above contains a ticket number it indicates a failure in executing the controller, before any attempt to execute the doctests. This is usually due to an indentation error or an error outside function code.\nA green title indicates that all tests (if defined) passed. In this case test results are not shown.': 'אם בדו"ח לעיל מופיע מספר דו"ח שגיאה, זה מצביע על שגיאה בבקר, עוד לפני שניתן היה להריץ את הdoctest. לרוב מדובר בשגיאת הזחה, או שגיאה שאינה בקוד של הפונקציה.\r\nכותרת ירוקה מצביע על כך שכל הבדיקות (אם הוגדרו) עברו בהצלחה, במידה ותוצאות הבדיקה אינן מופיעות.',
'if your application uses a database other than sqlite you will then have to configure its DAL in pythonanywhere.': 'if your application uses a database other than sqlite you will then have to configure its DAL in pythonanywhere.',
'import': 'import',
'Import/Export': 'יבא\\יצא',
'In development, use the default Rocket webserver that is currently supported by this debugger.': 'In development, use the default Rocket webserver that is currently supported by this debugger.',
'includes': 'מכיל',
'Incorrect code. {0} more attempt(s) remaining.': 'Incorrect code. {0} more attempt(s) remaining.',
'Indent with tabs': 'Indent with tabs',
'insert new': 'הכנס נוסף',
'insert new %s': 'הכנס %s נוסף',
'inspect attributes': 'inspect attributes',
'Install': 'התקן',
'Installation of %(plugin)s for %(app)s': 'Installation of %(plugin)s for %(app)s',
'Installed applications': 'אפליקציות מותקנות',
'Insufficient privileges': 'Insufficient privileges',
'Interaction at %s line %s': 'Interaction at %s line %s',
'Interactive console': 'Interactive console',
'internal error': 'שגיאה מובנית',
'internal error: %s': 'internal error: %s',
'Internal State': 'מצב מובנה',
'Invalid action': 'הוראה לא קיימת',
'Invalid application name': 'Invalid application name',
'invalid circular reference': 'invalid circular reference',
'Invalid email': 'Invalid email',
'Invalid git repository specified.': 'Invalid git repository specified.',
'Invalid key': 'Invalid key',
'Invalid login': 'Invalid login',
'invalid password': 'סיסמא שגויה',
'Invalid password': 'Invalid password',
'invalid password.': 'invalid password.',
'Invalid Query': 'שאילתה לא תקינה',
'invalid request': 'בקשה לא תקינה',
'Invalid request': 'Invalid request',
'Invalid reset password': 'Invalid reset password',
'invalid table names (auth_* tables already defined)': 'invalid table names (auth_* tables already defined)',
'invalid ticket': 'דו"ח שגיאה לא קיים',
'Invalid user': 'Invalid user',
'Invalid username': 'Invalid username',
'Invitation to join %(site)s': 'Invitation to join %(site)s',
'Key': 'Key',
'Key verified': 'Key verified',
'Keyboard shortcuts': 'Keyboard shortcuts',
'kill process': 'kill process',
'language file "%(filename)s" created/updated': 'קובץ השפה "%(filename)s" נוצר\\עודכן',
'Language files (static strings) updated': 'קובץ השפה (מחרוזות סטאטיות) עודכן',
'languages': 'שפות',
'Languages': 'שפות',
'Last name': 'Last name',
'Last Revision': 'Last Revision',
'Last saved on:': 'לאחרונה נשמר בתאריך:',
'License for': 'רשיון עבור',
'License:': 'License:',
'Line Nr': 'Line Nr',
'Line number': 'Line number',
'lists by exception': 'lists by exception',
'lists by ticket': 'lists by ticket',
'Loading...': 'Loading...',
'loading...': 'טוען...',
'Local Apps': 'Local Apps',
'locals': 'locals',
'Locals##debug': 'Locals##debug',
'Log In': 'Log In',
'Logged in': 'Logged in',
'Logged out': 'Logged out',
'Login': 'התחבר',
'login': 'התחבר',
'Login disabled by administrator': 'Login disabled by administrator',
'Login successful': 'Login successful',
'Login to the Administrative Interface': 'התחבר לממשק המנהל',
'Login/Register': 'Login/Register',
'Logout': 'התנתק',
'lost password': 'lost password',
'Main Menu': 'Main Menu',
'Manage': 'Manage',
'Manage %(action)s': 'Manage %(action)s',
'Manage Access Control': 'Manage Access Control',
'Manage Admin Users/Students': 'Manage Admin Users/Students',
'Manage Cache': 'Manage Cache',
'Manage Students': 'Manage Students',
'Memberships': 'Memberships',
'merge': 'מזג',
'Models': 'מבני נתונים',
'models': 'מבני נתונים',
'Modified On': 'Modified On',
'Modules': 'מודולים',
'modules': 'מודולים',
'Multi User Mode': 'Multi User Mode',
'Name': 'Name',
'new application "%s" created': 'האפליקציה "%s" נוצרה',
'new application "%s" imported': 'new application "%s" imported',
'New Application Wizard': 'New Application Wizard',
'New application wizard': 'New application wizard',
'New password': 'New password',
'new plugin installed': 'פלאגין חדש הותקן',
'New plugin installed: %s': 'New plugin installed: %s',
'New Record': 'רשומה חדשה',
'new record inserted': 'הרשומה נוספה',
'New simple application': 'New simple application',
'next': 'next',
'next %s rows': 'next %s rows',
'next 100 rows': '100 הרשומות הבאות',
'NO': 'לא',
'no changes': 'no changes',
'No databases in this application': 'אין מסדי נתונים לאפליקציה זו',
'No Interaction yet': 'No Interaction yet',
'no match': 'לא נמצאה התאמה',
'no package selected': 'no package selected',
'no permission to uninstall "%s"': 'no permission to uninstall "%s"',
'Node:': 'Node:',
'Not Authorized': 'Not Authorized',
'Not supported': 'Not supported',
'Note: If you receive an error with github status code of 128, ensure the system and account you are deploying from has a cooresponding ssh key configured in the openshift account.': 'Note: If you receive an error with github status code of 128, ensure the system and account you are deploying from has a cooresponding ssh key configured in the openshift account.',
'Object or table name': 'Object or table name',
'Old password': 'Old password',
"On production, you'll have to configure your webserver to use one process and multiple threads to use this debugger.": "On production, you'll have to configure your webserver to use one process and multiple threads to use this debugger.",
'Open new app in new window': 'Open new app in new window',
'OpenShift Deployment Interface': 'OpenShift Deployment Interface',
'OpenShift Output': 'OpenShift Output',
'or alternatively': 'or alternatively',
'Or Get from URL:': 'Or Get from URL:',
'or import from csv file': 'או יבא מקובץ csv',
'or provide app url:': 'או ספק כתובת url של אפליקציה',
'Origin': 'Origin',
'Original/Translation': 'מקור\\תרגום',
'Overview': 'Overview',
'Overwrite installed app': 'התקן על גבי אפלקציה מותקנת',
'Pack all': 'ארוז הכל',
'Pack compiled': 'ארוז מקומפל',
'Pack custom': 'Pack custom',
'pack plugin': 'ארוז תוסף',
'PAM authenticated user, cannot change password here': 'שינוי סיסמא באמצעות PAM אינו יכול להתבצע כאן',
'Password': 'Password',
'password changed': 'סיסמא שונתה',
'Password changed': 'Password changed',
"Password fields don't match": "Password fields don't match",
'Password reset': 'Password reset',
'Password retrieve': 'Password retrieve',
'Past revisions': 'Past revisions',
'Path to appcfg.py': 'Path to appcfg.py',
'Path to local openshift repo root.': 'Path to local openshift repo root.',
'Peeking at file': 'מעיין בקובץ',
'Permission': 'Permission',
'Permissions': 'Permissions',
'Please': 'Please',
'please input your password again': 'please input your password again',
'Please wait, giving pythonanywhere a moment...': 'Please wait, giving pythonanywhere a moment...',
'plugin "%(plugin)s" deleted': 'תוסף "%(plugin)s" נמחק',
'Plugin "%s" in application': 'פלאגין "%s" של אפליקציה',
'plugin not specified': 'plugin not specified',
'Plugin page': 'Plugin page',
'plugins': 'plugins',
'Plugins': 'תוספים',
'Plural Form #%s': 'Plural Form #%s',
'Plural-Forms:': 'Plural-Forms:',
'Powered by': 'מופעל ע"י',
'Preferences saved correctly': 'Preferences saved correctly',
'Preferences saved on session only': 'Preferences saved on session only',
'previous %s rows': 'previous %s rows',
'previous 100 rows': '100 הרשומות הקודמות',
'Private files': 'Private files',
'private files': 'private files',
'Profile updated': 'Profile updated',
'Project Progress': 'Project Progress',
'Pull': 'Pull',
'Pull failed, certain files could not be checked out. Check logs for details.': 'Pull failed, certain files could not be checked out. Check logs for details.',
'Pull is not possible because you have unmerged files. Fix them up in the work tree, and then try again.': 'Pull is not possible because you have unmerged files. Fix them up in the work tree, and then try again.',
'Push': 'Push',
'Push failed, there are unmerged entries in the cache. Resolve merge issues manually and try again.': 'Push failed, there are unmerged entries in the cache. Resolve merge issues manually and try again.',
'pygraphviz library not found': 'pygraphviz library not found',
'PythonAnywhere Apps': 'PythonAnywhere Apps',
'PythonAnywhere Password': 'PythonAnywhere Password',
'Query:': 'שאילתה:',
'RAM': 'RAM',
'RAM Cache Keys': 'RAM Cache Keys',
'Ram Cleared': 'Ram Cleared',
'Rapid Search': 'Rapid Search',
'Record': 'Record',
'record': 'רשומה',
'Record %(id)s created': 'Record %(id)s created',
'Record %(id)s deleted': 'Record %(id)s deleted',
'Record %(id)s read': 'Record %(id)s read',
'Record %(id)s updated': 'Record %(id)s updated',
'Record Created': 'Record Created',
'Record Deleted': 'Record Deleted',
'record does not exist': 'הרשומה אינה קיימת',
'record id': 'מזהה רשומה',
'Record id': 'Record id',
'Record ID': 'Record ID',
'Record Updated': 'Record Updated',
'refresh': 'refresh',
'register': 'register',
'Registration identifier': 'Registration identifier',
'Registration is pending approval': 'Registration is pending approval',
'Registration key': 'Registration key',
'Registration needs verification': 'Registration needs verification',
'Registration successful': 'Registration successful',
'Reload routes': 'Reload routes',
'Remember me (for 30 days)': 'Remember me (for 30 days)',
'Remove compiled': 'הסר מקומפל',
'Removed Breakpoint on %s at line %s': 'Removed Breakpoint on %s at line %s',
'Replace': 'Replace',
'Replace All': 'Replace All',
'Repository (%s)': 'Repository (%s)',
'request': 'request',
'Request reset password': 'Request reset password',
'requires distutils, but not installed': 'requires distutils, but not installed',
'requires python-git, but not installed': 'requires python-git, but not installed',
'Reset Password key': 'Reset Password key',
'Resolve Conflict file': 'הסר קובץ היוצר קונפליקט',
'response': 'response',
'restart': 'restart',
'restore': 'שחזר',
'return': 'return',
'Revert': 'Revert',
'revert': 'חזור לגירסא קודמת',
'reverted to revision %s': 'reverted to revision %s',
'Revision %s': 'Revision %s',
'Revision:': 'Revision:',
'Role': 'Role',
'Roles': 'Roles',
'Rows in Table': 'Rows in Table',
'Rows in table': 'רשומות בטבלה',
'Rows selected': 'רשומות נבחרו',
'rules are not defined': 'rules are not defined',
'Run tests': 'Run tests',
'Run tests in this file': 'Run tests in this file',
"Run tests in this file (to run all files, you may also use the button labelled 'test')": "Run tests in this file (to run all files, you may also use the button labelled 'test')",
'Running on %s': 'Running on %s',
'Save': 'Save',
'Save file:': 'Save file:',
'Save file: %s': 'Save file: %s',
'Save model as...': 'Save model as...',
'Save via Ajax': 'Save via Ajax',
'Saved file hash:': 'גיבוב הקובץ השמור:',
'Screenshot %s': 'Screenshot %s',
'Search': 'Search',
'Select Files to Package': 'Select Files to Package',
'selected': 'נבחרו',
'session': 'session',
'session expired': 'תם הסשן',
'Session saved correctly': 'Session saved correctly',
'Session saved on session only': 'Session saved on session only',
'Set Breakpoint on %s at line %s: %s': 'Set Breakpoint on %s at line %s: %s',
'shell': 'שורת פקודה',
'Showing %s to %s of %s %s found': 'Showing %s to %s of %s %s found',
'Sign Up': 'Sign Up',
'Singular Form': 'Singular Form',
'Site': 'אתר',
'Size of cache:': 'Size of cache:',
'skip to generate': 'skip to generate',
'some files could not be removed': 'לא ניתן היה להסיר חלק מהקבצים',
'Something went wrong please wait a few minutes before retrying': 'Something went wrong please wait a few minutes before retrying',
'Sorry, could not find mercurial installed': 'Sorry, could not find mercurial installed',
'source : db': 'source : db',
'source : filesystem': 'source : filesystem',
'Start a new app': 'Start a new app',
'Start searching': 'Start searching',
'Start wizard': 'start wizard',
'state': 'מצב',
'Static': 'Static',
'static': 'קבצים סטאטיים',
'Static files': 'קבצים סטאטיים',
'Statistics': 'Statistics',
'Step': 'Step',
'step': 'step',
'stop': 'stop',
'submit': 'שלח',
'Submit': 'Submit',
'successful': 'successful',
'Sure you want to delete this object?': 'האם אתה בטוח שברצונך למחוק אובייקט זה?',
'switch to : db': 'switch to : db',
'switch to : filesystem': 'switch to : filesystem',
'Tab width (# characters)': 'Tab width (# characters)',
'table': 'טבלה',
'Table': 'Table',
'Temporary': 'Temporary',
'test': 'בדיקות',
'Testing application': 'בודק את האפליקציה',
'The "query" is a condition like "db.table1.field1==\'value\'". Something like "db.table1.field1==db.table2.field2" results in a SQL JOIN.': '"שאליתה" היא תנאי כגון "db1.table1.filed1=\'value\'" ביטוי כמו db.table1.field1=db.table2.field1 יחולל join',
'The app exists, was created by wizard, continue to overwrite!': 'The app exists, was created by wizard, continue to overwrite!',
'The app exists, was NOT created by wizard, continue to overwrite!': 'The app exists, was NOT created by wizard, continue to overwrite!',
'the application logic, each URL path is mapped in one exposed function in the controller': 'הלוגיקה של האפליקציה, כל url ממופה לפונקציה חשופה בבקר',
'The application logic, each URL path is mapped in one exposed function in the controller': 'The application logic, each URL path is mapped in one exposed function in the controller',
'the data representation, define database tables and sets': 'ייצוג המידע, בו מוגדרים טבלאות ומבנים',
'The data representation, define database tables and sets': 'The data representation, define database tables and sets',
'The presentations layer, views are also known as templates': 'The presentations layer, views are also known as templates',
'the presentations layer, views are also known as templates': 'שכבת התצוגה, המכונה גם template',
'Theme': 'Theme',
'There are no controllers': 'אין בקרים',
'There are no models': 'אין מבני נתונים',
'There are no modules': 'אין מודולים',
'There are no plugins': 'There are no plugins',
'There are no private files': 'There are no private files',
'There are no static files': 'אין קבצים סטאטיים',
'There are no translators': 'There are no translators',
'There are no translators, only default language is supported': 'אין תרגומים. רק שפת ברירת המחדל נתמכת',
'There are no views': 'אין קבצי תצוגה',
'These files are not served, they are only available from within your app': 'These files are not served, they are only available from within your app',
'These files are served without processing, your images go here': 'These files are served without processing, your images go here',
'these files are served without processing, your images go here': 'אלו הם קבצים הנשלחים מהשרת ללא עיבוד. הכנס את התמונות כאן',
'This code was emailed to you and is required for login.': 'This code was emailed to you and is required for login.',
"This debugger may not work properly if you don't have a threaded webserver or you're using multiple daemon processes.": "This debugger may not work properly if you don't have a threaded webserver or you're using multiple daemon processes.",
'This email already has an account': 'This email already has an account',
'This is an experimental feature and it needs more testing. If you decide to downgrade you do it at your own risk': 'This is an experimental feature and it needs more testing. If you decide to downgrade you do it at your own risk',
'This is an experimental feature and it needs more testing. If you decide to upgrade you do it at your own risk': 'This is an experimental feature and it needs more testing. If you decide to upgrade you do it at your own risk',
'This is the %(filename)s template': 'זוהי תבנית הקובץ %(filename)s ',
"This page can commit your changes to an openshift app repo and push them to your cloud instance. This assumes that you've already created the application instance using the web2py skeleton and have that repo somewhere on a filesystem that this web2py instance can access. This functionality requires GitPython installed and on the python path of the runtime that web2py is operating in.": "This page can commit your changes to an openshift app repo and push them to your cloud instance. This assumes that you've already created the application instance using the web2py skeleton and have that repo somewhere on a filesystem that this web2py instance can access. This functionality requires GitPython installed and on the python path of the runtime that web2py is operating in.",
'This page can upload your application to the Google App Engine computing cloud. Mind that you must first create indexes locally and this is done by installing the Google appserver and running the app locally with it once, or there will be errors when selecting records. Attention: deployment may take long time, depending on the network speed. Attention: it will overwrite your app.yaml. DO NOT SUBMIT TWICE.': 'This page can upload your application to the Google App Engine computing cloud. Mind that you must first create indexes locally and this is done by installing the Google appserver and running the app locally with it once, or there will be errors when selecting records. Attention: deployment may take long time, depending on the network speed. Attention: it will overwrite your app.yaml. DO NOT SUBMIT TWICE.',
'this page to see if a breakpoint was hit and debug interaction is required.': 'this page to see if a breakpoint was hit and debug interaction is required.',
'This will pull changes from the remote repo for application "%s"?': 'This will pull changes from the remote repo for application "%s"?',
'This will push changes to the remote repo for application "%s".': 'This will push changes to the remote repo for application "%s".',
'Ticket': 'דו"ח שגיאה',
'Ticket ID': 'Ticket ID',
'Ticket Missing': 'Ticket Missing',
'Time in Cache (h:m:s)': 'Time in Cache (h:m:s)',
'Timestamp': 'Timestamp',
'TM': 'סימן רשום',
'to previous version.': 'אין גירסא קודמת',
'To create a plugin, name a file/folder plugin_[name]': 'כדי ליצור תוסף, קרא לקובץ או סיפריה בשם לפי התבנית plugin_[name]',
'To emulate a breakpoint programatically, write:': 'To emulate a breakpoint programatically, write:',
'to use the debugger!': 'to use the debugger!',
'toggle breakpoint': 'toggle breakpoint',
'Toggle comment': 'Toggle comment',
'Toggle Fullscreen': 'Toggle Fullscreen',
'Traceback': 'Traceback',
'translation strings for the application': 'מחרוזות תרגום עבור האפליקציה',
'Translation strings for the application': 'Translation strings for the application',
'try': 'נסה',
'try something like': 'נסה משהו כמו',
'Try the mobile interface': 'Try the mobile interface',
'try view': 'try view',
'Two-step Login Authentication Code': 'Two-step Login Authentication Code',
'Type PDB debugger command in here and hit Return (Enter) to execute it.': 'Type PDB debugger command in here and hit Return (Enter) to execute it.',
'Type some Python code in here and hit Return (Enter) to execute it.': 'Type some Python code in here and hit Return (Enter) to execute it.',
'Unable to check for upgrades': 'לא ניתן היה לבדוק אם יש שדרוגים',
'unable to create application "%s"': 'נכשל ביצירת האפליקציה "%s"',
'unable to delete file "%(filename)s"': 'נכשל במחיקת הקובץ "%(filename)s"',
'unable to delete file plugin "%(plugin)s"': 'נכשל במחיקת התוסף "%(plugin)s"',
'Unable to determine the line number!': 'Unable to determine the line number!',
'Unable to download app because:': 'לא ניתן היה להוריד את האפליקציה כי:',
'Unable to download because': 'לא הצלחתי להוריד כי',
'unable to download layout': 'unable to download layout',
'unable to download plugin: %s': 'unable to download plugin: %s',
'Unable to download the list of plugins': 'Unable to download the list of plugins',
'unable to install plugin "%s"': 'unable to install plugin "%s"',
'unable to parse csv file': 'לא הצלחתי לנתח את הקלט של קובץ csv',
'Unable to send email': 'Unable to send email',
'unable to uninstall "%s"': 'לא ניתן להסיר את "%s"',
'unable to upgrade because "%s"': 'לא ניתן היה לשדרג כי "%s"',
'uncheck all': 'הסר סימון מהכל',
'Uninstall': 'הסר התקנה',
'Unsupported webserver working mode: %s': 'Unsupported webserver working mode: %s',
'update': 'עדכן',
'update all languages': 'עדכן את כלל קיבצי השפה',
'Update:': 'עדכן:',
'Upgrade': 'Upgrade',
'upgrade now': 'upgrade now',
'upgrade now to %s': 'upgrade now to %s',
'upgrade web2py now': 'שדרג את web2py עכשיו',
'upload': 'upload',
'Upload': 'Upload',
'Upload & install packed application': 'העלה והתקן אפליקציה ארוזה',
'Upload a package:': 'Upload a package:',
'Upload and install packed application': 'Upload and install packed application',
'upload application:': 'העלה אפליקציה:',
'upload file:': 'העלה קובץ:',
'upload plugin file:': 'העלה קובץ תוסף:',
'Use (...)&(...) for AND, (...)|(...) for OR, and ~(...) for NOT to build more complex queries.': 'השתמש ב (...)&(...) עבור תנאי AND, (...)|(...) עבור תנאי OR ו~(...) עבור תנאי NOT ליצירת שאילתות מורכבות',
'Use an url:': 'Use an url:',
'User': 'User',
'User %(id)s is impersonating %(other_id)s': 'User %(id)s is impersonating %(other_id)s',
'User %(id)s Logged-in': 'User %(id)s Logged-in',
'User %(id)s Logged-out': 'User %(id)s Logged-out',
'User %(id)s Password changed': 'User %(id)s Password changed',
'User %(id)s Password reset': 'User %(id)s Password reset',
'User %(id)s Password retrieved': 'User %(id)s Password retrieved',
'User %(id)s Profile updated': 'User %(id)s Profile updated',
'User %(id)s Registered': 'User %(id)s Registered',
'User %(id)s Username retrieved': 'User %(id)s Username retrieved',
'User %(id)s Verification email sent': 'User %(id)s Verification email sent',
'User %(id)s verified registration key': 'User %(id)s verified registration key',
'User ID': 'User ID',
'Username': 'Username',
'Username already taken': 'Username already taken',
'Username retrieve': 'Username retrieve',
'Users': 'Users',
'Using the shell may lock the database to other users of this app.': 'Using the shell may lock the database to other users of this app.',
'variables': 'משתנים',
'Verify Password': 'Verify Password',
'Version': 'גירסא',
'Versioning': 'Versioning',
'versioning': 'מנגנון גירסאות',
'view': 'הצג',
'Views': 'מראה',
'views': 'מראה',
'Warning!': 'Warning!',
'WARNING:': 'WARNING:',
'WARNING: The following views could not be compiled:': 'WARNING: The following views could not be compiled:',
'Web Framework': 'Web Framework',
'web2py Admin Password': 'web2py Admin Password',
'web2py apps to deploy': 'web2py apps to deploy',
'web2py Debugger': 'web2py Debugger',
'web2py downgrade': 'web2py downgrade',
'web2py is up to date': 'web2py מותקנת בגירסתה האחרונה',
'web2py online debugger': 'web2py online debugger',
'web2py Recent Tweets': 'ציוצים אחרונים של web2py',
'web2py upgrade': 'web2py upgrade',
'web2py upgraded; please restart it': 'web2py שודרגה; נא אתחל אותה',
'Welcome %(username)s! Click on the link %(link)s to verify your email': 'Welcome %(username)s! Click on the link %(link)s to verify your email',
'Working...': 'Working...',
'WSGI reference name': 'WSGI reference name',
'YES': 'כן',
'Yes': 'Yes',
'You can also set and remove breakpoint in the edit window, using the Toggle Breakpoint button': 'You can also set and remove breakpoint in the edit window, using the Toggle Breakpoint button',
'You can inspect variables using the console below': 'You can inspect variables using the console below',
'You have been invited to join %(site)s, click %(link)s to complete the process': 'You have been invited to join %(site)s, click %(link)s to complete the process',
'You have one more login attempt before you are locked out': 'You have one more login attempt before you are locked out',
'You need to set up and reach a': 'You need to set up and reach a',
'You only need these if you have already registered': 'You only need these if you have already registered',
'Your application will be blocked until you click an action button (next, step, continue, etc.)': 'Your application will be blocked until you click an action button (next, step, continue, etc.)',
'Your password is: %(password)s': 'Your password is: %(password)s',
'Your temporary login code is {0}': 'Your temporary login code is {0}',
'Your username is: %(username)s': 'Your username is: %(username)s',
'Your username was emailed to you': 'Your username was emailed to you',
}
| true | true |
f72467ab96456a59cb16d087533c917c2a6562da | 3,363 | py | Python | google/ads/google_ads/v1/proto/services/asset_service_pb2_grpc.py | jiulongw/google-ads-python | 6f5256eb1eeb5a9a95c8cdb9b97988d3a676282e | [
"Apache-2.0"
] | 1 | 2019-11-30T23:42:39.000Z | 2019-11-30T23:42:39.000Z | google/ads/google_ads/v1/proto/services/asset_service_pb2_grpc.py | jiulongw/google-ads-python | 6f5256eb1eeb5a9a95c8cdb9b97988d3a676282e | [
"Apache-2.0"
] | null | null | null | google/ads/google_ads/v1/proto/services/asset_service_pb2_grpc.py | jiulongw/google-ads-python | 6f5256eb1eeb5a9a95c8cdb9b97988d3a676282e | [
"Apache-2.0"
] | 1 | 2020-09-30T17:04:06.000Z | 2020-09-30T17:04:06.000Z | # Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT!
import grpc
from google.ads.google_ads.v1.proto.resources import asset_pb2 as google_dot_ads_dot_googleads__v1_dot_proto_dot_resources_dot_asset__pb2
from google.ads.google_ads.v1.proto.services import asset_service_pb2 as google_dot_ads_dot_googleads__v1_dot_proto_dot_services_dot_asset__service__pb2
class AssetServiceStub(object):
"""Proto file describing the Asset service.
Service to manage assets. Asset types can be created with AssetService are
YoutubeVideoAsset, MediaBundleAsset and ImageAsset. TextAsset should be
created with Ad inline.
"""
def __init__(self, channel):
"""Constructor.
Args:
channel: A grpc.Channel.
"""
self.GetAsset = channel.unary_unary(
'/google.ads.googleads.v1.services.AssetService/GetAsset',
request_serializer=google_dot_ads_dot_googleads__v1_dot_proto_dot_services_dot_asset__service__pb2.GetAssetRequest.SerializeToString,
response_deserializer=google_dot_ads_dot_googleads__v1_dot_proto_dot_resources_dot_asset__pb2.Asset.FromString,
)
self.MutateAssets = channel.unary_unary(
'/google.ads.googleads.v1.services.AssetService/MutateAssets',
request_serializer=google_dot_ads_dot_googleads__v1_dot_proto_dot_services_dot_asset__service__pb2.MutateAssetsRequest.SerializeToString,
response_deserializer=google_dot_ads_dot_googleads__v1_dot_proto_dot_services_dot_asset__service__pb2.MutateAssetsResponse.FromString,
)
class AssetServiceServicer(object):
"""Proto file describing the Asset service.
Service to manage assets. Asset types can be created with AssetService are
YoutubeVideoAsset, MediaBundleAsset and ImageAsset. TextAsset should be
created with Ad inline.
"""
def GetAsset(self, request, context):
"""Returns the requested asset in full detail.
"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def MutateAssets(self, request, context):
"""Creates assets. Operation statuses are returned.
"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def add_AssetServiceServicer_to_server(servicer, server):
rpc_method_handlers = {
'GetAsset': grpc.unary_unary_rpc_method_handler(
servicer.GetAsset,
request_deserializer=google_dot_ads_dot_googleads__v1_dot_proto_dot_services_dot_asset__service__pb2.GetAssetRequest.FromString,
response_serializer=google_dot_ads_dot_googleads__v1_dot_proto_dot_resources_dot_asset__pb2.Asset.SerializeToString,
),
'MutateAssets': grpc.unary_unary_rpc_method_handler(
servicer.MutateAssets,
request_deserializer=google_dot_ads_dot_googleads__v1_dot_proto_dot_services_dot_asset__service__pb2.MutateAssetsRequest.FromString,
response_serializer=google_dot_ads_dot_googleads__v1_dot_proto_dot_services_dot_asset__service__pb2.MutateAssetsResponse.SerializeToString,
),
}
generic_handler = grpc.method_handlers_generic_handler(
'google.ads.googleads.v1.services.AssetService', rpc_method_handlers)
server.add_generic_rpc_handlers((generic_handler,))
| 46.068493 | 152 | 0.803152 |
import grpc
from google.ads.google_ads.v1.proto.resources import asset_pb2 as google_dot_ads_dot_googleads__v1_dot_proto_dot_resources_dot_asset__pb2
from google.ads.google_ads.v1.proto.services import asset_service_pb2 as google_dot_ads_dot_googleads__v1_dot_proto_dot_services_dot_asset__service__pb2
class AssetServiceStub(object):
def __init__(self, channel):
self.GetAsset = channel.unary_unary(
'/google.ads.googleads.v1.services.AssetService/GetAsset',
request_serializer=google_dot_ads_dot_googleads__v1_dot_proto_dot_services_dot_asset__service__pb2.GetAssetRequest.SerializeToString,
response_deserializer=google_dot_ads_dot_googleads__v1_dot_proto_dot_resources_dot_asset__pb2.Asset.FromString,
)
self.MutateAssets = channel.unary_unary(
'/google.ads.googleads.v1.services.AssetService/MutateAssets',
request_serializer=google_dot_ads_dot_googleads__v1_dot_proto_dot_services_dot_asset__service__pb2.MutateAssetsRequest.SerializeToString,
response_deserializer=google_dot_ads_dot_googleads__v1_dot_proto_dot_services_dot_asset__service__pb2.MutateAssetsResponse.FromString,
)
class AssetServiceServicer(object):
def GetAsset(self, request, context):
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def MutateAssets(self, request, context):
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def add_AssetServiceServicer_to_server(servicer, server):
rpc_method_handlers = {
'GetAsset': grpc.unary_unary_rpc_method_handler(
servicer.GetAsset,
request_deserializer=google_dot_ads_dot_googleads__v1_dot_proto_dot_services_dot_asset__service__pb2.GetAssetRequest.FromString,
response_serializer=google_dot_ads_dot_googleads__v1_dot_proto_dot_resources_dot_asset__pb2.Asset.SerializeToString,
),
'MutateAssets': grpc.unary_unary_rpc_method_handler(
servicer.MutateAssets,
request_deserializer=google_dot_ads_dot_googleads__v1_dot_proto_dot_services_dot_asset__service__pb2.MutateAssetsRequest.FromString,
response_serializer=google_dot_ads_dot_googleads__v1_dot_proto_dot_services_dot_asset__service__pb2.MutateAssetsResponse.SerializeToString,
),
}
generic_handler = grpc.method_handlers_generic_handler(
'google.ads.googleads.v1.services.AssetService', rpc_method_handlers)
server.add_generic_rpc_handlers((generic_handler,))
| true | true |
f72468636003b664ce87050e414e28fea873cd2a | 6,582 | py | Python | uhd_restpy/testplatform/sessions/ixnetwork/impairment/link/link.py | OpenIxia/ixnetwork_restpy | f628db450573a104f327cf3c737ca25586e067ae | [
"MIT"
] | 20 | 2019-05-07T01:59:14.000Z | 2022-02-11T05:24:47.000Z | uhd_restpy/testplatform/sessions/ixnetwork/impairment/link/link.py | OpenIxia/ixnetwork_restpy | f628db450573a104f327cf3c737ca25586e067ae | [
"MIT"
] | 60 | 2019-04-03T18:59:35.000Z | 2022-02-22T12:05:05.000Z | uhd_restpy/testplatform/sessions/ixnetwork/impairment/link/link.py | OpenIxia/ixnetwork_restpy | f628db450573a104f327cf3c737ca25586e067ae | [
"MIT"
] | 13 | 2019-05-20T10:48:31.000Z | 2021-10-06T07:45:44.000Z | # MIT LICENSE
#
# Copyright 1997 - 2020 by IXIA Keysight
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"),
# to deal in the Software without restriction, including without limitation
# the rights to use, copy, modify, merge, publish, distribute, sublicense,
# and/or sell copies of the Software, and to permit persons to whom the
# Software is furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
from uhd_restpy.base import Base
from uhd_restpy.files import Files
from typing import List, Any, Union
class Link(Base):
"""List of impairment links. Each link consists of a pair of ports.
The Link class encapsulates a list of link resources that are managed by the system.
A list of resources can be retrieved from the server using the Link.find() method.
"""
__slots__ = ()
_SDM_NAME = 'link'
_SDM_ATT_MAP = {
'ForwardingInterruption': 'forwardingInterruption',
'Name': 'name',
'RxPortName': 'rxPortName',
'TxPortName': 'txPortName',
}
_SDM_ENUM_MAP = {
}
def __init__(self, parent, list_op=False):
super(Link, self).__init__(parent, list_op)
@property
def LosLof(self):
"""
Returns
-------
- obj(uhd_restpy.testplatform.sessions.ixnetwork.impairment.link.loslof.loslof.LosLof): An instance of the LosLof class
Raises
------
- ServerError: The server has encountered an uncategorized error condition
"""
from uhd_restpy.testplatform.sessions.ixnetwork.impairment.link.loslof.loslof import LosLof
if self._properties.get('LosLof', None) is not None:
return self._properties.get('LosLof')
else:
return LosLof(self)._select()
@property
def ForwardingInterruption(self):
# type: () -> bool
"""
Returns
-------
- bool: Emulate a link fault. Drop all packets received.
"""
return self._get_attribute(self._SDM_ATT_MAP['ForwardingInterruption'])
@ForwardingInterruption.setter
def ForwardingInterruption(self, value):
# type: (bool) -> None
self._set_attribute(self._SDM_ATT_MAP['ForwardingInterruption'], value)
@property
def Name(self):
# type: () -> str
"""
Returns
-------
- str: The name of the link: receiving port -> transmitting port.
"""
return self._get_attribute(self._SDM_ATT_MAP['Name'])
@property
def RxPortName(self):
# type: () -> str
"""
Returns
-------
- str: The name of the receiving port.
"""
return self._get_attribute(self._SDM_ATT_MAP['RxPortName'])
@property
def TxPortName(self):
# type: () -> str
"""
Returns
-------
- str: The name of the transmitting port.
"""
return self._get_attribute(self._SDM_ATT_MAP['TxPortName'])
def update(self, ForwardingInterruption=None):
# type: (bool) -> Link
"""Updates link resource on the server.
Args
----
- ForwardingInterruption (bool): Emulate a link fault. Drop all packets received.
Raises
------
- ServerError: The server has encountered an uncategorized error condition
"""
return self._update(self._map_locals(self._SDM_ATT_MAP, locals()))
def add(self, ForwardingInterruption=None):
# type: (bool) -> Link
"""Adds a new link resource on the json, only valid with config assistant
Args
----
- ForwardingInterruption (bool): Emulate a link fault. Drop all packets received.
Returns
-------
- self: This instance with all currently retrieved link resources using find and the newly added link resources available through an iterator or index
Raises
------
- Exception: if this function is not being used with config assistance
"""
return self._add_xpath(self._map_locals(self._SDM_ATT_MAP, locals()))
def find(self, ForwardingInterruption=None, Name=None, RxPortName=None, TxPortName=None):
# type: (bool, str, str, str) -> Link
"""Finds and retrieves link resources from the server.
All named parameters are evaluated on the server using regex. The named parameters can be used to selectively retrieve link resources from the server.
To retrieve an exact match ensure the parameter value starts with ^ and ends with $
By default the find method takes no parameters and will retrieve all link resources from the server.
Args
----
- ForwardingInterruption (bool): Emulate a link fault. Drop all packets received.
- Name (str): The name of the link: receiving port -> transmitting port.
- RxPortName (str): The name of the receiving port.
- TxPortName (str): The name of the transmitting port.
Returns
-------
- self: This instance with matching link resources retrieved from the server available through an iterator or index
Raises
------
- ServerError: The server has encountered an uncategorized error condition
"""
return self._select(self._map_locals(self._SDM_ATT_MAP, locals()))
def read(self, href):
"""Retrieves a single instance of link data from the server.
Args
----
- href (str): An href to the instance to be retrieved
Returns
-------
- self: This instance with the link resources from the server available through an iterator or index
Raises
------
- NotFoundError: The requested resource does not exist on the server
- ServerError: The server has encountered an uncategorized error condition
"""
return self._read(href)
| 36.164835 | 158 | 0.647068 |
from uhd_restpy.base import Base
from uhd_restpy.files import Files
from typing import List, Any, Union
class Link(Base):
__slots__ = ()
_SDM_NAME = 'link'
_SDM_ATT_MAP = {
'ForwardingInterruption': 'forwardingInterruption',
'Name': 'name',
'RxPortName': 'rxPortName',
'TxPortName': 'txPortName',
}
_SDM_ENUM_MAP = {
}
def __init__(self, parent, list_op=False):
super(Link, self).__init__(parent, list_op)
@property
def LosLof(self):
from uhd_restpy.testplatform.sessions.ixnetwork.impairment.link.loslof.loslof import LosLof
if self._properties.get('LosLof', None) is not None:
return self._properties.get('LosLof')
else:
return LosLof(self)._select()
@property
def ForwardingInterruption(self):
return self._get_attribute(self._SDM_ATT_MAP['ForwardingInterruption'])
@ForwardingInterruption.setter
def ForwardingInterruption(self, value):
self._set_attribute(self._SDM_ATT_MAP['ForwardingInterruption'], value)
@property
def Name(self):
return self._get_attribute(self._SDM_ATT_MAP['Name'])
@property
def RxPortName(self):
return self._get_attribute(self._SDM_ATT_MAP['RxPortName'])
@property
def TxPortName(self):
return self._get_attribute(self._SDM_ATT_MAP['TxPortName'])
def update(self, ForwardingInterruption=None):
return self._update(self._map_locals(self._SDM_ATT_MAP, locals()))
def add(self, ForwardingInterruption=None):
return self._add_xpath(self._map_locals(self._SDM_ATT_MAP, locals()))
def find(self, ForwardingInterruption=None, Name=None, RxPortName=None, TxPortName=None):
return self._select(self._map_locals(self._SDM_ATT_MAP, locals()))
def read(self, href):
return self._read(href)
| true | true |
f724688f8543cbeb74c6f94451f45052c8b2c1af | 11,189 | py | Python | plugins/modules/oracle_profile.py | blaf-cgi/ansible-oracle-modules | 37905c6ad91808a96f0085c9c1069e166f2e17b4 | [
"MIT"
] | 8 | 2020-08-11T04:21:24.000Z | 2021-12-03T16:21:56.000Z | plugins/modules/oracle_profile.py | blaf-cgi/ansible-oracle-modules | 37905c6ad91808a96f0085c9c1069e166f2e17b4 | [
"MIT"
] | 4 | 2021-03-13T09:09:28.000Z | 2022-01-07T12:38:02.000Z | plugins/modules/oracle_profile.py | blaf-cgi/ansible-oracle-modules | 37905c6ad91808a96f0085c9c1069e166f2e17b4 | [
"MIT"
] | 3 | 2021-03-16T13:48:57.000Z | 2022-03-02T10:43:47.000Z | #!/usr/bin/python
# -*- coding: utf-8 -*-
from __future__ import absolute_import, division, print_function
__metaclass__ = type
DOCUMENTATION = '''
---
module: oracle_profile
short_description: Manage profiles in an Oracle database
description:
- Manage profiles in an Oracle database
version_added: "0.8.0"
options:
name:
description:
- The name of the profile
required: true
default: None
aliases: ['profile']
state:
description:
- The intended state of the profile.
default: present
choices: ['present','absent']
attribute_name:
description:
- The attribute name (e.g PASSWORD_REUSE_TIME)
default: None
aliases: ['an']
attribute_value:
description:
- The attribute value (e.g 10)
default: None
aliases: ['av']
username:
description:
- The DB username
required: false
default: sys
aliases: ['un']
password:
description:
- The password for the DB user
required: false
default: None
aliases: ['pw']
service_name:
description:
- The profile_name to connect to the database.
required: false
aliases: ['sn']
hostname:
description:
- The host of the database if using dbms_profile
required: false
default: localhost
aliases: ['host']
port:
description:
- The listener port to connect to the database if using dbms_profile
required: false
default: 1521
oracle_home:
description:
- The GI ORACLE_HOME
required: false
default: None
aliases: ['oh']
notes:
- cx_Oracle needs to be installed
requirements: [ "cx_Oracle" ]
author: Mikael Sandström, oravirt@gmail.com, @oravirt
'''
EXAMPLES = '''
# Create a profile
- hosts: dbserver
vars:
oracle_home: /u01/app/oracle/12.2.0.1/db1
hostname: "{{ inventory_hostname }}"
service_name: orclpdb
user: system
password: Oracle_123
oracle_env:
ORACLE_HOME: "{{ oracle_home }}"
LD_LIBRARY_PATH: "{{ oracle_home }}/lib"
profiles:
- name: profile1
attribute_name:
- password_reuse_max
- password_reuse_time
- sessions_per_user
attribute_value:
- 6
- 20
- 5
state: present
tasks:
- name: Manage profiles
oracle_profile:
name={{ item.name }}
attribute_name={{ item.attribute_name}}
attribute_value={{ item.attribute_value}}
state={{ item.state }}
hostname={{ hostname }}
service_name={{ service_name }}
user={{ user }}
password={{ password }}
environment: "{{oracle_env}}"
with_items: "{{ profiles }}"
'''
try:
import cx_Oracle
except ImportError:
cx_oracle_exists = False
else:
cx_oracle_exists = True
# Check if the profile exists
def check_profile_exists(cursor, module, msg, name):
sql = 'select count(*) from dba_profiles where lower (profile) = \'%s\'' % (name.lower())
result = execute_sql_get(module, msg, cursor, sql)
if result[0][0] > 0:
return True
else:
return False
def create_profile(cursor, module, msg, oracle_home, name, attribute_name, attribute_value):
add_attr = False
if not any(x == 'None' for x in attribute_name):
add_attr = True
if not any(x is None for x in attribute_name):
add_attr = True
if add_attr:
attributes = ' '.join(['' + str(n) + ' ' + str(v) + '' for n, v in zip(attribute_name, attribute_value)])
sql = 'create profile %s limit ' % name
if add_attr:
sql += ' %s' % (attributes.lower())
if execute_sql(module, msg, cursor, sql):
return True
else:
return False
def remove_profile(cursor, module, msg, oracle_home, name):
dropsql = 'drop profile %s' % name
if execute_sql(module, msg, cursor, dropsql):
return True
else:
return False
def ensure_profile_state(cursor, module, msg, name, state, attribute_name, attribute_value):
# pass
total_sql = []
profile_sql = 'alter profile %s ' % (name.upper())
# Deal with attribute differences
if attribute_name and attribute_value:
# Make sure attributes are lower case
attribute_name = [x.lower() for x in attribute_name]
attribute_value = [str(y).lower() for y in attribute_value]
wanted_attributes = zip(attribute_name, attribute_value)
# Check the current attributes
attribute_names_ = ','.join(['\'' + n[0] + '\'' for n in wanted_attributes])
if len(attribute_names_) != 0:
current_attributes = get_current_attributes(cursor, module, msg, name, attribute_names_)
# Convert to dict and compare current with wanted
if dict(current_attributes) != dict(wanted_attributes):
for i in wanted_attributes:
total_sql.append("alter profile %s limit %s %s " % (name, i[0], i[1]))
# module.exit_json(msg=total_sql, changed=True)
if len(total_sql) > 0:
if ensure_profile_state_sql(module, msg, cursor, total_sql):
msg = 'profile %s has been put in the intended state' % name
module.exit_json(msg=msg, changed=True)
else:
return False
else:
msg = 'Nothing to do'
module.exit_json(msg=msg, changed=False)
def ensure_profile_state_sql(module, msg, cursor, total_sql):
for sql in total_sql:
execute_sql(module, msg, cursor, sql)
return True
def get_current_attributes(cursor, module, msg, name, attribute_names_):
sql = 'select lower(resource_name),lower(limit) '
sql += 'from dba_profiles '
sql += 'where lower(profile) = \'%s\' ' % (name.lower())
sql += 'and lower(resource_name) in (%s) ' % (attribute_names_.lower())
result = execute_sql_get(module, msg, cursor, sql)
return result
def execute_sql_get(module, msg, cursor, sql):
try:
cursor.execute(sql)
result = (cursor.fetchall())
except cx_Oracle.DatabaseError as exc:
error, = exc.args
msg = 'Something went wrong while executing sql_get - %s sql: %s' % (error.message, sql)
module.fail_json(msg=msg, changed=False)
return False
return result
def execute_sql(module, msg, cursor, sql):
try:
cursor.execute(sql)
except cx_Oracle.DatabaseError as exc:
error, = exc.args
msg = 'Something went wrong while executing sql - %s sql: %s' % (error.message, sql)
module.fail_json(msg=msg, changed=False)
return False
return True
def main():
msg = ['']
cursor = None
module = AnsibleModule(
argument_spec=dict(
name=dict(required=True, aliases=['profile']),
attribute_name=dict(required=True, type='list', aliases=['an']),
attribute_value=dict(required=True, type='list', aliases=['av']),
state=dict(default="present", choices=["present", "absent"]),
user=dict(required=False, aliases=['un', 'username']),
password=dict(required=False, no_log=True, aliases=['pw']),
mode=dict(default='normal', choices=["normal", "sysdba"]),
hostname=dict(required=False, default='localhost', aliases=['host']),
port=dict(required=False, default=1521),
service_name=dict(required=False, aliases=['sn']),
oracle_home=dict(required=False, aliases=['oh']),
),
)
name = module.params["name"]
attribute_name = module.params["attribute_name"]
attribute_value = module.params["attribute_value"]
state = module.params["state"]
user = module.params["user"]
password = module.params["password"]
mode = module.params["mode"]
hostname = module.params["hostname"]
port = module.params["port"]
service_name = module.params["service_name"]
oracle_home = module.params["oracle_home"]
if not cx_oracle_exists:
msg = "The cx_Oracle module is required. 'pip install cx_Oracle' should do the trick. If cx_Oracle is installed, make sure ORACLE_HOME & LD_LIBRARY_PATH is set"
module.fail_json(msg=msg)
wallet_connect = '/@%s' % service_name
try:
if (
not user and not password): # If neither user or password is supplied, the use of an oracle wallet is assumed
connect = wallet_connect
if mode == 'sysdba':
conn = cx_Oracle.connect(wallet_connect, mode=cx_Oracle.SYSDBA)
else:
conn = cx_Oracle.connect(wallet_connect)
elif user and password:
dsn = cx_Oracle.makedsn(host=hostname, port=port, service_name=service_name)
connect = dsn
if mode == 'sysdba':
conn = cx_Oracle.connect(user, password, dsn, mode=cx_Oracle.SYSDBA)
else:
conn = cx_Oracle.connect(user, password, dsn)
elif not user or not password:
module.fail_json(msg='Missing username or password for cx_Oracle')
except cx_Oracle.DatabaseError as exc:
error, = exc.args
msg = 'Could not connect to DB: %s, connect descriptor: %s, username: %s, pass: %s' % (
error.message, connect, user, password)
module.fail_json(msg=msg, changed=False)
cursor = conn.cursor()
if oracle_home is not None:
os.environ['ORACLE_HOME'] = oracle_home
elif 'ORACLE_HOME' in os.environ:
oracle_home = os.environ['ORACLE_HOME']
else:
msg = 'ORACLE_HOME variable not set. Please set it and re-run the command'
module.fail_json(msg=msg, changed=False)
if state == 'present':
if not check_profile_exists(cursor, module, msg, name):
if create_profile(cursor, module, msg, oracle_home, name, attribute_name, attribute_value):
msg = 'Successfully created profile %s ' % name
module.exit_json(msg=msg, changed=True)
else:
module.fail_json(msg=msg, changed=False)
else:
ensure_profile_state(cursor, module, msg, name, state, attribute_name, attribute_value)
elif state == 'absent':
if check_profile_exists(cursor, module, msg, name):
if remove_profile(cursor, module, msg, oracle_home, name):
msg = 'Profile %s successfully removed' % name
module.exit_json(msg=msg, changed=True)
else:
module.exit_json(msg=msg, changed=False)
else:
msg = 'Profile %s doesn\'t exist' % name
module.exit_json(msg=msg, changed=False)
module.exit_json(msg="Unhandled exit", changed=False)
from ansible.module_utils.basic import AnsibleModule, os
if __name__ == '__main__':
main()
| 32.620991 | 168 | 0.602377 |
from __future__ import absolute_import, division, print_function
__metaclass__ = type
DOCUMENTATION = '''
---
module: oracle_profile
short_description: Manage profiles in an Oracle database
description:
- Manage profiles in an Oracle database
version_added: "0.8.0"
options:
name:
description:
- The name of the profile
required: true
default: None
aliases: ['profile']
state:
description:
- The intended state of the profile.
default: present
choices: ['present','absent']
attribute_name:
description:
- The attribute name (e.g PASSWORD_REUSE_TIME)
default: None
aliases: ['an']
attribute_value:
description:
- The attribute value (e.g 10)
default: None
aliases: ['av']
username:
description:
- The DB username
required: false
default: sys
aliases: ['un']
password:
description:
- The password for the DB user
required: false
default: None
aliases: ['pw']
service_name:
description:
- The profile_name to connect to the database.
required: false
aliases: ['sn']
hostname:
description:
- The host of the database if using dbms_profile
required: false
default: localhost
aliases: ['host']
port:
description:
- The listener port to connect to the database if using dbms_profile
required: false
default: 1521
oracle_home:
description:
- The GI ORACLE_HOME
required: false
default: None
aliases: ['oh']
notes:
- cx_Oracle needs to be installed
requirements: [ "cx_Oracle" ]
author: Mikael Sandström, oravirt@gmail.com, @oravirt
'''
EXAMPLES = '''
# Create a profile
- hosts: dbserver
vars:
oracle_home: /u01/app/oracle/12.2.0.1/db1
hostname: "{{ inventory_hostname }}"
service_name: orclpdb
user: system
password: Oracle_123
oracle_env:
ORACLE_HOME: "{{ oracle_home }}"
LD_LIBRARY_PATH: "{{ oracle_home }}/lib"
profiles:
- name: profile1
attribute_name:
- password_reuse_max
- password_reuse_time
- sessions_per_user
attribute_value:
- 6
- 20
- 5
state: present
tasks:
- name: Manage profiles
oracle_profile:
name={{ item.name }}
attribute_name={{ item.attribute_name}}
attribute_value={{ item.attribute_value}}
state={{ item.state }}
hostname={{ hostname }}
service_name={{ service_name }}
user={{ user }}
password={{ password }}
environment: "{{oracle_env}}"
with_items: "{{ profiles }}"
'''
try:
import cx_Oracle
except ImportError:
cx_oracle_exists = False
else:
cx_oracle_exists = True
def check_profile_exists(cursor, module, msg, name):
sql = 'select count(*) from dba_profiles where lower (profile) = \'%s\'' % (name.lower())
result = execute_sql_get(module, msg, cursor, sql)
if result[0][0] > 0:
return True
else:
return False
def create_profile(cursor, module, msg, oracle_home, name, attribute_name, attribute_value):
add_attr = False
if not any(x == 'None' for x in attribute_name):
add_attr = True
if not any(x is None for x in attribute_name):
add_attr = True
if add_attr:
attributes = ' '.join(['' + str(n) + ' ' + str(v) + '' for n, v in zip(attribute_name, attribute_value)])
sql = 'create profile %s limit ' % name
if add_attr:
sql += ' %s' % (attributes.lower())
if execute_sql(module, msg, cursor, sql):
return True
else:
return False
def remove_profile(cursor, module, msg, oracle_home, name):
dropsql = 'drop profile %s' % name
if execute_sql(module, msg, cursor, dropsql):
return True
else:
return False
def ensure_profile_state(cursor, module, msg, name, state, attribute_name, attribute_value):
total_sql = []
profile_sql = 'alter profile %s ' % (name.upper())
if attribute_name and attribute_value:
attribute_name = [x.lower() for x in attribute_name]
attribute_value = [str(y).lower() for y in attribute_value]
wanted_attributes = zip(attribute_name, attribute_value)
attribute_names_ = ','.join(['\'' + n[0] + '\'' for n in wanted_attributes])
if len(attribute_names_) != 0:
current_attributes = get_current_attributes(cursor, module, msg, name, attribute_names_)
if dict(current_attributes) != dict(wanted_attributes):
for i in wanted_attributes:
total_sql.append("alter profile %s limit %s %s " % (name, i[0], i[1]))
if len(total_sql) > 0:
if ensure_profile_state_sql(module, msg, cursor, total_sql):
msg = 'profile %s has been put in the intended state' % name
module.exit_json(msg=msg, changed=True)
else:
return False
else:
msg = 'Nothing to do'
module.exit_json(msg=msg, changed=False)
def ensure_profile_state_sql(module, msg, cursor, total_sql):
for sql in total_sql:
execute_sql(module, msg, cursor, sql)
return True
def get_current_attributes(cursor, module, msg, name, attribute_names_):
sql = 'select lower(resource_name),lower(limit) '
sql += 'from dba_profiles '
sql += 'where lower(profile) = \'%s\' ' % (name.lower())
sql += 'and lower(resource_name) in (%s) ' % (attribute_names_.lower())
result = execute_sql_get(module, msg, cursor, sql)
return result
def execute_sql_get(module, msg, cursor, sql):
try:
cursor.execute(sql)
result = (cursor.fetchall())
except cx_Oracle.DatabaseError as exc:
error, = exc.args
msg = 'Something went wrong while executing sql_get - %s sql: %s' % (error.message, sql)
module.fail_json(msg=msg, changed=False)
return False
return result
def execute_sql(module, msg, cursor, sql):
try:
cursor.execute(sql)
except cx_Oracle.DatabaseError as exc:
error, = exc.args
msg = 'Something went wrong while executing sql - %s sql: %s' % (error.message, sql)
module.fail_json(msg=msg, changed=False)
return False
return True
def main():
msg = ['']
cursor = None
module = AnsibleModule(
argument_spec=dict(
name=dict(required=True, aliases=['profile']),
attribute_name=dict(required=True, type='list', aliases=['an']),
attribute_value=dict(required=True, type='list', aliases=['av']),
state=dict(default="present", choices=["present", "absent"]),
user=dict(required=False, aliases=['un', 'username']),
password=dict(required=False, no_log=True, aliases=['pw']),
mode=dict(default='normal', choices=["normal", "sysdba"]),
hostname=dict(required=False, default='localhost', aliases=['host']),
port=dict(required=False, default=1521),
service_name=dict(required=False, aliases=['sn']),
oracle_home=dict(required=False, aliases=['oh']),
),
)
name = module.params["name"]
attribute_name = module.params["attribute_name"]
attribute_value = module.params["attribute_value"]
state = module.params["state"]
user = module.params["user"]
password = module.params["password"]
mode = module.params["mode"]
hostname = module.params["hostname"]
port = module.params["port"]
service_name = module.params["service_name"]
oracle_home = module.params["oracle_home"]
if not cx_oracle_exists:
msg = "The cx_Oracle module is required. 'pip install cx_Oracle' should do the trick. If cx_Oracle is installed, make sure ORACLE_HOME & LD_LIBRARY_PATH is set"
module.fail_json(msg=msg)
wallet_connect = '/@%s' % service_name
try:
if (
not user and not password):
connect = wallet_connect
if mode == 'sysdba':
conn = cx_Oracle.connect(wallet_connect, mode=cx_Oracle.SYSDBA)
else:
conn = cx_Oracle.connect(wallet_connect)
elif user and password:
dsn = cx_Oracle.makedsn(host=hostname, port=port, service_name=service_name)
connect = dsn
if mode == 'sysdba':
conn = cx_Oracle.connect(user, password, dsn, mode=cx_Oracle.SYSDBA)
else:
conn = cx_Oracle.connect(user, password, dsn)
elif not user or not password:
module.fail_json(msg='Missing username or password for cx_Oracle')
except cx_Oracle.DatabaseError as exc:
error, = exc.args
msg = 'Could not connect to DB: %s, connect descriptor: %s, username: %s, pass: %s' % (
error.message, connect, user, password)
module.fail_json(msg=msg, changed=False)
cursor = conn.cursor()
if oracle_home is not None:
os.environ['ORACLE_HOME'] = oracle_home
elif 'ORACLE_HOME' in os.environ:
oracle_home = os.environ['ORACLE_HOME']
else:
msg = 'ORACLE_HOME variable not set. Please set it and re-run the command'
module.fail_json(msg=msg, changed=False)
if state == 'present':
if not check_profile_exists(cursor, module, msg, name):
if create_profile(cursor, module, msg, oracle_home, name, attribute_name, attribute_value):
msg = 'Successfully created profile %s ' % name
module.exit_json(msg=msg, changed=True)
else:
module.fail_json(msg=msg, changed=False)
else:
ensure_profile_state(cursor, module, msg, name, state, attribute_name, attribute_value)
elif state == 'absent':
if check_profile_exists(cursor, module, msg, name):
if remove_profile(cursor, module, msg, oracle_home, name):
msg = 'Profile %s successfully removed' % name
module.exit_json(msg=msg, changed=True)
else:
module.exit_json(msg=msg, changed=False)
else:
msg = 'Profile %s doesn\'t exist' % name
module.exit_json(msg=msg, changed=False)
module.exit_json(msg="Unhandled exit", changed=False)
from ansible.module_utils.basic import AnsibleModule, os
if __name__ == '__main__':
main()
| true | true |
f72468caade31cb02d08f32af7db7f38ca7ca5c2 | 1,858 | py | Python | azure_iot_hub/azure/templates/led_matrix_esp32_iot_hub/__init__.py | codycodes/gix-mkrfridays-iot | 8e41040114e58b55a07ecdef7c0f4a669a0b27dc | [
"MIT"
] | 2 | 2020-05-17T05:04:22.000Z | 2020-05-19T17:14:59.000Z | azure_iot_hub/azure/templates/led_matrix_esp32_iot_hub/__init__.py | codycodes/gix-mkrfridays-iot | 8e41040114e58b55a07ecdef7c0f4a669a0b27dc | [
"MIT"
] | 18 | 2020-05-13T22:51:50.000Z | 2020-10-07T04:44:16.000Z | azure_iot_hub/azure/templates/led_matrix_esp32_iot_hub/__init__.py | codycodes/gix-mkrfridays-iot | 8e41040114e58b55a07ecdef7c0f4a669a0b27dc | [
"MIT"
] | 2 | 2020-04-24T21:25:42.000Z | 2020-05-16T19:00:50.000Z | import logging
import azure.functions as func
from azure.iot.hub import IoTHubRegistryManager
# Note that Azure Key Vault doesn't support underscores
# and some other special chars;
# we substitute with a hyphen for underscore
CONNECTION_STRING = "{c2d connection string}"
DEVICE_ID = "{device to invoke}"
MESSAGE_COUNT = 1
def iothub_messaging_sample_run(msg):
try:
# IoTHubRegistryManager
registry_manager = IoTHubRegistryManager(CONNECTION_STRING)
for i in range(0, MESSAGE_COUNT):
logging.info('Sending message: {0}'.format(i))
data = msg
props = {}
registry_manager.send_c2d_message(
DEVICE_ID,
data,
properties=props)
except Exception as ex:
logging.info(f"Unexpected error {ex}")
return
def main(req: func.HttpRequest) -> func.HttpResponse:
logging.info('Python HTTP trigger function processed a request.')
msg = req.params.get('msg')
if not msg:
try:
req_body = req.get_json()
except ValueError:
pass
else:
msg = req_body.get('msg')
logging.info('***NOW EXECUTING C2D***')
if msg:
# TODO: this whitespace is to push some unicode chars off
# the screen; can be removed later when Arduino code is
# fixed
iothub_messaging_sample_run(msg)
return func.HttpResponse(
f"Your text {msg} has been deployed to the"
" device successfully!")
else:
return func.HttpResponse(
"This HTTP triggered function executed successfully."
" Pass a msg in the query string or in the request body"
" for a personalized response.",
status_code=200
)
| 32.034483 | 70 | 0.599569 | import logging
import azure.functions as func
from azure.iot.hub import IoTHubRegistryManager
# and some other special chars;
# we substitute with a hyphen for underscore
CONNECTION_STRING = "{c2d connection string}"
DEVICE_ID = "{device to invoke}"
MESSAGE_COUNT = 1
def iothub_messaging_sample_run(msg):
try:
# IoTHubRegistryManager
registry_manager = IoTHubRegistryManager(CONNECTION_STRING)
for i in range(0, MESSAGE_COUNT):
logging.info('Sending message: {0}'.format(i))
data = msg
props = {}
registry_manager.send_c2d_message(
DEVICE_ID,
data,
properties=props)
except Exception as ex:
logging.info(f"Unexpected error {ex}")
return
def main(req: func.HttpRequest) -> func.HttpResponse:
logging.info('Python HTTP trigger function processed a request.')
msg = req.params.get('msg')
if not msg:
try:
req_body = req.get_json()
except ValueError:
pass
else:
msg = req_body.get('msg')
logging.info('***NOW EXECUTING C2D***')
if msg:
# TODO: this whitespace is to push some unicode chars off
# the screen; can be removed later when Arduino code is
# fixed
iothub_messaging_sample_run(msg)
return func.HttpResponse(
f"Your text {msg} has been deployed to the"
" device successfully!")
else:
return func.HttpResponse(
"This HTTP triggered function executed successfully."
" Pass a msg in the query string or in the request body"
" for a personalized response.",
status_code=200
)
| true | true |
f72469575b1cfc70bf9f89f7c27364ebc94398cb | 4,622 | py | Python | images/orbit-controller/src/orbit_controller/webhooks/imagereplication_pod_webhook.py | srinivasreddych/aws-orbit-workbench | 2d154addff58d26f5459a73c06148aaf5e9fad46 | [
"Apache-2.0"
] | 94 | 2021-03-19T19:55:11.000Z | 2022-03-31T19:50:01.000Z | images/orbit-controller/src/orbit_controller/webhooks/imagereplication_pod_webhook.py | srinivasreddych/aws-orbit-workbench | 2d154addff58d26f5459a73c06148aaf5e9fad46 | [
"Apache-2.0"
] | 410 | 2021-03-19T18:04:48.000Z | 2022-03-22T13:56:53.000Z | images/orbit-controller/src/orbit_controller/webhooks/imagereplication_pod_webhook.py | srinivasreddych/aws-orbit-workbench | 2d154addff58d26f5459a73c06148aaf5e9fad46 | [
"Apache-2.0"
] | 24 | 2021-03-19T23:16:23.000Z | 2022-03-04T01:05:18.000Z | # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License").
# You may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import logging
import os
from copy import deepcopy
from typing import Any, Dict, List, Optional
import kopf
from orbit_controller import ORBIT_API_GROUP, ORBIT_API_VERSION, dynamic_client
from orbit_controller.utils import imagereplication_utils
CONFIG: Dict[str, Any]
@kopf.on.startup()
def configure(settings: kopf.OperatorSettings, logger: kopf.Logger, **_: Any) -> None:
settings.admission.server = kopf.WebhookServer(
cafile="/certs/ca.crt",
certfile="/certs/tls.crt",
pkeyfile="/certs/tls.key",
port=443,
)
settings.persistence.progress_storage = kopf.MultiProgressStorage(
[
kopf.AnnotationsProgressStorage(prefix="orbit.aws"),
kopf.StatusProgressStorage(field="status.orbit-aws"),
]
)
settings.persistence.finalizer = "imagereplication-pod-webhook.orbit.aws/kopf-finalizer"
settings.posting.level = logging.getLevelName(os.environ.get("EVENT_LOG_LEVEL", "INFO"))
global CONFIG
CONFIG = imagereplication_utils.get_config()
logger.info("CONFIG: %s", CONFIG)
def _check_replication_status(value: str, **_: Any) -> bool:
return value not in ["Failed", "MaxAttemptsExceeded"]
@kopf.index( # type: ignore
ORBIT_API_GROUP,
ORBIT_API_VERSION,
"imagereplications",
field="status.replication.replicationStatus",
value=_check_replication_status,
)
def imagereplications_idx(namespace: str, name: str, spec: kopf.Spec, status: kopf.Status, **_: Any) -> Dict[str, Any]:
replication_status = status.get("replication", {}).get("replicationStatus", None)
return {
spec["destination"]: {
"namespace": namespace,
"name": name,
"source": spec["source"],
"replicationStatus": replication_status,
}
}
@kopf.on.mutate("pods", id="update-pod-images") # type: ignore
def update_pod_images(
spec: kopf.Spec,
patch: kopf.Patch,
dryrun: bool,
logger: kopf.Logger,
imagereplications_idx: kopf.Index[str, str],
**_: Any,
) -> kopf.Patch:
if dryrun:
logger.debug("DryRun - Skip Pod Mutation")
return patch
annotations = {}
init_containers: List[Dict[str, Any]] = []
containers: List[Dict[str, Any]] = []
replications = {}
def process_containers(
src_containers: Optional[List[Dict[str, Any]]], dest_containers: List[Dict[str, Any]]
) -> None:
for container in src_containers if src_containers else []:
image = container.get("image", "")
desired_image = imagereplication_utils.get_desired_image(image=image, config=CONFIG)
if image != desired_image:
container_copy = deepcopy(container)
container_copy["image"] = desired_image
dest_containers.append(container_copy)
replications[image] = desired_image
annotations[f"original-container-image~1{container['name']}"] = image
process_containers(spec.get("initContainers", []), init_containers)
process_containers(spec.get("containers", []), containers)
if replications:
client = dynamic_client()
for source, destination in replications.items():
if not imagereplications_idx.get(destination, []):
imagereplication_utils.create_imagereplication(
namespace="orbit-system",
source=source,
destination=destination,
client=client,
logger=logger,
)
else:
logger.debug("Skipping ImageReplication Creation")
if annotations:
patch["metadata"] = {"annotations": annotations}
patch["spec"] = {}
if init_containers:
patch["spec"]["initContainers"] = init_containers
if containers:
patch["spec"]["containers"] = containers
logger.debug("Patch: %s", str(patch))
return patch
| 35.553846 | 119 | 0.650151 |
import logging
import os
from copy import deepcopy
from typing import Any, Dict, List, Optional
import kopf
from orbit_controller import ORBIT_API_GROUP, ORBIT_API_VERSION, dynamic_client
from orbit_controller.utils import imagereplication_utils
CONFIG: Dict[str, Any]
@kopf.on.startup()
def configure(settings: kopf.OperatorSettings, logger: kopf.Logger, **_: Any) -> None:
settings.admission.server = kopf.WebhookServer(
cafile="/certs/ca.crt",
certfile="/certs/tls.crt",
pkeyfile="/certs/tls.key",
port=443,
)
settings.persistence.progress_storage = kopf.MultiProgressStorage(
[
kopf.AnnotationsProgressStorage(prefix="orbit.aws"),
kopf.StatusProgressStorage(field="status.orbit-aws"),
]
)
settings.persistence.finalizer = "imagereplication-pod-webhook.orbit.aws/kopf-finalizer"
settings.posting.level = logging.getLevelName(os.environ.get("EVENT_LOG_LEVEL", "INFO"))
global CONFIG
CONFIG = imagereplication_utils.get_config()
logger.info("CONFIG: %s", CONFIG)
def _check_replication_status(value: str, **_: Any) -> bool:
return value not in ["Failed", "MaxAttemptsExceeded"]
@kopf.index(
ORBIT_API_GROUP,
ORBIT_API_VERSION,
"imagereplications",
field="status.replication.replicationStatus",
value=_check_replication_status,
)
def imagereplications_idx(namespace: str, name: str, spec: kopf.Spec, status: kopf.Status, **_: Any) -> Dict[str, Any]:
replication_status = status.get("replication", {}).get("replicationStatus", None)
return {
spec["destination"]: {
"namespace": namespace,
"name": name,
"source": spec["source"],
"replicationStatus": replication_status,
}
}
@kopf.on.mutate("pods", id="update-pod-images")
def update_pod_images(
spec: kopf.Spec,
patch: kopf.Patch,
dryrun: bool,
logger: kopf.Logger,
imagereplications_idx: kopf.Index[str, str],
**_: Any,
) -> kopf.Patch:
if dryrun:
logger.debug("DryRun - Skip Pod Mutation")
return patch
annotations = {}
init_containers: List[Dict[str, Any]] = []
containers: List[Dict[str, Any]] = []
replications = {}
def process_containers(
src_containers: Optional[List[Dict[str, Any]]], dest_containers: List[Dict[str, Any]]
) -> None:
for container in src_containers if src_containers else []:
image = container.get("image", "")
desired_image = imagereplication_utils.get_desired_image(image=image, config=CONFIG)
if image != desired_image:
container_copy = deepcopy(container)
container_copy["image"] = desired_image
dest_containers.append(container_copy)
replications[image] = desired_image
annotations[f"original-container-image~1{container['name']}"] = image
process_containers(spec.get("initContainers", []), init_containers)
process_containers(spec.get("containers", []), containers)
if replications:
client = dynamic_client()
for source, destination in replications.items():
if not imagereplications_idx.get(destination, []):
imagereplication_utils.create_imagereplication(
namespace="orbit-system",
source=source,
destination=destination,
client=client,
logger=logger,
)
else:
logger.debug("Skipping ImageReplication Creation")
if annotations:
patch["metadata"] = {"annotations": annotations}
patch["spec"] = {}
if init_containers:
patch["spec"]["initContainers"] = init_containers
if containers:
patch["spec"]["containers"] = containers
logger.debug("Patch: %s", str(patch))
return patch
| true | true |
f724699755ba91bea524160dc912b306d1e17208 | 40 | py | Python | boa3_test/example/tuple_test/BoolTuple.py | jplippi/neo3-boa | 052be4adebb665113715bb80067d954f7ad85ad5 | [
"Apache-2.0"
] | 25 | 2020-07-22T19:37:43.000Z | 2022-03-08T03:23:55.000Z | boa3_test/example/tuple_test/BoolTuple.py | jplippi/neo3-boa | 052be4adebb665113715bb80067d954f7ad85ad5 | [
"Apache-2.0"
] | 419 | 2020-04-23T17:48:14.000Z | 2022-03-31T13:17:45.000Z | boa3_test/example/tuple_test/BoolTuple.py | jplippi/neo3-boa | 052be4adebb665113715bb80067d954f7ad85ad5 | [
"Apache-2.0"
] | 15 | 2020-05-21T21:54:24.000Z | 2021-11-18T06:17:24.000Z | def Main():
a = (True, True, False)
| 13.333333 | 27 | 0.525 | def Main():
a = (True, True, False)
| true | true |
f72469d5195720193ad83e59ae342ad8847b12fa | 11,453 | py | Python | planner_svg_gen.py | jc0a20/myplannergen | b1a19733fb393bae6159f12cfb1d239bb81d0c25 | [
"MIT"
] | null | null | null | planner_svg_gen.py | jc0a20/myplannergen | b1a19733fb393bae6159f12cfb1d239bb81d0c25 | [
"MIT"
] | 2 | 2020-05-01T13:54:12.000Z | 2020-05-01T13:55:24.000Z | planner_svg_gen.py | jc0a20/myplannergen | b1a19733fb393bae6159f12cfb1d239bb81d0c25 | [
"MIT"
] | null | null | null | import configparser
import os
import re
import subprocess
import sys
import PyPDF2
config = configparser.ConfigParser()
config.read('config.ini', encoding='utf-8')
INKSCAPE_PATH = config['DEFAULT']['InkscapePath']
def replace_text(target_doc, target_str, replace_str, id_str):
pattern = '''id="''' + id_str + '''".+?>''' + target_str + '''<.+?</text>'''
result = re.search(pattern, target_doc, re.S)
id_trim_str = target_doc[result.span()[0]: result.span()[1]]
pattern2 = ">" + target_str + "<"
result2 = re.search(pattern2, id_trim_str, re.S)
index_s, index_e = result.span()[0] + result2.span()[0] + 1, result.span()[0] + result2.span()[1] - 1
target_doc_new = target_doc[:index_s] + replace_str + target_doc[index_e:]
return target_doc_new
def replace_color(target_doc, replace_rgb, id_str):
pattern = '''<rect.*?/>'''
result = re.findall(pattern, target_doc, re.S)
result_trim = ""
for i in result:
if id_str in i:
result_trim = i
result2 = re.search(result_trim, target_doc, re.S)
tmp_trim_txt = target_doc[result2.span()[0]:result2.span()[1]]
pattern2 = '''fill:#'''
result3 = re.search(pattern2, tmp_trim_txt, re.S)
index_s, index_e = result2.span()[0] + result3.span()[1], result2.span()[0] + result3.span()[1] + 6
target_doc_new = target_doc[:index_s] + replace_rgb + target_doc[index_e:]
return target_doc_new
def replace_blank(target_doc, replace_alpha, id_str):
pattern = '''<rect.*?/>'''
result = re.findall(pattern, target_doc, re.S)
result_trim = ""
for i in result:
if id_str in i:
result_trim = i
result2 = re.search(result_trim, target_doc, re.S)
tmp_trim_txt = target_doc[result2.span()[0]:result2.span()[1]]
pattern2 = '''fill-opacity:'''
result3 = re.search(pattern2, tmp_trim_txt, re.S)
index_s, index_e = result2.span()[0] + result3.span()[1], result2.span()[0] + result3.span()[1] + 3
# print(target_doc[index_s:index_e])
target_doc_new = target_doc[:index_s] + str(replace_alpha) + target_doc[index_e:]
# print(target_doc_new[index_s:index_s+10])
return target_doc_new
day_ref_set = set([i for i in range(1,31+1,1)])
'''
年、月、日、曜日、何の日、上書き色、何週目
2019,01,01,MON,New Year's Day,#ffb6c1,1
2019,01,02,TUE,2nd,#add8e6,1
2019,01,03,WED,3rd,,2
'''
WEEK_NUM_COLOR = ["#000000",
"#8b4513",
"#cd5c5c",
"#ff8c00",
"#ffd700",
"#006400",
"#4169e1",
"#9400d3",
"#808080"]
# read_list_a_month=[[2020,1,1,"MON","New Year's Day","#ffb6c1",1],
# [2020,1,2,"TUE","2nd","#add8e6",1],
# [2020,1,3,"WED","3rd","",2]]
with open('contents.csv', encoding='utf-8') as f:
read_tmp = f.read()
read_list = [i.split(',') for i in read_tmp.split('\n') if len(i)>0]
read_list = read_list[1:]
with open("template.svg", encoding='utf-8') as f:
target_doc = f.read()
with open("template_cutline.svg", encoding='utf-8') as f:
target_doc_cutline = f.read()
export_dir_str = "./export_svg/"
# MONTH_HEADER,01
# YEAR_HEADER,2020
# DAY_01L-31L,33
# DOW_01L-31L,Sun
# DAY_WNUM_01L-31L,01
# DAY_OF_NAME_01-31,DAY_OF_NAME
# day_rect_01L
# DAY_01R-31L,33
# DOW_01R-31L,Sun
# day_rect_01R
#["p1o", "p2o", "p3o", "p4o", "p5o", "p6o", "p7o", "p1u", "p2u", "p3u", "p4u", "p5u", "p6u", "p7u"]
write_svg_filename_list = []
for iii in zip([1, 2, 3, 4, 5, 6, 7, -1, 0, 12, 11, 10, 9, 8], [0, 12, 11, 10, 9, 8, 7, -1, 1, 2, 3, 4, 5, 6],
["002", "004", "006", "008", "010", "012", "014", "001", "003", "005", "007", "009", "011", "013"]):
# 断ち切り線ページ #
if iii[0] == -1 and iii[1] == -1:
target_doc_new = target_doc_cutline
write_svg_filename = export_dir_str + iii[2] + ".svg"
write_svg_filename_list.append(write_svg_filename)
with open(write_svg_filename, mode='w', encoding='utf-8') as f:
f.write(target_doc_new)
continue
else:
target_doc_new = target_doc
# 左ページ #
left_month = iii[0]
if left_month == 0:
id_str, replace_alpha = "RECT_BLANK_L", 1
target_doc_new = replace_blank(target_doc_new, replace_alpha, id_str)
else:
id_str, replace_alpha = "RECT_BLANK_L", 0
target_doc_new = replace_blank(target_doc_new, replace_alpha, id_str)
left_month = str(left_month)
read_list_a_month = [rline for rline in read_list if rline[1] == left_month]
READ_YEAR = read_list_a_month[0][0]
READ_MONTH = read_list_a_month[0][1]
# 年月
year_str = str(READ_YEAR).rjust(4, '0')
id_str, target_str, replace_str = "YEAR_HEADER", "2020", year_str
target_doc_new = replace_text(target_doc_new, target_str, replace_str, id_str) # target_doc
month_str = str(READ_MONTH).rjust(2, '0')
id_str, target_str, replace_str = "MONTH_HEADER", "01", month_str
target_doc_new = replace_text(target_doc_new, target_str, replace_str, id_str)
# 日の色(交互) 初期化
for i in range(1, 31, 1):
day_str = str(i).rjust(2, '0')
if i % 2 == 0: # 偶数
id_str, replace_rgb = "day_rect_" + day_str + "L", "FFFFFF"
target_doc_new = replace_color(target_doc_new, replace_rgb, id_str)
else: # 奇数
id_str, replace_rgb = "day_rect_" + day_str + "L", "FFFFFF"
target_doc_new = replace_color(target_doc_new, replace_rgb, id_str)
for yi, mi, di, dowi, dnamei, dcolori, wnumi, tmp1, tmp2, tmp3 in read_list_a_month:
day_str = str(di).rjust(2, '0')
id_str, target_str, replace_str = "DAY_" + day_str + "L", "33", day_str
target_doc_new = replace_text(target_doc_new, target_str, replace_str, id_str)
day_of_week_str = dowi
id_str, target_str, replace_str = "DOW_" + day_str + "L", "Sun", day_of_week_str
target_doc_new = replace_text(target_doc_new, target_str, replace_str, id_str)
day_of_name_str = dnamei
id_str, target_str, replace_str = "DAY_OF_NAME_" + day_str, "DAY_OF_NAME", day_of_name_str
target_doc_new = replace_text(target_doc_new, target_str, replace_str, id_str)
week_num_str = wnumi
id_str, target_str, replace_str = "DAY_WNUM_" + day_str + "L", "01", week_num_str
target_doc_new = replace_text(target_doc_new, target_str, replace_str, id_str)
if len(dcolori) == 6:
id_str, replace_rgb = "day_rect_" + day_str + "L", dcolori
target_doc_new = replace_color(target_doc_new, replace_rgb, id_str)
# その月に存在しない日の処理
day_list = [int(di) for yi, mi, di, dowi, dnamei, dcolori, wnumi, tmp1, tmp2, tmp3 in read_list_a_month]
day_list_diff = sorted(list(day_ref_set - set(day_list))) # その月に存在しない日のリスト
for ddiff in day_list_diff:
day_str = str(ddiff).rjust(2, '0')
id_str, target_str, replace_str = "DAY_" + day_str + "L", "33", ""
target_doc_new = replace_text(target_doc_new, target_str, replace_str, id_str)
day_of_week_str = ''
id_str, target_str, replace_str = "DOW_" + day_str + "L", "Sun", day_of_week_str
target_doc_new = replace_text(target_doc_new, target_str, replace_str, id_str)
day_of_name_str = ''
id_str, target_str, replace_str = "DAY_OF_NAME_" + day_str, "DAY_OF_NAME", day_of_name_str
target_doc_new = replace_text(target_doc_new, target_str, replace_str, id_str)
week_num_str = ""
id_str, target_str, replace_str = "DAY_WNUM_" + day_str + "L", "01", week_num_str
target_doc_new = replace_text(target_doc_new, target_str, replace_str, id_str)
id_str, replace_rgb = "day_rect_" + day_str + "L", "CCCCCC"
target_doc_new = replace_color(target_doc_new, replace_rgb, id_str)
# 右ページ #
right_month = iii[1]
if right_month == 0:
id_str, replace_alpha = "RECT_BLANK_R", 1
target_doc_new = replace_blank(target_doc_new, replace_alpha, id_str)
else:
id_str, replace_alpha = "RECT_BLANK_R", 0
target_doc_new = replace_blank(target_doc_new, replace_alpha, id_str)
right_month = str(right_month)
read_list_a_month = [rline for rline in read_list if rline[1] == right_month]
READ_YEAR = read_list_a_month[0][0]
READ_MONTH = read_list_a_month[0][1]
# 日の色(交互) 初期化
for i in range(1, 31, 1):
day_str = str(i).rjust(2, '0')
if i % 2 == 0: # 偶数
id_str, replace_rgb = "day_rect_" + day_str + "R", "FFFFFF"
target_doc_new = replace_color(target_doc_new, replace_rgb, id_str) # target_doc_new
else: # 奇数
id_str, replace_rgb = "day_rect_" + day_str + "R", "FFFFFF"
target_doc_new = replace_color(target_doc_new, replace_rgb, id_str)
for yi, mi, di, dowi, dnamei, dcolori, wnumi, tmp1, tmp2, tmp3 in read_list_a_month:
day_str = str(di).rjust(2, '0')
id_str, target_str, replace_str = "DAY_" + day_str + "R", "33", day_str
target_doc_new = replace_text(target_doc_new, target_str, replace_str, id_str)
day_of_week_str = dowi
id_str, target_str, replace_str = "DOW_" + day_str + "R", "Sun", day_of_week_str
target_doc_new = replace_text(target_doc_new, target_str, replace_str, id_str)
if len(dcolori) == 6:
id_str, replace_rgb = "day_rect_" + day_str + "R", dcolori
target_doc_new = replace_color(target_doc_new, replace_rgb, id_str)
# その月に存在しない日の処理
day_list = [int(di) for yi, mi, di, dowi, dnamei, dcolori, wnumi, tmp1, tmp2, tmp3 in read_list_a_month]
day_list_diff = sorted(list(day_ref_set - set(day_list))) # その月に存在しない日のリスト
for ddiff in day_list_diff:
day_str = str(ddiff).rjust(2, '0')
id_str, target_str, replace_str = "DAY_" + day_str + "R", "33", ""
target_doc_new = replace_text(target_doc_new, target_str, replace_str, id_str)
day_of_week_str = ''
id_str, target_str, replace_str = "DOW_" + day_str + "R", "Sun", day_of_week_str
target_doc_new = replace_text(target_doc_new, target_str, replace_str, id_str)
id_str, replace_rgb = "day_rect_" + day_str + "R", "CCCCCC"
target_doc_new = replace_color(target_doc_new, replace_rgb, id_str)
write_svg_filename = export_dir_str + iii[2] + ".svg"
write_svg_filename_list.append(write_svg_filename)
with open(write_svg_filename, mode='w', encoding='utf-8') as f:
f.write(target_doc_new)
write_pdf_filename_list = []
for fnamei in sorted(write_svg_filename_list):
basename_without_ext = os.path.splitext(os.path.basename(fnamei))[0]
export_pdf_filename = ".\\export_pdf\\{0}.pdf".format(basename_without_ext)
write_pdf_filename_list.append(export_pdf_filename)
print(fnamei,'->',export_pdf_filename)
cmd = '''"{0}" -f {1} -A {2}'''.format(INKSCAPE_PATH,fnamei,export_pdf_filename)
e = subprocess.call(cmd, shell=True)
merger = PyPDF2.PdfFileMerger()
for a_pdf_filename in write_pdf_filename_list:
merger.append(a_pdf_filename)
merger.write('./output.pdf')
merger.close()
sys.exit(0) | 38.56229 | 115 | 0.628045 | import configparser
import os
import re
import subprocess
import sys
import PyPDF2
config = configparser.ConfigParser()
config.read('config.ini', encoding='utf-8')
INKSCAPE_PATH = config['DEFAULT']['InkscapePath']
def replace_text(target_doc, target_str, replace_str, id_str):
pattern = '''id="''' + id_str + '''".+?>''' + target_str + '''<.+?</text>'''
result = re.search(pattern, target_doc, re.S)
id_trim_str = target_doc[result.span()[0]: result.span()[1]]
pattern2 = ">" + target_str + "<"
result2 = re.search(pattern2, id_trim_str, re.S)
index_s, index_e = result.span()[0] + result2.span()[0] + 1, result.span()[0] + result2.span()[1] - 1
target_doc_new = target_doc[:index_s] + replace_str + target_doc[index_e:]
return target_doc_new
def replace_color(target_doc, replace_rgb, id_str):
pattern = '''<rect.*?/>'''
result = re.findall(pattern, target_doc, re.S)
result_trim = ""
for i in result:
if id_str in i:
result_trim = i
result2 = re.search(result_trim, target_doc, re.S)
tmp_trim_txt = target_doc[result2.span()[0]:result2.span()[1]]
pattern2 = '''fill:#'''
result3 = re.search(pattern2, tmp_trim_txt, re.S)
index_s, index_e = result2.span()[0] + result3.span()[1], result2.span()[0] + result3.span()[1] + 6
target_doc_new = target_doc[:index_s] + replace_rgb + target_doc[index_e:]
return target_doc_new
def replace_blank(target_doc, replace_alpha, id_str):
pattern = '''<rect.*?/>'''
result = re.findall(pattern, target_doc, re.S)
result_trim = ""
for i in result:
if id_str in i:
result_trim = i
result2 = re.search(result_trim, target_doc, re.S)
tmp_trim_txt = target_doc[result2.span()[0]:result2.span()[1]]
pattern2 = '''fill-opacity:'''
result3 = re.search(pattern2, tmp_trim_txt, re.S)
index_s, index_e = result2.span()[0] + result3.span()[1], result2.span()[0] + result3.span()[1] + 3
target_doc_new = target_doc[:index_s] + str(replace_alpha) + target_doc[index_e:]
return target_doc_new
day_ref_set = set([i for i in range(1,31+1,1)])
WEEK_NUM_COLOR = ["#000000",
"#8b4513",
"#cd5c5c",
"#ff8c00",
"#ffd700",
"#006400",
"#4169e1",
"#9400d3",
"#808080"]
# [2020,1,2,"TUE","2nd","#add8e6",1],
# [2020,1,3,"WED","3rd","",2]]
with open('contents.csv', encoding='utf-8') as f:
read_tmp = f.read()
read_list = [i.split(',') for i in read_tmp.split('\n') if len(i)>0]
read_list = read_list[1:]
with open("template.svg", encoding='utf-8') as f:
target_doc = f.read()
with open("template_cutline.svg", encoding='utf-8') as f:
target_doc_cutline = f.read()
export_dir_str = "./export_svg/"
# MONTH_HEADER,01
# YEAR_HEADER,2020
# DAY_01L-31L,33
# DOW_01L-31L,Sun
# DAY_WNUM_01L-31L,01
# DAY_OF_NAME_01-31,DAY_OF_NAME
# day_rect_01L
# DAY_01R-31L,33
# DOW_01R-31L,Sun
# day_rect_01R
#["p1o", "p2o", "p3o", "p4o", "p5o", "p6o", "p7o", "p1u", "p2u", "p3u", "p4u", "p5u", "p6u", "p7u"]
write_svg_filename_list = []
for iii in zip([1, 2, 3, 4, 5, 6, 7, -1, 0, 12, 11, 10, 9, 8], [0, 12, 11, 10, 9, 8, 7, -1, 1, 2, 3, 4, 5, 6],
["002", "004", "006", "008", "010", "012", "014", "001", "003", "005", "007", "009", "011", "013"]):
# 断ち切り線ページ #
if iii[0] == -1 and iii[1] == -1:
target_doc_new = target_doc_cutline
write_svg_filename = export_dir_str + iii[2] + ".svg"
write_svg_filename_list.append(write_svg_filename)
with open(write_svg_filename, mode='w', encoding='utf-8') as f:
f.write(target_doc_new)
continue
else:
target_doc_new = target_doc
# 左ページ #
left_month = iii[0]
if left_month == 0:
id_str, replace_alpha = "RECT_BLANK_L", 1
target_doc_new = replace_blank(target_doc_new, replace_alpha, id_str)
else:
id_str, replace_alpha = "RECT_BLANK_L", 0
target_doc_new = replace_blank(target_doc_new, replace_alpha, id_str)
left_month = str(left_month)
read_list_a_month = [rline for rline in read_list if rline[1] == left_month]
READ_YEAR = read_list_a_month[0][0]
READ_MONTH = read_list_a_month[0][1]
# 年月
year_str = str(READ_YEAR).rjust(4, '0')
id_str, target_str, replace_str = "YEAR_HEADER", "2020", year_str
target_doc_new = replace_text(target_doc_new, target_str, replace_str, id_str) # target_doc
month_str = str(READ_MONTH).rjust(2, '0')
id_str, target_str, replace_str = "MONTH_HEADER", "01", month_str
target_doc_new = replace_text(target_doc_new, target_str, replace_str, id_str)
# 日の色(交互) 初期化
for i in range(1, 31, 1):
day_str = str(i).rjust(2, '0')
if i % 2 == 0: # 偶数
id_str, replace_rgb = "day_rect_" + day_str + "L", "FFFFFF"
target_doc_new = replace_color(target_doc_new, replace_rgb, id_str)
else: # 奇数
id_str, replace_rgb = "day_rect_" + day_str + "L", "FFFFFF"
target_doc_new = replace_color(target_doc_new, replace_rgb, id_str)
for yi, mi, di, dowi, dnamei, dcolori, wnumi, tmp1, tmp2, tmp3 in read_list_a_month:
day_str = str(di).rjust(2, '0')
id_str, target_str, replace_str = "DAY_" + day_str + "L", "33", day_str
target_doc_new = replace_text(target_doc_new, target_str, replace_str, id_str)
day_of_week_str = dowi
id_str, target_str, replace_str = "DOW_" + day_str + "L", "Sun", day_of_week_str
target_doc_new = replace_text(target_doc_new, target_str, replace_str, id_str)
day_of_name_str = dnamei
id_str, target_str, replace_str = "DAY_OF_NAME_" + day_str, "DAY_OF_NAME", day_of_name_str
target_doc_new = replace_text(target_doc_new, target_str, replace_str, id_str)
week_num_str = wnumi
id_str, target_str, replace_str = "DAY_WNUM_" + day_str + "L", "01", week_num_str
target_doc_new = replace_text(target_doc_new, target_str, replace_str, id_str)
if len(dcolori) == 6:
id_str, replace_rgb = "day_rect_" + day_str + "L", dcolori
target_doc_new = replace_color(target_doc_new, replace_rgb, id_str)
# その月に存在しない日の処理
day_list = [int(di) for yi, mi, di, dowi, dnamei, dcolori, wnumi, tmp1, tmp2, tmp3 in read_list_a_month]
day_list_diff = sorted(list(day_ref_set - set(day_list))) # その月に存在しない日のリスト
for ddiff in day_list_diff:
day_str = str(ddiff).rjust(2, '0')
id_str, target_str, replace_str = "DAY_" + day_str + "L", "33", ""
target_doc_new = replace_text(target_doc_new, target_str, replace_str, id_str)
day_of_week_str = ''
id_str, target_str, replace_str = "DOW_" + day_str + "L", "Sun", day_of_week_str
target_doc_new = replace_text(target_doc_new, target_str, replace_str, id_str)
day_of_name_str = ''
id_str, target_str, replace_str = "DAY_OF_NAME_" + day_str, "DAY_OF_NAME", day_of_name_str
target_doc_new = replace_text(target_doc_new, target_str, replace_str, id_str)
week_num_str = ""
id_str, target_str, replace_str = "DAY_WNUM_" + day_str + "L", "01", week_num_str
target_doc_new = replace_text(target_doc_new, target_str, replace_str, id_str)
id_str, replace_rgb = "day_rect_" + day_str + "L", "CCCCCC"
target_doc_new = replace_color(target_doc_new, replace_rgb, id_str)
# 右ページ #
right_month = iii[1]
if right_month == 0:
id_str, replace_alpha = "RECT_BLANK_R", 1
target_doc_new = replace_blank(target_doc_new, replace_alpha, id_str)
else:
id_str, replace_alpha = "RECT_BLANK_R", 0
target_doc_new = replace_blank(target_doc_new, replace_alpha, id_str)
right_month = str(right_month)
read_list_a_month = [rline for rline in read_list if rline[1] == right_month]
READ_YEAR = read_list_a_month[0][0]
READ_MONTH = read_list_a_month[0][1]
# 日の色(交互) 初期化
for i in range(1, 31, 1):
day_str = str(i).rjust(2, '0')
if i % 2 == 0: # 偶数
id_str, replace_rgb = "day_rect_" + day_str + "R", "FFFFFF"
target_doc_new = replace_color(target_doc_new, replace_rgb, id_str) # target_doc_new
else: # 奇数
id_str, replace_rgb = "day_rect_" + day_str + "R", "FFFFFF"
target_doc_new = replace_color(target_doc_new, replace_rgb, id_str)
for yi, mi, di, dowi, dnamei, dcolori, wnumi, tmp1, tmp2, tmp3 in read_list_a_month:
day_str = str(di).rjust(2, '0')
id_str, target_str, replace_str = "DAY_" + day_str + "R", "33", day_str
target_doc_new = replace_text(target_doc_new, target_str, replace_str, id_str)
day_of_week_str = dowi
id_str, target_str, replace_str = "DOW_" + day_str + "R", "Sun", day_of_week_str
target_doc_new = replace_text(target_doc_new, target_str, replace_str, id_str)
if len(dcolori) == 6:
id_str, replace_rgb = "day_rect_" + day_str + "R", dcolori
target_doc_new = replace_color(target_doc_new, replace_rgb, id_str)
# その月に存在しない日の処理
day_list = [int(di) for yi, mi, di, dowi, dnamei, dcolori, wnumi, tmp1, tmp2, tmp3 in read_list_a_month]
day_list_diff = sorted(list(day_ref_set - set(day_list))) # その月に存在しない日のリスト
for ddiff in day_list_diff:
day_str = str(ddiff).rjust(2, '0')
id_str, target_str, replace_str = "DAY_" + day_str + "R", "33", ""
target_doc_new = replace_text(target_doc_new, target_str, replace_str, id_str)
day_of_week_str = ''
id_str, target_str, replace_str = "DOW_" + day_str + "R", "Sun", day_of_week_str
target_doc_new = replace_text(target_doc_new, target_str, replace_str, id_str)
id_str, replace_rgb = "day_rect_" + day_str + "R", "CCCCCC"
target_doc_new = replace_color(target_doc_new, replace_rgb, id_str)
write_svg_filename = export_dir_str + iii[2] + ".svg"
write_svg_filename_list.append(write_svg_filename)
with open(write_svg_filename, mode='w', encoding='utf-8') as f:
f.write(target_doc_new)
write_pdf_filename_list = []
for fnamei in sorted(write_svg_filename_list):
basename_without_ext = os.path.splitext(os.path.basename(fnamei))[0]
export_pdf_filename = ".\\export_pdf\\{0}.pdf".format(basename_without_ext)
write_pdf_filename_list.append(export_pdf_filename)
print(fnamei,'->',export_pdf_filename)
cmd = '''"{0}" -f {1} -A {2}'''.format(INKSCAPE_PATH,fnamei,export_pdf_filename)
e = subprocess.call(cmd, shell=True)
merger = PyPDF2.PdfFileMerger()
for a_pdf_filename in write_pdf_filename_list:
merger.append(a_pdf_filename)
merger.write('./output.pdf')
merger.close()
sys.exit(0) | true | true |
f7246ae56a8cd487abd73fdf982a117465640f8f | 6,658 | py | Python | pciids/pciids.py | ilkermanap/python-pciids | 9a2fcb00d3e0100b9de331047133a42e98242deb | [
"MIT"
] | 5 | 2018-02-12T13:45:45.000Z | 2020-06-04T09:24:52.000Z | pciids/pciids.py | ilkermanap/python-pciids | 9a2fcb00d3e0100b9de331047133a42e98242deb | [
"MIT"
] | 2 | 2016-03-17T09:22:01.000Z | 2020-01-12T23:04:35.000Z | pciids/pciids.py | ilkermanap/python-pciids | 9a2fcb00d3e0100b9de331047133a42e98242deb | [
"MIT"
] | 4 | 2018-09-04T12:57:58.000Z | 2021-07-02T01:01:26.000Z | import os
import bz2
import requests
import glob
global HOME
HOME = "https://pci-ids.ucw.cz"
class Vendor:
"""
Class for vendors. This is the top level class
for the devices belong to a specific vendor.
self.devices is the device dictionary
subdevices are in each device.
"""
def __init__(self, vendorStr):
"""
Class initializes with the raw line from pci.ids
Parsing takes place inside __init__
"""
self.ID = vendorStr.split()[0]
self.name = vendorStr.replace("%s " % self.ID,"")
self.devices = {}
def addDevice(self, deviceStr):
"""
Adds a device to self.devices
takes the raw line from pci.ids
"""
s = deviceStr.strip()
devID = s.split()[0]
if devID in self.devices:
pass
else:
self.devices[devID] = Device(deviceStr)
def report(self):
print( self.ID, self.name)
for id, dev in self.devices.items():
dev.report()
class Device:
def __init__(self, deviceStr):
"""
Class for each device.
Each vendor has its own devices dictionary.
"""
s = deviceStr.strip()
self.ID = s.split()[0]
self.name = s.replace("%s " % self.ID,"")
self.subdevices = {}
def report(self):
print("\t%s\t%s" % (self.ID, self.name))
for subID, subdev in self.subdevices.items():
subdev.report()
def addSubDevice(self, subDeviceStr):
"""
Adds a subvendor, subdevice to device.
Uses raw line from pci.ids
"""
s = subDeviceStr.strip()
spl = s.split()
subVendorID = spl[0]
subDeviceID = spl[1]
subDeviceName = s.split(" ")[-1]
devID = "%s:%s" % (subVendorID,subDeviceID)
self.subdevices[devID] = SubDevice(subVendorID,subDeviceID,subDeviceName)
class SubDevice:
"""
Class for subdevices.
"""
def __init__(self, vendor, device, name):
"""
Class initializes with vendorid, deviceid and name
"""
self.vendorID = vendor
self.deviceID = device
self.name = name
def report(self):
print( "\t\t%s\t%s\t%s" % (self.vendorID, self.deviceID,self.name))
class PCIIds:
"""
Top class for all pci.ids entries.
All queries will be asked to this class.
PCIIds.vendors["0e11"].devices["0046"].subdevices["0e11:4091"].name = "Smart Array 6i"
"""
def __init__(self, url=HOME):
"""
Prepares the directories.
Checks local data file.
Tries to load from local, if not found, downloads from web
"""
self.url = url
self.version = ""
self.date = ""
self.compressed = "pci.ids.bz2"
if (os.path.isdir("data") is False):
os.mkdir("data")
self.vendors = {}
self.contents = None
self.loadLocal()
self.parse()
def reportVendors(self):
"""Reports the vendors
"""
for vid, v in self.vendors.items():
print( v.ID, v.name)
def report(self, vendor = None):
"""
Reports everything for all vendors or a specific vendor
PCIIds.report() reports everything
PCIIDs.report("0e11") reports only "Compaq Computer Corporation"
"""
if vendor != None:
self.vendors[vendor].report()
else:
for vID, v in self.vendors.items():
v.report()
def findDate(self, content):
for l in content:
if l.find("Date:") > -1:
return l.split()[-2].replace("-", "")
return None
def parse(self):
if len(self.contents) < 1:
print( "data/%s-pci.ids not found" % self.date)
else:
vendorID = ""
deviceID = ""
for l in self.contents:
if l[0] == "#":
continue
elif len(l.strip()) == 0:
continue
else:
if l.find("\t\t") == 0:
self.vendors[vendorID].devices[deviceID].addSubDevice(l)
elif l.find("\t") == 0:
deviceID = l.strip().split()[0]
self.vendors[vendorID].addDevice(l)
else:
vendorID = l.split()[0]
self.vendors[vendorID] = Vendor(l)
def getLatest(self):
ver, date, url = self.latestVersion()
outfile = "data/%s-%s" % (date, self.compressed[:-4]) # remove bz2
out = open(outfile, "wb")
resp = requests.get(url)
out.write(bz2.decompress(resp.content))
out.close()
self.version = ver
self.date = date
self.readLocal()
def readLocal(self):
"""
Reads the local file
"""
self.contents = open("data/%s-pci.ids" % self.date).readlines()
self.date = self.findDate(self.contents)
def loadLocal(self):
"""
Loads database from local. If there is no file,
it creates a new one from web
"""
idsfile = glob.glob("data/*.ids")
if len(idsfile) == 0:
self.getLatest()
else:
self.date = idsfile[0].split("/")[1].split("-")[0]
self.readLocal()
def latestVersion(self):
"""
Checks the latest version from web
"""
resp = requests.get(self.url)
webPage = resp.content.decode().splitlines()
for line in webPage:
if line.find(self.compressed) > -1:
print(line)
for tag in line.split("<"):
if tag.find(self.compressed) > -1:
path = tag.split('"')[1]
ver = path.split("/")[1]
url = "%s%s" % (self.url, path)
urlUncompressed = url.replace(".bz2","")
resp2 = requests.get(urlUncompressed)
con = resp2.content.decode().splitlines()
for i in range(10):
l = con[i]
if l.find("Date:") > -1:
date = l.split()[-2].replace("-","")
break
return (ver, date, "%s%s" % (HOME, path))
break
return ""
if __name__ == "__main__":
id = PCIIds()
#id.reportVendors()
| 30.541284 | 92 | 0.49249 | import os
import bz2
import requests
import glob
global HOME
HOME = "https://pci-ids.ucw.cz"
class Vendor:
def __init__(self, vendorStr):
self.ID = vendorStr.split()[0]
self.name = vendorStr.replace("%s " % self.ID,"")
self.devices = {}
def addDevice(self, deviceStr):
s = deviceStr.strip()
devID = s.split()[0]
if devID in self.devices:
pass
else:
self.devices[devID] = Device(deviceStr)
def report(self):
print( self.ID, self.name)
for id, dev in self.devices.items():
dev.report()
class Device:
def __init__(self, deviceStr):
s = deviceStr.strip()
self.ID = s.split()[0]
self.name = s.replace("%s " % self.ID,"")
self.subdevices = {}
def report(self):
print("\t%s\t%s" % (self.ID, self.name))
for subID, subdev in self.subdevices.items():
subdev.report()
def addSubDevice(self, subDeviceStr):
s = subDeviceStr.strip()
spl = s.split()
subVendorID = spl[0]
subDeviceID = spl[1]
subDeviceName = s.split(" ")[-1]
devID = "%s:%s" % (subVendorID,subDeviceID)
self.subdevices[devID] = SubDevice(subVendorID,subDeviceID,subDeviceName)
class SubDevice:
def __init__(self, vendor, device, name):
self.vendorID = vendor
self.deviceID = device
self.name = name
def report(self):
print( "\t\t%s\t%s\t%s" % (self.vendorID, self.deviceID,self.name))
class PCIIds:
def __init__(self, url=HOME):
self.url = url
self.version = ""
self.date = ""
self.compressed = "pci.ids.bz2"
if (os.path.isdir("data") is False):
os.mkdir("data")
self.vendors = {}
self.contents = None
self.loadLocal()
self.parse()
def reportVendors(self):
for vid, v in self.vendors.items():
print( v.ID, v.name)
def report(self, vendor = None):
if vendor != None:
self.vendors[vendor].report()
else:
for vID, v in self.vendors.items():
v.report()
def findDate(self, content):
for l in content:
if l.find("Date:") > -1:
return l.split()[-2].replace("-", "")
return None
def parse(self):
if len(self.contents) < 1:
print( "data/%s-pci.ids not found" % self.date)
else:
vendorID = ""
deviceID = ""
for l in self.contents:
if l[0] == "#":
continue
elif len(l.strip()) == 0:
continue
else:
if l.find("\t\t") == 0:
self.vendors[vendorID].devices[deviceID].addSubDevice(l)
elif l.find("\t") == 0:
deviceID = l.strip().split()[0]
self.vendors[vendorID].addDevice(l)
else:
vendorID = l.split()[0]
self.vendors[vendorID] = Vendor(l)
def getLatest(self):
ver, date, url = self.latestVersion()
outfile = "data/%s-%s" % (date, self.compressed[:-4])
out = open(outfile, "wb")
resp = requests.get(url)
out.write(bz2.decompress(resp.content))
out.close()
self.version = ver
self.date = date
self.readLocal()
def readLocal(self):
self.contents = open("data/%s-pci.ids" % self.date).readlines()
self.date = self.findDate(self.contents)
def loadLocal(self):
idsfile = glob.glob("data/*.ids")
if len(idsfile) == 0:
self.getLatest()
else:
self.date = idsfile[0].split("/")[1].split("-")[0]
self.readLocal()
def latestVersion(self):
resp = requests.get(self.url)
webPage = resp.content.decode().splitlines()
for line in webPage:
if line.find(self.compressed) > -1:
print(line)
for tag in line.split("<"):
if tag.find(self.compressed) > -1:
path = tag.split('"')[1]
ver = path.split("/")[1]
url = "%s%s" % (self.url, path)
urlUncompressed = url.replace(".bz2","")
resp2 = requests.get(urlUncompressed)
con = resp2.content.decode().splitlines()
for i in range(10):
l = con[i]
if l.find("Date:") > -1:
date = l.split()[-2].replace("-","")
break
return (ver, date, "%s%s" % (HOME, path))
break
return ""
if __name__ == "__main__":
id = PCIIds()
#id.reportVendors()
| true | true |
f7246b1f729e630e5873f854feff0a703ae952c5 | 3,564 | py | Python | test/functional/feature_notifications.py | cisnes/PINECOIN | a0252cace17ecc1208a07368c0b893d3878459d8 | [
"MIT"
] | null | null | null | test/functional/feature_notifications.py | cisnes/PINECOIN | a0252cace17ecc1208a07368c0b893d3878459d8 | [
"MIT"
] | null | null | null | test/functional/feature_notifications.py | cisnes/PINECOIN | a0252cace17ecc1208a07368c0b893d3878459d8 | [
"MIT"
] | null | null | null | #!/usr/bin/env python3
# Copyright (c) 2014-2019 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
"""Test the -alertnotify, -blocknotify and -walletnotify options."""
import os
from test_framework.address import ADDRESS_BCRT1_UNSPENDABLE
from test_framework.test_framework import PineCoinTestFramework
from test_framework.util import assert_equal, wait_until, connect_nodes_bi
class NotificationsTest(PineCoinTestFramework):
def set_test_params(self):
self.num_nodes = 2
self.setup_clean_chain = True
def setup_network(self):
self.alertnotify_dir = os.path.join(self.options.tmpdir, "alertnotify")
self.blocknotify_dir = os.path.join(self.options.tmpdir, "blocknotify")
self.walletnotify_dir = os.path.join(self.options.tmpdir, "walletnotify")
os.mkdir(self.alertnotify_dir)
os.mkdir(self.blocknotify_dir)
os.mkdir(self.walletnotify_dir)
# -alertnotify and -blocknotify on node0, walletnotify on node1
self.extra_args = [[
"-alertnotify=echo > {}".format(os.path.join(self.alertnotify_dir, '%s')),
"-blocknotify=echo > {}".format(os.path.join(self.blocknotify_dir, '%s'))],
["-blockversion=211",
"-rescan",
"-walletnotify=echo > {}".format(os.path.join(self.walletnotify_dir, '%s'))]]
super().setup_network()
def run_test(self):
self.log.info("test -blocknotify")
block_count = 10
blocks = self.nodes[1].generatetoaddress(block_count, self.nodes[1].getnewaddress() if self.is_wallet_compiled() else ADDRESS_BCRT1_UNSPENDABLE)
# wait at most 10 seconds for expected number of files before reading the content
wait_until(lambda: len(os.listdir(self.blocknotify_dir)) == block_count, timeout=10)
# directory content should equal the generated blocks hashes
assert_equal(sorted(blocks), sorted(os.listdir(self.blocknotify_dir)))
if self.is_wallet_compiled():
self.log.info("test -walletnotify")
# wait at most 10 seconds for expected number of files before reading the content
wait_until(lambda: len(os.listdir(self.walletnotify_dir)) == block_count, timeout=10)
# directory content should equal the generated transaction hashes
txids_rpc = list(map(lambda t: t['txid'], self.nodes[1].listtransactions("*", block_count)))
assert_equal(sorted(txids_rpc), sorted(os.listdir(self.walletnotify_dir)))
self.stop_node(1)
for tx_file in os.listdir(self.walletnotify_dir):
os.remove(os.path.join(self.walletnotify_dir, tx_file))
self.log.info("test -walletnotify after rescan")
# restart node to rescan to force wallet notifications
self.start_node(1)
connect_nodes_bi(self.nodes, 0, 1)
wait_until(lambda: len(os.listdir(self.walletnotify_dir)) == block_count, timeout=10)
# directory content should equal the generated transaction hashes
txids_rpc = list(map(lambda t: t['txid'], self.nodes[1].listtransactions("*", block_count)))
assert_equal(sorted(txids_rpc), sorted(os.listdir(self.walletnotify_dir)))
# TODO: add test for `-alertnotify` large fork notifications
if __name__ == '__main__':
NotificationsTest().main()
| 48.821918 | 152 | 0.670034 |
import os
from test_framework.address import ADDRESS_BCRT1_UNSPENDABLE
from test_framework.test_framework import PineCoinTestFramework
from test_framework.util import assert_equal, wait_until, connect_nodes_bi
class NotificationsTest(PineCoinTestFramework):
def set_test_params(self):
self.num_nodes = 2
self.setup_clean_chain = True
def setup_network(self):
self.alertnotify_dir = os.path.join(self.options.tmpdir, "alertnotify")
self.blocknotify_dir = os.path.join(self.options.tmpdir, "blocknotify")
self.walletnotify_dir = os.path.join(self.options.tmpdir, "walletnotify")
os.mkdir(self.alertnotify_dir)
os.mkdir(self.blocknotify_dir)
os.mkdir(self.walletnotify_dir)
self.extra_args = [[
"-alertnotify=echo > {}".format(os.path.join(self.alertnotify_dir, '%s')),
"-blocknotify=echo > {}".format(os.path.join(self.blocknotify_dir, '%s'))],
["-blockversion=211",
"-rescan",
"-walletnotify=echo > {}".format(os.path.join(self.walletnotify_dir, '%s'))]]
super().setup_network()
def run_test(self):
self.log.info("test -blocknotify")
block_count = 10
blocks = self.nodes[1].generatetoaddress(block_count, self.nodes[1].getnewaddress() if self.is_wallet_compiled() else ADDRESS_BCRT1_UNSPENDABLE)
wait_until(lambda: len(os.listdir(self.blocknotify_dir)) == block_count, timeout=10)
assert_equal(sorted(blocks), sorted(os.listdir(self.blocknotify_dir)))
if self.is_wallet_compiled():
self.log.info("test -walletnotify")
wait_until(lambda: len(os.listdir(self.walletnotify_dir)) == block_count, timeout=10)
txids_rpc = list(map(lambda t: t['txid'], self.nodes[1].listtransactions("*", block_count)))
assert_equal(sorted(txids_rpc), sorted(os.listdir(self.walletnotify_dir)))
self.stop_node(1)
for tx_file in os.listdir(self.walletnotify_dir):
os.remove(os.path.join(self.walletnotify_dir, tx_file))
self.log.info("test -walletnotify after rescan")
self.start_node(1)
connect_nodes_bi(self.nodes, 0, 1)
wait_until(lambda: len(os.listdir(self.walletnotify_dir)) == block_count, timeout=10)
txids_rpc = list(map(lambda t: t['txid'], self.nodes[1].listtransactions("*", block_count)))
assert_equal(sorted(txids_rpc), sorted(os.listdir(self.walletnotify_dir)))
if __name__ == '__main__':
NotificationsTest().main()
| true | true |
f7246b36becdc63ea725194e371727a55e6be4c1 | 3,932 | py | Python | test/functional/test_framework/blocktools.py | tmiholdings/tmi | f1b6027f025dafc40616cde076df2f4b8cdae8a2 | [
"MIT"
] | null | null | null | test/functional/test_framework/blocktools.py | tmiholdings/tmi | f1b6027f025dafc40616cde076df2f4b8cdae8a2 | [
"MIT"
] | null | null | null | test/functional/test_framework/blocktools.py | tmiholdings/tmi | f1b6027f025dafc40616cde076df2f4b8cdae8a2 | [
"MIT"
] | null | null | null | #!/usr/bin/env python3
# Copyright (c) 2015-2016 The TMIcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
"""Utilities for manipulating blocks and transactions."""
from .mininode import *
from .script import CScript, OP_TRUE, OP_CHECKSIG, OP_RETURN
# Create a block (with regtest difficulty)
def create_block(hashprev, coinbase, nTime=None):
block = CBlock()
if nTime is None:
import time
block.nTime = int(time.time()+600)
else:
block.nTime = nTime
block.hashPrevBlock = hashprev
block.nBits = 0x207fffff # Will break after a difficulty adjustment...
block.vtx.append(coinbase)
block.hashMerkleRoot = block.calc_merkle_root()
block.calc_sha256()
return block
# From BIP141
WITNESS_COMMITMENT_HEADER = b"\xaa\x21\xa9\xed"
def get_witness_script(witness_root, witness_nonce):
witness_commitment = uint256_from_str(hash256(ser_uint256(witness_root)+ser_uint256(witness_nonce)))
output_data = WITNESS_COMMITMENT_HEADER + ser_uint256(witness_commitment)
return CScript([OP_RETURN, output_data])
# According to BIP141, blocks with witness rules active must commit to the
# hash of all in-block transactions including witness.
def add_witness_commitment(block, nonce=0):
# First calculate the merkle root of the block's
# transactions, with witnesses.
witness_nonce = nonce
witness_root = block.calc_witness_merkle_root()
# witness_nonce should go to coinbase witness.
block.vtx[0].wit.vtxinwit = [CTxInWitness()]
block.vtx[0].wit.vtxinwit[0].scriptWitness.stack = [ser_uint256(witness_nonce)]
# witness commitment is the last OP_RETURN output in coinbase
block.vtx[0].vout.append(CTxOut(0, get_witness_script(witness_root, witness_nonce)))
block.vtx[0].rehash()
block.hashMerkleRoot = block.calc_merkle_root()
block.rehash()
def serialize_script_num(value):
r = bytearray(0)
if value == 0:
return r
neg = value < 0
absvalue = -value if neg else value
while (absvalue):
r.append(int(absvalue & 0xff))
absvalue >>= 8
if r[-1] & 0x80:
r.append(0x80 if neg else 0)
elif neg:
r[-1] |= 0x80
return r
# Create a coinbase transaction, assuming no miner fees.
# If pubkey is passed in, the coinbase output will be a P2PK output;
# otherwise an anyone-can-spend output.
def create_coinbase(height, pubkey = None):
coinbase = CTransaction()
coinbase.vin.append(CTxIn(COutPoint(0, 0xffffffff),
ser_string(serialize_script_num(height)), 0xffffffff))
coinbaseoutput = CTxOut()
coinbaseoutput.nValue = 50 * COIN
halvings = int(height/150) # regtest
coinbaseoutput.nValue >>= halvings
if (pubkey != None):
coinbaseoutput.scriptPubKey = CScript([pubkey, OP_CHECKSIG])
else:
coinbaseoutput.scriptPubKey = CScript([OP_TRUE])
coinbase.vout = [ coinbaseoutput ]
coinbase.calc_sha256()
return coinbase
# Create a transaction.
# If the scriptPubKey is not specified, make it anyone-can-spend.
def create_transaction(prevtx, n, sig, value, scriptPubKey=CScript()):
tx = CTransaction()
assert(n < len(prevtx.vout))
tx.vin.append(CTxIn(COutPoint(prevtx.sha256, n), sig, 0xffffffff))
tx.vout.append(CTxOut(value, scriptPubKey))
tx.calc_sha256()
return tx
def get_legacy_sigopcount_block(block, fAccurate=True):
count = 0
for tx in block.vtx:
count += get_legacy_sigopcount_tx(tx, fAccurate)
return count
def get_legacy_sigopcount_tx(tx, fAccurate=True):
count = 0
for i in tx.vout:
count += i.scriptPubKey.GetSigOpCount(fAccurate)
for j in tx.vin:
# scriptSig might be of type bytes, so convert to CScript for the moment
count += CScript(j.scriptSig).GetSigOpCount(fAccurate)
return count
| 35.423423 | 104 | 0.709054 |
from .mininode import *
from .script import CScript, OP_TRUE, OP_CHECKSIG, OP_RETURN
def create_block(hashprev, coinbase, nTime=None):
block = CBlock()
if nTime is None:
import time
block.nTime = int(time.time()+600)
else:
block.nTime = nTime
block.hashPrevBlock = hashprev
block.nBits = 0x207fffff
block.vtx.append(coinbase)
block.hashMerkleRoot = block.calc_merkle_root()
block.calc_sha256()
return block
WITNESS_COMMITMENT_HEADER = b"\xaa\x21\xa9\xed"
def get_witness_script(witness_root, witness_nonce):
witness_commitment = uint256_from_str(hash256(ser_uint256(witness_root)+ser_uint256(witness_nonce)))
output_data = WITNESS_COMMITMENT_HEADER + ser_uint256(witness_commitment)
return CScript([OP_RETURN, output_data])
def add_witness_commitment(block, nonce=0):
# transactions, with witnesses.
witness_nonce = nonce
witness_root = block.calc_witness_merkle_root()
# witness_nonce should go to coinbase witness.
block.vtx[0].wit.vtxinwit = [CTxInWitness()]
block.vtx[0].wit.vtxinwit[0].scriptWitness.stack = [ser_uint256(witness_nonce)]
# witness commitment is the last OP_RETURN output in coinbase
block.vtx[0].vout.append(CTxOut(0, get_witness_script(witness_root, witness_nonce)))
block.vtx[0].rehash()
block.hashMerkleRoot = block.calc_merkle_root()
block.rehash()
def serialize_script_num(value):
r = bytearray(0)
if value == 0:
return r
neg = value < 0
absvalue = -value if neg else value
while (absvalue):
r.append(int(absvalue & 0xff))
absvalue >>= 8
if r[-1] & 0x80:
r.append(0x80 if neg else 0)
elif neg:
r[-1] |= 0x80
return r
# Create a coinbase transaction, assuming no miner fees.
# If pubkey is passed in, the coinbase output will be a P2PK output;
# otherwise an anyone-can-spend output.
def create_coinbase(height, pubkey = None):
coinbase = CTransaction()
coinbase.vin.append(CTxIn(COutPoint(0, 0xffffffff),
ser_string(serialize_script_num(height)), 0xffffffff))
coinbaseoutput = CTxOut()
coinbaseoutput.nValue = 50 * COIN
halvings = int(height/150) # regtest
coinbaseoutput.nValue >>= halvings
if (pubkey != None):
coinbaseoutput.scriptPubKey = CScript([pubkey, OP_CHECKSIG])
else:
coinbaseoutput.scriptPubKey = CScript([OP_TRUE])
coinbase.vout = [ coinbaseoutput ]
coinbase.calc_sha256()
return coinbase
# Create a transaction.
# If the scriptPubKey is not specified, make it anyone-can-spend.
def create_transaction(prevtx, n, sig, value, scriptPubKey=CScript()):
tx = CTransaction()
assert(n < len(prevtx.vout))
tx.vin.append(CTxIn(COutPoint(prevtx.sha256, n), sig, 0xffffffff))
tx.vout.append(CTxOut(value, scriptPubKey))
tx.calc_sha256()
return tx
def get_legacy_sigopcount_block(block, fAccurate=True):
count = 0
for tx in block.vtx:
count += get_legacy_sigopcount_tx(tx, fAccurate)
return count
def get_legacy_sigopcount_tx(tx, fAccurate=True):
count = 0
for i in tx.vout:
count += i.scriptPubKey.GetSigOpCount(fAccurate)
for j in tx.vin:
# scriptSig might be of type bytes, so convert to CScript for the moment
count += CScript(j.scriptSig).GetSigOpCount(fAccurate)
return count
| true | true |
f7246bf98a05892332d4bb0c595fed3c4643f2dc | 2,401 | py | Python | src/pudl/__init__.py | erictleung/pudl | 32bfbf3a959114f766b630f5b873a93b7a930c71 | [
"MIT"
] | null | null | null | src/pudl/__init__.py | erictleung/pudl | 32bfbf3a959114f766b630f5b873a93b7a930c71 | [
"MIT"
] | null | null | null | src/pudl/__init__.py | erictleung/pudl | 32bfbf3a959114f766b630f5b873a93b7a930c71 | [
"MIT"
] | null | null | null | """The Public Utility Data Liberation (PUDL) Project."""
# Create a parent logger for all PUDL loggers to inherit from
import logging
import pkg_resources
import pudl.analysis.mcoe
import pudl.cli
import pudl.constants
import pudl.convert.datapkg_to_sqlite
import pudl.convert.epacems_to_parquet
import pudl.convert.ferc1_to_sqlite
import pudl.convert.flatten_datapkgs
import pudl.etl
import pudl.extract.eia860
import pudl.extract.eia923
import pudl.extract.epacems
import pudl.extract.epaipm
import pudl.extract.ferc1
import pudl.glue.ferc1_eia
import pudl.helpers
import pudl.load.csv
import pudl.load.metadata
# Output modules by data source:
import pudl.output.eia860
import pudl.output.eia923
import pudl.output.ferc1
import pudl.output.glue
import pudl.output.pudltabl
# Transformation functions, organized by data source:
import pudl.transform.eia
import pudl.transform.eia860
import pudl.transform.eia923
import pudl.transform.epacems
import pudl.transform.epaipm
import pudl.transform.ferc1
# Deployed data & workspace management
import pudl.validate
import pudl.workspace.datastore
import pudl.workspace.setup # noqa: F401 WTF is this showing up as unused?
__author__ = "Catalyst Cooperative"
__contact__ = "pudl@catalyst.coop"
__maintainer__ = "Catalyst Cooperative"
__license__ = "MIT License"
__maintainer_email__ = "zane.selvans@catalyst.coop"
__version__ = pkg_resources.get_distribution("catalystcoop.pudl").version
__docformat__ = "restructuredtext en"
__description__ = "Tools for liberating public US electric utility data."
__long_description__ = """
This Public Utility Data Liberation (PUDL) project is a collection of tools
that allow programmatic access to and manipulation of many public data sets
related to electric utilities in the United States. These data sets are
often collected by state and federal agencies, but are publicized in ways
that are not well standardized, or intended for interoperability. PUDL
seeks to allow more transparent and useful access to this important public
data, with the goal of enabling climate advocates, academic researchers, and
data journalists to better understand the electricity system and its impacts
on climate.
"""
__pythonrequiredversion__ = "3.7"
__projecturl__ = "https://catalyst.coop/pudl/"
__downloadurl__ = "https://github.com/catalyst-cooperative/pudl/"
logging.getLogger(__name__).addHandler(logging.NullHandler())
| 35.835821 | 76 | 0.82299 |
import logging
import pkg_resources
import pudl.analysis.mcoe
import pudl.cli
import pudl.constants
import pudl.convert.datapkg_to_sqlite
import pudl.convert.epacems_to_parquet
import pudl.convert.ferc1_to_sqlite
import pudl.convert.flatten_datapkgs
import pudl.etl
import pudl.extract.eia860
import pudl.extract.eia923
import pudl.extract.epacems
import pudl.extract.epaipm
import pudl.extract.ferc1
import pudl.glue.ferc1_eia
import pudl.helpers
import pudl.load.csv
import pudl.load.metadata
import pudl.output.eia860
import pudl.output.eia923
import pudl.output.ferc1
import pudl.output.glue
import pudl.output.pudltabl
import pudl.transform.eia
import pudl.transform.eia860
import pudl.transform.eia923
import pudl.transform.epacems
import pudl.transform.epaipm
import pudl.transform.ferc1
import pudl.validate
import pudl.workspace.datastore
import pudl.workspace.setup
__author__ = "Catalyst Cooperative"
__contact__ = "pudl@catalyst.coop"
__maintainer__ = "Catalyst Cooperative"
__license__ = "MIT License"
__maintainer_email__ = "zane.selvans@catalyst.coop"
__version__ = pkg_resources.get_distribution("catalystcoop.pudl").version
__docformat__ = "restructuredtext en"
__description__ = "Tools for liberating public US electric utility data."
__long_description__ = """
This Public Utility Data Liberation (PUDL) project is a collection of tools
that allow programmatic access to and manipulation of many public data sets
related to electric utilities in the United States. These data sets are
often collected by state and federal agencies, but are publicized in ways
that are not well standardized, or intended for interoperability. PUDL
seeks to allow more transparent and useful access to this important public
data, with the goal of enabling climate advocates, academic researchers, and
data journalists to better understand the electricity system and its impacts
on climate.
"""
__pythonrequiredversion__ = "3.7"
__projecturl__ = "https://catalyst.coop/pudl/"
__downloadurl__ = "https://github.com/catalyst-cooperative/pudl/"
logging.getLogger(__name__).addHandler(logging.NullHandler())
| true | true |
f7246c03ec5401f98478d3072cffa65821a40e6d | 5,510 | py | Python | test/functional/p2p_disconnect_ban.py | Simple-Software-Solutions/RBX-Core | 8cf0dfda708233e080e8729cec0b5014218386e3 | [
"MIT"
] | null | null | null | test/functional/p2p_disconnect_ban.py | Simple-Software-Solutions/RBX-Core | 8cf0dfda708233e080e8729cec0b5014218386e3 | [
"MIT"
] | null | null | null | test/functional/p2p_disconnect_ban.py | Simple-Software-Solutions/RBX-Core | 8cf0dfda708233e080e8729cec0b5014218386e3 | [
"MIT"
] | null | null | null | #!/usr/bin/env python3
# Copyright (c) 2014-2017 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
"""Test node disconnect and ban behavior"""
import time
from test_framework.test_framework import RbxTestFramework
from test_framework.util import (
assert_equal,
connect_nodes,
assert_raises_rpc_error,
wait_until,
)
class DisconnectBanTest(RbxTestFramework):
def set_test_params(self):
self.num_nodes = 2
def run_test(self):
self.log.info("Connect nodes both way")
connect_nodes(self.nodes[0], 1)
connect_nodes(self.nodes[1], 0)
self.log.info("Test setban and listbanned RPCs")
self.log.info("setban: successfully ban single IP address")
assert_equal(len(self.nodes[1].getpeerinfo()), 2) # node1 should have 2 connections to node0 at this point
self.nodes[1].setban("127.0.0.1", "add")
wait_until(lambda: len(self.nodes[1].getpeerinfo()) == 0, timeout=10)
assert_equal(len(self.nodes[1].getpeerinfo()), 0) # all nodes must be disconnected at this point
assert_equal(len(self.nodes[1].listbanned()), 1)
self.log.info("clearbanned: successfully clear ban list")
self.nodes[1].clearbanned()
assert_equal(len(self.nodes[1].listbanned()), 0)
self.nodes[1].setban("127.0.0.0/24", "add")
self.log.info("setban: fail to ban an already banned subnet")
assert_equal(len(self.nodes[1].listbanned()), 1)
assert_raises_rpc_error(-23, "IP/Subnet already banned", self.nodes[1].setban, "127.0.0.1", "add")
self.log.info("setban: fail to ban an invalid subnet")
assert_raises_rpc_error(-23, "Error: Invalid IP/Subnet", self.nodes[1].setban, "127.0.0.1/42", "add")
assert_equal(len(self.nodes[1].listbanned()), 1) # still only one banned ip because 127.0.0.1 is within the range of 127.0.0.0/24
self.log.info("setban remove: fail to unban a non-banned subnet")
assert_raises_rpc_error(-1, "Error: Unban failed", self.nodes[1].setban, "127.0.0.1", "remove")
assert_equal(len(self.nodes[1].listbanned()), 1)
self.log.info("setban remove: successfully unban subnet")
self.nodes[1].setban("127.0.0.0/24", "remove")
assert_equal(len(self.nodes[1].listbanned()), 0)
self.nodes[1].clearbanned()
assert_equal(len(self.nodes[1].listbanned()), 0)
self.log.info("setban: test persistence across node restart")
self.nodes[1].setban("127.0.0.0/32", "add")
self.nodes[1].setban("127.0.0.0/24", "add")
# Set the mocktime so we can control when bans expire
old_time = int(time.time())
self.nodes[1].setmocktime(old_time)
self.nodes[1].setban("192.168.0.1", "add", 1) # ban for 1 seconds
self.nodes[1].setban("2001:4d48:ac57:400:cacf:e9ff:fe1d:9c63/19", "add", 1000) # ban for 1000 seconds
listBeforeShutdown = self.nodes[1].listbanned()
assert_equal("192.168.0.1/32", listBeforeShutdown[2]['address'])
# Move time forward by 3 seconds so the third ban has expired
self.nodes[1].setmocktime(old_time + 3)
assert_equal(len(self.nodes[1].listbanned()), 4)
self.stop_node(1)
self.start_node(1)
listAfterShutdown = self.nodes[1].listbanned()
assert_equal("127.0.0.0/24", listAfterShutdown[0]['address'])
assert_equal("127.0.0.0/32", listAfterShutdown[1]['address'])
assert_equal("/19" in listAfterShutdown[2]['address'], True)
# Clear ban lists
self.nodes[1].clearbanned()
self.log.info("Connect nodes both way")
connect_nodes(self.nodes[0], 1)
connect_nodes(self.nodes[1], 0)
self.log.info("Test disconnectnode RPCs")
#self.log.info("disconnectnode: fail to disconnect when calling with address and nodeid")
#address1 = self.nodes[0].getpeerinfo()[0]['addr']
#node1 = self.nodes[0].getpeerinfo()[0]['addr']
#assert_raises_rpc_error(-32602, "Only one of address and nodeid should be provided.", self.nodes[0].disconnectnode, address=address1, nodeid=node1)
self.log.info("disconnectnode: fail to disconnect when calling with junk address")
assert_raises_rpc_error(-29, "Node not found in connected nodes", self.nodes[0].disconnectnode, "221B Baker Street")
self.log.info("disconnectnode: successfully disconnect node by address")
address1 = self.nodes[0].getpeerinfo()[0]['addr']
self.nodes[0].disconnectnode(address1)
wait_until(lambda: len(self.nodes[0].getpeerinfo()) == 1, timeout=10)
assert not [node for node in self.nodes[0].getpeerinfo() if node['addr'] == address1]
self.log.info("disconnectnode: successfully reconnect node")
connect_nodes(self.nodes[0], 1) # reconnect the node
assert_equal(len(self.nodes[0].getpeerinfo()), 2)
assert [node for node in self.nodes[0].getpeerinfo() if node['addr'] == address1]
#self.log.info("disconnectnode: successfully disconnect node by node id")
#id1 = self.nodes[0].getpeerinfo()[0]['id']
#self.nodes[0].disconnectnode(nodeid=id1)
#wait_until(lambda: len(self.nodes[0].getpeerinfo()) == 1, timeout=10)
#assert not [node for node in self.nodes[0].getpeerinfo() if node['id'] == id1]
if __name__ == '__main__':
DisconnectBanTest().main()
| 48.333333 | 156 | 0.659891 |
import time
from test_framework.test_framework import RbxTestFramework
from test_framework.util import (
assert_equal,
connect_nodes,
assert_raises_rpc_error,
wait_until,
)
class DisconnectBanTest(RbxTestFramework):
def set_test_params(self):
self.num_nodes = 2
def run_test(self):
self.log.info("Connect nodes both way")
connect_nodes(self.nodes[0], 1)
connect_nodes(self.nodes[1], 0)
self.log.info("Test setban and listbanned RPCs")
self.log.info("setban: successfully ban single IP address")
assert_equal(len(self.nodes[1].getpeerinfo()), 2)
self.nodes[1].setban("127.0.0.1", "add")
wait_until(lambda: len(self.nodes[1].getpeerinfo()) == 0, timeout=10)
assert_equal(len(self.nodes[1].getpeerinfo()), 0)
assert_equal(len(self.nodes[1].listbanned()), 1)
self.log.info("clearbanned: successfully clear ban list")
self.nodes[1].clearbanned()
assert_equal(len(self.nodes[1].listbanned()), 0)
self.nodes[1].setban("127.0.0.0/24", "add")
self.log.info("setban: fail to ban an already banned subnet")
assert_equal(len(self.nodes[1].listbanned()), 1)
assert_raises_rpc_error(-23, "IP/Subnet already banned", self.nodes[1].setban, "127.0.0.1", "add")
self.log.info("setban: fail to ban an invalid subnet")
assert_raises_rpc_error(-23, "Error: Invalid IP/Subnet", self.nodes[1].setban, "127.0.0.1/42", "add")
assert_equal(len(self.nodes[1].listbanned()), 1)
self.log.info("setban remove: fail to unban a non-banned subnet")
assert_raises_rpc_error(-1, "Error: Unban failed", self.nodes[1].setban, "127.0.0.1", "remove")
assert_equal(len(self.nodes[1].listbanned()), 1)
self.log.info("setban remove: successfully unban subnet")
self.nodes[1].setban("127.0.0.0/24", "remove")
assert_equal(len(self.nodes[1].listbanned()), 0)
self.nodes[1].clearbanned()
assert_equal(len(self.nodes[1].listbanned()), 0)
self.log.info("setban: test persistence across node restart")
self.nodes[1].setban("127.0.0.0/32", "add")
self.nodes[1].setban("127.0.0.0/24", "add")
old_time = int(time.time())
self.nodes[1].setmocktime(old_time)
self.nodes[1].setban("192.168.0.1", "add", 1)
self.nodes[1].setban("2001:4d48:ac57:400:cacf:e9ff:fe1d:9c63/19", "add", 1000)
listBeforeShutdown = self.nodes[1].listbanned()
assert_equal("192.168.0.1/32", listBeforeShutdown[2]['address'])
self.nodes[1].setmocktime(old_time + 3)
assert_equal(len(self.nodes[1].listbanned()), 4)
self.stop_node(1)
self.start_node(1)
listAfterShutdown = self.nodes[1].listbanned()
assert_equal("127.0.0.0/24", listAfterShutdown[0]['address'])
assert_equal("127.0.0.0/32", listAfterShutdown[1]['address'])
assert_equal("/19" in listAfterShutdown[2]['address'], True)
self.nodes[1].clearbanned()
self.log.info("Connect nodes both way")
connect_nodes(self.nodes[0], 1)
connect_nodes(self.nodes[1], 0)
self.log.info("Test disconnectnode RPCs")
self.log.info("disconnectnode: fail to disconnect when calling with junk address")
assert_raises_rpc_error(-29, "Node not found in connected nodes", self.nodes[0].disconnectnode, "221B Baker Street")
self.log.info("disconnectnode: successfully disconnect node by address")
address1 = self.nodes[0].getpeerinfo()[0]['addr']
self.nodes[0].disconnectnode(address1)
wait_until(lambda: len(self.nodes[0].getpeerinfo()) == 1, timeout=10)
assert not [node for node in self.nodes[0].getpeerinfo() if node['addr'] == address1]
self.log.info("disconnectnode: successfully reconnect node")
connect_nodes(self.nodes[0], 1)
assert_equal(len(self.nodes[0].getpeerinfo()), 2)
assert [node for node in self.nodes[0].getpeerinfo() if node['addr'] == address1]
if __name__ == '__main__':
DisconnectBanTest().main()
| true | true |
f7246cc917719e28deb7b06b6817fd9d7b3f055b | 1,758 | py | Python | core/src/zeit/edit/browser/resources.py | rickdg/vivi | 16134ac954bf8425646d4ad47bdd1f372e089355 | [
"BSD-3-Clause"
] | 5 | 2019-05-16T09:51:29.000Z | 2021-05-31T09:30:03.000Z | core/src/zeit/edit/browser/resources.py | rickdg/vivi | 16134ac954bf8425646d4ad47bdd1f372e089355 | [
"BSD-3-Clause"
] | 107 | 2019-05-24T12:19:02.000Z | 2022-03-23T15:05:56.000Z | core/src/zeit/edit/browser/resources.py | rickdg/vivi | 16134ac954bf8425646d4ad47bdd1f372e089355 | [
"BSD-3-Clause"
] | 3 | 2020-08-14T11:01:17.000Z | 2022-01-08T17:32:19.000Z | from zeit.cms.browser.resources import SplitDirResource, Library
import zeit.cms.browser.resources
import zeit.find.browser.resources
lib_css = Library('zeit.edit', 'resources')
lib_js = Library('zeit.edit.js', 'js')
SplitDirResource('editor.css')
SplitDirResource('fold.js', depends=[zeit.cms.browser.resources.base])
SplitDirResource('json.js', depends=[zeit.cms.browser.resources.base])
SplitDirResource('edit.js', depends=[
zeit.cms.browser.resources.base,
zeit.cms.browser.resources.tab_js,
zeit.find.browser.resources.find_js,
json_js, # noqa
editor_css, # noqa
])
SplitDirResource('context.js', depends=[
zeit.cms.browser.resources.base,
edit_js]) # noqa
SplitDirResource('drop.js', depends=[
zeit.cms.browser.resources.base,
zeit.cms.browser.resources.dnd_js,
context_js]) # noqa
SplitDirResource('sortable.js', depends=[
zeit.cms.browser.resources.base,
context_js, # noqa
drop_js]) # noqa
SplitDirResource('inlineform.js', depends=[
zeit.cms.browser.resources.base,
zeit.cms.browser.resources.view_js,
zeit.cms.browser.resources.form_js,
edit_js, # noqa
editor_css, # noqa
])
SplitDirResource('lightbox.js', depends=[
zeit.cms.browser.resources.base,
zeit.cms.browser.resources.lightbox_js,
zeit.cms.browser.resources.tab_js,
context_js, # noqa
edit_js, # noqa
editor_css, # noqa
])
SplitDirResource('library.js', depends=[
zeit.cms.browser.resources.base,
zeit.cms.browser.resources.view_js,
zeit.cms.browser.resources.tab_js,
drop_js, # noqa
editor_css, # noqa
])
SplitDirResource('undo.js', depends=[
zeit.cms.browser.resources.base,
zeit.cms.browser.resources.view_js,
editor_css, # noqa
])
| 27.046154 | 70 | 0.713879 | from zeit.cms.browser.resources import SplitDirResource, Library
import zeit.cms.browser.resources
import zeit.find.browser.resources
lib_css = Library('zeit.edit', 'resources')
lib_js = Library('zeit.edit.js', 'js')
SplitDirResource('editor.css')
SplitDirResource('fold.js', depends=[zeit.cms.browser.resources.base])
SplitDirResource('json.js', depends=[zeit.cms.browser.resources.base])
SplitDirResource('edit.js', depends=[
zeit.cms.browser.resources.base,
zeit.cms.browser.resources.tab_js,
zeit.find.browser.resources.find_js,
json_js,
editor_css,
])
SplitDirResource('context.js', depends=[
zeit.cms.browser.resources.base,
edit_js])
SplitDirResource('drop.js', depends=[
zeit.cms.browser.resources.base,
zeit.cms.browser.resources.dnd_js,
context_js])
SplitDirResource('sortable.js', depends=[
zeit.cms.browser.resources.base,
context_js,
drop_js])
SplitDirResource('inlineform.js', depends=[
zeit.cms.browser.resources.base,
zeit.cms.browser.resources.view_js,
zeit.cms.browser.resources.form_js,
edit_js,
editor_css,
])
SplitDirResource('lightbox.js', depends=[
zeit.cms.browser.resources.base,
zeit.cms.browser.resources.lightbox_js,
zeit.cms.browser.resources.tab_js,
context_js,
edit_js,
editor_css,
])
SplitDirResource('library.js', depends=[
zeit.cms.browser.resources.base,
zeit.cms.browser.resources.view_js,
zeit.cms.browser.resources.tab_js,
drop_js,
editor_css,
])
SplitDirResource('undo.js', depends=[
zeit.cms.browser.resources.base,
zeit.cms.browser.resources.view_js,
editor_css,
])
| true | true |
f7246cd07aed951d4878742924bcbfb2fe5565c2 | 329 | py | Python | factioncli/commands/credentials.py | joncave/CLI | e12113594574bd4ca112895c6df59d1ae1c2094f | [
"BSD-3-Clause"
] | null | null | null | factioncli/commands/credentials.py | joncave/CLI | e12113594574bd4ca112895c6df59d1ae1c2094f | [
"BSD-3-Clause"
] | null | null | null | factioncli/commands/credentials.py | joncave/CLI | e12113594574bd4ca112895c6df59d1ae1c2094f | [
"BSD-3-Clause"
] | null | null | null | from cliff.lister import Lister
from factioncli.processing.config import get_passwords
class Credentials(Lister):
"Returns a list of the default credentials for this instance of Faction"
def take_action(self, parsed_args):
passwords = get_passwords()
return ("Type", "Username", "Password"), passwords
| 29.909091 | 76 | 0.738602 | from cliff.lister import Lister
from factioncli.processing.config import get_passwords
class Credentials(Lister):
def take_action(self, parsed_args):
passwords = get_passwords()
return ("Type", "Username", "Password"), passwords
| true | true |
f7246fb1d3475e6a8f8ea2b2fd45ec7d3e10c62f | 16,568 | py | Python | venv/Lib/site-packages/scipy/sparse/linalg/tests/test_interface.py | EkremBayar/bayar | aad1a32044da671d0b4f11908416044753360b39 | [
"MIT"
] | 353 | 2020-12-10T10:47:17.000Z | 2022-03-31T23:08:29.000Z | venv/Lib/site-packages/scipy/sparse/linalg/tests/test_interface.py | EkremBayar/bayar | aad1a32044da671d0b4f11908416044753360b39 | [
"MIT"
] | 80 | 2020-12-10T09:54:22.000Z | 2022-03-30T22:08:45.000Z | venv/Lib/site-packages/scipy/sparse/linalg/tests/test_interface.py | EkremBayar/bayar | aad1a32044da671d0b4f11908416044753360b39 | [
"MIT"
] | 63 | 2020-12-10T17:10:34.000Z | 2022-03-28T16:27:07.000Z | """Test functions for the sparse.linalg.interface module
"""
from functools import partial
from itertools import product
import operator
import pytest
from pytest import raises as assert_raises, warns
from numpy.testing import assert_, assert_equal
import numpy as np
import scipy.sparse as sparse
from scipy.sparse.linalg import interface
from scipy.sparse.sputils import matrix
class TestLinearOperator(object):
def setup_method(self):
self.A = np.array([[1,2,3],
[4,5,6]])
self.B = np.array([[1,2],
[3,4],
[5,6]])
self.C = np.array([[1,2],
[3,4]])
def test_matvec(self):
def get_matvecs(A):
return [{
'shape': A.shape,
'matvec': lambda x: np.dot(A, x).reshape(A.shape[0]),
'rmatvec': lambda x: np.dot(A.T.conj(),
x).reshape(A.shape[1])
},
{
'shape': A.shape,
'matvec': lambda x: np.dot(A, x),
'rmatvec': lambda x: np.dot(A.T.conj(), x),
'rmatmat': lambda x: np.dot(A.T.conj(), x),
'matmat': lambda x: np.dot(A, x)
}]
for matvecs in get_matvecs(self.A):
A = interface.LinearOperator(**matvecs)
assert_(A.args == ())
assert_equal(A.matvec(np.array([1,2,3])), [14,32])
assert_equal(A.matvec(np.array([[1],[2],[3]])), [[14],[32]])
assert_equal(A * np.array([1,2,3]), [14,32])
assert_equal(A * np.array([[1],[2],[3]]), [[14],[32]])
assert_equal(A.dot(np.array([1,2,3])), [14,32])
assert_equal(A.dot(np.array([[1],[2],[3]])), [[14],[32]])
assert_equal(A.matvec(matrix([[1],[2],[3]])), [[14],[32]])
assert_equal(A * matrix([[1],[2],[3]]), [[14],[32]])
assert_equal(A.dot(matrix([[1],[2],[3]])), [[14],[32]])
assert_equal((2*A)*[1,1,1], [12,30])
assert_equal((2 * A).rmatvec([1, 1]), [10, 14, 18])
assert_equal((2*A).H.matvec([1,1]), [10, 14, 18])
assert_equal((2*A)*[[1],[1],[1]], [[12],[30]])
assert_equal((2 * A).matmat([[1], [1], [1]]), [[12], [30]])
assert_equal((A*2)*[1,1,1], [12,30])
assert_equal((A*2)*[[1],[1],[1]], [[12],[30]])
assert_equal((2j*A)*[1,1,1], [12j,30j])
assert_equal((A+A)*[1,1,1], [12, 30])
assert_equal((A + A).rmatvec([1, 1]), [10, 14, 18])
assert_equal((A+A).H.matvec([1,1]), [10, 14, 18])
assert_equal((A+A)*[[1],[1],[1]], [[12], [30]])
assert_equal((A+A).matmat([[1],[1],[1]]), [[12], [30]])
assert_equal((-A)*[1,1,1], [-6,-15])
assert_equal((-A)*[[1],[1],[1]], [[-6],[-15]])
assert_equal((A-A)*[1,1,1], [0,0])
assert_equal((A - A) * [[1], [1], [1]], [[0], [0]])
X = np.array([[1, 2], [3, 4]])
# A_asarray = np.array([[1, 2, 3], [4, 5, 6]])
assert_equal((2 * A).rmatmat(X), np.dot((2 * self.A).T, X))
assert_equal((A * 2).rmatmat(X), np.dot((self.A * 2).T, X))
assert_equal((2j * A).rmatmat(X),
np.dot((2j * self.A).T.conj(), X))
assert_equal((A * 2j).rmatmat(X),
np.dot((self.A * 2j).T.conj(), X))
assert_equal((A + A).rmatmat(X),
np.dot((self.A + self.A).T, X))
assert_equal((A + 2j * A).rmatmat(X),
np.dot((self.A + 2j * self.A).T.conj(), X))
assert_equal((-A).rmatmat(X), np.dot((-self.A).T, X))
assert_equal((A - A).rmatmat(X),
np.dot((self.A - self.A).T, X))
assert_equal((2j * A).rmatmat(2j * X),
np.dot((2j * self.A).T.conj(), 2j * X))
z = A+A
assert_(len(z.args) == 2 and z.args[0] is A and z.args[1] is A)
z = 2*A
assert_(len(z.args) == 2 and z.args[0] is A and z.args[1] == 2)
assert_(isinstance(A.matvec([1, 2, 3]), np.ndarray))
assert_(isinstance(A.matvec(np.array([[1],[2],[3]])), np.ndarray))
assert_(isinstance(A * np.array([1,2,3]), np.ndarray))
assert_(isinstance(A * np.array([[1],[2],[3]]), np.ndarray))
assert_(isinstance(A.dot(np.array([1,2,3])), np.ndarray))
assert_(isinstance(A.dot(np.array([[1],[2],[3]])), np.ndarray))
assert_(isinstance(A.matvec(matrix([[1],[2],[3]])), np.ndarray))
assert_(isinstance(A * matrix([[1],[2],[3]]), np.ndarray))
assert_(isinstance(A.dot(matrix([[1],[2],[3]])), np.ndarray))
assert_(isinstance(2*A, interface._ScaledLinearOperator))
assert_(isinstance(2j*A, interface._ScaledLinearOperator))
assert_(isinstance(A+A, interface._SumLinearOperator))
assert_(isinstance(-A, interface._ScaledLinearOperator))
assert_(isinstance(A-A, interface._SumLinearOperator))
assert_((2j*A).dtype == np.complex_)
assert_raises(ValueError, A.matvec, np.array([1,2]))
assert_raises(ValueError, A.matvec, np.array([1,2,3,4]))
assert_raises(ValueError, A.matvec, np.array([[1],[2]]))
assert_raises(ValueError, A.matvec, np.array([[1],[2],[3],[4]]))
assert_raises(ValueError, lambda: A*A)
assert_raises(ValueError, lambda: A**2)
for matvecsA, matvecsB in product(get_matvecs(self.A),
get_matvecs(self.B)):
A = interface.LinearOperator(**matvecsA)
B = interface.LinearOperator(**matvecsB)
# AtimesB = np.array([[22, 28], [49, 64]])
AtimesB = self.A.dot(self.B)
X = np.array([[1, 2], [3, 4]])
assert_equal((A * B).rmatmat(X), np.dot((AtimesB).T, X))
assert_equal((2j * A * B).rmatmat(X),
np.dot((2j * AtimesB).T.conj(), X))
assert_equal((A*B)*[1,1], [50,113])
assert_equal((A*B)*[[1],[1]], [[50],[113]])
assert_equal((A*B).matmat([[1],[1]]), [[50],[113]])
assert_equal((A * B).rmatvec([1, 1]), [71, 92])
assert_equal((A * B).H.matvec([1, 1]), [71, 92])
assert_(isinstance(A*B, interface._ProductLinearOperator))
assert_raises(ValueError, lambda: A+B)
assert_raises(ValueError, lambda: A**2)
z = A*B
assert_(len(z.args) == 2 and z.args[0] is A and z.args[1] is B)
for matvecsC in get_matvecs(self.C):
C = interface.LinearOperator(**matvecsC)
X = np.array([[1, 2], [3, 4]])
assert_equal(C.rmatmat(X), np.dot((self.C).T, X))
assert_equal((C**2).rmatmat(X),
np.dot((np.dot(self.C, self.C)).T, X))
assert_equal((C**2)*[1,1], [17,37])
assert_equal((C**2).rmatvec([1, 1]), [22, 32])
assert_equal((C**2).H.matvec([1, 1]), [22, 32])
assert_equal((C**2).matmat([[1],[1]]), [[17],[37]])
assert_(isinstance(C**2, interface._PowerLinearOperator))
def test_matmul(self):
D = {'shape': self.A.shape,
'matvec': lambda x: np.dot(self.A, x).reshape(self.A.shape[0]),
'rmatvec': lambda x: np.dot(self.A.T.conj(),
x).reshape(self.A.shape[1]),
'rmatmat': lambda x: np.dot(self.A.T.conj(), x),
'matmat': lambda x: np.dot(self.A, x)}
A = interface.LinearOperator(**D)
B = np.array([[1, 2, 3],
[4, 5, 6],
[7, 8, 9]])
b = B[0]
assert_equal(operator.matmul(A, b), A * b)
assert_equal(operator.matmul(A, B), A * B)
assert_raises(ValueError, operator.matmul, A, 2)
assert_raises(ValueError, operator.matmul, 2, A)
class TestAsLinearOperator(object):
def setup_method(self):
self.cases = []
def make_cases(original, dtype):
cases = []
cases.append((matrix(original, dtype=dtype), original))
cases.append((np.array(original, dtype=dtype), original))
cases.append((sparse.csr_matrix(original, dtype=dtype), original))
# Test default implementations of _adjoint and _rmatvec, which
# refer to each other.
def mv(x, dtype):
y = original.dot(x)
if len(x.shape) == 2:
y = y.reshape(-1, 1)
return y
def rmv(x, dtype):
return original.T.conj().dot(x)
class BaseMatlike(interface.LinearOperator):
args = ()
def __init__(self, dtype):
self.dtype = np.dtype(dtype)
self.shape = original.shape
def _matvec(self, x):
return mv(x, self.dtype)
class HasRmatvec(BaseMatlike):
args = ()
def _rmatvec(self,x):
return rmv(x, self.dtype)
class HasAdjoint(BaseMatlike):
args = ()
def _adjoint(self):
shape = self.shape[1], self.shape[0]
matvec = partial(rmv, dtype=self.dtype)
rmatvec = partial(mv, dtype=self.dtype)
return interface.LinearOperator(matvec=matvec,
rmatvec=rmatvec,
dtype=self.dtype,
shape=shape)
class HasRmatmat(HasRmatvec):
def _matmat(self, x):
return original.dot(x)
def _rmatmat(self, x):
return original.T.conj().dot(x)
cases.append((HasRmatvec(dtype), original))
cases.append((HasAdjoint(dtype), original))
cases.append((HasRmatmat(dtype), original))
return cases
original = np.array([[1,2,3], [4,5,6]])
self.cases += make_cases(original, np.int32)
self.cases += make_cases(original, np.float32)
self.cases += make_cases(original, np.float64)
self.cases += [(interface.aslinearoperator(M).T, A.T)
for M, A in make_cases(original.T, np.float64)]
self.cases += [(interface.aslinearoperator(M).H, A.T.conj())
for M, A in make_cases(original.T, np.float64)]
original = np.array([[1, 2j, 3j], [4j, 5j, 6]])
self.cases += make_cases(original, np.complex_)
self.cases += [(interface.aslinearoperator(M).T, A.T)
for M, A in make_cases(original.T, np.complex_)]
self.cases += [(interface.aslinearoperator(M).H, A.T.conj())
for M, A in make_cases(original.T, np.complex_)]
def test_basic(self):
for M, A_array in self.cases:
A = interface.aslinearoperator(M)
M,N = A.shape
xs = [np.array([1, 2, 3]),
np.array([[1], [2], [3]])]
ys = [np.array([1, 2]), np.array([[1], [2]])]
if A.dtype == np.complex_:
xs += [np.array([1, 2j, 3j]),
np.array([[1], [2j], [3j]])]
ys += [np.array([1, 2j]), np.array([[1], [2j]])]
x2 = np.array([[1, 4], [2, 5], [3, 6]])
for x in xs:
assert_equal(A.matvec(x), A_array.dot(x))
assert_equal(A * x, A_array.dot(x))
assert_equal(A.matmat(x2), A_array.dot(x2))
assert_equal(A * x2, A_array.dot(x2))
for y in ys:
assert_equal(A.rmatvec(y), A_array.T.conj().dot(y))
assert_equal(A.T.matvec(y), A_array.T.dot(y))
assert_equal(A.H.matvec(y), A_array.T.conj().dot(y))
for y in ys:
if y.ndim < 2:
continue
assert_equal(A.rmatmat(y), A_array.T.conj().dot(y))
assert_equal(A.T.matmat(y), A_array.T.dot(y))
assert_equal(A.H.matmat(y), A_array.T.conj().dot(y))
if hasattr(M,'dtype'):
assert_equal(A.dtype, M.dtype)
assert_(hasattr(A, 'args'))
def test_dot(self):
for M, A_array in self.cases:
A = interface.aslinearoperator(M)
M,N = A.shape
x0 = np.array([1, 2, 3])
x1 = np.array([[1], [2], [3]])
x2 = np.array([[1, 4], [2, 5], [3, 6]])
assert_equal(A.dot(x0), A_array.dot(x0))
assert_equal(A.dot(x1), A_array.dot(x1))
assert_equal(A.dot(x2), A_array.dot(x2))
def test_repr():
A = interface.LinearOperator(shape=(1, 1), matvec=lambda x: 1)
repr_A = repr(A)
assert_('unspecified dtype' not in repr_A, repr_A)
def test_identity():
ident = interface.IdentityOperator((3, 3))
assert_equal(ident * [1, 2, 3], [1, 2, 3])
assert_equal(ident.dot(np.arange(9).reshape(3, 3)).ravel(), np.arange(9))
assert_raises(ValueError, ident.matvec, [1, 2, 3, 4])
def test_attributes():
A = interface.aslinearoperator(np.arange(16).reshape(4, 4))
def always_four_ones(x):
x = np.asarray(x)
assert_(x.shape == (3,) or x.shape == (3, 1))
return np.ones(4)
B = interface.LinearOperator(shape=(4, 3), matvec=always_four_ones)
for op in [A, B, A * B, A.H, A + A, B + B, A**4]:
assert_(hasattr(op, "dtype"))
assert_(hasattr(op, "shape"))
assert_(hasattr(op, "_matvec"))
def matvec(x):
""" Needed for test_pickle as local functions are not pickleable """
return np.zeros(3)
def test_pickle():
import pickle
for protocol in range(pickle.HIGHEST_PROTOCOL + 1):
A = interface.LinearOperator((3, 3), matvec)
s = pickle.dumps(A, protocol=protocol)
B = pickle.loads(s)
for k in A.__dict__:
assert_equal(getattr(A, k), getattr(B, k))
def test_inheritance():
class Empty(interface.LinearOperator):
pass
with warns(RuntimeWarning, match="should implement at least"):
assert_raises(TypeError, Empty)
class Identity(interface.LinearOperator):
def __init__(self, n):
super(Identity, self).__init__(dtype=None, shape=(n, n))
def _matvec(self, x):
return x
id3 = Identity(3)
assert_equal(id3.matvec([1, 2, 3]), [1, 2, 3])
assert_raises(NotImplementedError, id3.rmatvec, [4, 5, 6])
class MatmatOnly(interface.LinearOperator):
def __init__(self, A):
super(MatmatOnly, self).__init__(A.dtype, A.shape)
self.A = A
def _matmat(self, x):
return self.A.dot(x)
mm = MatmatOnly(np.random.randn(5, 3))
assert_equal(mm.matvec(np.random.randn(3)).shape, (5,))
def test_dtypes_of_operator_sum():
# gh-6078
mat_complex = np.random.rand(2,2) + 1j * np.random.rand(2,2)
mat_real = np.random.rand(2,2)
complex_operator = interface.aslinearoperator(mat_complex)
real_operator = interface.aslinearoperator(mat_real)
sum_complex = complex_operator + complex_operator
sum_real = real_operator + real_operator
assert_equal(sum_real.dtype, np.float64)
assert_equal(sum_complex.dtype, np.complex128)
def test_no_double_init():
call_count = [0]
def matvec(v):
call_count[0] += 1
return v
# It should call matvec exactly once (in order to determine the
# operator dtype)
interface.LinearOperator((2, 2), matvec=matvec)
assert_equal(call_count[0], 1)
def test_adjoint_conjugate():
X = np.array([[1j]])
A = interface.aslinearoperator(X)
B = 1j * A
Y = 1j * X
v = np.array([1])
assert_equal(B.dot(v), Y.dot(v))
assert_equal(B.H.dot(v), Y.T.conj().dot(v))
def test_ndim():
X = np.array([[1]])
A = interface.aslinearoperator(X)
assert_equal(A.ndim, 2)
def test_transpose_noconjugate():
X = np.array([[1j]])
A = interface.aslinearoperator(X)
B = 1j * A
Y = 1j * X
v = np.array([1])
assert_equal(B.dot(v), Y.dot(v))
assert_equal(B.T.dot(v), Y.T.dot(v))
| 36.736142 | 78 | 0.506639 |
from functools import partial
from itertools import product
import operator
import pytest
from pytest import raises as assert_raises, warns
from numpy.testing import assert_, assert_equal
import numpy as np
import scipy.sparse as sparse
from scipy.sparse.linalg import interface
from scipy.sparse.sputils import matrix
class TestLinearOperator(object):
def setup_method(self):
self.A = np.array([[1,2,3],
[4,5,6]])
self.B = np.array([[1,2],
[3,4],
[5,6]])
self.C = np.array([[1,2],
[3,4]])
def test_matvec(self):
def get_matvecs(A):
return [{
'shape': A.shape,
'matvec': lambda x: np.dot(A, x).reshape(A.shape[0]),
'rmatvec': lambda x: np.dot(A.T.conj(),
x).reshape(A.shape[1])
},
{
'shape': A.shape,
'matvec': lambda x: np.dot(A, x),
'rmatvec': lambda x: np.dot(A.T.conj(), x),
'rmatmat': lambda x: np.dot(A.T.conj(), x),
'matmat': lambda x: np.dot(A, x)
}]
for matvecs in get_matvecs(self.A):
A = interface.LinearOperator(**matvecs)
assert_(A.args == ())
assert_equal(A.matvec(np.array([1,2,3])), [14,32])
assert_equal(A.matvec(np.array([[1],[2],[3]])), [[14],[32]])
assert_equal(A * np.array([1,2,3]), [14,32])
assert_equal(A * np.array([[1],[2],[3]]), [[14],[32]])
assert_equal(A.dot(np.array([1,2,3])), [14,32])
assert_equal(A.dot(np.array([[1],[2],[3]])), [[14],[32]])
assert_equal(A.matvec(matrix([[1],[2],[3]])), [[14],[32]])
assert_equal(A * matrix([[1],[2],[3]]), [[14],[32]])
assert_equal(A.dot(matrix([[1],[2],[3]])), [[14],[32]])
assert_equal((2*A)*[1,1,1], [12,30])
assert_equal((2 * A).rmatvec([1, 1]), [10, 14, 18])
assert_equal((2*A).H.matvec([1,1]), [10, 14, 18])
assert_equal((2*A)*[[1],[1],[1]], [[12],[30]])
assert_equal((2 * A).matmat([[1], [1], [1]]), [[12], [30]])
assert_equal((A*2)*[1,1,1], [12,30])
assert_equal((A*2)*[[1],[1],[1]], [[12],[30]])
assert_equal((2j*A)*[1,1,1], [12j,30j])
assert_equal((A+A)*[1,1,1], [12, 30])
assert_equal((A + A).rmatvec([1, 1]), [10, 14, 18])
assert_equal((A+A).H.matvec([1,1]), [10, 14, 18])
assert_equal((A+A)*[[1],[1],[1]], [[12], [30]])
assert_equal((A+A).matmat([[1],[1],[1]]), [[12], [30]])
assert_equal((-A)*[1,1,1], [-6,-15])
assert_equal((-A)*[[1],[1],[1]], [[-6],[-15]])
assert_equal((A-A)*[1,1,1], [0,0])
assert_equal((A - A) * [[1], [1], [1]], [[0], [0]])
X = np.array([[1, 2], [3, 4]])
assert_equal((2 * A).rmatmat(X), np.dot((2 * self.A).T, X))
assert_equal((A * 2).rmatmat(X), np.dot((self.A * 2).T, X))
assert_equal((2j * A).rmatmat(X),
np.dot((2j * self.A).T.conj(), X))
assert_equal((A * 2j).rmatmat(X),
np.dot((self.A * 2j).T.conj(), X))
assert_equal((A + A).rmatmat(X),
np.dot((self.A + self.A).T, X))
assert_equal((A + 2j * A).rmatmat(X),
np.dot((self.A + 2j * self.A).T.conj(), X))
assert_equal((-A).rmatmat(X), np.dot((-self.A).T, X))
assert_equal((A - A).rmatmat(X),
np.dot((self.A - self.A).T, X))
assert_equal((2j * A).rmatmat(2j * X),
np.dot((2j * self.A).T.conj(), 2j * X))
z = A+A
assert_(len(z.args) == 2 and z.args[0] is A and z.args[1] is A)
z = 2*A
assert_(len(z.args) == 2 and z.args[0] is A and z.args[1] == 2)
assert_(isinstance(A.matvec([1, 2, 3]), np.ndarray))
assert_(isinstance(A.matvec(np.array([[1],[2],[3]])), np.ndarray))
assert_(isinstance(A * np.array([1,2,3]), np.ndarray))
assert_(isinstance(A * np.array([[1],[2],[3]]), np.ndarray))
assert_(isinstance(A.dot(np.array([1,2,3])), np.ndarray))
assert_(isinstance(A.dot(np.array([[1],[2],[3]])), np.ndarray))
assert_(isinstance(A.matvec(matrix([[1],[2],[3]])), np.ndarray))
assert_(isinstance(A * matrix([[1],[2],[3]]), np.ndarray))
assert_(isinstance(A.dot(matrix([[1],[2],[3]])), np.ndarray))
assert_(isinstance(2*A, interface._ScaledLinearOperator))
assert_(isinstance(2j*A, interface._ScaledLinearOperator))
assert_(isinstance(A+A, interface._SumLinearOperator))
assert_(isinstance(-A, interface._ScaledLinearOperator))
assert_(isinstance(A-A, interface._SumLinearOperator))
assert_((2j*A).dtype == np.complex_)
assert_raises(ValueError, A.matvec, np.array([1,2]))
assert_raises(ValueError, A.matvec, np.array([1,2,3,4]))
assert_raises(ValueError, A.matvec, np.array([[1],[2]]))
assert_raises(ValueError, A.matvec, np.array([[1],[2],[3],[4]]))
assert_raises(ValueError, lambda: A*A)
assert_raises(ValueError, lambda: A**2)
for matvecsA, matvecsB in product(get_matvecs(self.A),
get_matvecs(self.B)):
A = interface.LinearOperator(**matvecsA)
B = interface.LinearOperator(**matvecsB)
AtimesB = self.A.dot(self.B)
X = np.array([[1, 2], [3, 4]])
assert_equal((A * B).rmatmat(X), np.dot((AtimesB).T, X))
assert_equal((2j * A * B).rmatmat(X),
np.dot((2j * AtimesB).T.conj(), X))
assert_equal((A*B)*[1,1], [50,113])
assert_equal((A*B)*[[1],[1]], [[50],[113]])
assert_equal((A*B).matmat([[1],[1]]), [[50],[113]])
assert_equal((A * B).rmatvec([1, 1]), [71, 92])
assert_equal((A * B).H.matvec([1, 1]), [71, 92])
assert_(isinstance(A*B, interface._ProductLinearOperator))
assert_raises(ValueError, lambda: A+B)
assert_raises(ValueError, lambda: A**2)
z = A*B
assert_(len(z.args) == 2 and z.args[0] is A and z.args[1] is B)
for matvecsC in get_matvecs(self.C):
C = interface.LinearOperator(**matvecsC)
X = np.array([[1, 2], [3, 4]])
assert_equal(C.rmatmat(X), np.dot((self.C).T, X))
assert_equal((C**2).rmatmat(X),
np.dot((np.dot(self.C, self.C)).T, X))
assert_equal((C**2)*[1,1], [17,37])
assert_equal((C**2).rmatvec([1, 1]), [22, 32])
assert_equal((C**2).H.matvec([1, 1]), [22, 32])
assert_equal((C**2).matmat([[1],[1]]), [[17],[37]])
assert_(isinstance(C**2, interface._PowerLinearOperator))
def test_matmul(self):
D = {'shape': self.A.shape,
'matvec': lambda x: np.dot(self.A, x).reshape(self.A.shape[0]),
'rmatvec': lambda x: np.dot(self.A.T.conj(),
x).reshape(self.A.shape[1]),
'rmatmat': lambda x: np.dot(self.A.T.conj(), x),
'matmat': lambda x: np.dot(self.A, x)}
A = interface.LinearOperator(**D)
B = np.array([[1, 2, 3],
[4, 5, 6],
[7, 8, 9]])
b = B[0]
assert_equal(operator.matmul(A, b), A * b)
assert_equal(operator.matmul(A, B), A * B)
assert_raises(ValueError, operator.matmul, A, 2)
assert_raises(ValueError, operator.matmul, 2, A)
class TestAsLinearOperator(object):
def setup_method(self):
self.cases = []
def make_cases(original, dtype):
cases = []
cases.append((matrix(original, dtype=dtype), original))
cases.append((np.array(original, dtype=dtype), original))
cases.append((sparse.csr_matrix(original, dtype=dtype), original))
def mv(x, dtype):
y = original.dot(x)
if len(x.shape) == 2:
y = y.reshape(-1, 1)
return y
def rmv(x, dtype):
return original.T.conj().dot(x)
class BaseMatlike(interface.LinearOperator):
args = ()
def __init__(self, dtype):
self.dtype = np.dtype(dtype)
self.shape = original.shape
def _matvec(self, x):
return mv(x, self.dtype)
class HasRmatvec(BaseMatlike):
args = ()
def _rmatvec(self,x):
return rmv(x, self.dtype)
class HasAdjoint(BaseMatlike):
args = ()
def _adjoint(self):
shape = self.shape[1], self.shape[0]
matvec = partial(rmv, dtype=self.dtype)
rmatvec = partial(mv, dtype=self.dtype)
return interface.LinearOperator(matvec=matvec,
rmatvec=rmatvec,
dtype=self.dtype,
shape=shape)
class HasRmatmat(HasRmatvec):
def _matmat(self, x):
return original.dot(x)
def _rmatmat(self, x):
return original.T.conj().dot(x)
cases.append((HasRmatvec(dtype), original))
cases.append((HasAdjoint(dtype), original))
cases.append((HasRmatmat(dtype), original))
return cases
original = np.array([[1,2,3], [4,5,6]])
self.cases += make_cases(original, np.int32)
self.cases += make_cases(original, np.float32)
self.cases += make_cases(original, np.float64)
self.cases += [(interface.aslinearoperator(M).T, A.T)
for M, A in make_cases(original.T, np.float64)]
self.cases += [(interface.aslinearoperator(M).H, A.T.conj())
for M, A in make_cases(original.T, np.float64)]
original = np.array([[1, 2j, 3j], [4j, 5j, 6]])
self.cases += make_cases(original, np.complex_)
self.cases += [(interface.aslinearoperator(M).T, A.T)
for M, A in make_cases(original.T, np.complex_)]
self.cases += [(interface.aslinearoperator(M).H, A.T.conj())
for M, A in make_cases(original.T, np.complex_)]
def test_basic(self):
for M, A_array in self.cases:
A = interface.aslinearoperator(M)
M,N = A.shape
xs = [np.array([1, 2, 3]),
np.array([[1], [2], [3]])]
ys = [np.array([1, 2]), np.array([[1], [2]])]
if A.dtype == np.complex_:
xs += [np.array([1, 2j, 3j]),
np.array([[1], [2j], [3j]])]
ys += [np.array([1, 2j]), np.array([[1], [2j]])]
x2 = np.array([[1, 4], [2, 5], [3, 6]])
for x in xs:
assert_equal(A.matvec(x), A_array.dot(x))
assert_equal(A * x, A_array.dot(x))
assert_equal(A.matmat(x2), A_array.dot(x2))
assert_equal(A * x2, A_array.dot(x2))
for y in ys:
assert_equal(A.rmatvec(y), A_array.T.conj().dot(y))
assert_equal(A.T.matvec(y), A_array.T.dot(y))
assert_equal(A.H.matvec(y), A_array.T.conj().dot(y))
for y in ys:
if y.ndim < 2:
continue
assert_equal(A.rmatmat(y), A_array.T.conj().dot(y))
assert_equal(A.T.matmat(y), A_array.T.dot(y))
assert_equal(A.H.matmat(y), A_array.T.conj().dot(y))
if hasattr(M,'dtype'):
assert_equal(A.dtype, M.dtype)
assert_(hasattr(A, 'args'))
def test_dot(self):
for M, A_array in self.cases:
A = interface.aslinearoperator(M)
M,N = A.shape
x0 = np.array([1, 2, 3])
x1 = np.array([[1], [2], [3]])
x2 = np.array([[1, 4], [2, 5], [3, 6]])
assert_equal(A.dot(x0), A_array.dot(x0))
assert_equal(A.dot(x1), A_array.dot(x1))
assert_equal(A.dot(x2), A_array.dot(x2))
def test_repr():
A = interface.LinearOperator(shape=(1, 1), matvec=lambda x: 1)
repr_A = repr(A)
assert_('unspecified dtype' not in repr_A, repr_A)
def test_identity():
ident = interface.IdentityOperator((3, 3))
assert_equal(ident * [1, 2, 3], [1, 2, 3])
assert_equal(ident.dot(np.arange(9).reshape(3, 3)).ravel(), np.arange(9))
assert_raises(ValueError, ident.matvec, [1, 2, 3, 4])
def test_attributes():
A = interface.aslinearoperator(np.arange(16).reshape(4, 4))
def always_four_ones(x):
x = np.asarray(x)
assert_(x.shape == (3,) or x.shape == (3, 1))
return np.ones(4)
B = interface.LinearOperator(shape=(4, 3), matvec=always_four_ones)
for op in [A, B, A * B, A.H, A + A, B + B, A**4]:
assert_(hasattr(op, "dtype"))
assert_(hasattr(op, "shape"))
assert_(hasattr(op, "_matvec"))
def matvec(x):
return np.zeros(3)
def test_pickle():
import pickle
for protocol in range(pickle.HIGHEST_PROTOCOL + 1):
A = interface.LinearOperator((3, 3), matvec)
s = pickle.dumps(A, protocol=protocol)
B = pickle.loads(s)
for k in A.__dict__:
assert_equal(getattr(A, k), getattr(B, k))
def test_inheritance():
class Empty(interface.LinearOperator):
pass
with warns(RuntimeWarning, match="should implement at least"):
assert_raises(TypeError, Empty)
class Identity(interface.LinearOperator):
def __init__(self, n):
super(Identity, self).__init__(dtype=None, shape=(n, n))
def _matvec(self, x):
return x
id3 = Identity(3)
assert_equal(id3.matvec([1, 2, 3]), [1, 2, 3])
assert_raises(NotImplementedError, id3.rmatvec, [4, 5, 6])
class MatmatOnly(interface.LinearOperator):
def __init__(self, A):
super(MatmatOnly, self).__init__(A.dtype, A.shape)
self.A = A
def _matmat(self, x):
return self.A.dot(x)
mm = MatmatOnly(np.random.randn(5, 3))
assert_equal(mm.matvec(np.random.randn(3)).shape, (5,))
def test_dtypes_of_operator_sum():
mat_complex = np.random.rand(2,2) + 1j * np.random.rand(2,2)
mat_real = np.random.rand(2,2)
complex_operator = interface.aslinearoperator(mat_complex)
real_operator = interface.aslinearoperator(mat_real)
sum_complex = complex_operator + complex_operator
sum_real = real_operator + real_operator
assert_equal(sum_real.dtype, np.float64)
assert_equal(sum_complex.dtype, np.complex128)
def test_no_double_init():
call_count = [0]
def matvec(v):
call_count[0] += 1
return v
interface.LinearOperator((2, 2), matvec=matvec)
assert_equal(call_count[0], 1)
def test_adjoint_conjugate():
X = np.array([[1j]])
A = interface.aslinearoperator(X)
B = 1j * A
Y = 1j * X
v = np.array([1])
assert_equal(B.dot(v), Y.dot(v))
assert_equal(B.H.dot(v), Y.T.conj().dot(v))
def test_ndim():
X = np.array([[1]])
A = interface.aslinearoperator(X)
assert_equal(A.ndim, 2)
def test_transpose_noconjugate():
X = np.array([[1j]])
A = interface.aslinearoperator(X)
B = 1j * A
Y = 1j * X
v = np.array([1])
assert_equal(B.dot(v), Y.dot(v))
assert_equal(B.T.dot(v), Y.T.dot(v))
| true | true |
f7246fd41a52c4c852543dc3cdc9672346ec9dbe | 6,333 | py | Python | doc/source/conf.py | gaohao95/cffi | 8d1a4ec54db0f3f0e18e4a68c2bdc7f32d0fdd8b | [
"MIT"
] | 1 | 2017-01-05T00:59:03.000Z | 2017-01-05T00:59:03.000Z | doc/source/conf.py | gaohao95/cffi | 8d1a4ec54db0f3f0e18e4a68c2bdc7f32d0fdd8b | [
"MIT"
] | null | null | null | doc/source/conf.py | gaohao95/cffi | 8d1a4ec54db0f3f0e18e4a68c2bdc7f32d0fdd8b | [
"MIT"
] | null | null | null | # -*- coding: utf-8 -*-
#
# CFFI documentation build configuration file, created by
# sphinx-quickstart on Thu Jun 14 16:37:47 2012.
#
# This file is execfile()d with the current directory set to its containing dir.
#
# Note that not all possible configuration values are present in this
# autogenerated file.
#
# All configuration values have a default; values that are commented out
# serve to show the default.
import sys, os
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
#sys.path.append(os.path.abspath('.'))
# -- General configuration -----------------------------------------------------
# Add any Sphinx extension module names here, as strings. They can be extensions
# coming with Sphinx (named 'sphinx.ext.*') or your custom ones.
extensions = ['sphinx.ext.autodoc']
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# The suffix of source filenames.
source_suffix = '.rst'
# The encoding of source files.
#source_encoding = 'utf-8'
# The master toctree document.
master_doc = 'index'
# General information about the project.
project = u'CFFI'
copyright = u'2012-2015, Armin Rigo, Maciej Fijalkowski'
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The short X.Y version.
version = '1.9'
# The full version, including alpha/beta/rc tags.
release = '1.9.2'
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#language = None
# There are two options for replacing |today|: either, you set today to some
# non-false value, then it is used:
#today = ''
# Else, today_fmt is used as the format for a strftime call.
#today_fmt = '%B %d, %Y'
# List of documents that shouldn't be included in the build.
#unused_docs = []
# List of directories, relative to source directory, that shouldn't be searched
# for source files.
exclude_trees = []
# The reST default role (used for this markup: `text`) to use for all documents.
#default_role = None
# If true, '()' will be appended to :func: etc. cross-reference text.
#add_function_parentheses = True
# If true, the current module name will be prepended to all description
# unit titles (such as .. function::).
#add_module_names = True
# If true, sectionauthor and moduleauthor directives will be shown in the
# output. They are ignored by default.
#show_authors = False
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# A list of ignored prefixes for module index sorting.
#modindex_common_prefix = []
# -- Options for HTML output ---------------------------------------------------
# The theme to use for HTML and HTML Help pages. Major themes that come with
# Sphinx are currently 'default' and 'sphinxdoc'.
#html_theme = 'default'
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
#html_theme_options = {}
# Add any paths that contain custom themes here, relative to this directory.
#html_theme_path = []
# The name for this set of Sphinx documents. If None, it defaults to
# "<project> v<release> documentation".
#html_title = None
# A shorter title for the navigation bar. Default is the same as html_title.
#html_short_title = None
# The name of an image file (relative to this directory) to place at the top
# of the sidebar.
#html_logo = None
# The name of an image file (within the static path) to use as favicon of the
# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
# pixels large.
#html_favicon = None
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
#html_static_path = ['_static']
# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
# using the given strftime format.
#html_last_updated_fmt = '%b %d, %Y'
# If true, SmartyPants will be used to convert quotes and dashes to
# typographically correct entities.
#html_use_smartypants = True
# Custom sidebar templates, maps document names to template names.
#html_sidebars = {}
# Additional templates that should be rendered to pages, maps page names to
# template names.
#html_additional_pages = {}
# If false, no module index is generated.
#html_use_modindex = True
# If false, no index is generated.
#html_use_index = True
# If true, the index is split into individual pages for each letter.
#html_split_index = False
# If true, links to the reST sources are added to the pages.
#html_show_sourcelink = True
# If true, an OpenSearch description file will be output, and all pages will
# contain a <link> tag referring to it. The value of this option must be the
# base URL from which the finished HTML is served.
#html_use_opensearch = ''
# If nonempty, this is the file name suffix for HTML files (e.g. ".xhtml").
#html_file_suffix = ''
# Output file base name for HTML help builder.
htmlhelp_basename = 'CFFIdoc'
# -- Options for LaTeX output --------------------------------------------------
# The paper size ('letter' or 'a4').
#latex_paper_size = 'letter'
# The font size ('10pt', '11pt' or '12pt').
#latex_font_size = '10pt'
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title, author, documentclass [howto/manual]).
latex_documents = [
('index', 'CFFI.tex', u'CFFI Documentation',
u'Armin Rigo, Maciej Fijalkowski', 'manual'),
]
# The name of an image file (relative to this directory) to place at the top of
# the title page.
#latex_logo = None
# For "manual" documents, if this is true, then toplevel headings are parts,
# not chapters.
#latex_use_parts = False
# Additional stuff for the LaTeX preamble.
#latex_preamble = ''
# Documents to append as an appendix to all manuals.
#latex_appendices = []
# If false, no module index is generated.
#latex_use_modindex = True
| 32.476923 | 80 | 0.722722 |
import sys, os
extensions = ['sphinx.ext.autodoc']
templates_path = ['_templates']
source_suffix = '.rst'
master_doc = 'index'
project = u'CFFI'
copyright = u'2012-2015, Armin Rigo, Maciej Fijalkowski'
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The short X.Y version.
version = '1.9'
# The full version, including alpha/beta/rc tags.
release = '1.9.2'
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#language = None
# There are two options for replacing |today|: either, you set today to some
# non-false value, then it is used:
#today = ''
# Else, today_fmt is used as the format for a strftime call.
#today_fmt = '%B %d, %Y'
# List of documents that shouldn't be included in the build.
# for source files.
exclude_trees = []
# The reST default role (used for this markup: `text`) to use for all documents.
#default_role = None
# If true, '()' will be appended to :func: etc. cross-reference text.
#add_function_parentheses = True
# If true, the current module name will be prepended to all description
# unit titles (such as .. function::).
#add_module_names = True
# If true, sectionauthor and moduleauthor directives will be shown in the
# output. They are ignored by default.
#show_authors = False
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# A list of ignored prefixes for module index sorting.
#modindex_common_prefix = []
# -- Options for HTML output ---------------------------------------------------
# The theme to use for HTML and HTML Help pages. Major themes that come with
# Sphinx are currently 'default' and 'sphinxdoc'.
#html_theme = 'default'
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
#html_theme_options = {}
# Add any paths that contain custom themes here, relative to this directory.
#html_theme_path = []
# The name for this set of Sphinx documents. If None, it defaults to
# "<project> v<release> documentation".
#html_title = None
# A shorter title for the navigation bar. Default is the same as html_title.
#html_short_title = None
# The name of an image file (relative to this directory) to place at the top
# of the sidebar.
#html_logo = None
# The name of an image file (within the static path) to use as favicon of the
# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
# pixels large.
#html_favicon = None
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
#html_static_path = ['_static']
# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
# using the given strftime format.
#html_last_updated_fmt = '%b %d, %Y'
# If true, SmartyPants will be used to convert quotes and dashes to
# typographically correct entities.
#html_use_smartypants = True
# Custom sidebar templates, maps document names to template names.
#html_sidebars = {}
# Additional templates that should be rendered to pages, maps page names to
# template names.
#html_additional_pages = {}
# If false, no module index is generated.
#html_use_modindex = True
# If false, no index is generated.
#html_use_index = True
# If true, the index is split into individual pages for each letter.
#html_split_index = False
# If true, links to the reST sources are added to the pages.
#html_show_sourcelink = True
# If true, an OpenSearch description file will be output, and all pages will
# contain a <link> tag referring to it. The value of this option must be the
# base URL from which the finished HTML is served.
#html_use_opensearch = ''
# If nonempty, this is the file name suffix for HTML files (e.g. ".xhtml").
#html_file_suffix = ''
# Output file base name for HTML help builder.
htmlhelp_basename = 'CFFIdoc'
# -- Options for LaTeX output --------------------------------------------------
# The paper size ('letter' or 'a4').
#latex_paper_size = 'letter'
# The font size ('10pt', '11pt' or '12pt').
#latex_font_size = '10pt'
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title, author, documentclass [howto/manual]).
latex_documents = [
('index', 'CFFI.tex', u'CFFI Documentation',
u'Armin Rigo, Maciej Fijalkowski', 'manual'),
]
# The name of an image file (relative to this directory) to place at the top of
# the title page.
#latex_logo = None
# For "manual" documents, if this is true, then toplevel headings are parts,
# not chapters.
#latex_use_parts = False
# Additional stuff for the LaTeX preamble.
#latex_preamble = ''
# Documents to append as an appendix to all manuals.
#latex_appendices = []
# If false, no module index is generated.
#latex_use_modindex = True
| true | true |
f72470edb12a542373c15f2c30a033702b46467a | 1,325 | py | Python | tests/test_erd.py | Datateer/erd-python | 528b876bf5adf6114cd1e0aac6a2be14006fd2eb | [
"MIT"
] | 1 | 2021-09-13T06:05:48.000Z | 2021-09-13T06:05:48.000Z | tests/test_erd.py | Datateer/erd-python | 528b876bf5adf6114cd1e0aac6a2be14006fd2eb | [
"MIT"
] | 2 | 2021-01-22T11:20:34.000Z | 2022-01-18T07:20:09.000Z | tests/test_erd.py | Datateer/erd-python | 528b876bf5adf6114cd1e0aac6a2be14006fd2eb | [
"MIT"
] | null | null | null | #!/usr/bin/env python
"""Tests for `erd` package."""
import unittest
from click.testing import CliRunner
from erd import erd
from erd import cli
class TestErd(unittest.TestCase):
"""Tests for `erd` package."""
def setUp(self):
"""Set up test fixtures, if any."""
def tearDown(self):
"""Tear down test fixtures, if any."""
def test_000_something(self):
"""Test something."""
def test_command_line_interface(self):
"""Test the CLI."""
runner = CliRunner()
result = runner.invoke(cli.main)
assert result.exit_code == 0
assert 'erd.cli.main' in result.output
help_result = runner.invoke(cli.main, ['--help'])
assert help_result.exit_code == 0
assert '--help Show this message and exit.' in help_result.output
@unittest.skip('not implemented yet')
def test_input_filename_required(self):
raise NotImplementedError()
@unittest.skip('not implemented yet')
def test_outputfilename_required(self):
raise NotImplementedError()
@unittest.skip('not implemented yet')
def test_errors_if_input_file_not_found(self):
raise NotImplementedError()
@unittest.skip('not implemented yet')
def test_produces_output_file(self):
raise NotImplementedError()
| 25.980392 | 74 | 0.658113 |
import unittest
from click.testing import CliRunner
from erd import erd
from erd import cli
class TestErd(unittest.TestCase):
def setUp(self):
def tearDown(self):
def test_000_something(self):
def test_command_line_interface(self):
runner = CliRunner()
result = runner.invoke(cli.main)
assert result.exit_code == 0
assert 'erd.cli.main' in result.output
help_result = runner.invoke(cli.main, ['--help'])
assert help_result.exit_code == 0
assert '--help Show this message and exit.' in help_result.output
@unittest.skip('not implemented yet')
def test_input_filename_required(self):
raise NotImplementedError()
@unittest.skip('not implemented yet')
def test_outputfilename_required(self):
raise NotImplementedError()
@unittest.skip('not implemented yet')
def test_errors_if_input_file_not_found(self):
raise NotImplementedError()
@unittest.skip('not implemented yet')
def test_produces_output_file(self):
raise NotImplementedError()
| true | true |
f7247100061d6774afb40c4bea380d62cdfa96b7 | 11,258 | py | Python | modnet/matbench/benchmark.py | sparks-baird/modnet | 2b4a88aa8a3323756b6daee52450569cddd0068b | [
"MIT"
] | 32 | 2020-05-22T11:47:37.000Z | 2022-03-28T12:55:40.000Z | modnet/matbench/benchmark.py | sparks-baird/modnet | 2b4a88aa8a3323756b6daee52450569cddd0068b | [
"MIT"
] | 43 | 2020-06-12T21:09:15.000Z | 2022-03-28T21:05:43.000Z | modnet/matbench/benchmark.py | sparks-baird/modnet | 2b4a88aa8a3323756b6daee52450569cddd0068b | [
"MIT"
] | 22 | 2020-06-19T12:03:02.000Z | 2022-01-17T16:13:38.000Z | import os
from collections import defaultdict
from traceback import print_exc
from typing import List, Dict, Any, Optional, Tuple, Type
import numpy as np
from modnet.preprocessing import MODData
from modnet.models import MODNetModel
from modnet.utils import LOG
from modnet.hyper_opt import FitGenetic
MATBENCH_SEED = 18012019
def matbench_kfold_splits(data: MODData, n_splits=5, classification=False):
"""Return the pre-defined k-fold splits to use when reporting matbench results.
Arguments:
data: The featurized MODData.
"""
if classification:
from sklearn.model_selection import StratifiedKFold as KFold
else:
from sklearn.model_selection import KFold
kf = KFold(n_splits=n_splits, shuffle=True, random_state=MATBENCH_SEED)
kf_splits = kf.split(data.df_featurized, y=data.df_targets)
return kf_splits
def matbench_benchmark(
data: MODData,
target: List[str],
target_weights: Dict[str, float],
fit_settings: Optional[Dict[str, Any]] = None,
ga_settings: Optional[Dict[str, float]] = None,
classification: bool = False,
model_type: Type[MODNetModel] = MODNetModel,
save_folds: bool = False,
save_models: bool = False,
hp_optimization: bool = True,
hp_strategy: str = "fit_preset",
inner_feat_selection: bool = True,
use_precomputed_cross_nmi: bool = True,
presets: Optional[List[dict]] = None,
fast: bool = False,
n_jobs: Optional[int] = None,
nested: bool = False,
**model_init_kwargs,
) -> dict:
"""Train and cross-validate a model against Matbench data splits, optionally
performing hyperparameter optimisation.
Arguments:
data: The entire dataset as a `MODData`.
target: The list of target names to train on.
target_weights: The target weights to use for the `MODNetModel`.
fit_settings: Any settings to pass to `model.fit(...)` directly
(typically when not performing hyperparameter optimisation).
classification: Whether all tasks are classification rather than regression.
model_type: The type of the model to create and benchmark.
save_folds: Whether to save dataframes with pre-processed fold
data (e.g. feature selection).
save_models: Whether to pickle all trained models according to
their fold index and performance.
hp_optimization: Whether to perform hyperparameter optimisation.
hp_strategy: Which optimization strategy to choose. Use either \"fit_preset\" or \"ga\".
inner_feat_selection: Whether to perform split-level feature
selection or try to use pre-computed values.
use_precomputed_cross_nmi: Whether to use the precmputed cross NMI
from the Materials Project dataset, or recompute per fold.
presets: Override the built-in hyperparameter grid with these presets.
fast: Whether to perform debug training, i.e. reduced presets and epochs, for the fit_preset strategy.
n_jobs: Try to parallelize the inner fit_preset over this number of
processes. Maxes out at number_of_presets*nested_folds
nested: Whether to perform nested CV for hyperparameter optimisation.
**model_init_kwargs: Additional arguments to pass to the model on creation.
Returns:
A dictionary containing all the results from the training, broken
down by model and by fold.
"""
if hp_optimization:
if hp_strategy not in ["fit_preset", "ga"]:
raise RuntimeError(
f'{hp_strategy} not supported. Choose from "fit_genetic" or "ga".'
)
if fit_settings is None:
fit_settings = {}
if not fit_settings.get("n_feat"):
nf = len(data.df_featurized.columns)
fit_settings["n_feat"] = nf
if not fit_settings.get("num_neurons"):
# Pass dummy network
fit_settings["num_neurons"] = [[4], [4], [4], [4]]
if ga_settings is None:
ga_settings = {
"size_pop": 20,
"num_generations": 10,
"early_stopping": 4,
"refit": False,
}
fold_data = []
results = defaultdict(list)
for ind, (train, test) in enumerate(
matbench_kfold_splits(data, classification=classification)
):
train_data, test_data = data.split((train, test))
if inner_feat_selection:
path = "folds/train_moddata_f{}".format(ind + 1)
if os.path.isfile(path):
train_data = MODData.load(path)
else:
train_data.feature_selection(
n=-1,
use_precomputed_cross_nmi=use_precomputed_cross_nmi,
n_jobs=n_jobs,
)
os.makedirs("folds", exist_ok=True)
train_data.save(path)
fold_data.append((train_data, test_data))
args = (target, target_weights, fit_settings, ga_settings)
model_kwargs = {
"model_type": model_type,
"hp_optimization": hp_optimization,
"fast": fast,
"classification": classification,
"save_folds": save_folds,
"presets": presets,
"hp_strategy": hp_strategy,
"save_models": save_models,
"nested": nested,
"n_jobs": n_jobs,
}
model_kwargs.update(model_init_kwargs)
fold_results = []
for fold in enumerate(fold_data):
fold_results.append(train_fold(fold, *args, **model_kwargs))
for fold in fold_results:
for key in fold:
results[key].append(fold[key])
return results
def train_fold(
fold: Tuple[int, Tuple[MODData, MODData]],
target: List[str],
target_weights: Dict[str, float],
fit_settings: Dict[str, Any],
ga_settings: Dict[str, float],
model_type: Type[MODNetModel] = MODNetModel,
presets=None,
hp_optimization=True,
hp_strategy="fit_preset",
classification=False,
save_folds=False,
fast=False,
save_models=False,
nested=False,
n_jobs=None,
**model_kwargs,
) -> dict:
"""Train one fold of a CV.
Unless stated, all arguments have the same meaning as in `matbench_benchmark(...)`.
Arguments:
fold: A tuple containing the fold index, and another tuple of the
training MODData and test MODData.
Returns:
A dictionary summarising the fold results.
"""
fold_ind, (train_data, test_data) = fold
results = {}
multi_target = bool(len(target) - 1)
# If not performing hp_optimization, load model init settings from fit_settings
model_settings = {}
if not hp_optimization:
model_settings = {
"num_neurons": fit_settings["num_neurons"],
"num_classes": fit_settings.get("num_classes"),
"act": fit_settings.get("act"),
"out_act": fit_settings.get("out_act", "linear"),
"n_feat": fit_settings["n_feat"],
}
model_settings.update(model_kwargs)
if classification:
model_settings["num_classes"] = {t: 2 for t in target_weights}
model = model_type(target, target_weights, **model_settings)
if hp_optimization:
if hp_strategy == "fit_preset":
(
models,
val_losses,
best_learning_curve,
learning_curves,
best_presets,
) = model.fit_preset(
train_data,
presets=presets,
fast=fast,
classification=classification,
nested=nested,
n_jobs=n_jobs,
)
results["nested_losses"] = val_losses
results["nested_learning_curves"] = learning_curves
results["best_learning_curves"] = best_learning_curve
results["best_presets"] = best_presets
elif hp_strategy == "ga":
ga = FitGenetic(train_data)
model = ga.run(
size_pop=ga_settings["size_pop"],
num_generations=ga_settings["num_generations"],
nested=nested,
n_jobs=n_jobs,
early_stopping=ga_settings["early_stopping"],
refit=ga_settings["refit"],
fast=fast,
)
if save_models:
for ind, nested_model in enumerate(models):
score = val_losses[ind]
nested_model.save(f"results/nested_model_{fold_ind}_{ind}_{score:3.3f}")
model.save(f"results/best_model_{fold_ind}_{score:3.3f}")
else:
if fit_settings["increase_bs"]:
model.fit(
train_data,
lr=fit_settings["lr"],
epochs=fit_settings["epochs"],
batch_size=fit_settings["batch_size"],
loss=fit_settings["loss"],
)
model.fit(
train_data,
lr=fit_settings["lr"] / 7,
epochs=fit_settings["epochs"] // 2,
batch_size=fit_settings["batch_size"] * 2,
loss=fit_settings["loss"],
)
else:
model.fit(train_data, **fit_settings)
try:
predict_kwargs = {}
if classification:
predict_kwargs["return_prob"] = True
if model.can_return_uncertainty:
predict_kwargs["return_unc"] = True
pred_results = model.predict(test_data, **predict_kwargs)
if isinstance(pred_results, tuple):
predictions, stds = pred_results
else:
predictions = pred_results
stds = None
targets = test_data.df_targets
if classification:
from sklearn.metrics import roc_auc_score
from sklearn.preprocessing import OneHotEncoder
y_true = OneHotEncoder().fit_transform(targets.values).toarray()
score = roc_auc_score(y_true, predictions.values)
pred_bool = model.predict(test_data, return_prob=False)
LOG.info(f"ROC-AUC: {score}")
errors = targets - pred_bool
elif multi_target:
errors = targets - predictions
score = np.mean(np.abs(errors.values), axis=0)
else:
errors = targets - predictions
score = np.mean(np.abs(errors.values))
except Exception:
print_exc()
print("Something went wrong benchmarking this model.")
predictions = None
errors = None
score = None
if save_folds:
opt_feat = train_data.optimal_features[: fit_settings["n_feat"]]
df_train = train_data.df_featurized
df_train = df_train[opt_feat]
df_train.to_csv("folds/train_f{}.csv".format(ind + 1))
df_test = test_data.df_featurized
df_test = df_test[opt_feat]
errors.columns = [x + "_error" for x in errors.columns]
df_test = df_test.join(errors)
df_test.to_csv("folds/test_f{}.csv".format(ind + 1))
results["predictions"] = predictions
if stds is not None:
results["stds"] = stds
results["targets"] = targets
results["errors"] = errors
results["scores"] = score
results["model"] = model
return results
| 34.012085 | 110 | 0.618671 | import os
from collections import defaultdict
from traceback import print_exc
from typing import List, Dict, Any, Optional, Tuple, Type
import numpy as np
from modnet.preprocessing import MODData
from modnet.models import MODNetModel
from modnet.utils import LOG
from modnet.hyper_opt import FitGenetic
MATBENCH_SEED = 18012019
def matbench_kfold_splits(data: MODData, n_splits=5, classification=False):
if classification:
from sklearn.model_selection import StratifiedKFold as KFold
else:
from sklearn.model_selection import KFold
kf = KFold(n_splits=n_splits, shuffle=True, random_state=MATBENCH_SEED)
kf_splits = kf.split(data.df_featurized, y=data.df_targets)
return kf_splits
def matbench_benchmark(
data: MODData,
target: List[str],
target_weights: Dict[str, float],
fit_settings: Optional[Dict[str, Any]] = None,
ga_settings: Optional[Dict[str, float]] = None,
classification: bool = False,
model_type: Type[MODNetModel] = MODNetModel,
save_folds: bool = False,
save_models: bool = False,
hp_optimization: bool = True,
hp_strategy: str = "fit_preset",
inner_feat_selection: bool = True,
use_precomputed_cross_nmi: bool = True,
presets: Optional[List[dict]] = None,
fast: bool = False,
n_jobs: Optional[int] = None,
nested: bool = False,
**model_init_kwargs,
) -> dict:
if hp_optimization:
if hp_strategy not in ["fit_preset", "ga"]:
raise RuntimeError(
f'{hp_strategy} not supported. Choose from "fit_genetic" or "ga".'
)
if fit_settings is None:
fit_settings = {}
if not fit_settings.get("n_feat"):
nf = len(data.df_featurized.columns)
fit_settings["n_feat"] = nf
if not fit_settings.get("num_neurons"):
fit_settings["num_neurons"] = [[4], [4], [4], [4]]
if ga_settings is None:
ga_settings = {
"size_pop": 20,
"num_generations": 10,
"early_stopping": 4,
"refit": False,
}
fold_data = []
results = defaultdict(list)
for ind, (train, test) in enumerate(
matbench_kfold_splits(data, classification=classification)
):
train_data, test_data = data.split((train, test))
if inner_feat_selection:
path = "folds/train_moddata_f{}".format(ind + 1)
if os.path.isfile(path):
train_data = MODData.load(path)
else:
train_data.feature_selection(
n=-1,
use_precomputed_cross_nmi=use_precomputed_cross_nmi,
n_jobs=n_jobs,
)
os.makedirs("folds", exist_ok=True)
train_data.save(path)
fold_data.append((train_data, test_data))
args = (target, target_weights, fit_settings, ga_settings)
model_kwargs = {
"model_type": model_type,
"hp_optimization": hp_optimization,
"fast": fast,
"classification": classification,
"save_folds": save_folds,
"presets": presets,
"hp_strategy": hp_strategy,
"save_models": save_models,
"nested": nested,
"n_jobs": n_jobs,
}
model_kwargs.update(model_init_kwargs)
fold_results = []
for fold in enumerate(fold_data):
fold_results.append(train_fold(fold, *args, **model_kwargs))
for fold in fold_results:
for key in fold:
results[key].append(fold[key])
return results
def train_fold(
fold: Tuple[int, Tuple[MODData, MODData]],
target: List[str],
target_weights: Dict[str, float],
fit_settings: Dict[str, Any],
ga_settings: Dict[str, float],
model_type: Type[MODNetModel] = MODNetModel,
presets=None,
hp_optimization=True,
hp_strategy="fit_preset",
classification=False,
save_folds=False,
fast=False,
save_models=False,
nested=False,
n_jobs=None,
**model_kwargs,
) -> dict:
fold_ind, (train_data, test_data) = fold
results = {}
multi_target = bool(len(target) - 1)
model_settings = {}
if not hp_optimization:
model_settings = {
"num_neurons": fit_settings["num_neurons"],
"num_classes": fit_settings.get("num_classes"),
"act": fit_settings.get("act"),
"out_act": fit_settings.get("out_act", "linear"),
"n_feat": fit_settings["n_feat"],
}
model_settings.update(model_kwargs)
if classification:
model_settings["num_classes"] = {t: 2 for t in target_weights}
model = model_type(target, target_weights, **model_settings)
if hp_optimization:
if hp_strategy == "fit_preset":
(
models,
val_losses,
best_learning_curve,
learning_curves,
best_presets,
) = model.fit_preset(
train_data,
presets=presets,
fast=fast,
classification=classification,
nested=nested,
n_jobs=n_jobs,
)
results["nested_losses"] = val_losses
results["nested_learning_curves"] = learning_curves
results["best_learning_curves"] = best_learning_curve
results["best_presets"] = best_presets
elif hp_strategy == "ga":
ga = FitGenetic(train_data)
model = ga.run(
size_pop=ga_settings["size_pop"],
num_generations=ga_settings["num_generations"],
nested=nested,
n_jobs=n_jobs,
early_stopping=ga_settings["early_stopping"],
refit=ga_settings["refit"],
fast=fast,
)
if save_models:
for ind, nested_model in enumerate(models):
score = val_losses[ind]
nested_model.save(f"results/nested_model_{fold_ind}_{ind}_{score:3.3f}")
model.save(f"results/best_model_{fold_ind}_{score:3.3f}")
else:
if fit_settings["increase_bs"]:
model.fit(
train_data,
lr=fit_settings["lr"],
epochs=fit_settings["epochs"],
batch_size=fit_settings["batch_size"],
loss=fit_settings["loss"],
)
model.fit(
train_data,
lr=fit_settings["lr"] / 7,
epochs=fit_settings["epochs"] // 2,
batch_size=fit_settings["batch_size"] * 2,
loss=fit_settings["loss"],
)
else:
model.fit(train_data, **fit_settings)
try:
predict_kwargs = {}
if classification:
predict_kwargs["return_prob"] = True
if model.can_return_uncertainty:
predict_kwargs["return_unc"] = True
pred_results = model.predict(test_data, **predict_kwargs)
if isinstance(pred_results, tuple):
predictions, stds = pred_results
else:
predictions = pred_results
stds = None
targets = test_data.df_targets
if classification:
from sklearn.metrics import roc_auc_score
from sklearn.preprocessing import OneHotEncoder
y_true = OneHotEncoder().fit_transform(targets.values).toarray()
score = roc_auc_score(y_true, predictions.values)
pred_bool = model.predict(test_data, return_prob=False)
LOG.info(f"ROC-AUC: {score}")
errors = targets - pred_bool
elif multi_target:
errors = targets - predictions
score = np.mean(np.abs(errors.values), axis=0)
else:
errors = targets - predictions
score = np.mean(np.abs(errors.values))
except Exception:
print_exc()
print("Something went wrong benchmarking this model.")
predictions = None
errors = None
score = None
if save_folds:
opt_feat = train_data.optimal_features[: fit_settings["n_feat"]]
df_train = train_data.df_featurized
df_train = df_train[opt_feat]
df_train.to_csv("folds/train_f{}.csv".format(ind + 1))
df_test = test_data.df_featurized
df_test = df_test[opt_feat]
errors.columns = [x + "_error" for x in errors.columns]
df_test = df_test.join(errors)
df_test.to_csv("folds/test_f{}.csv".format(ind + 1))
results["predictions"] = predictions
if stds is not None:
results["stds"] = stds
results["targets"] = targets
results["errors"] = errors
results["scores"] = score
results["model"] = model
return results
| true | true |
f7247128248055fc8b3fc7e0f99d36f794357c24 | 5,958 | py | Python | utils/evaluation.py | lippman1125/pytorch_FAN | ffc9c968478d55cb0c75c062bb8774923f961110 | [
"BSD-3-Clause"
] | 58 | 2019-03-14T20:13:10.000Z | 2022-03-17T07:59:34.000Z | utils/evaluation.py | lippman1125/pytorch_FAN | ffc9c968478d55cb0c75c062bb8774923f961110 | [
"BSD-3-Clause"
] | 7 | 2019-03-29T05:13:39.000Z | 2021-02-08T23:00:32.000Z | utils/evaluation.py | lippman1125/pytorch_FAN | ffc9c968478d55cb0c75c062bb8774923f961110 | [
"BSD-3-Clause"
] | 8 | 2019-05-29T09:05:32.000Z | 2022-03-12T17:00:02.000Z | from __future__ import absolute_import, print_function
import math
import numpy as np
import matplotlib.pyplot as plt
from random import randint
from .misc import *
from .transforms import transform, transform_preds
__all__ = ['accuracy', 'AverageMeter']
def get_preds(scores):
''' get predictions from score maps in torch Tensor
return type: torch.LongTensor
'''
assert scores.dim() == 4, 'Score maps should be 4-dim'
# batch, chn, height, width ===> batch, chn, height*width
# chn = 68
# height*width = score_map
maxval, idx = torch.max(scores.view(scores.size(0), scores.size(1), -1), 2)
maxval = maxval.view(scores.size(0), scores.size(1), 1)
idx = idx.view(scores.size(0), scores.size(1), 1) + 1
preds = idx.repeat(1, 1, 2).float()
# batchsize * numPoints * 2
# 0 is x coord
# 1 is y coord
# shape = batchsize, numPoints, 2
preds[:, :, 0] = (preds[:, :, 0] - 1) % scores.size(3) + 1
preds[:, :, 1] = torch.floor((preds[:, :, 1] - 1) / scores.size(2)) + 1
pred_mask = maxval.gt(0).repeat(1, 1, 2).float()
preds *= pred_mask
return preds
def calc_dists(preds, target, normalize):
preds = preds.float()
target = target.float()
# dists = 68 x batch
dists = torch.zeros(preds.size(1), preds.size(0))
for n in range(preds.size(0)):
for c in range(preds.size(1)):
if target[n, c, 0] > 1 and target[n, c, 1] > 1:
dists[c, n] = torch.dist(preds[n, c, :], target[n, c, :]) / normalize[n]
else:
dists[c, n] = -1
return dists
def dist_acc(dists, thr=0.5):
''' Return percentage below threshold while ignoring values with a -1 '''
if dists.ne(-1).sum() > 0:
return dists.le(thr).eq(dists.ne(-1)).sum() * 1.0 / dists.ne(-1).sum()
else:
return -1
def calc_metrics(dists, path='', category=''):
errors = torch.mean(dists, 0).view(dists.size(1))
axes1 = np.linspace(0, 1, 1000)
axes2 = np.zeros(1000)
for i in range(1000):
axes2[i] = float((errors < axes1[i]).sum()) / float(errors.size(0))
auc = round(np.sum(axes2[:70]) / .7, 2)
if path:
label = '{}({}) : {}'.format(path.split('/')[2], category, str(auc))
plt.xlim(0, 7)
plt.ylim(0, 100)
plt.yticks(np.arange(0, 110, 10))
plt.xticks(np.arange(0, 8, 1))
plt.grid()
plt.title('NME (%)', fontsize=20)
plt.xlabel('NME (%)', fontsize=16)
plt.ylabel('Test images (%)', fontsize=16)
if category:
if category in ['Easy', 'Category A']:
plt.plot(axes1 * 100, axes2 * 100, 'b-', label=label, lw=3)
if category in ['Media', 'Category B']:
plt.plot(axes1 * 100, axes2 * 100, 'r-', label=label, lw=3)
if category in ['Hard', 'Category C']:
plt.plot(axes1 * 100, axes2 * 100, 'g-', label=label, lw=3)
else:
plt.plot(axes1 * 100, axes2 * 100, 'b-', label=label, lw=3)
plt.legend(loc=4, fontsize=12)
plt.savefig(os.path.join(path + '/CED.eps'))
return auc
def _get_bboxsize(iterable):
# iterable = 68 x 2
# torch.min return values, idxs
mins = torch.min(iterable, 0)[0].view(2)
maxs = torch.max(iterable, 0)[0].view(2)
center = torch.FloatTensor((maxs[0] - (maxs[0] - mins[0]) / 2,
maxs[1] - (maxs[1] - mins[1]) / 2))
# center[1] = center[1] - ((maxs[1] - mins[1]) * 0.12)
return np.sqrt(abs(maxs[0] - mins[0]) * abs(maxs[1] - mins[1]))
def accuracy(output, target, idxs, thr=0.08):
''' Calculate accuracy according to NME, but uses ground truth heatmap rather than x,y locations
First value to be returned is accuracy calculated based on overall 'idxs'
followed by individual accuracies
'''
# preds = batch, 68, 64, 64
preds = get_preds(output)
gts = get_preds(target)
# B * 2
norm = torch.ones(preds.size(0))
# use face bbox to normalize
for i, gt in enumerate(gts):
norm[i] = _get_bboxsize(gt)
dists = calc_dists(preds, gts, norm)
acc = torch.zeros(len(idxs) + 1)
avg_acc = 0
cnt = 0
mean_dists = torch.mean(dists, 0)
acc[0] = mean_dists.le(thr).sum() * 1.0 / preds.size(0)
# for i in range(len(idxs)):
# acc[i+1] = dist_acc(dists[idxs[i]-1], thr=thr)
# if acc[i+1] >= 0:
# avg_acc = avg_acc + acc[i+1]
# cnt += 1
# if cnt != 0:
# acc[0] = avg_acc / cnt
return acc, dists
def final_preds(output, center, scale, res):
if output.size(1) == 136:
coords = output.view((output.szie(0), 68, 2))
else:
coords = get_preds(output) # float type
# output shape is batch, 68, 64, 64
# coords shape is batch, 68, 2
# pose-processing
for n in range(coords.size(0)):
for p in range(coords.size(1)):
hm = output[n][p]
px = int(math.floor(coords[n][p][0]))
py = int(math.floor(coords[n][p][1]))
if px > 1 and px < res[0] and py > 1 and py < res[1]:
diff = torch.Tensor(
[hm[py - 1][px] - hm[py - 1][px - 2], hm[py][px - 1] - hm[py - 2][px - 1]])
coords[n][p] += diff.sign() * .25
coords += 0.5
preds = coords.clone()
# Transform back
for i in range(coords.size(0)):
preds[i] = transform_preds(coords[i], center[i], scale[i], res)
if preds.dim() < 3:
preds = preds.view(1, preds.size())
return preds
class AverageMeter(object):
"""Computes and stores the average and current value"""
def __init__(self):
self.reset()
def reset(self):
self.val = 0
self.avg = 0
self.sum = 0
self.count = 0
def update(self, val, n=1):
self.val = val
self.sum += val * n
self.count += n
self.avg = self.sum / self.count
| 31.193717 | 100 | 0.553206 | from __future__ import absolute_import, print_function
import math
import numpy as np
import matplotlib.pyplot as plt
from random import randint
from .misc import *
from .transforms import transform, transform_preds
__all__ = ['accuracy', 'AverageMeter']
def get_preds(scores):
assert scores.dim() == 4, 'Score maps should be 4-dim'
maxval, idx = torch.max(scores.view(scores.size(0), scores.size(1), -1), 2)
maxval = maxval.view(scores.size(0), scores.size(1), 1)
idx = idx.view(scores.size(0), scores.size(1), 1) + 1
preds = idx.repeat(1, 1, 2).float()
preds[:, :, 0] = (preds[:, :, 0] - 1) % scores.size(3) + 1
preds[:, :, 1] = torch.floor((preds[:, :, 1] - 1) / scores.size(2)) + 1
pred_mask = maxval.gt(0).repeat(1, 1, 2).float()
preds *= pred_mask
return preds
def calc_dists(preds, target, normalize):
preds = preds.float()
target = target.float()
dists = torch.zeros(preds.size(1), preds.size(0))
for n in range(preds.size(0)):
for c in range(preds.size(1)):
if target[n, c, 0] > 1 and target[n, c, 1] > 1:
dists[c, n] = torch.dist(preds[n, c, :], target[n, c, :]) / normalize[n]
else:
dists[c, n] = -1
return dists
def dist_acc(dists, thr=0.5):
if dists.ne(-1).sum() > 0:
return dists.le(thr).eq(dists.ne(-1)).sum() * 1.0 / dists.ne(-1).sum()
else:
return -1
def calc_metrics(dists, path='', category=''):
errors = torch.mean(dists, 0).view(dists.size(1))
axes1 = np.linspace(0, 1, 1000)
axes2 = np.zeros(1000)
for i in range(1000):
axes2[i] = float((errors < axes1[i]).sum()) / float(errors.size(0))
auc = round(np.sum(axes2[:70]) / .7, 2)
if path:
label = '{}({}) : {}'.format(path.split('/')[2], category, str(auc))
plt.xlim(0, 7)
plt.ylim(0, 100)
plt.yticks(np.arange(0, 110, 10))
plt.xticks(np.arange(0, 8, 1))
plt.grid()
plt.title('NME (%)', fontsize=20)
plt.xlabel('NME (%)', fontsize=16)
plt.ylabel('Test images (%)', fontsize=16)
if category:
if category in ['Easy', 'Category A']:
plt.plot(axes1 * 100, axes2 * 100, 'b-', label=label, lw=3)
if category in ['Media', 'Category B']:
plt.plot(axes1 * 100, axes2 * 100, 'r-', label=label, lw=3)
if category in ['Hard', 'Category C']:
plt.plot(axes1 * 100, axes2 * 100, 'g-', label=label, lw=3)
else:
plt.plot(axes1 * 100, axes2 * 100, 'b-', label=label, lw=3)
plt.legend(loc=4, fontsize=12)
plt.savefig(os.path.join(path + '/CED.eps'))
return auc
def _get_bboxsize(iterable):
mins = torch.min(iterable, 0)[0].view(2)
maxs = torch.max(iterable, 0)[0].view(2)
center = torch.FloatTensor((maxs[0] - (maxs[0] - mins[0]) / 2,
maxs[1] - (maxs[1] - mins[1]) / 2))
return np.sqrt(abs(maxs[0] - mins[0]) * abs(maxs[1] - mins[1]))
def accuracy(output, target, idxs, thr=0.08):
preds = get_preds(output)
gts = get_preds(target)
norm = torch.ones(preds.size(0))
for i, gt in enumerate(gts):
norm[i] = _get_bboxsize(gt)
dists = calc_dists(preds, gts, norm)
acc = torch.zeros(len(idxs) + 1)
avg_acc = 0
cnt = 0
mean_dists = torch.mean(dists, 0)
acc[0] = mean_dists.le(thr).sum() * 1.0 / preds.size(0)
return acc, dists
def final_preds(output, center, scale, res):
if output.size(1) == 136:
coords = output.view((output.szie(0), 68, 2))
else:
coords = get_preds(output)
for n in range(coords.size(0)):
for p in range(coords.size(1)):
hm = output[n][p]
px = int(math.floor(coords[n][p][0]))
py = int(math.floor(coords[n][p][1]))
if px > 1 and px < res[0] and py > 1 and py < res[1]:
diff = torch.Tensor(
[hm[py - 1][px] - hm[py - 1][px - 2], hm[py][px - 1] - hm[py - 2][px - 1]])
coords[n][p] += diff.sign() * .25
coords += 0.5
preds = coords.clone()
for i in range(coords.size(0)):
preds[i] = transform_preds(coords[i], center[i], scale[i], res)
if preds.dim() < 3:
preds = preds.view(1, preds.size())
return preds
class AverageMeter(object):
def __init__(self):
self.reset()
def reset(self):
self.val = 0
self.avg = 0
self.sum = 0
self.count = 0
def update(self, val, n=1):
self.val = val
self.sum += val * n
self.count += n
self.avg = self.sum / self.count
| true | true |
f72471beb538717956885400bdf193fde9f2aea9 | 5,531 | py | Python | api/routes/admin/admin_student_routes.py | NoisyBotDude/MIS-Backend | fa402b0a6d5d6862634b0ed55bc57178856c1eba | [
"MIT"
] | null | null | null | api/routes/admin/admin_student_routes.py | NoisyBotDude/MIS-Backend | fa402b0a6d5d6862634b0ed55bc57178856c1eba | [
"MIT"
] | null | null | null | api/routes/admin/admin_student_routes.py | NoisyBotDude/MIS-Backend | fa402b0a6d5d6862634b0ed55bc57178856c1eba | [
"MIT"
] | null | null | null | from fileinput import filename
from urllib.request import Request
from api.drivers import student
from api.drivers.student import student_drivers
from api.middlewares import authentication_middleware
from api.schemas.admin.admin_request_schema import admin_request_schemas
from api.schemas.student.request_schemas import student_request_schemas
from api.schemas.student.response_schemas import student_response_schemas
from api.utils.exceptions import exceptions
from fastapi import APIRouter, Depends, HTTPException, Request, status
from fastapi.responses import JSONResponse
from api.repository import admin_repo
from api.utils.save_student_data import save_data
from starlette.responses import FileResponse
import json
def construct_router():
admin = APIRouter(tags=["Admin"])
@admin.post("/notify/student")
async def notify_by_batch():
pass
@admin.post("/add/student/subscription")
async def add_student_subscription(
request: admin_request_schemas.ManipulateStudentSubscriptionSchema,
):
try:
response = await student_drivers.Student().update_array_of_str(
request.__dict__
)
return JSONResponse(status_code=200, content={"message": "info updated"})
except exceptions.DuplicateStudent:
return JSONResponse(
status_code=409, content={"message": "info cannot be updated"}
)
except exceptions.UnexpectedError:
return JSONResponse(
status_code=500, content={"message": "internal server error"}
)
@admin.post("/remove/student/subscription")
async def remove_student_subscription(
request: admin_request_schemas.ManipulateStudentSubscriptionSchema,
):
try:
response = await student_drivers.Student().delete_from_array_of_str(
request.__dict__
)
if response:
return JSONResponse(
status_code=200,
content={"message": "subscription deleted successfully"},
)
return JSONResponse(
status_code=500, content={"message": "subscription deletion failed"}
)
except exceptions.DuplicateStudent:
return JSONResponse(
status_code=409, content={"message": "info cannot be updated"}
)
except exceptions.UnexpectedError:
return JSONResponse(
status_code=500, content={"message": "internal server error"}
)
@admin.post("/verify/student")
async def verify_student(request: Request):
request = await request.json()
response = await admin_repo.assign_otp(request["student_ids"])
if response:
return JSONResponse(
status_code=200, content={"message": "otp assigned successfully"}
)
return JSONResponse(
status_code=500,
content={
"message": """otp cannot be assigned successfully for all student"""
},
)
@admin.get("/ban/student/{student_id}")
async def ban_student_account(student_id: str):
response = await student_drivers.Student().ban_student(student_id)
if response == "already_banned":
return JSONResponse(
status_code=404, content={"message": "student aleady banned"}
)
elif response:
return JSONResponse(
status_code=200, content={"message": "student banned successfully"}
)
return JSONResponse(
status_code=500, content={"message": "internal server error"}
)
@admin.delete("/delete/student/{student_id}")
async def delete_student_account(student_id: str):
response = await student_drivers.Student().delete_student(student_id)
if response:
return JSONResponse(
status_code=200, content={"message": "student deleted successfully"}
)
return JSONResponse(
status_code=404, content={"message": "student does not exist"}
)
@admin.get("/all_student")
async def get_student_profile():
try:
response = await (
student_drivers.Student().get_all_students()
)
return JSONResponse(
status_code=200,
content=response
)
except Exception as e:
print(e, "exception")
@admin.post("/student/data")
async def get_student_data():
students = await (
student_drivers.Student().get_all_students_data()
)
# print(students)
save_data(students)
if students:
return JSONResponse(
status_code=200,
content= {
"message" : "training details saved succesfully"
}
)
return JSONResponse(
status_code=500,
content= {
"message" : "training details cannot be saved"
}
)
@admin.get("/student/data")
async def get_student_data(request: Request):
filename = "student_data.xls"
#send excel file
return FileResponse(
filename,
filename="student_data.xls",
status_code=200,
media_type="application/vnd.ms-excel"
)
return admin
| 31.248588 | 85 | 0.605496 | from fileinput import filename
from urllib.request import Request
from api.drivers import student
from api.drivers.student import student_drivers
from api.middlewares import authentication_middleware
from api.schemas.admin.admin_request_schema import admin_request_schemas
from api.schemas.student.request_schemas import student_request_schemas
from api.schemas.student.response_schemas import student_response_schemas
from api.utils.exceptions import exceptions
from fastapi import APIRouter, Depends, HTTPException, Request, status
from fastapi.responses import JSONResponse
from api.repository import admin_repo
from api.utils.save_student_data import save_data
from starlette.responses import FileResponse
import json
def construct_router():
admin = APIRouter(tags=["Admin"])
@admin.post("/notify/student")
async def notify_by_batch():
pass
@admin.post("/add/student/subscription")
async def add_student_subscription(
request: admin_request_schemas.ManipulateStudentSubscriptionSchema,
):
try:
response = await student_drivers.Student().update_array_of_str(
request.__dict__
)
return JSONResponse(status_code=200, content={"message": "info updated"})
except exceptions.DuplicateStudent:
return JSONResponse(
status_code=409, content={"message": "info cannot be updated"}
)
except exceptions.UnexpectedError:
return JSONResponse(
status_code=500, content={"message": "internal server error"}
)
@admin.post("/remove/student/subscription")
async def remove_student_subscription(
request: admin_request_schemas.ManipulateStudentSubscriptionSchema,
):
try:
response = await student_drivers.Student().delete_from_array_of_str(
request.__dict__
)
if response:
return JSONResponse(
status_code=200,
content={"message": "subscription deleted successfully"},
)
return JSONResponse(
status_code=500, content={"message": "subscription deletion failed"}
)
except exceptions.DuplicateStudent:
return JSONResponse(
status_code=409, content={"message": "info cannot be updated"}
)
except exceptions.UnexpectedError:
return JSONResponse(
status_code=500, content={"message": "internal server error"}
)
@admin.post("/verify/student")
async def verify_student(request: Request):
request = await request.json()
response = await admin_repo.assign_otp(request["student_ids"])
if response:
return JSONResponse(
status_code=200, content={"message": "otp assigned successfully"}
)
return JSONResponse(
status_code=500,
content={
"message": """otp cannot be assigned successfully for all student"""
},
)
@admin.get("/ban/student/{student_id}")
async def ban_student_account(student_id: str):
response = await student_drivers.Student().ban_student(student_id)
if response == "already_banned":
return JSONResponse(
status_code=404, content={"message": "student aleady banned"}
)
elif response:
return JSONResponse(
status_code=200, content={"message": "student banned successfully"}
)
return JSONResponse(
status_code=500, content={"message": "internal server error"}
)
@admin.delete("/delete/student/{student_id}")
async def delete_student_account(student_id: str):
response = await student_drivers.Student().delete_student(student_id)
if response:
return JSONResponse(
status_code=200, content={"message": "student deleted successfully"}
)
return JSONResponse(
status_code=404, content={"message": "student does not exist"}
)
@admin.get("/all_student")
async def get_student_profile():
try:
response = await (
student_drivers.Student().get_all_students()
)
return JSONResponse(
status_code=200,
content=response
)
except Exception as e:
print(e, "exception")
@admin.post("/student/data")
async def get_student_data():
students = await (
student_drivers.Student().get_all_students_data()
)
save_data(students)
if students:
return JSONResponse(
status_code=200,
content= {
"message" : "training details saved succesfully"
}
)
return JSONResponse(
status_code=500,
content= {
"message" : "training details cannot be saved"
}
)
@admin.get("/student/data")
async def get_student_data(request: Request):
filename = "student_data.xls"
return FileResponse(
filename,
filename="student_data.xls",
status_code=200,
media_type="application/vnd.ms-excel"
)
return admin
| true | true |
f724734a995b0486052f20cb0dc53813c6e312e3 | 6,915 | py | Python | analysis/views/views_karyomapping.py | SACGF/variantgrid | 515195e2f03a0da3a3e5f2919d8e0431babfd9c9 | [
"RSA-MD"
] | 5 | 2021-01-14T03:34:42.000Z | 2022-03-07T15:34:18.000Z | analysis/views/views_karyomapping.py | SACGF/variantgrid | 515195e2f03a0da3a3e5f2919d8e0431babfd9c9 | [
"RSA-MD"
] | 551 | 2020-10-19T00:02:38.000Z | 2022-03-30T02:18:22.000Z | analysis/views/views_karyomapping.py | SACGF/variantgrid | 515195e2f03a0da3a3e5f2919d8e0431babfd9c9 | [
"RSA-MD"
] | null | null | null | import csv
from collections import defaultdict, OrderedDict
from django.core.exceptions import PermissionDenied
from django.http.response import StreamingHttpResponse
from django.shortcuts import get_object_or_404, render, redirect
from django.urls.base import reverse
from django.views.decorators.cache import cache_page
from django.views.decorators.vary import vary_on_cookie
from analysis.forms import KaryomappingGeneForm, UserTrioForm
from analysis.models.models_karyomapping import KaryomappingAnalysis, KaryotypeBins, \
KaryomappingGene
from library.constants import DAY_SECS
from library.django_utils import add_save_message
from library.jqgrid_export import StashFile
from patients.models_enums import Zygosity
from snpdb.models import Trio
from snpdb.models.models_variant import Variant
def get_karyomapping_analysis_permission_check(request, pk):
ka = get_object_or_404(KaryomappingAnalysis, pk=pk)
if not request.user.has_perm(KaryomappingAnalysis.get_read_perm(), ka):
msg = f"{request.user} does not have permission to access {ka}"
raise PermissionDenied(msg)
return ka
def karyomapping_analyses(request):
context = {"trio_form": UserTrioForm()}
return render(request, 'analysis/karyomapping/karyomapping_analyses.html', context)
def create_and_view_karyomapping_analysis_for_trio(trio, user):
karyomapping = KaryomappingAnalysis.objects.create(user=user,
name=trio.name,
trio=trio)
url = reverse("view_karyomapping_analysis", kwargs={"pk": karyomapping.pk})
return redirect(url)
def create_karyomapping_analysis_for_trio_id(request, trio_id):
trio = Trio.get_for_user(request.user, trio_id)
return create_and_view_karyomapping_analysis_for_trio(trio, request.user)
def view_karyomapping_analysis(request, pk):
karyomapping_analysis = get_karyomapping_analysis_permission_check(request, pk)
gene_form = KaryomappingGeneForm(request.POST or None,
karyomapping_analysis=karyomapping_analysis,
initial={"upstream_kb": 2000,
"downstream_kb": 2000})
created_karyomapping_gene = None
if request.method == "POST":
valid = gene_form.is_valid()
if valid:
created_karyomapping_gene = gene_form.save()
add_save_message(request, valid, "KaryomappingGene")
context = {"karyomapping_analysis": karyomapping_analysis,
"gene_form": gene_form,
"has_write_permission": karyomapping_analysis.can_write(request.user),
"created_karyomapping_gene": created_karyomapping_gene}
return render(request, 'analysis/karyomapping/view_karyomapping_analysis.html', context)
def get_variant_lookup_and_scatter_data(karyomapping_bins):
""" Dumped to JS to be used by Plotly scatterplot
karyomapping_bins : Have separate entries for ALT/REF, we merge these for output """
variant_id_lookup = {}
data = defaultdict(lambda: defaultdict(list))
for karyotype_code, variant_data in karyomapping_bins.items():
x = []
text = []
for variant_id, chrom, position, ref, alt in variant_data:
variant_string = Variant.format_tuple(chrom, position, ref, alt)
variant_id_lookup[variant_string] = variant_id
x.append(position)
text.append(variant_string)
collapsed_code = KaryotypeBins.COLLAPSED_BINS[karyotype_code]
data[collapsed_code]["x"].extend(x)
data[collapsed_code]["text"].extend(text)
karyotype_bin_counts = OrderedDict()
for kc in KaryotypeBins.KARYOTYPE_LABEL_ORDER:
karyotype_bin_counts[kc] = len(data[kc]["x"])
return variant_id_lookup, data, karyotype_bin_counts
@cache_page(DAY_SECS) # Only caching this for a day due to high amount of development
@vary_on_cookie
def view_karyomapping_gene(request, pk):
karyomapping_gene = get_object_or_404(KaryomappingGene, pk=pk)
# Permission check on parent karyomapping_analysis
get_karyomapping_analysis_permission_check(request, karyomapping_gene.karyomapping_analysis.pk)
iv, strand = karyomapping_gene.get_genomic_interval_and_strand()
karyomapping_bins = karyomapping_gene.get_karyomapping_bins()
variant_id_lookup, karyotype_bin_scatter_data, karyotype_bin_counts = get_variant_lookup_and_scatter_data(karyomapping_bins)
context = {"kag": karyomapping_gene,
"iv": iv,
"strand": strand,
"karyotype_bin_labels": KaryotypeBins.KARYOTYPE_LABEL_ORDER,
"karyotype_bin_labels": KaryotypeBins.KARYOTYPE_LABEL_ORDER, # Want order
"variant_id_lookup": variant_id_lookup,
"karyotype_bin_scatter_data": karyotype_bin_scatter_data,
"karyotype_bin_counts": karyotype_bin_counts}
return render(request, 'analysis/karyomapping/view_karyomapping_gene.html', context)
def download_karyomapping_gene_csv(request, pk):
karyomapping_gene = get_object_or_404(KaryomappingGene, pk=pk)
# Permission check on parent karyomapping_analysis
get_karyomapping_analysis_permission_check(request, karyomapping_gene.karyomapping_analysis.pk)
variant_and_genotypes = karyomapping_gene.get_variant_and_genotypes()
filename = f"karyomapping_gene_{karyomapping_gene.pk}_{karyomapping_gene}.csv"
# TODO: merge code w/library.jqgrid_export.grid_export_csv
karotype_bin_lookup = KaryotypeBins.get_karotype_bin_lookup()
header = ['chrom', 'position', 'ref', 'alt', 'proband_gt', 'father_gt', 'mother_gt', 'karyotype_bin']
pseudo_buffer = StashFile()
writer = csv.DictWriter(pseudo_buffer, header, dialect='excel')
def iter_row_writer():
writer.writeheader()
yield pseudo_buffer.value
for variant_data, genotype_tuple in variant_and_genotypes:
_, chrom, position, ref, alt = variant_data
proband_gt, father_gt, mother_gt = genotype_tuple
try:
karotype_bin = karotype_bin_lookup[proband_gt][father_gt][mother_gt]
except:
karotype_bin = ''
row = {'chrom': chrom,
'position': position,
'ref': ref,
'alt': alt,
'proband_gt': Zygosity.get_genotype(proband_gt),
'father_gt': Zygosity.get_genotype(father_gt),
'mother_gt': Zygosity.get_genotype(mother_gt),
'karyotype_bin': karotype_bin}
writer.writerow(row)
yield pseudo_buffer.value
response = StreamingHttpResponse(iter_row_writer(), content_type="text/csv")
response['Content-Disposition'] = f'attachment; filename="{filename}"'
return response
| 43.21875 | 128 | 0.707881 | import csv
from collections import defaultdict, OrderedDict
from django.core.exceptions import PermissionDenied
from django.http.response import StreamingHttpResponse
from django.shortcuts import get_object_or_404, render, redirect
from django.urls.base import reverse
from django.views.decorators.cache import cache_page
from django.views.decorators.vary import vary_on_cookie
from analysis.forms import KaryomappingGeneForm, UserTrioForm
from analysis.models.models_karyomapping import KaryomappingAnalysis, KaryotypeBins, \
KaryomappingGene
from library.constants import DAY_SECS
from library.django_utils import add_save_message
from library.jqgrid_export import StashFile
from patients.models_enums import Zygosity
from snpdb.models import Trio
from snpdb.models.models_variant import Variant
def get_karyomapping_analysis_permission_check(request, pk):
ka = get_object_or_404(KaryomappingAnalysis, pk=pk)
if not request.user.has_perm(KaryomappingAnalysis.get_read_perm(), ka):
msg = f"{request.user} does not have permission to access {ka}"
raise PermissionDenied(msg)
return ka
def karyomapping_analyses(request):
context = {"trio_form": UserTrioForm()}
return render(request, 'analysis/karyomapping/karyomapping_analyses.html', context)
def create_and_view_karyomapping_analysis_for_trio(trio, user):
karyomapping = KaryomappingAnalysis.objects.create(user=user,
name=trio.name,
trio=trio)
url = reverse("view_karyomapping_analysis", kwargs={"pk": karyomapping.pk})
return redirect(url)
def create_karyomapping_analysis_for_trio_id(request, trio_id):
trio = Trio.get_for_user(request.user, trio_id)
return create_and_view_karyomapping_analysis_for_trio(trio, request.user)
def view_karyomapping_analysis(request, pk):
karyomapping_analysis = get_karyomapping_analysis_permission_check(request, pk)
gene_form = KaryomappingGeneForm(request.POST or None,
karyomapping_analysis=karyomapping_analysis,
initial={"upstream_kb": 2000,
"downstream_kb": 2000})
created_karyomapping_gene = None
if request.method == "POST":
valid = gene_form.is_valid()
if valid:
created_karyomapping_gene = gene_form.save()
add_save_message(request, valid, "KaryomappingGene")
context = {"karyomapping_analysis": karyomapping_analysis,
"gene_form": gene_form,
"has_write_permission": karyomapping_analysis.can_write(request.user),
"created_karyomapping_gene": created_karyomapping_gene}
return render(request, 'analysis/karyomapping/view_karyomapping_analysis.html', context)
def get_variant_lookup_and_scatter_data(karyomapping_bins):
variant_id_lookup = {}
data = defaultdict(lambda: defaultdict(list))
for karyotype_code, variant_data in karyomapping_bins.items():
x = []
text = []
for variant_id, chrom, position, ref, alt in variant_data:
variant_string = Variant.format_tuple(chrom, position, ref, alt)
variant_id_lookup[variant_string] = variant_id
x.append(position)
text.append(variant_string)
collapsed_code = KaryotypeBins.COLLAPSED_BINS[karyotype_code]
data[collapsed_code]["x"].extend(x)
data[collapsed_code]["text"].extend(text)
karyotype_bin_counts = OrderedDict()
for kc in KaryotypeBins.KARYOTYPE_LABEL_ORDER:
karyotype_bin_counts[kc] = len(data[kc]["x"])
return variant_id_lookup, data, karyotype_bin_counts
@cache_page(DAY_SECS)
@vary_on_cookie
def view_karyomapping_gene(request, pk):
karyomapping_gene = get_object_or_404(KaryomappingGene, pk=pk)
get_karyomapping_analysis_permission_check(request, karyomapping_gene.karyomapping_analysis.pk)
iv, strand = karyomapping_gene.get_genomic_interval_and_strand()
karyomapping_bins = karyomapping_gene.get_karyomapping_bins()
variant_id_lookup, karyotype_bin_scatter_data, karyotype_bin_counts = get_variant_lookup_and_scatter_data(karyomapping_bins)
context = {"kag": karyomapping_gene,
"iv": iv,
"strand": strand,
"karyotype_bin_labels": KaryotypeBins.KARYOTYPE_LABEL_ORDER,
"karyotype_bin_labels": KaryotypeBins.KARYOTYPE_LABEL_ORDER,
"variant_id_lookup": variant_id_lookup,
"karyotype_bin_scatter_data": karyotype_bin_scatter_data,
"karyotype_bin_counts": karyotype_bin_counts}
return render(request, 'analysis/karyomapping/view_karyomapping_gene.html', context)
def download_karyomapping_gene_csv(request, pk):
karyomapping_gene = get_object_or_404(KaryomappingGene, pk=pk)
get_karyomapping_analysis_permission_check(request, karyomapping_gene.karyomapping_analysis.pk)
variant_and_genotypes = karyomapping_gene.get_variant_and_genotypes()
filename = f"karyomapping_gene_{karyomapping_gene.pk}_{karyomapping_gene}.csv"
karotype_bin_lookup = KaryotypeBins.get_karotype_bin_lookup()
header = ['chrom', 'position', 'ref', 'alt', 'proband_gt', 'father_gt', 'mother_gt', 'karyotype_bin']
pseudo_buffer = StashFile()
writer = csv.DictWriter(pseudo_buffer, header, dialect='excel')
def iter_row_writer():
writer.writeheader()
yield pseudo_buffer.value
for variant_data, genotype_tuple in variant_and_genotypes:
_, chrom, position, ref, alt = variant_data
proband_gt, father_gt, mother_gt = genotype_tuple
try:
karotype_bin = karotype_bin_lookup[proband_gt][father_gt][mother_gt]
except:
karotype_bin = ''
row = {'chrom': chrom,
'position': position,
'ref': ref,
'alt': alt,
'proband_gt': Zygosity.get_genotype(proband_gt),
'father_gt': Zygosity.get_genotype(father_gt),
'mother_gt': Zygosity.get_genotype(mother_gt),
'karyotype_bin': karotype_bin}
writer.writerow(row)
yield pseudo_buffer.value
response = StreamingHttpResponse(iter_row_writer(), content_type="text/csv")
response['Content-Disposition'] = f'attachment; filename="{filename}"'
return response
| true | true |
f724736b6941b37c7e6fc68854f6f1512721115e | 1,762 | py | Python | CondTools/IntegrationTest/python/validate_dt_devdb10_cfg.py | ckamtsikis/cmssw | ea19fe642bb7537cbf58451dcf73aa5fd1b66250 | [
"Apache-2.0"
] | 852 | 2015-01-11T21:03:51.000Z | 2022-03-25T21:14:00.000Z | CondTools/IntegrationTest/python/validate_dt_devdb10_cfg.py | ckamtsikis/cmssw | ea19fe642bb7537cbf58451dcf73aa5fd1b66250 | [
"Apache-2.0"
] | 30,371 | 2015-01-02T00:14:40.000Z | 2022-03-31T23:26:05.000Z | CondTools/IntegrationTest/python/validate_dt_devdb10_cfg.py | ckamtsikis/cmssw | ea19fe642bb7537cbf58451dcf73aa5fd1b66250 | [
"Apache-2.0"
] | 3,240 | 2015-01-02T05:53:18.000Z | 2022-03-31T17:24:21.000Z | # The following comments couldn't be translated into the new config version:
# Configuration file for EventSetupTest_t
import FWCore.ParameterSet.Config as cms
process = cms.Process("TEST")
process.PoolDBESSource = cms.ESSource("PoolDBESSource",
loadAll = cms.bool(True),
toGet = cms.VPSet(cms.PSet(
record = cms.string('DTT0Rcd'),
tag = cms.string('MTCC_t0')
),
cms.PSet(
record = cms.string('DTTtrigRcd'),
tag = cms.string('MTCC_tTrig')
),
cms.PSet(
record = cms.string('DTReadOutMappingRcd'),
tag = cms.string('MTCC_map')
)),
messagelevel = cms.untracked.uint32(2),
catalog = cms.untracked.string('relationalcatalog_oracle://devdb10/CMS_COND_GENERAL'), ##devdb10/CMS_COND_GENERAL"
timetype = cms.string('runnumber'),
connect = cms.string('oracle://devdb10/CMS_COND_DT'), ##devdb10/CMS_COND_DT"
authenticationMethod = cms.untracked.uint32(1)
)
process.source = cms.Source("EmptySource",
maxEvents = cms.untracked.int32(5),
numberEventsInRun = cms.untracked.uint32(1),
firstRun = cms.untracked.uint32(1)
)
process.get = cms.EDAnalyzer("EventSetupRecordDataGetter",
toGet = cms.VPSet(cms.PSet(
record = cms.string('DTT0Rcd'),
data = cms.vstring('DTT0')
),
cms.PSet(
record = cms.string('DTTtrigRcd'),
data = cms.vstring('DTTtrig')
),
cms.PSet(
record = cms.string('DTReadOutMappingRcd'),
data = cms.vstring('DTReadOutMapping')
)),
verbose = cms.untracked.bool(True)
)
process.printer = cms.OutputModule("AsciiOutputModule")
process.p = cms.Path(process.get)
process.ep = cms.EndPath(process.printer)
| 30.37931 | 118 | 0.641317 |
# Configuration file for EventSetupTest_t
import FWCore.ParameterSet.Config as cms
process = cms.Process("TEST")
process.PoolDBESSource = cms.ESSource("PoolDBESSource",
loadAll = cms.bool(True),
toGet = cms.VPSet(cms.PSet(
record = cms.string('DTT0Rcd'),
tag = cms.string('MTCC_t0')
),
cms.PSet(
record = cms.string('DTTtrigRcd'),
tag = cms.string('MTCC_tTrig')
),
cms.PSet(
record = cms.string('DTReadOutMappingRcd'),
tag = cms.string('MTCC_map')
)),
messagelevel = cms.untracked.uint32(2),
catalog = cms.untracked.string('relationalcatalog_oracle://devdb10/CMS_COND_GENERAL'), ##devdb10/CMS_COND_GENERAL"
timetype = cms.string('runnumber'),
connect = cms.string('oracle://devdb10/CMS_COND_DT'), ##devdb10/CMS_COND_DT"
authenticationMethod = cms.untracked.uint32(1)
)
process.source = cms.Source("EmptySource",
maxEvents = cms.untracked.int32(5),
numberEventsInRun = cms.untracked.uint32(1),
firstRun = cms.untracked.uint32(1)
)
process.get = cms.EDAnalyzer("EventSetupRecordDataGetter",
toGet = cms.VPSet(cms.PSet(
record = cms.string('DTT0Rcd'),
data = cms.vstring('DTT0')
),
cms.PSet(
record = cms.string('DTTtrigRcd'),
data = cms.vstring('DTTtrig')
),
cms.PSet(
record = cms.string('DTReadOutMappingRcd'),
data = cms.vstring('DTReadOutMapping')
)),
verbose = cms.untracked.bool(True)
)
process.printer = cms.OutputModule("AsciiOutputModule")
process.p = cms.Path(process.get)
process.ep = cms.EndPath(process.printer)
| true | true |
f724739e7899ac5f3fc808e74a2261f63bf6be67 | 2,010 | py | Python | 3_text/apache_log_parser_split.py | lluxury/P_U_S_A | 1eb9d1fef74f9ce3618ae950f5223f598510be84 | [
"MIT"
] | null | null | null | 3_text/apache_log_parser_split.py | lluxury/P_U_S_A | 1eb9d1fef74f9ce3618ae950f5223f598510be84 | [
"MIT"
] | null | null | null | 3_text/apache_log_parser_split.py | lluxury/P_U_S_A | 1eb9d1fef74f9ce3618ae950f5223f598510be84 | [
"MIT"
] | null | null | null | #!/usr/bin/env python
"""
USAGE:
apache_log_parser_split.py some_log_file
This script takes one command line argument: the name of a log file
to parse. It then parses the log file and generates a report which
associates remote hosts with number of bytes transferred to them.
"""
import sys
def dictify_logline(line):
'''return a dictionary of the pertinent pieces of an apache combined log file
Currently, the only fields we are interested in are remote host and bytes sent,
but we are putting status in there just for good measure.
'''
split_line = line.split()
return {'remote_host': split_line[0],
'status': split_line[8],
'bytes_sent': split_line[9],
}
def generate_log_report(logfile):
'''return a dictionary of format remote_host=>[list of bytes sent]
This function takes a file object, iterates through all the lines in the file,
and generates a report of the number of bytes transferred to each remote host
for each hit on the webserver.
'''
report_dict = {}
for line in logfile:
line_dict = dictify_logline(line)
#
print (line_dict)
try:
bytes_sent = int(line_dict['bytes_sent'])
except ValueError:
##totally disregard anything we don't understand
continue
report_dict.setdefault(line_dict['remote_host'], []).append(bytes_sent)
return report_dict
if __name__ == "__main__":
if not len(sys.argv) > 1:
print (__doc__)
sys.exit(1)
infile_name = sys.argv[1]
try:
infile = open(infile_name, 'r')
except IOError:
print ("You must specify a valid file to parse")
print (__doc__)
sys.exit(1)
log_report = generate_log_report(infile)
#
print (log_report)
infile.close()
#__main__少量处理, 参数检查,尝试打开文件, 并传给generate_log_report()
#generate_log_report()创建字典,迭代日志所有行,并每一行传给dictify_logline()
#检查bytes_sent是不整数,利用dictify_logline()返回的数据升级字典,最后字典返回__main__
| 30.454545 | 83 | 0.676617 |
import sys
def dictify_logline(line):
split_line = line.split()
return {'remote_host': split_line[0],
'status': split_line[8],
'bytes_sent': split_line[9],
}
def generate_log_report(logfile):
report_dict = {}
for line in logfile:
line_dict = dictify_logline(line)
print (line_dict)
try:
bytes_sent = int(line_dict['bytes_sent'])
except ValueError:
fault(line_dict['remote_host'], []).append(bytes_sent)
return report_dict
if __name__ == "__main__":
if not len(sys.argv) > 1:
print (__doc__)
sys.exit(1)
infile_name = sys.argv[1]
try:
infile = open(infile_name, 'r')
except IOError:
print ("You must specify a valid file to parse")
print (__doc__)
sys.exit(1)
log_report = generate_log_report(infile)
#
print (log_report)
infile.close()
#__main__少量处理, 参数检查,尝试打开文件, 并传给generate_log_report()
#generate_log_report()创建字典,迭代日志所有行,并每一行传给dictify_logline()
#检查bytes_sent是不整数,利用dictify_logline()返回的数据升级字典,最后字典返回__main__
| true | true |
f724743cde7860172ea7e6028c7280410480dec1 | 7,225 | py | Python | thx_bot/integration_conversations.py | thxprotocol/telegram-bot | d8c77060740f76ed50aab93cb7c6ca83567d1710 | [
"Apache-2.0"
] | null | null | null | thx_bot/integration_conversations.py | thxprotocol/telegram-bot | d8c77060740f76ed50aab93cb7c6ca83567d1710 | [
"Apache-2.0"
] | null | null | null | thx_bot/integration_conversations.py | thxprotocol/telegram-bot | d8c77060740f76ed50aab93cb7c6ca83567d1710 | [
"Apache-2.0"
] | 3 | 2021-10-31T19:20:39.000Z | 2021-12-04T03:35:23.000Z | from telegram.ext import CommandHandler
from telegram.ext import ConversationHandler
from telegram.ext import Filters
from telegram.ext import MessageHandler
from thx_bot.commands import CHOOSING
from thx_bot.commands import CHOOSING_ADD_MEMBER
from thx_bot.commands import CHOOSING_REWARDS
from thx_bot.commands import CHOOSING_SIGNUP
from thx_bot.commands import CHOOSING_TOKENS
from thx_bot.commands import CHOOSING_WALLET_UPDATE
from thx_bot.commands import TYPING_REPLY
from thx_bot.commands import TYPING_REPLY_MEMBER
from thx_bot.commands import TYPING_REPLY_SIGNUP
from thx_bot.commands import TYPING_REPLY_WALLET_UPDATE
from thx_bot.commands import TYPING_REWARD_REPLY
from thx_bot.commands import TYPING_TOKENS_REPLY
from thx_bot.commands.add_member import start_adding_member
from thx_bot.commands.add_member import done_member_add
from thx_bot.commands.add_member import received_information_member_add
from thx_bot.commands.add_member import regular_choice_member_add
from thx_bot.commands.create_wallet import done_signup
from thx_bot.commands.create_wallet import received_information_signup
from thx_bot.commands.create_wallet import regular_choice_signup
from thx_bot.commands.create_wallet import start_creating_wallet
from thx_bot.commands.entrance import disable_entrance_checks
from thx_bot.commands.entrance import done_permission
from thx_bot.commands.entrance import permissions_entrypoint
from thx_bot.commands.entrance import received_permission_amount
from thx_bot.commands.entrance import regular_choice_permissions
from thx_bot.commands.entrance import show_entrance_permision_for_channel
from thx_bot.commands.entrance import toggle_users_with_rewards
from thx_bot.commands.pool_rewards import done_rewards
from thx_bot.commands.pool_rewards import pool_show_rewards_command
from thx_bot.commands.pool_rewards import received_information_reward
from thx_bot.commands.pool_rewards import regular_choice_reward
from thx_bot.commands.pool_rewards import rewards_entrypoint
from thx_bot.commands.register_channel import check_connection_channel
from thx_bot.commands.register_channel import done_channel
from thx_bot.commands.register_channel import received_information_channel
from thx_bot.commands.register_channel import regular_choice_channel
from thx_bot.commands.register_channel import start_setting_channel
from thx_bot.commands.update_wallet import done_wallet_update
from thx_bot.commands.update_wallet import received_information_wallet_update
from thx_bot.commands.update_wallet import regular_choice_wallet_update
from thx_bot.commands.update_wallet import start_updating_wallet
register_channel_conversation = ConversationHandler(
entry_points=[CommandHandler('register_channel', start_setting_channel)], # noqa
states={ # noqa
CHOOSING: [
MessageHandler(
Filters.regex('^(Client id|Client secret|Pool address)$'),
regular_choice_channel
),
],
TYPING_REPLY: [
MessageHandler(
Filters.text & ~(Filters.command | Filters.regex('^Done$')),
received_information_channel,
)
],
},
fallbacks=[ # noqa
MessageHandler(Filters.regex('^Done$'), done_channel),
MessageHandler(Filters.regex('^Test Connection$'), check_connection_channel),
],
name="register_channel",
persistent=False,
)
create_wallet_conversation = ConversationHandler(
entry_points=[CommandHandler('create_wallet', start_creating_wallet)], # noqa
states={ # noqa
CHOOSING_SIGNUP: [
MessageHandler(
Filters.regex('^(Email|Password)$'), regular_choice_signup
),
],
TYPING_REPLY_SIGNUP: [
MessageHandler(
Filters.text & ~(Filters.command | Filters.regex('^Done$')),
received_information_signup,
)
],
},
fallbacks=[ # noqa
MessageHandler(Filters.regex('^Done$'), done_signup),
], # noqa
name="create_wallet",
persistent=False,
)
update_wallet_conversation = ConversationHandler(
entry_points=[CommandHandler('update_wallet', start_updating_wallet)], # noqa
states={ # noqa
CHOOSING_WALLET_UPDATE: [
MessageHandler(
Filters.regex('^Wallet Update$'), regular_choice_wallet_update
),
],
TYPING_REPLY_WALLET_UPDATE: [
MessageHandler(
Filters.text & ~(Filters.command | Filters.regex('^Done$')),
received_information_wallet_update,
)
],
},
fallbacks=[ # noqa
MessageHandler(Filters.regex('^Done$'), done_wallet_update),
], # noqa
name="update_wallet",
persistent=False,
)
rewards_conversation = ConversationHandler(
entry_points=[CommandHandler('rewards', rewards_entrypoint)], # noqa
states={ # noqa
CHOOSING_REWARDS: [
MessageHandler(
Filters.regex('^Set Reward$'), regular_choice_reward
),
],
TYPING_REWARD_REPLY: [
MessageHandler(
Filters.text & ~(Filters.command | Filters.regex('^Done$')),
received_information_reward,
)
],
},
fallbacks=[ # noqa
MessageHandler(Filters.regex('^Done$'), done_rewards),
MessageHandler(Filters.regex('^Show rewards$'), pool_show_rewards_command),
], # noqa
name="rewards",
persistent=False,
)
entrance_tokens_conversation = ConversationHandler(
entry_points=[CommandHandler('entrance', permissions_entrypoint)],
states={ # noqa
CHOOSING_TOKENS: [
MessageHandler(
Filters.regex('^Set entrance amount$'), regular_choice_permissions
),
],
TYPING_TOKENS_REPLY: [
MessageHandler(
Filters.text & ~(Filters.command | Filters.regex('^Done$')),
received_permission_amount,
)
],
},
fallbacks=[ # noqa
MessageHandler(Filters.regex('^Done$'), done_permission),
MessageHandler(
Filters.regex('^Show entrance configuration$'), show_entrance_permision_for_channel),
MessageHandler(
Filters.regex('^Disable entrance checks$'), disable_entrance_checks),
MessageHandler(
Filters.regex('^Toggle only users with rewards$'), toggle_users_with_rewards),
], # noqa
name="entrance",
persistent=False,
)
add_member_conversation = ConversationHandler(
entry_points=[CommandHandler('add_me_to_pool', start_adding_member)], # noqa
states={ # noqa
CHOOSING_ADD_MEMBER: [
MessageHandler(
Filters.regex('^Add your wallet$'), regular_choice_member_add
),
],
TYPING_REPLY_MEMBER: [
MessageHandler(
Filters.text & ~(Filters.command | Filters.regex('^Done$')),
received_information_member_add,
)
],
},
fallbacks=[ # noqa
MessageHandler(Filters.regex('^Done$'), done_member_add),
], # noqa
name="add_member",
persistent=False,
)
| 37.827225 | 97 | 0.701315 | from telegram.ext import CommandHandler
from telegram.ext import ConversationHandler
from telegram.ext import Filters
from telegram.ext import MessageHandler
from thx_bot.commands import CHOOSING
from thx_bot.commands import CHOOSING_ADD_MEMBER
from thx_bot.commands import CHOOSING_REWARDS
from thx_bot.commands import CHOOSING_SIGNUP
from thx_bot.commands import CHOOSING_TOKENS
from thx_bot.commands import CHOOSING_WALLET_UPDATE
from thx_bot.commands import TYPING_REPLY
from thx_bot.commands import TYPING_REPLY_MEMBER
from thx_bot.commands import TYPING_REPLY_SIGNUP
from thx_bot.commands import TYPING_REPLY_WALLET_UPDATE
from thx_bot.commands import TYPING_REWARD_REPLY
from thx_bot.commands import TYPING_TOKENS_REPLY
from thx_bot.commands.add_member import start_adding_member
from thx_bot.commands.add_member import done_member_add
from thx_bot.commands.add_member import received_information_member_add
from thx_bot.commands.add_member import regular_choice_member_add
from thx_bot.commands.create_wallet import done_signup
from thx_bot.commands.create_wallet import received_information_signup
from thx_bot.commands.create_wallet import regular_choice_signup
from thx_bot.commands.create_wallet import start_creating_wallet
from thx_bot.commands.entrance import disable_entrance_checks
from thx_bot.commands.entrance import done_permission
from thx_bot.commands.entrance import permissions_entrypoint
from thx_bot.commands.entrance import received_permission_amount
from thx_bot.commands.entrance import regular_choice_permissions
from thx_bot.commands.entrance import show_entrance_permision_for_channel
from thx_bot.commands.entrance import toggle_users_with_rewards
from thx_bot.commands.pool_rewards import done_rewards
from thx_bot.commands.pool_rewards import pool_show_rewards_command
from thx_bot.commands.pool_rewards import received_information_reward
from thx_bot.commands.pool_rewards import regular_choice_reward
from thx_bot.commands.pool_rewards import rewards_entrypoint
from thx_bot.commands.register_channel import check_connection_channel
from thx_bot.commands.register_channel import done_channel
from thx_bot.commands.register_channel import received_information_channel
from thx_bot.commands.register_channel import regular_choice_channel
from thx_bot.commands.register_channel import start_setting_channel
from thx_bot.commands.update_wallet import done_wallet_update
from thx_bot.commands.update_wallet import received_information_wallet_update
from thx_bot.commands.update_wallet import regular_choice_wallet_update
from thx_bot.commands.update_wallet import start_updating_wallet
register_channel_conversation = ConversationHandler(
entry_points=[CommandHandler('register_channel', start_setting_channel)],
states={
CHOOSING: [
MessageHandler(
Filters.regex('^(Client id|Client secret|Pool address)$'),
regular_choice_channel
),
],
TYPING_REPLY: [
MessageHandler(
Filters.text & ~(Filters.command | Filters.regex('^Done$')),
received_information_channel,
)
],
},
fallbacks=[
MessageHandler(Filters.regex('^Done$'), done_channel),
MessageHandler(Filters.regex('^Test Connection$'), check_connection_channel),
],
name="register_channel",
persistent=False,
)
create_wallet_conversation = ConversationHandler(
entry_points=[CommandHandler('create_wallet', start_creating_wallet)],
states={
CHOOSING_SIGNUP: [
MessageHandler(
Filters.regex('^(Email|Password)$'), regular_choice_signup
),
],
TYPING_REPLY_SIGNUP: [
MessageHandler(
Filters.text & ~(Filters.command | Filters.regex('^Done$')),
received_information_signup,
)
],
},
fallbacks=[
MessageHandler(Filters.regex('^Done$'), done_signup),
],
name="create_wallet",
persistent=False,
)
update_wallet_conversation = ConversationHandler(
entry_points=[CommandHandler('update_wallet', start_updating_wallet)],
states={
CHOOSING_WALLET_UPDATE: [
MessageHandler(
Filters.regex('^Wallet Update$'), regular_choice_wallet_update
),
],
TYPING_REPLY_WALLET_UPDATE: [
MessageHandler(
Filters.text & ~(Filters.command | Filters.regex('^Done$')),
received_information_wallet_update,
)
],
},
fallbacks=[
MessageHandler(Filters.regex('^Done$'), done_wallet_update),
],
name="update_wallet",
persistent=False,
)
rewards_conversation = ConversationHandler(
entry_points=[CommandHandler('rewards', rewards_entrypoint)],
states={
CHOOSING_REWARDS: [
MessageHandler(
Filters.regex('^Set Reward$'), regular_choice_reward
),
],
TYPING_REWARD_REPLY: [
MessageHandler(
Filters.text & ~(Filters.command | Filters.regex('^Done$')),
received_information_reward,
)
],
},
fallbacks=[
MessageHandler(Filters.regex('^Done$'), done_rewards),
MessageHandler(Filters.regex('^Show rewards$'), pool_show_rewards_command),
],
name="rewards",
persistent=False,
)
entrance_tokens_conversation = ConversationHandler(
entry_points=[CommandHandler('entrance', permissions_entrypoint)],
states={
CHOOSING_TOKENS: [
MessageHandler(
Filters.regex('^Set entrance amount$'), regular_choice_permissions
),
],
TYPING_TOKENS_REPLY: [
MessageHandler(
Filters.text & ~(Filters.command | Filters.regex('^Done$')),
received_permission_amount,
)
],
},
fallbacks=[
MessageHandler(Filters.regex('^Done$'), done_permission),
MessageHandler(
Filters.regex('^Show entrance configuration$'), show_entrance_permision_for_channel),
MessageHandler(
Filters.regex('^Disable entrance checks$'), disable_entrance_checks),
MessageHandler(
Filters.regex('^Toggle only users with rewards$'), toggle_users_with_rewards),
],
name="entrance",
persistent=False,
)
add_member_conversation = ConversationHandler(
entry_points=[CommandHandler('add_me_to_pool', start_adding_member)],
states={
CHOOSING_ADD_MEMBER: [
MessageHandler(
Filters.regex('^Add your wallet$'), regular_choice_member_add
),
],
TYPING_REPLY_MEMBER: [
MessageHandler(
Filters.text & ~(Filters.command | Filters.regex('^Done$')),
received_information_member_add,
)
],
},
fallbacks=[
MessageHandler(Filters.regex('^Done$'), done_member_add),
],
name="add_member",
persistent=False,
)
| true | true |
f72474b80565c33b2edda35cd6580c085dbf16da | 17,832 | py | Python | pybind/nos/v6_0_2c/rbridge_id/router/router_bgp/router_bgp_attributes/neighbor/peer_grps/neighbor_peer_grp/update_source/__init__.py | shivharis/pybind | 4e1c6d54b9fd722ccec25546ba2413d79ce337e6 | [
"Apache-2.0"
] | null | null | null | pybind/nos/v6_0_2c/rbridge_id/router/router_bgp/router_bgp_attributes/neighbor/peer_grps/neighbor_peer_grp/update_source/__init__.py | shivharis/pybind | 4e1c6d54b9fd722ccec25546ba2413d79ce337e6 | [
"Apache-2.0"
] | null | null | null | pybind/nos/v6_0_2c/rbridge_id/router/router_bgp/router_bgp_attributes/neighbor/peer_grps/neighbor_peer_grp/update_source/__init__.py | shivharis/pybind | 4e1c6d54b9fd722ccec25546ba2413d79ce337e6 | [
"Apache-2.0"
] | 1 | 2021-11-05T22:15:42.000Z | 2021-11-05T22:15:42.000Z |
from operator import attrgetter
import pyangbind.lib.xpathhelper as xpathhelper
from pyangbind.lib.yangtypes import RestrictedPrecisionDecimalType, RestrictedClassType, TypedListType
from pyangbind.lib.yangtypes import YANGBool, YANGListType, YANGDynClass, ReferenceType
from pyangbind.lib.base import PybindBase
from decimal import Decimal
from bitarray import bitarray
import __builtin__
import ethernet_interface
class update_source(PybindBase):
"""
This class was auto-generated by the PythonClass plugin for PYANG
from YANG module brocade-rbridge - based on the path /rbridge-id/router/router-bgp/router-bgp-attributes/neighbor/peer-grps/neighbor-peer-grp/update-source. Each member element of
the container is represented as a class variable - with a specific
YANG type.
"""
__slots__ = ('_pybind_generated_by', '_path_helper', '_yang_name', '_rest_name', '_extmethods', '__sip_ipv4_address','__ethernet_interface','__loopback','__ve_interface',)
_yang_name = 'update-source'
_rest_name = 'update-source'
_pybind_generated_by = 'container'
def __init__(self, *args, **kwargs):
path_helper_ = kwargs.pop("path_helper", None)
if path_helper_ is False:
self._path_helper = False
elif path_helper_ is not None and isinstance(path_helper_, xpathhelper.YANGPathHelper):
self._path_helper = path_helper_
elif hasattr(self, "_parent"):
path_helper_ = getattr(self._parent, "_path_helper", False)
self._path_helper = path_helper_
else:
self._path_helper = False
extmethods = kwargs.pop("extmethods", None)
if extmethods is False:
self._extmethods = False
elif extmethods is not None and isinstance(extmethods, dict):
self._extmethods = extmethods
elif hasattr(self, "_parent"):
extmethods = getattr(self._parent, "_extmethods", None)
self._extmethods = extmethods
else:
self._extmethods = False
self.__ve_interface = YANGDynClass(base=RestrictedClassType(base_type=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..4294967295']}, int_size=32), restriction_dict={'range': [u'1..8191']}), is_leaf=True, yang_name="ve-interface", rest_name="ve-interface", parent=self, choice=(u'ch-update-source', u'ca-ve'), path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Virtual Interface'}}, namespace='urn:brocade.com:mgmt:brocade-bgp', defining_module='brocade-bgp', yang_type='interface:ve-type', is_config=True)
self.__sip_ipv4_address = YANGDynClass(base=RestrictedClassType(base_type=unicode, restriction_dict={'pattern': u'(([0-9]|[1-9][0-9]|1[0-9][0-9]|2[0-4][0-9]|25[0-5])\\.){3}([0-9]|[1-9][0-9]|1[0-9][0-9]|2[0-4][0-9]|25[0-5])(%[\\p{N}\\p{L}]+)?'}), is_leaf=True, yang_name="sip-ipv4-address", rest_name="sip-ipv4-address", parent=self, choice=(u'ch-update-source', u'ca-ipv4'), path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'cli-drop-node-name': None}}, namespace='urn:brocade.com:mgmt:brocade-bgp', defining_module='brocade-bgp', yang_type='sip-ipv4-address', is_config=True)
self.__loopback = YANGDynClass(base=RestrictedClassType(base_type=unicode, restriction_dict={'length': [u'1..63']}), is_leaf=True, yang_name="loopback", rest_name="loopback", parent=self, choice=(u'ch-update-source', u'ca-loopback'), path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Loopback Interface'}}, namespace='urn:brocade.com:mgmt:brocade-bgp', defining_module='brocade-bgp', yang_type='loopback-interface', is_config=True)
self.__ethernet_interface = YANGDynClass(base=ethernet_interface.ethernet_interface, is_container='container', presence=False, yang_name="ethernet-interface", rest_name="", parent=self, choice=(u'ch-update-source', u'ca-eth'), path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'cli-compact-syntax': None, u'cli-drop-node-name': None, u'cli-sequence-commands': None}}, namespace='urn:brocade.com:mgmt:brocade-bgp', defining_module='brocade-bgp', yang_type='container', is_config=True)
load = kwargs.pop("load", None)
if args:
if len(args) > 1:
raise TypeError("cannot create a YANG container with >1 argument")
all_attr = True
for e in self._pyangbind_elements:
if not hasattr(args[0], e):
all_attr = False
break
if not all_attr:
raise ValueError("Supplied object did not have the correct attributes")
for e in self._pyangbind_elements:
nobj = getattr(args[0], e)
if nobj._changed() is False:
continue
setmethod = getattr(self, "_set_%s" % e)
if load is None:
setmethod(getattr(args[0], e))
else:
setmethod(getattr(args[0], e), load=load)
def _path(self):
if hasattr(self, "_parent"):
return self._parent._path()+[self._yang_name]
else:
return [u'rbridge-id', u'router', u'router-bgp', u'router-bgp-attributes', u'neighbor', u'peer-grps', u'neighbor-peer-grp', u'update-source']
def _rest_path(self):
if hasattr(self, "_parent"):
if self._rest_name:
return self._parent._rest_path()+[self._rest_name]
else:
return self._parent._rest_path()
else:
return [u'rbridge-id', u'router', u'bgp', u'neighbor', u'neighbor-peer-grp', u'update-source']
def _get_sip_ipv4_address(self):
"""
Getter method for sip_ipv4_address, mapped from YANG variable /rbridge_id/router/router_bgp/router_bgp_attributes/neighbor/peer_grps/neighbor_peer_grp/update_source/sip_ipv4_address (sip-ipv4-address)
"""
return self.__sip_ipv4_address
def _set_sip_ipv4_address(self, v, load=False):
"""
Setter method for sip_ipv4_address, mapped from YANG variable /rbridge_id/router/router_bgp/router_bgp_attributes/neighbor/peer_grps/neighbor_peer_grp/update_source/sip_ipv4_address (sip-ipv4-address)
If this variable is read-only (config: false) in the
source YANG file, then _set_sip_ipv4_address is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_sip_ipv4_address() directly.
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=RestrictedClassType(base_type=unicode, restriction_dict={'pattern': u'(([0-9]|[1-9][0-9]|1[0-9][0-9]|2[0-4][0-9]|25[0-5])\\.){3}([0-9]|[1-9][0-9]|1[0-9][0-9]|2[0-4][0-9]|25[0-5])(%[\\p{N}\\p{L}]+)?'}), is_leaf=True, yang_name="sip-ipv4-address", rest_name="sip-ipv4-address", parent=self, choice=(u'ch-update-source', u'ca-ipv4'), path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'cli-drop-node-name': None}}, namespace='urn:brocade.com:mgmt:brocade-bgp', defining_module='brocade-bgp', yang_type='sip-ipv4-address', is_config=True)
except (TypeError, ValueError):
raise ValueError({
'error-string': """sip_ipv4_address must be of a type compatible with sip-ipv4-address""",
'defined-type': "brocade-bgp:sip-ipv4-address",
'generated-type': """YANGDynClass(base=RestrictedClassType(base_type=unicode, restriction_dict={'pattern': u'(([0-9]|[1-9][0-9]|1[0-9][0-9]|2[0-4][0-9]|25[0-5])\\.){3}([0-9]|[1-9][0-9]|1[0-9][0-9]|2[0-4][0-9]|25[0-5])(%[\\p{N}\\p{L}]+)?'}), is_leaf=True, yang_name="sip-ipv4-address", rest_name="sip-ipv4-address", parent=self, choice=(u'ch-update-source', u'ca-ipv4'), path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'cli-drop-node-name': None}}, namespace='urn:brocade.com:mgmt:brocade-bgp', defining_module='brocade-bgp', yang_type='sip-ipv4-address', is_config=True)""",
})
self.__sip_ipv4_address = t
if hasattr(self, '_set'):
self._set()
def _unset_sip_ipv4_address(self):
self.__sip_ipv4_address = YANGDynClass(base=RestrictedClassType(base_type=unicode, restriction_dict={'pattern': u'(([0-9]|[1-9][0-9]|1[0-9][0-9]|2[0-4][0-9]|25[0-5])\\.){3}([0-9]|[1-9][0-9]|1[0-9][0-9]|2[0-4][0-9]|25[0-5])(%[\\p{N}\\p{L}]+)?'}), is_leaf=True, yang_name="sip-ipv4-address", rest_name="sip-ipv4-address", parent=self, choice=(u'ch-update-source', u'ca-ipv4'), path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'cli-drop-node-name': None}}, namespace='urn:brocade.com:mgmt:brocade-bgp', defining_module='brocade-bgp', yang_type='sip-ipv4-address', is_config=True)
def _get_ethernet_interface(self):
"""
Getter method for ethernet_interface, mapped from YANG variable /rbridge_id/router/router_bgp/router_bgp_attributes/neighbor/peer_grps/neighbor_peer_grp/update_source/ethernet_interface (container)
"""
return self.__ethernet_interface
def _set_ethernet_interface(self, v, load=False):
"""
Setter method for ethernet_interface, mapped from YANG variable /rbridge_id/router/router_bgp/router_bgp_attributes/neighbor/peer_grps/neighbor_peer_grp/update_source/ethernet_interface (container)
If this variable is read-only (config: false) in the
source YANG file, then _set_ethernet_interface is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_ethernet_interface() directly.
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=ethernet_interface.ethernet_interface, is_container='container', presence=False, yang_name="ethernet-interface", rest_name="", parent=self, choice=(u'ch-update-source', u'ca-eth'), path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'cli-compact-syntax': None, u'cli-drop-node-name': None, u'cli-sequence-commands': None}}, namespace='urn:brocade.com:mgmt:brocade-bgp', defining_module='brocade-bgp', yang_type='container', is_config=True)
except (TypeError, ValueError):
raise ValueError({
'error-string': """ethernet_interface must be of a type compatible with container""",
'defined-type': "container",
'generated-type': """YANGDynClass(base=ethernet_interface.ethernet_interface, is_container='container', presence=False, yang_name="ethernet-interface", rest_name="", parent=self, choice=(u'ch-update-source', u'ca-eth'), path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'cli-compact-syntax': None, u'cli-drop-node-name': None, u'cli-sequence-commands': None}}, namespace='urn:brocade.com:mgmt:brocade-bgp', defining_module='brocade-bgp', yang_type='container', is_config=True)""",
})
self.__ethernet_interface = t
if hasattr(self, '_set'):
self._set()
def _unset_ethernet_interface(self):
self.__ethernet_interface = YANGDynClass(base=ethernet_interface.ethernet_interface, is_container='container', presence=False, yang_name="ethernet-interface", rest_name="", parent=self, choice=(u'ch-update-source', u'ca-eth'), path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'cli-compact-syntax': None, u'cli-drop-node-name': None, u'cli-sequence-commands': None}}, namespace='urn:brocade.com:mgmt:brocade-bgp', defining_module='brocade-bgp', yang_type='container', is_config=True)
def _get_loopback(self):
"""
Getter method for loopback, mapped from YANG variable /rbridge_id/router/router_bgp/router_bgp_attributes/neighbor/peer_grps/neighbor_peer_grp/update_source/loopback (loopback-interface)
"""
return self.__loopback
def _set_loopback(self, v, load=False):
"""
Setter method for loopback, mapped from YANG variable /rbridge_id/router/router_bgp/router_bgp_attributes/neighbor/peer_grps/neighbor_peer_grp/update_source/loopback (loopback-interface)
If this variable is read-only (config: false) in the
source YANG file, then _set_loopback is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_loopback() directly.
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=RestrictedClassType(base_type=unicode, restriction_dict={'length': [u'1..63']}), is_leaf=True, yang_name="loopback", rest_name="loopback", parent=self, choice=(u'ch-update-source', u'ca-loopback'), path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Loopback Interface'}}, namespace='urn:brocade.com:mgmt:brocade-bgp', defining_module='brocade-bgp', yang_type='loopback-interface', is_config=True)
except (TypeError, ValueError):
raise ValueError({
'error-string': """loopback must be of a type compatible with loopback-interface""",
'defined-type': "brocade-bgp:loopback-interface",
'generated-type': """YANGDynClass(base=RestrictedClassType(base_type=unicode, restriction_dict={'length': [u'1..63']}), is_leaf=True, yang_name="loopback", rest_name="loopback", parent=self, choice=(u'ch-update-source', u'ca-loopback'), path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Loopback Interface'}}, namespace='urn:brocade.com:mgmt:brocade-bgp', defining_module='brocade-bgp', yang_type='loopback-interface', is_config=True)""",
})
self.__loopback = t
if hasattr(self, '_set'):
self._set()
def _unset_loopback(self):
self.__loopback = YANGDynClass(base=RestrictedClassType(base_type=unicode, restriction_dict={'length': [u'1..63']}), is_leaf=True, yang_name="loopback", rest_name="loopback", parent=self, choice=(u'ch-update-source', u'ca-loopback'), path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Loopback Interface'}}, namespace='urn:brocade.com:mgmt:brocade-bgp', defining_module='brocade-bgp', yang_type='loopback-interface', is_config=True)
def _get_ve_interface(self):
"""
Getter method for ve_interface, mapped from YANG variable /rbridge_id/router/router_bgp/router_bgp_attributes/neighbor/peer_grps/neighbor_peer_grp/update_source/ve_interface (interface:ve-type)
"""
return self.__ve_interface
def _set_ve_interface(self, v, load=False):
"""
Setter method for ve_interface, mapped from YANG variable /rbridge_id/router/router_bgp/router_bgp_attributes/neighbor/peer_grps/neighbor_peer_grp/update_source/ve_interface (interface:ve-type)
If this variable is read-only (config: false) in the
source YANG file, then _set_ve_interface is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_ve_interface() directly.
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=RestrictedClassType(base_type=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..4294967295']}, int_size=32), restriction_dict={'range': [u'1..8191']}), is_leaf=True, yang_name="ve-interface", rest_name="ve-interface", parent=self, choice=(u'ch-update-source', u'ca-ve'), path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Virtual Interface'}}, namespace='urn:brocade.com:mgmt:brocade-bgp', defining_module='brocade-bgp', yang_type='interface:ve-type', is_config=True)
except (TypeError, ValueError):
raise ValueError({
'error-string': """ve_interface must be of a type compatible with interface:ve-type""",
'defined-type': "interface:ve-type",
'generated-type': """YANGDynClass(base=RestrictedClassType(base_type=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..4294967295']}, int_size=32), restriction_dict={'range': [u'1..8191']}), is_leaf=True, yang_name="ve-interface", rest_name="ve-interface", parent=self, choice=(u'ch-update-source', u'ca-ve'), path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Virtual Interface'}}, namespace='urn:brocade.com:mgmt:brocade-bgp', defining_module='brocade-bgp', yang_type='interface:ve-type', is_config=True)""",
})
self.__ve_interface = t
if hasattr(self, '_set'):
self._set()
def _unset_ve_interface(self):
self.__ve_interface = YANGDynClass(base=RestrictedClassType(base_type=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..4294967295']}, int_size=32), restriction_dict={'range': [u'1..8191']}), is_leaf=True, yang_name="ve-interface", rest_name="ve-interface", parent=self, choice=(u'ch-update-source', u'ca-ve'), path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Virtual Interface'}}, namespace='urn:brocade.com:mgmt:brocade-bgp', defining_module='brocade-bgp', yang_type='interface:ve-type', is_config=True)
sip_ipv4_address = __builtin__.property(_get_sip_ipv4_address, _set_sip_ipv4_address)
ethernet_interface = __builtin__.property(_get_ethernet_interface, _set_ethernet_interface)
loopback = __builtin__.property(_get_loopback, _set_loopback)
ve_interface = __builtin__.property(_get_ve_interface, _set_ve_interface)
__choices__ = {u'ch-update-source': {u'ca-eth': [u'ethernet_interface'], u'ca-ve': [u've_interface'], u'ca-ipv4': [u'sip_ipv4_address'], u'ca-loopback': [u'loopback']}}
_pyangbind_elements = {'sip_ipv4_address': sip_ipv4_address, 'ethernet_interface': ethernet_interface, 'loopback': loopback, 've_interface': ve_interface, }
| 77.868996 | 648 | 0.730316 |
from operator import attrgetter
import pyangbind.lib.xpathhelper as xpathhelper
from pyangbind.lib.yangtypes import RestrictedPrecisionDecimalType, RestrictedClassType, TypedListType
from pyangbind.lib.yangtypes import YANGBool, YANGListType, YANGDynClass, ReferenceType
from pyangbind.lib.base import PybindBase
from decimal import Decimal
from bitarray import bitarray
import __builtin__
import ethernet_interface
class update_source(PybindBase):
__slots__ = ('_pybind_generated_by', '_path_helper', '_yang_name', '_rest_name', '_extmethods', '__sip_ipv4_address','__ethernet_interface','__loopback','__ve_interface',)
_yang_name = 'update-source'
_rest_name = 'update-source'
_pybind_generated_by = 'container'
def __init__(self, *args, **kwargs):
path_helper_ = kwargs.pop("path_helper", None)
if path_helper_ is False:
self._path_helper = False
elif path_helper_ is not None and isinstance(path_helper_, xpathhelper.YANGPathHelper):
self._path_helper = path_helper_
elif hasattr(self, "_parent"):
path_helper_ = getattr(self._parent, "_path_helper", False)
self._path_helper = path_helper_
else:
self._path_helper = False
extmethods = kwargs.pop("extmethods", None)
if extmethods is False:
self._extmethods = False
elif extmethods is not None and isinstance(extmethods, dict):
self._extmethods = extmethods
elif hasattr(self, "_parent"):
extmethods = getattr(self._parent, "_extmethods", None)
self._extmethods = extmethods
else:
self._extmethods = False
self.__ve_interface = YANGDynClass(base=RestrictedClassType(base_type=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..4294967295']}, int_size=32), restriction_dict={'range': [u'1..8191']}), is_leaf=True, yang_name="ve-interface", rest_name="ve-interface", parent=self, choice=(u'ch-update-source', u'ca-ve'), path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Virtual Interface'}}, namespace='urn:brocade.com:mgmt:brocade-bgp', defining_module='brocade-bgp', yang_type='interface:ve-type', is_config=True)
self.__sip_ipv4_address = YANGDynClass(base=RestrictedClassType(base_type=unicode, restriction_dict={'pattern': u'(([0-9]|[1-9][0-9]|1[0-9][0-9]|2[0-4][0-9]|25[0-5])\\.){3}([0-9]|[1-9][0-9]|1[0-9][0-9]|2[0-4][0-9]|25[0-5])(%[\\p{N}\\p{L}]+)?'}), is_leaf=True, yang_name="sip-ipv4-address", rest_name="sip-ipv4-address", parent=self, choice=(u'ch-update-source', u'ca-ipv4'), path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'cli-drop-node-name': None}}, namespace='urn:brocade.com:mgmt:brocade-bgp', defining_module='brocade-bgp', yang_type='sip-ipv4-address', is_config=True)
self.__loopback = YANGDynClass(base=RestrictedClassType(base_type=unicode, restriction_dict={'length': [u'1..63']}), is_leaf=True, yang_name="loopback", rest_name="loopback", parent=self, choice=(u'ch-update-source', u'ca-loopback'), path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Loopback Interface'}}, namespace='urn:brocade.com:mgmt:brocade-bgp', defining_module='brocade-bgp', yang_type='loopback-interface', is_config=True)
self.__ethernet_interface = YANGDynClass(base=ethernet_interface.ethernet_interface, is_container='container', presence=False, yang_name="ethernet-interface", rest_name="", parent=self, choice=(u'ch-update-source', u'ca-eth'), path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'cli-compact-syntax': None, u'cli-drop-node-name': None, u'cli-sequence-commands': None}}, namespace='urn:brocade.com:mgmt:brocade-bgp', defining_module='brocade-bgp', yang_type='container', is_config=True)
load = kwargs.pop("load", None)
if args:
if len(args) > 1:
raise TypeError("cannot create a YANG container with >1 argument")
all_attr = True
for e in self._pyangbind_elements:
if not hasattr(args[0], e):
all_attr = False
break
if not all_attr:
raise ValueError("Supplied object did not have the correct attributes")
for e in self._pyangbind_elements:
nobj = getattr(args[0], e)
if nobj._changed() is False:
continue
setmethod = getattr(self, "_set_%s" % e)
if load is None:
setmethod(getattr(args[0], e))
else:
setmethod(getattr(args[0], e), load=load)
def _path(self):
if hasattr(self, "_parent"):
return self._parent._path()+[self._yang_name]
else:
return [u'rbridge-id', u'router', u'router-bgp', u'router-bgp-attributes', u'neighbor', u'peer-grps', u'neighbor-peer-grp', u'update-source']
def _rest_path(self):
if hasattr(self, "_parent"):
if self._rest_name:
return self._parent._rest_path()+[self._rest_name]
else:
return self._parent._rest_path()
else:
return [u'rbridge-id', u'router', u'bgp', u'neighbor', u'neighbor-peer-grp', u'update-source']
def _get_sip_ipv4_address(self):
return self.__sip_ipv4_address
def _set_sip_ipv4_address(self, v, load=False):
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=RestrictedClassType(base_type=unicode, restriction_dict={'pattern': u'(([0-9]|[1-9][0-9]|1[0-9][0-9]|2[0-4][0-9]|25[0-5])\\.){3}([0-9]|[1-9][0-9]|1[0-9][0-9]|2[0-4][0-9]|25[0-5])(%[\\p{N}\\p{L}]+)?'}), is_leaf=True, yang_name="sip-ipv4-address", rest_name="sip-ipv4-address", parent=self, choice=(u'ch-update-source', u'ca-ipv4'), path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'cli-drop-node-name': None}}, namespace='urn:brocade.com:mgmt:brocade-bgp', defining_module='brocade-bgp', yang_type='sip-ipv4-address', is_config=True)
except (TypeError, ValueError):
raise ValueError({
'error-string': """sip_ipv4_address must be of a type compatible with sip-ipv4-address""",
'defined-type': "brocade-bgp:sip-ipv4-address",
'generated-type': """YANGDynClass(base=RestrictedClassType(base_type=unicode, restriction_dict={'pattern': u'(([0-9]|[1-9][0-9]|1[0-9][0-9]|2[0-4][0-9]|25[0-5])\\.){3}([0-9]|[1-9][0-9]|1[0-9][0-9]|2[0-4][0-9]|25[0-5])(%[\\p{N}\\p{L}]+)?'}), is_leaf=True, yang_name="sip-ipv4-address", rest_name="sip-ipv4-address", parent=self, choice=(u'ch-update-source', u'ca-ipv4'), path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'cli-drop-node-name': None}}, namespace='urn:brocade.com:mgmt:brocade-bgp', defining_module='brocade-bgp', yang_type='sip-ipv4-address', is_config=True)""",
})
self.__sip_ipv4_address = t
if hasattr(self, '_set'):
self._set()
def _unset_sip_ipv4_address(self):
self.__sip_ipv4_address = YANGDynClass(base=RestrictedClassType(base_type=unicode, restriction_dict={'pattern': u'(([0-9]|[1-9][0-9]|1[0-9][0-9]|2[0-4][0-9]|25[0-5])\\.){3}([0-9]|[1-9][0-9]|1[0-9][0-9]|2[0-4][0-9]|25[0-5])(%[\\p{N}\\p{L}]+)?'}), is_leaf=True, yang_name="sip-ipv4-address", rest_name="sip-ipv4-address", parent=self, choice=(u'ch-update-source', u'ca-ipv4'), path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'cli-drop-node-name': None}}, namespace='urn:brocade.com:mgmt:brocade-bgp', defining_module='brocade-bgp', yang_type='sip-ipv4-address', is_config=True)
def _get_ethernet_interface(self):
return self.__ethernet_interface
def _set_ethernet_interface(self, v, load=False):
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=ethernet_interface.ethernet_interface, is_container='container', presence=False, yang_name="ethernet-interface", rest_name="", parent=self, choice=(u'ch-update-source', u'ca-eth'), path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'cli-compact-syntax': None, u'cli-drop-node-name': None, u'cli-sequence-commands': None}}, namespace='urn:brocade.com:mgmt:brocade-bgp', defining_module='brocade-bgp', yang_type='container', is_config=True)
except (TypeError, ValueError):
raise ValueError({
'error-string': """ethernet_interface must be of a type compatible with container""",
'defined-type': "container",
'generated-type': """YANGDynClass(base=ethernet_interface.ethernet_interface, is_container='container', presence=False, yang_name="ethernet-interface", rest_name="", parent=self, choice=(u'ch-update-source', u'ca-eth'), path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'cli-compact-syntax': None, u'cli-drop-node-name': None, u'cli-sequence-commands': None}}, namespace='urn:brocade.com:mgmt:brocade-bgp', defining_module='brocade-bgp', yang_type='container', is_config=True)""",
})
self.__ethernet_interface = t
if hasattr(self, '_set'):
self._set()
def _unset_ethernet_interface(self):
self.__ethernet_interface = YANGDynClass(base=ethernet_interface.ethernet_interface, is_container='container', presence=False, yang_name="ethernet-interface", rest_name="", parent=self, choice=(u'ch-update-source', u'ca-eth'), path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'cli-compact-syntax': None, u'cli-drop-node-name': None, u'cli-sequence-commands': None}}, namespace='urn:brocade.com:mgmt:brocade-bgp', defining_module='brocade-bgp', yang_type='container', is_config=True)
def _get_loopback(self):
return self.__loopback
def _set_loopback(self, v, load=False):
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=RestrictedClassType(base_type=unicode, restriction_dict={'length': [u'1..63']}), is_leaf=True, yang_name="loopback", rest_name="loopback", parent=self, choice=(u'ch-update-source', u'ca-loopback'), path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Loopback Interface'}}, namespace='urn:brocade.com:mgmt:brocade-bgp', defining_module='brocade-bgp', yang_type='loopback-interface', is_config=True)
except (TypeError, ValueError):
raise ValueError({
'error-string': """loopback must be of a type compatible with loopback-interface""",
'defined-type': "brocade-bgp:loopback-interface",
'generated-type': """YANGDynClass(base=RestrictedClassType(base_type=unicode, restriction_dict={'length': [u'1..63']}), is_leaf=True, yang_name="loopback", rest_name="loopback", parent=self, choice=(u'ch-update-source', u'ca-loopback'), path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Loopback Interface'}}, namespace='urn:brocade.com:mgmt:brocade-bgp', defining_module='brocade-bgp', yang_type='loopback-interface', is_config=True)""",
})
self.__loopback = t
if hasattr(self, '_set'):
self._set()
def _unset_loopback(self):
self.__loopback = YANGDynClass(base=RestrictedClassType(base_type=unicode, restriction_dict={'length': [u'1..63']}), is_leaf=True, yang_name="loopback", rest_name="loopback", parent=self, choice=(u'ch-update-source', u'ca-loopback'), path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Loopback Interface'}}, namespace='urn:brocade.com:mgmt:brocade-bgp', defining_module='brocade-bgp', yang_type='loopback-interface', is_config=True)
def _get_ve_interface(self):
return self.__ve_interface
def _set_ve_interface(self, v, load=False):
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=RestrictedClassType(base_type=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..4294967295']}, int_size=32), restriction_dict={'range': [u'1..8191']}), is_leaf=True, yang_name="ve-interface", rest_name="ve-interface", parent=self, choice=(u'ch-update-source', u'ca-ve'), path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Virtual Interface'}}, namespace='urn:brocade.com:mgmt:brocade-bgp', defining_module='brocade-bgp', yang_type='interface:ve-type', is_config=True)
except (TypeError, ValueError):
raise ValueError({
'error-string': """ve_interface must be of a type compatible with interface:ve-type""",
'defined-type': "interface:ve-type",
'generated-type': """YANGDynClass(base=RestrictedClassType(base_type=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..4294967295']}, int_size=32), restriction_dict={'range': [u'1..8191']}), is_leaf=True, yang_name="ve-interface", rest_name="ve-interface", parent=self, choice=(u'ch-update-source', u'ca-ve'), path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Virtual Interface'}}, namespace='urn:brocade.com:mgmt:brocade-bgp', defining_module='brocade-bgp', yang_type='interface:ve-type', is_config=True)""",
})
self.__ve_interface = t
if hasattr(self, '_set'):
self._set()
def _unset_ve_interface(self):
self.__ve_interface = YANGDynClass(base=RestrictedClassType(base_type=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..4294967295']}, int_size=32), restriction_dict={'range': [u'1..8191']}), is_leaf=True, yang_name="ve-interface", rest_name="ve-interface", parent=self, choice=(u'ch-update-source', u'ca-ve'), path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Virtual Interface'}}, namespace='urn:brocade.com:mgmt:brocade-bgp', defining_module='brocade-bgp', yang_type='interface:ve-type', is_config=True)
sip_ipv4_address = __builtin__.property(_get_sip_ipv4_address, _set_sip_ipv4_address)
ethernet_interface = __builtin__.property(_get_ethernet_interface, _set_ethernet_interface)
loopback = __builtin__.property(_get_loopback, _set_loopback)
ve_interface = __builtin__.property(_get_ve_interface, _set_ve_interface)
__choices__ = {u'ch-update-source': {u'ca-eth': [u'ethernet_interface'], u'ca-ve': [u've_interface'], u'ca-ipv4': [u'sip_ipv4_address'], u'ca-loopback': [u'loopback']}}
_pyangbind_elements = {'sip_ipv4_address': sip_ipv4_address, 'ethernet_interface': ethernet_interface, 'loopback': loopback, 've_interface': ve_interface, }
| true | true |
f7247582fc6b8d543be8129bb89d210bd8e927bf | 7,404 | py | Python | experiments/runtime/plot_runtime.py | patriotemeritus/LO-PHI | 8f92c4efb94061cb08b7e9b0ff96346565c12653 | [
"BSD-3-Clause"
] | 28 | 2016-02-26T00:54:16.000Z | 2022-02-24T12:21:15.000Z | experiments/runtime/plot_runtime.py | patriotemeritus/LO-PHI | 8f92c4efb94061cb08b7e9b0ff96346565c12653 | [
"BSD-3-Clause"
] | 2 | 2016-03-15T21:49:09.000Z | 2016-07-12T22:11:39.000Z | experiments/runtime/plot_runtime.py | patriotemeritus/LO-PHI | 8f92c4efb94061cb08b7e9b0ff96346565c12653 | [
"BSD-3-Clause"
] | 9 | 2016-06-08T14:42:28.000Z | 2021-11-11T03:51:34.000Z | """
Make a "broken" horizontal bar plot, i.e. one with gaps, of run times.
(c) 2015 Massachusetts Institute of Technology
"""
import numpy
import pprint
import matplotlib
matplotlib.use('GTKAgg')
import matplotlib.pyplot as plt
label_fontsize = 11
labels = ['Disk Reset',
# 'Power On',
'OS Boot',
'OS Stabilize',
'Key Presses',
'Mem. (Clean)',
'Compress (Clean)',
# 'Start Capture',
'Buttons (Clean)',
'Run Binary',
'Mem. (Interim)',
'Screenshot (Interim)',
'Buttons (Click)',
'Extra Sleep',
'Mem. (Dirty)',
'Screenshot (Final)',
'Compress (Dirty)',
'Shutdown',
'Store Results',
'wtf',
'wtf',
'wtf',
'wtf']
mal_executed_index = 7
flip_text = [6]
def normalize_tuples(tuples):
start = tuples[0][0]
rtn_list = []
for tuple in tuples:
start_x = tuple[0]-start
x_len = tuple[1]-tuple[0]
rtn_list.append((start_x, x_len))
return rtn_list
# Get our virtual data
virtual_tuples = [(1439831168.838292, 1439831169.377921),
(1439831169.377968, 1439831190.231804),
(1439831190.231869, 1439831250.232374),
(1439831250.236563, 1439831287.317039),
(1439831287.317097, 1439831309.774347),
(1439831310.612543, 1439831402.00221), # Updated manually
(1439831311.01773, 1439831319.4211), (1439831319.929066, 1439831379.980296), (1439831379.98037, 1439831403.835613), (1439831403.835616, 1439831404.160049), (1439831404.160206, 1439831412.36363), (1439831412.367247, 1439831499.982088), (1439831499.98215, 1439831522.069226), (1439831522.069228, 1439831522.378756), (1439831522.378882, 1439831622.912597), (1439831622.912614, 1439831628.073827), (1439831628.108982, 1439831661.06076)]
virtual_tuples = normalize_tuples(virtual_tuples)
pprint.pprint(virtual_tuples)
# Get our physical data
physical_tuples = [(1439830680.396736, 1439831070.997367),
(1439831070.997433, 1439831114.95812),
(1439831114.958218, 1439831175.002975),
(1439831175.007641, 1439831216.18251),
(1439831216.182679, 1439831305.710553),
(1439831306.717004, 1439831454.234577), # Updated manually
(1439831307.13812, 1439831317.108357),
(1439831318.016684, 1439831378.074144),
(1439831378.074319, 1439831455.997746),
(1439831455.997755, 1439831460.148652),
(1439831460.148693, 1439831475.965947),
(1439831475.972392, 1439831498.053105),
(1439831498.053454, 1439831589.70029),
(1439831589.700302, 1439831594.548414),
(1439831594.548729, 1439831744.423983),
(1439831744.424003, 1439831772.876672),
(1439831773.100489, 1439831795.210495)]
physical_tuples = normalize_tuples(physical_tuples)
pprint.pprint(physical_tuples)
# physical_tuples = virtual_tuples
fig, (ax1, ax2) = plt.subplots(2)
y_val = len(virtual_tuples)
for idx in range(len(virtual_tuples)):
ax1.broken_barh([ physical_tuples[idx] ] , (y_val, 1), facecolors='grey')
ax2.broken_barh([ virtual_tuples[idx] ] , (y_val, 1), facecolors='grey')
print virtual_tuples[idx]
if idx == mal_executed_index:
ax1.annotate('Binary Executed', (physical_tuples[idx][0], y_val+.5),
xytext=(physical_tuples[idx][0]-500, y_val-2),
arrowprops=dict(facecolor='black',
arrowstyle="->"),
fontsize=label_fontsize)
ax2.annotate('Binary Executed', (virtual_tuples[idx][0], y_val+.5),
xytext=(virtual_tuples[idx][0]-125, y_val-2),
arrowprops=dict(facecolor='black',
arrowstyle="->"),
fontsize=label_fontsize)
elif idx in flip_text:
annotate_x = physical_tuples[idx][0]
ax1.annotate(labels[idx], (annotate_x, y_val+.5),
xytext=(annotate_x-300, y_val+.5),
arrowprops=dict(facecolor='black',
arrowstyle="->"),
fontsize=label_fontsize)
annotate_x = virtual_tuples[idx][0]
ax2.annotate(labels[idx], (annotate_x, y_val+.5),
xytext=(annotate_x-125, y_val+1),
arrowprops=dict(facecolor='black',
arrowstyle="->"),
fontsize=label_fontsize)
else:
annotate_x = physical_tuples[idx][1] + physical_tuples[idx][0]
ax1.annotate(labels[idx], (annotate_x, y_val+.5),
xytext=(annotate_x+20, y_val+1),
arrowprops=dict(facecolor='black',
arrowstyle="->"),
fontsize=label_fontsize)
annotate_x = virtual_tuples[idx][1] + virtual_tuples[idx][0]
ax2.annotate(labels[idx], (annotate_x, y_val+.5),
xytext=(annotate_x+20, y_val+1),
arrowprops=dict(facecolor='black',
arrowstyle="->"),
fontsize=label_fontsize)
y_val -= 1
idx += 1
# ax1.set_xlabel('Seconds Elapsed')
ax1.set_ylabel('Physical Analysis', fontsize=20)
ax2.set_ylabel('Virtual Analysis', fontsize=20)
max_x_virt = virtual_tuples[-1][0]+virtual_tuples[-1][1]
max_x_phy = physical_tuples[-1][0]+physical_tuples[-1][1]
ax1.set_ylim([1,len(virtual_tuples)+1])
ax2.set_ylim([1,len(virtual_tuples)+1])
ax2.set_xlabel('Time Elapsed (Minutes)', fontsize=20)
yticks = numpy.arange(len(physical_tuples))+1.5
# Remove top and right border
ax1.spines['right'].set_visible(False)
ax1.spines['top'].set_visible(False)
ax2.spines['right'].set_visible(False)
ax2.spines['top'].set_visible(False)
# ax1.set_yticks(yticks)
# ax1.set_yticklabels(['']*len(virtual_tuples))
#
#
# ax2.set_yticks(yticks)
# ax2.set_yticklabels(['']*len(virtual_tuples))
ax1.set_yticks([])
ax2.set_yticks([])
# labels_reversed = []
# for x in reversed(labels):
# labels_reversed.append(x)
# ax2.set_yticklabels([''] + labels_reversed)
ax2.grid(True)
ax1.grid(True)
ax1.set_xticks(range(0,int(max_x_phy*1.2),120))
ax1.set_xticklabels(range(0,200,2))
ax2.set_xticks(range(0,int(max_x_virt*1.2),60))
ax2.set_xticklabels(range(0,200,1))
ax1.set_xlim([0,max_x_phy*1.2])
ax2.set_xlim([0,max_x_virt*1.2])
for tick in ax1.xaxis.get_major_ticks():
tick.label.set_fontsize(15)
for tick in ax1.yaxis.get_major_ticks():
tick.label.set_fontsize(15)
for tick in ax2.xaxis.get_major_ticks():
tick.label.set_fontsize(15)
for tick in ax2.yaxis.get_major_ticks():
tick.label.set_fontsize(15)
# ax.annotate('race interrupted', (61, 25),
# xytext=(0.8, 0.9), textcoords='axes fraction',
# arrowprops=dict(facecolor='black', shrink=0.05),
# fontsize=16,
# horizontalalignment='right', verticalalignment='top')
plt.tight_layout()
plt.savefig("runtime.eps", format='eps', dpi=1000)
# plt.show() | 35.596154 | 450 | 0.595354 | """
Make a "broken" horizontal bar plot, i.e. one with gaps, of run times.
(c) 2015 Massachusetts Institute of Technology
"""
import numpy
import pprint
import matplotlib
matplotlib.use('GTKAgg')
import matplotlib.pyplot as plt
label_fontsize = 11
labels = ['Disk Reset',
'OS Boot',
'OS Stabilize',
'Key Presses',
'Mem. (Clean)',
'Compress (Clean)',
'Buttons (Clean)',
'Run Binary',
'Mem. (Interim)',
'Screenshot (Interim)',
'Buttons (Click)',
'Extra Sleep',
'Mem. (Dirty)',
'Screenshot (Final)',
'Compress (Dirty)',
'Shutdown',
'Store Results',
'wtf',
'wtf',
'wtf',
'wtf']
mal_executed_index = 7
flip_text = [6]
def normalize_tuples(tuples):
start = tuples[0][0]
rtn_list = []
for tuple in tuples:
start_x = tuple[0]-start
x_len = tuple[1]-tuple[0]
rtn_list.append((start_x, x_len))
return rtn_list
virtual_tuples = [(1439831168.838292, 1439831169.377921),
(1439831169.377968, 1439831190.231804),
(1439831190.231869, 1439831250.232374),
(1439831250.236563, 1439831287.317039),
(1439831287.317097, 1439831309.774347),
(1439831310.612543, 1439831402.00221),
(1439831311.01773, 1439831319.4211), (1439831319.929066, 1439831379.980296), (1439831379.98037, 1439831403.835613), (1439831403.835616, 1439831404.160049), (1439831404.160206, 1439831412.36363), (1439831412.367247, 1439831499.982088), (1439831499.98215, 1439831522.069226), (1439831522.069228, 1439831522.378756), (1439831522.378882, 1439831622.912597), (1439831622.912614, 1439831628.073827), (1439831628.108982, 1439831661.06076)]
virtual_tuples = normalize_tuples(virtual_tuples)
pprint.pprint(virtual_tuples)
physical_tuples = [(1439830680.396736, 1439831070.997367),
(1439831070.997433, 1439831114.95812),
(1439831114.958218, 1439831175.002975),
(1439831175.007641, 1439831216.18251),
(1439831216.182679, 1439831305.710553),
(1439831306.717004, 1439831454.234577),
(1439831307.13812, 1439831317.108357),
(1439831318.016684, 1439831378.074144),
(1439831378.074319, 1439831455.997746),
(1439831455.997755, 1439831460.148652),
(1439831460.148693, 1439831475.965947),
(1439831475.972392, 1439831498.053105),
(1439831498.053454, 1439831589.70029),
(1439831589.700302, 1439831594.548414),
(1439831594.548729, 1439831744.423983),
(1439831744.424003, 1439831772.876672),
(1439831773.100489, 1439831795.210495)]
physical_tuples = normalize_tuples(physical_tuples)
pprint.pprint(physical_tuples)
fig, (ax1, ax2) = plt.subplots(2)
y_val = len(virtual_tuples)
for idx in range(len(virtual_tuples)):
ax1.broken_barh([ physical_tuples[idx] ] , (y_val, 1), facecolors='grey')
ax2.broken_barh([ virtual_tuples[idx] ] , (y_val, 1), facecolors='grey')
print virtual_tuples[idx]
if idx == mal_executed_index:
ax1.annotate('Binary Executed', (physical_tuples[idx][0], y_val+.5),
xytext=(physical_tuples[idx][0]-500, y_val-2),
arrowprops=dict(facecolor='black',
arrowstyle="->"),
fontsize=label_fontsize)
ax2.annotate('Binary Executed', (virtual_tuples[idx][0], y_val+.5),
xytext=(virtual_tuples[idx][0]-125, y_val-2),
arrowprops=dict(facecolor='black',
arrowstyle="->"),
fontsize=label_fontsize)
elif idx in flip_text:
annotate_x = physical_tuples[idx][0]
ax1.annotate(labels[idx], (annotate_x, y_val+.5),
xytext=(annotate_x-300, y_val+.5),
arrowprops=dict(facecolor='black',
arrowstyle="->"),
fontsize=label_fontsize)
annotate_x = virtual_tuples[idx][0]
ax2.annotate(labels[idx], (annotate_x, y_val+.5),
xytext=(annotate_x-125, y_val+1),
arrowprops=dict(facecolor='black',
arrowstyle="->"),
fontsize=label_fontsize)
else:
annotate_x = physical_tuples[idx][1] + physical_tuples[idx][0]
ax1.annotate(labels[idx], (annotate_x, y_val+.5),
xytext=(annotate_x+20, y_val+1),
arrowprops=dict(facecolor='black',
arrowstyle="->"),
fontsize=label_fontsize)
annotate_x = virtual_tuples[idx][1] + virtual_tuples[idx][0]
ax2.annotate(labels[idx], (annotate_x, y_val+.5),
xytext=(annotate_x+20, y_val+1),
arrowprops=dict(facecolor='black',
arrowstyle="->"),
fontsize=label_fontsize)
y_val -= 1
idx += 1
ax1.set_ylabel('Physical Analysis', fontsize=20)
ax2.set_ylabel('Virtual Analysis', fontsize=20)
max_x_virt = virtual_tuples[-1][0]+virtual_tuples[-1][1]
max_x_phy = physical_tuples[-1][0]+physical_tuples[-1][1]
ax1.set_ylim([1,len(virtual_tuples)+1])
ax2.set_ylim([1,len(virtual_tuples)+1])
ax2.set_xlabel('Time Elapsed (Minutes)', fontsize=20)
yticks = numpy.arange(len(physical_tuples))+1.5
ax1.spines['right'].set_visible(False)
ax1.spines['top'].set_visible(False)
ax2.spines['right'].set_visible(False)
ax2.spines['top'].set_visible(False)
ax1.set_yticks([])
ax2.set_yticks([])
ax2.grid(True)
ax1.grid(True)
ax1.set_xticks(range(0,int(max_x_phy*1.2),120))
ax1.set_xticklabels(range(0,200,2))
ax2.set_xticks(range(0,int(max_x_virt*1.2),60))
ax2.set_xticklabels(range(0,200,1))
ax1.set_xlim([0,max_x_phy*1.2])
ax2.set_xlim([0,max_x_virt*1.2])
for tick in ax1.xaxis.get_major_ticks():
tick.label.set_fontsize(15)
for tick in ax1.yaxis.get_major_ticks():
tick.label.set_fontsize(15)
for tick in ax2.xaxis.get_major_ticks():
tick.label.set_fontsize(15)
for tick in ax2.yaxis.get_major_ticks():
tick.label.set_fontsize(15)
plt.tight_layout()
plt.savefig("runtime.eps", format='eps', dpi=1000)
| false | true |
f72475dc8fb021a442a84a5e411f00ab6c251ab0 | 1,015 | py | Python | mud.py | nparry0/mud | ec520795e2206d5e885c343c13e3ead5d2787cd5 | [
"MIT"
] | 2 | 2018-03-15T06:00:18.000Z | 2018-03-22T03:19:38.000Z | mud.py | nparry0/mud | ec520795e2206d5e885c343c13e3ead5d2787cd5 | [
"MIT"
] | null | null | null | mud.py | nparry0/mud | ec520795e2206d5e885c343c13e3ead5d2787cd5 | [
"MIT"
] | null | null | null | import gevent.server
from mud_telnet_handler import MudTelnetHandler
from game_server import GameServer
import argparse
import logging
# Set up logging
log = logging.getLogger()
handler = logging.StreamHandler()
formatter = logging.Formatter('[%(asctime)s] [%(levelname)s] %(message)s')
handler.setFormatter(formatter)
log.addHandler(handler)
log.setLevel(logging.DEBUG)
parser = argparse.ArgumentParser()
parser.add_argument("--playerdir", type=str, default="./players", help="Directory where all player data is kept.")
parser.add_argument("--map", type=str, default="mud.map", help="Map file")
parser.add_argument("--port", type=int, default=3000, help="Listening port.")
args = parser.parse_args()
log.info("Mud starting. Params: %s" % args)
# Set up some class vars of MudTelnetHandler
MudTelnetHandler.player_dir = args.playerdir + "/"
MudTelnetHandler.game_server = GameServer(args.map)
server = gevent.server.StreamServer(("", args.port), MudTelnetHandler.streamserver_handle)
server.serve_forever()
| 32.741935 | 114 | 0.770443 | import gevent.server
from mud_telnet_handler import MudTelnetHandler
from game_server import GameServer
import argparse
import logging
log = logging.getLogger()
handler = logging.StreamHandler()
formatter = logging.Formatter('[%(asctime)s] [%(levelname)s] %(message)s')
handler.setFormatter(formatter)
log.addHandler(handler)
log.setLevel(logging.DEBUG)
parser = argparse.ArgumentParser()
parser.add_argument("--playerdir", type=str, default="./players", help="Directory where all player data is kept.")
parser.add_argument("--map", type=str, default="mud.map", help="Map file")
parser.add_argument("--port", type=int, default=3000, help="Listening port.")
args = parser.parse_args()
log.info("Mud starting. Params: %s" % args)
MudTelnetHandler.player_dir = args.playerdir + "/"
MudTelnetHandler.game_server = GameServer(args.map)
server = gevent.server.StreamServer(("", args.port), MudTelnetHandler.streamserver_handle)
server.serve_forever()
| true | true |
f724762d197c2f0292cdab0ed9ca4350acb293d2 | 37,046 | py | Python | allauth/account/tests.py | pkyad/django-allauth | 1ffd9c2c4a121b9cdcb5814209933ce74d4303c5 | [
"MIT"
] | null | null | null | allauth/account/tests.py | pkyad/django-allauth | 1ffd9c2c4a121b9cdcb5814209933ce74d4303c5 | [
"MIT"
] | null | null | null | allauth/account/tests.py | pkyad/django-allauth | 1ffd9c2c4a121b9cdcb5814209933ce74d4303c5 | [
"MIT"
] | null | null | null | from __future__ import absolute_import
import json
from datetime import timedelta
import django
from django.utils.timezone import now
from django.test.utils import override_settings
from django.conf import settings
from django.core.urlresolvers import reverse
from django.test.client import Client
from django.core import mail
from django.test.client import RequestFactory
from django.contrib.auth.models import AnonymousUser, AbstractUser
from django.db import models
import unittest
from allauth.tests import TestCase, patch
from allauth.account.forms import BaseSignupForm
from allauth.account.models import (
EmailAddress,
EmailConfirmation,
EmailConfirmationHMAC)
from allauth.utils import (
get_current_site,
get_user_model,
get_username_max_length)
from . import app_settings
from .auth_backends import AuthenticationBackend
from .adapter import get_adapter
from .utils import url_str_to_user_pk, user_pk_to_url_str
import uuid
@override_settings(
ACCOUNT_DEFAULT_HTTP_PROTOCOL='https',
ACCOUNT_EMAIL_VERIFICATION=app_settings.EmailVerificationMethod.MANDATORY,
ACCOUNT_AUTHENTICATION_METHOD=app_settings.AuthenticationMethod.USERNAME,
ACCOUNT_SIGNUP_FORM_CLASS=None,
ACCOUNT_EMAIL_SUBJECT_PREFIX=None,
LOGIN_REDIRECT_URL='/accounts/profile/',
ACCOUNT_ADAPTER='allauth.account.adapter.DefaultAccountAdapter',
ACCOUNT_USERNAME_REQUIRED=True)
class AccountTests(TestCase):
def setUp(self):
if 'allauth.socialaccount' in settings.INSTALLED_APPS:
# Otherwise ImproperlyConfigured exceptions may occur
from ..socialaccount.models import SocialApp
sa = SocialApp.objects.create(name='testfb',
provider='facebook')
sa.sites.add(get_current_site())
@override_settings(
ACCOUNT_AUTHENTICATION_METHOD=app_settings.AuthenticationMethod
.USERNAME_EMAIL)
def test_username_containing_at(self):
user = get_user_model().objects.create(username='@raymond.penners')
user.set_password('psst')
user.save()
EmailAddress.objects.create(user=user,
email='raymond.penners@gmail.com',
primary=True,
verified=True)
resp = self.client.post(reverse('account_login'),
{'login': '@raymond.penners',
'password': 'psst'})
self.assertRedirects(resp,
'http://testserver'+settings.LOGIN_REDIRECT_URL,
fetch_redirect_response=False)
def test_signup_same_email_verified_externally(self):
user = self._test_signup_email_verified_externally('john@doe.com',
'john@doe.com')
self.assertEqual(EmailAddress.objects.filter(user=user).count(),
1)
EmailAddress.objects.get(verified=True,
email='john@doe.com',
user=user,
primary=True)
def test_signup_other_email_verified_externally(self):
"""
John is invited on john@work.com, but signs up via john@home.com.
E-mail verification is by-passed, their home e-mail address is
used as a secondary.
"""
user = self._test_signup_email_verified_externally('john@home.com',
'john@work.com')
self.assertEqual(EmailAddress.objects.filter(user=user).count(),
2)
EmailAddress.objects.get(verified=False,
email='john@home.com',
user=user,
primary=False)
EmailAddress.objects.get(verified=True,
email='john@work.com',
user=user,
primary=True)
def _test_signup_email_verified_externally(self, signup_email,
verified_email):
username = 'johndoe'
request = RequestFactory().post(reverse('account_signup'),
{'username': username,
'email': signup_email,
'password1': 'johndoe',
'password2': 'johndoe'})
# Fake stash_verified_email
from django.contrib.messages.middleware import MessageMiddleware
from django.contrib.sessions.middleware import SessionMiddleware
SessionMiddleware().process_request(request)
MessageMiddleware().process_request(request)
request.user = AnonymousUser()
request.session['account_verified_email'] = verified_email
from .views import signup
resp = signup(request)
self.assertEqual(resp.status_code, 302)
self.assertEqual(resp['location'],
get_adapter().get_login_redirect_url(request))
self.assertEqual(len(mail.outbox), 0)
return get_user_model().objects.get(username=username)
@override_settings(
ACCOUNT_USERNAME_REQUIRED=True,
ACCOUNT_SIGNUP_EMAIL_ENTER_TWICE=True)
def test_signup_email_twice(self):
request = RequestFactory().post(reverse('account_signup'),
{'username': 'johndoe',
'email1': 'john@work.com',
'email2': 'john@work.com',
'password1': 'johndoe',
'password2': 'johndoe'})
from django.contrib.messages.middleware import MessageMiddleware
from django.contrib.sessions.middleware import SessionMiddleware
SessionMiddleware().process_request(request)
MessageMiddleware().process_request(request)
request.user = AnonymousUser()
from .views import signup
signup(request)
user = get_user_model().objects.get(username='johndoe')
self.assertEqual(user.email, 'john@work.com')
def _create_user(self):
user = get_user_model().objects.create(username='john', is_active=True)
user.set_password('doe')
user.save()
return user
def _create_user_and_login(self):
user = self._create_user()
self.client.login(username='john', password='doe')
return user
def test_redirect_when_authenticated(self):
self._create_user_and_login()
c = self.client
resp = c.get(reverse('account_login'))
self.assertRedirects(resp, 'http://testserver/accounts/profile/',
fetch_redirect_response=False)
def test_password_reset_get(self):
resp = self.client.get(reverse('account_reset_password'))
self.assertTemplateUsed(resp, 'account/password_reset.html')
def test_password_set_redirect(self):
resp = self._password_set_or_reset_redirect('account_set_password',
True)
self.assertEqual(resp.status_code, 302)
def test_password_reset_no_redirect(self):
resp = self._password_set_or_reset_redirect('account_change_password',
True)
self.assertEqual(resp.status_code, 200)
def test_password_set_no_redirect(self):
resp = self._password_set_or_reset_redirect('account_set_password',
False)
self.assertEqual(resp.status_code, 200)
def test_password_reset_redirect(self):
resp = self._password_set_or_reset_redirect('account_change_password',
False)
self.assertEqual(resp.status_code, 302)
def _password_set_or_reset_redirect(self, urlname, usable_password):
user = self._create_user_and_login()
c = self.client
if not usable_password:
user.set_unusable_password()
user.save()
resp = c.get(reverse(urlname))
return resp
def test_password_forgotten_username_hint(self):
user = self._request_new_password()
body = mail.outbox[0].body
assert user.username in body
@override_settings(
ACCOUNT_AUTHENTICATION_METHOD=app_settings.AuthenticationMethod.EMAIL)
def test_password_forgotten_no_username_hint(self):
user = self._request_new_password()
body = mail.outbox[0].body
assert user.username not in body
def _request_new_password(self):
user = get_user_model().objects.create(
username='john', email='john@doe.org', is_active=True)
user.set_password('doe')
user.save()
self.client.post(
reverse('account_reset_password'),
data={'email': 'john@doe.org'})
self.assertEqual(len(mail.outbox), 1)
self.assertEqual(mail.outbox[0].to, ['john@doe.org'])
return user
def test_password_reset_flow(self):
"""
Tests the password reset flow: requesting a new password,
receiving the reset link via email and finally resetting the
password to a new value.
"""
# Request new password
user = self._request_new_password()
body = mail.outbox[0].body
self.assertGreater(body.find('https://'), 0)
# Extract URL for `password_reset_from_key` view and access it
url = body[body.find('/password/reset/'):].split()[0]
resp = self.client.get(url)
self.assertTemplateUsed(
resp,
'account/password_reset_from_key.%s' %
app_settings.TEMPLATE_EXTENSION)
self.assertFalse('token_fail' in resp.context_data)
# Reset the password
resp = self.client.post(url,
{'password1': 'newpass123',
'password2': 'newpass123'})
self.assertRedirects(resp,
reverse('account_reset_password_from_key_done'))
# Check the new password is in effect
user = get_user_model().objects.get(pk=user.pk)
self.assertTrue(user.check_password('newpass123'))
# Trying to reset the password against the same URL (or any other
# invalid/obsolete URL) returns a bad token response
resp = self.client.post(url,
{'password1': 'newpass123',
'password2': 'newpass123'})
self.assertTemplateUsed(
resp,
'account/password_reset_from_key.%s' %
app_settings.TEMPLATE_EXTENSION)
self.assertTrue(resp.context_data['token_fail'])
# Same should happen when accessing the page directly
response = self.client.get(url)
self.assertTemplateUsed(
response,
'account/password_reset_from_key.%s' %
app_settings.TEMPLATE_EXTENSION)
self.assertTrue(response.context_data['token_fail'])
# When in XHR views, it should respond with a 400 bad request
# code, and the response body should contain the JSON-encoded
# error from the adapter
response = self.client.post(url,
{'password1': 'newpass123',
'password2': 'newpass123'},
HTTP_X_REQUESTED_WITH='XMLHttpRequest')
self.assertEqual(response.status_code, 400)
data = json.loads(response.content.decode('utf8'))
self.assertTrue('form_errors' in data)
self.assertTrue('__all__' in data['form_errors'])
@override_settings(ACCOUNT_LOGIN_ON_PASSWORD_RESET=True)
def test_password_reset_ACCOUNT_LOGIN_ON_PASSWORD_RESET(self):
user = self._request_new_password()
body = mail.outbox[0].body
url = body[body.find('/password/reset/'):].split()[0]
resp = self.client.post(
url,
{'password1': 'newpass123',
'password2': 'newpass123'})
self.assertTrue(user.is_authenticated())
# EmailVerificationMethod.MANDATORY sends us to the confirm-email page
self.assertRedirects(resp, '/confirm-email/')
@override_settings(ACCOUNT_EMAIL_CONFIRMATION_HMAC=False)
def test_email_verification_mandatory(self):
c = Client()
# Signup
resp = c.post(reverse('account_signup'),
{'username': 'johndoe',
'email': 'john@doe.com',
'password1': 'johndoe',
'password2': 'johndoe'},
follow=True)
self.assertEqual(resp.status_code, 200)
self.assertEqual(mail.outbox[0].to, ['john@doe.com'])
self.assertGreater(mail.outbox[0].body.find('https://'), 0)
self.assertEqual(len(mail.outbox), 1)
self.assertTemplateUsed(
resp,
'account/verification_sent.%s' % app_settings.TEMPLATE_EXTENSION)
# Attempt to login, unverified
for attempt in [1, 2]:
resp = c.post(reverse('account_login'),
{'login': 'johndoe',
'password': 'johndoe'},
follow=True)
# is_active is controlled by the admin to manually disable
# users. I don't want this flag to flip automatically whenever
# users verify their email adresses.
self.assertTrue(get_user_model().objects.filter(
username='johndoe', is_active=True).exists())
self.assertTemplateUsed(
resp,
'account/verification_sent.' + app_settings.TEMPLATE_EXTENSION)
# Attempt 1: no mail is sent due to cool-down ,
# but there was already a mail in the outbox.
self.assertEqual(len(mail.outbox), attempt)
self.assertEqual(
EmailConfirmation.objects.filter(
email_address__email='john@doe.com').count(),
attempt)
# Wait for cooldown
EmailConfirmation.objects.update(sent=now() - timedelta(days=1))
# Verify, and re-attempt to login.
confirmation = EmailConfirmation \
.objects \
.filter(email_address__user__username='johndoe')[:1] \
.get()
resp = c.get(reverse('account_confirm_email',
args=[confirmation.key]))
self.assertTemplateUsed(
resp,
'account/email_confirm.%s' % app_settings.TEMPLATE_EXTENSION)
c.post(reverse('account_confirm_email',
args=[confirmation.key]))
resp = c.post(reverse('account_login'),
{'login': 'johndoe',
'password': 'johndoe'})
self.assertRedirects(resp,
'http://testserver'+settings.LOGIN_REDIRECT_URL,
fetch_redirect_response=False)
def test_email_escaping(self):
site = get_current_site()
site.name = '<enc&"test>'
site.save()
u = get_user_model().objects.create(
username='test',
email='foo@bar.com')
request = RequestFactory().get('/')
EmailAddress.objects.add_email(request, u, u.email, confirm=True)
self.assertTrue(mail.outbox[0].subject[1:].startswith(site.name))
@override_settings(
ACCOUNT_EMAIL_VERIFICATION=app_settings.EmailVerificationMethod
.OPTIONAL)
def test_login_unverified_account_optional(self):
"""Tests login behavior when email verification is optional."""
user = get_user_model().objects.create(username='john')
user.set_password('doe')
user.save()
EmailAddress.objects.create(user=user,
email='john@example.com',
primary=True,
verified=False)
resp = self.client.post(reverse('account_login'),
{'login': 'john',
'password': 'doe'})
self.assertRedirects(resp,
'http://testserver'+settings.LOGIN_REDIRECT_URL,
fetch_redirect_response=False)
@override_settings(
ACCOUNT_EMAIL_VERIFICATION=app_settings.EmailVerificationMethod
.OPTIONAL,
ACCOUNT_LOGIN_ATTEMPTS_LIMIT=3)
def test_login_failed_attempts_exceeded(self):
user = get_user_model().objects.create(username='john')
user.set_password('doe')
user.save()
EmailAddress.objects.create(user=user,
email='john@example.com',
primary=True,
verified=False)
for i in range(5):
is_valid_attempt = (i == 4)
is_locked = (i >= 3)
resp = self.client.post(
reverse('account_login'),
{'login': 'john',
'password': (
'doe' if is_valid_attempt
else 'wrong')})
self.assertFormError(
resp,
'form',
None,
'Too many failed login attempts. Try again later.'
if is_locked
else
'The username and/or password you specified are not correct.')
def test_login_unverified_account_mandatory(self):
"""Tests login behavior when email verification is mandatory."""
user = get_user_model().objects.create(username='john')
user.set_password('doe')
user.save()
EmailAddress.objects.create(user=user,
email='john@example.com',
primary=True,
verified=False)
resp = self.client.post(reverse('account_login'),
{'login': 'john',
'password': 'doe'})
self.assertRedirects(resp, reverse('account_email_verification_sent'))
def test_login_inactive_account(self):
"""
Tests login behavior with inactive accounts.
Inactive user accounts should be prevented from performing any actions,
regardless of their verified state.
"""
# Inactive and verified user account
user = get_user_model().objects.create(username='john',
is_active=False)
user.set_password('doe')
user.save()
EmailAddress.objects.create(user=user,
email='john@example.com',
primary=True,
verified=True)
resp = self.client.post(reverse('account_login'),
{'login': 'john',
'password': 'doe'})
self.assertRedirects(resp, reverse('account_inactive'))
# Inactive and unverified user account
user = get_user_model().objects.create(username='doe', is_active=False)
user.set_password('john')
user.save()
EmailAddress.objects.create(user=user,
email='doe@example.com',
primary=True,
verified=False)
resp = self.client.post(reverse('account_login'),
{'login': 'doe',
'password': 'john'})
self.assertRedirects(resp, reverse('account_inactive'))
def test_ajax_password_reset(self):
get_user_model().objects.create(
username='john', email='john@doe.org', is_active=True)
resp = self.client.post(
reverse('account_reset_password'),
data={'email': 'john@doe.org'},
HTTP_X_REQUESTED_WITH='XMLHttpRequest')
self.assertEqual(len(mail.outbox), 1)
self.assertEqual(mail.outbox[0].to, ['john@doe.org'])
self.assertEqual(resp['content-type'], 'application/json')
def test_ajax_login_fail(self):
resp = self.client.post(reverse('account_login'),
{},
HTTP_X_REQUESTED_WITH='XMLHttpRequest')
self.assertEqual(resp.status_code, 400)
json.loads(resp.content.decode('utf8'))
# TODO: Actually test something
@override_settings(
ACCOUNT_EMAIL_VERIFICATION=app_settings.EmailVerificationMethod
.OPTIONAL)
def test_ajax_login_success(self):
user = get_user_model().objects.create(username='john', is_active=True)
user.set_password('doe')
user.save()
resp = self.client.post(reverse('account_login'),
{'login': 'john',
'password': 'doe'},
HTTP_X_REQUESTED_WITH='XMLHttpRequest')
self.assertEqual(resp.status_code, 200)
data = json.loads(resp.content.decode('utf8'))
self.assertEqual(data['location'], '/accounts/profile/')
def test_email_view(self):
self._create_user_and_login()
self.client.get(reverse('account_email'))
# TODO: Actually test something
@override_settings(ACCOUNT_LOGOUT_ON_GET=True)
def test_logout_view_on_get(self):
c, resp = self._logout_view('get')
self.assertTemplateUsed(resp, 'account/messages/logged_out.txt')
@override_settings(ACCOUNT_LOGOUT_ON_GET=False)
def test_logout_view_on_post(self):
c, resp = self._logout_view('get')
self.assertTemplateUsed(
resp,
'account/logout.%s' % app_settings.TEMPLATE_EXTENSION)
resp = c.post(reverse('account_logout'))
self.assertTemplateUsed(resp, 'account/messages/logged_out.txt')
def _logout_view(self, method):
c = Client()
user = get_user_model().objects.create(username='john', is_active=True)
user.set_password('doe')
user.save()
c = Client()
c.login(username='john', password='doe')
return c, getattr(c, method)(reverse('account_logout'))
@override_settings(ACCOUNT_EMAIL_VERIFICATION=app_settings
.EmailVerificationMethod.OPTIONAL)
def test_optional_email_verification(self):
c = Client()
# Signup
c.get(reverse('account_signup'))
resp = c.post(reverse('account_signup'),
{'username': 'johndoe',
'email': 'john@doe.com',
'password1': 'johndoe',
'password2': 'johndoe'})
# Logged in
self.assertRedirects(resp,
settings.LOGIN_REDIRECT_URL,
fetch_redirect_response=False)
self.assertEqual(mail.outbox[0].to, ['john@doe.com'])
self.assertEqual(len(mail.outbox), 1)
# Logout & login again
c.logout()
# Wait for cooldown
EmailConfirmation.objects.update(sent=now() - timedelta(days=1))
# Signup
resp = c.post(reverse('account_login'),
{'login': 'johndoe',
'password': 'johndoe'})
self.assertRedirects(resp,
settings.LOGIN_REDIRECT_URL,
fetch_redirect_response=False)
self.assertEqual(mail.outbox[0].to, ['john@doe.com'])
# There was an issue that we sent out email confirmation mails
# on each login in case of optional verification. Make sure
# this is not the case:
self.assertEqual(len(mail.outbox), 1)
@override_settings(ACCOUNT_AUTHENTICATED_LOGIN_REDIRECTS=False)
def test_account_authenticated_login_redirects_is_false(self):
self._create_user_and_login()
resp = self.client.get(reverse('account_login'))
self.assertEqual(resp.status_code, 200)
@override_settings(AUTH_PASSWORD_VALIDATORS=[{
'NAME':
'django.contrib.auth.password_validation.MinimumLengthValidator',
'OPTIONS': {
'min_length': 9,
}
}])
def test_django_password_validation(self):
if django.VERSION < (1, 9, ):
return
resp = self.client.post(
reverse('account_signup'),
{'username': 'johndoe',
'email': 'john@doe.com',
'password1': 'johndoe',
'password2': 'johndoe'})
self.assertFormError(resp, 'form', None, [])
self.assertFormError(
resp,
'form',
'password1',
['This password is too short.'
' It must contain at least 9 characters.'])
@override_settings(ACCOUNT_EMAIL_CONFIRMATION_HMAC=True)
def test_email_confirmation_hmac_falls_back(self):
user = self._create_user()
email = EmailAddress.objects.create(
user=user,
email='a@b.com',
verified=False,
primary=True)
confirmation = EmailConfirmation.create(email)
confirmation.sent = now()
confirmation.save()
self.client.post(
reverse('account_confirm_email',
args=[confirmation.key]))
email = EmailAddress.objects.get(pk=email.pk)
self.assertTrue(email.verified)
@override_settings(ACCOUNT_EMAIL_CONFIRMATION_HMAC=True)
def test_email_confirmation_hmac(self):
user = self._create_user()
email = EmailAddress.objects.create(
user=user,
email='a@b.com',
verified=False,
primary=True)
confirmation = EmailConfirmationHMAC(email)
confirmation.send()
self.assertEqual(len(mail.outbox), 1)
self.client.post(
reverse('account_confirm_email',
args=[confirmation.key]))
email = EmailAddress.objects.get(pk=email.pk)
self.assertTrue(email.verified)
@override_settings(
ACCOUNT_EMAIL_CONFIRMATION_HMAC=True,
ACCOUNT_EMAIL_CONFIRMATION_EXPIRE_DAYS=0)
def test_email_confirmation_hmac_timeout(self):
user = self._create_user()
email = EmailAddress.objects.create(
user=user,
email='a@b.com',
verified=False,
primary=True)
confirmation = EmailConfirmationHMAC(email)
confirmation.send()
self.assertEqual(len(mail.outbox), 1)
self.client.post(
reverse('account_confirm_email',
args=[confirmation.key]))
email = EmailAddress.objects.get(pk=email.pk)
self.assertFalse(email.verified)
class EmailFormTests(TestCase):
def setUp(self):
User = get_user_model()
self.user = User.objects.create(username='john',
email='john1@doe.org')
self.user.set_password('doe')
self.user.save()
self.email_address = EmailAddress.objects.create(
user=self.user,
email=self.user.email,
verified=True,
primary=True)
self.email_address2 = EmailAddress.objects.create(
user=self.user,
email='john2@doe.org',
verified=False,
primary=False)
self.client.login(username='john', password='doe')
def test_add(self):
resp = self.client.post(
reverse('account_email'),
{'action_add': '',
'email': 'john3@doe.org'})
EmailAddress.objects.get(
email='john3@doe.org',
user=self.user,
verified=False,
primary=False)
self.assertTemplateUsed(resp,
'account/messages/email_confirmation_sent.txt')
def test_ajax_add(self):
resp = self.client.post(
reverse('account_email'),
{'action_add': '',
'email': 'john3@doe.org'},
HTTP_X_REQUESTED_WITH='XMLHttpRequest')
data = json.loads(resp.content.decode('utf8'))
self.assertEqual(data['location'],
reverse('account_email'))
def test_ajax_add_invalid(self):
resp = self.client.post(
reverse('account_email'),
{'action_add': '',
'email': 'john3#doe.org'},
HTTP_X_REQUESTED_WITH='XMLHttpRequest')
data = json.loads(resp.content.decode('utf8'))
self.assertTrue('form_errors' in data)
self.assertTrue('email' in data['form_errors'])
def test_remove_primary(self):
resp = self.client.post(
reverse('account_email'),
{'action_remove': '',
'email': self.email_address.email})
EmailAddress.objects.get(pk=self.email_address.pk)
self.assertTemplateUsed(
resp,
'account/messages/cannot_delete_primary_email.txt')
def test_ajax_remove_primary(self):
resp = self.client.post(
reverse('account_email'),
{'action_remove': '',
'email': self.email_address.email},
HTTP_X_REQUESTED_WITH='XMLHttpRequest')
self.assertTemplateUsed(
resp,
'account/messages/cannot_delete_primary_email.txt')
data = json.loads(resp.content.decode('utf8'))
self.assertEqual(data['location'],
reverse('account_email'))
def test_remove_secondary(self):
resp = self.client.post(
reverse('account_email'),
{'action_remove': '',
'email': self.email_address2.email})
self.assertRaises(EmailAddress.DoesNotExist,
lambda: EmailAddress.objects.get(
pk=self.email_address2.pk))
self.assertTemplateUsed(
resp,
'account/messages/email_deleted.txt')
def test_set_primary_unverified(self):
resp = self.client.post(
reverse('account_email'),
{'action_primary': '',
'email': self.email_address2.email})
email_address = EmailAddress.objects.get(pk=self.email_address.pk)
email_address2 = EmailAddress.objects.get(pk=self.email_address2.pk)
self.assertFalse(email_address2.primary)
self.assertTrue(email_address.primary)
self.assertTemplateUsed(
resp,
'account/messages/unverified_primary_email.txt')
def test_set_primary(self):
email_address2 = EmailAddress.objects.get(pk=self.email_address2.pk)
email_address2.verified = True
email_address2.save()
resp = self.client.post(
reverse('account_email'),
{'action_primary': '',
'email': self.email_address2.email})
email_address = EmailAddress.objects.get(pk=self.email_address.pk)
email_address2 = EmailAddress.objects.get(pk=self.email_address2.pk)
self.assertFalse(email_address.primary)
self.assertTrue(email_address2.primary)
self.assertTemplateUsed(
resp,
'account/messages/primary_email_set.txt')
def test_verify(self):
resp = self.client.post(
reverse('account_email'),
{'action_send': '',
'email': self.email_address2.email})
self.assertTemplateUsed(
resp,
'account/messages/email_confirmation_sent.txt')
class BaseSignupFormTests(TestCase):
@override_settings(
ACCOUNT_USERNAME_REQUIRED=True,
ACCOUNT_USERNAME_BLACKLIST=['username'])
def test_username_in_blacklist(self):
data = {
'username': 'username',
'email': 'user@example.com',
}
form = BaseSignupForm(data, email_required=True)
self.assertFalse(form.is_valid())
@override_settings(
ACCOUNT_USERNAME_REQUIRED=True,
ACCOUNT_USERNAME_BLACKLIST=['username'])
def test_username_not_in_blacklist(self):
data = {
'username': 'theusername',
'email': 'user@example.com',
}
form = BaseSignupForm(data, email_required=True)
self.assertTrue(form.is_valid())
@override_settings(ACCOUNT_USERNAME_REQUIRED=True)
def test_username_maxlength(self):
data = {
'username': 'username',
'email': 'user@example.com',
}
form = BaseSignupForm(data, email_required=True)
max_length = get_username_max_length()
field = form.fields['username']
self.assertEqual(field.max_length, max_length)
widget = field.widget
self.assertEqual(widget.attrs.get('maxlength'), str(max_length))
@override_settings(
ACCOUNT_USERNAME_REQUIRED=True,
ACCOUNT_SIGNUP_EMAIL_ENTER_TWICE=True)
def test_signup_email_verification(self):
data = {
'username': 'username',
'email': 'user@example.com',
}
form = BaseSignupForm(data, email_required=True)
self.assertFalse(form.is_valid())
data = {
'username': 'username',
'email1': 'user@example.com',
'email2': 'user@example.com',
}
form = BaseSignupForm(data, email_required=True)
self.assertTrue(form.is_valid())
data['email2'] = 'anotheruser@example.com'
form = BaseSignupForm(data, email_required=True)
self.assertFalse(form.is_valid())
class AuthenticationBackendTests(TestCase):
def setUp(self):
user = get_user_model().objects.create(
is_active=True,
email='john@doe.com',
username='john')
user.set_password(user.username)
user.save()
self.user = user
@override_settings(
ACCOUNT_AUTHENTICATION_METHOD=app_settings.AuthenticationMethod.USERNAME) # noqa
def test_auth_by_username(self):
user = self.user
backend = AuthenticationBackend()
self.assertEqual(
backend.authenticate(
username=user.username,
password=user.username).pk,
user.pk)
self.assertEqual(
backend.authenticate(
username=user.email,
password=user.username),
None)
@override_settings(
ACCOUNT_AUTHENTICATION_METHOD=app_settings.AuthenticationMethod.EMAIL) # noqa
def test_auth_by_email(self):
user = self.user
backend = AuthenticationBackend()
self.assertEqual(
backend.authenticate(
username=user.email,
password=user.username).pk,
user.pk)
self.assertEqual(
backend.authenticate(
username=user.username,
password=user.username),
None)
@override_settings(
ACCOUNT_AUTHENTICATION_METHOD=app_settings.AuthenticationMethod.USERNAME_EMAIL) # noqa
def test_auth_by_username_or_email(self):
user = self.user
backend = AuthenticationBackend()
self.assertEqual(
backend.authenticate(
username=user.email,
password=user.username).pk,
user.pk)
self.assertEqual(
backend.authenticate(
username=user.username,
password=user.username).pk,
user.pk)
class UtilsTests(TestCase):
def setUp(self):
if hasattr(models, 'UUIDField'):
self.user_id = uuid.uuid4().hex
class UUIDUser(AbstractUser):
id = models.UUIDField(primary_key=True,
default=uuid.uuid4,
editable=False)
class Meta(AbstractUser.Meta):
swappable = 'AUTH_USER_MODEL'
else:
UUIDUser = get_user_model()
self.UUIDUser = UUIDUser
@unittest.skipUnless(hasattr(models, 'UUIDField'),
reason="No UUIDField in this django version")
def test_url_str_to_pk_identifies_UUID_as_stringlike(self):
with patch('allauth.account.utils.get_user_model') as mocked_gum:
mocked_gum.return_value = self.UUIDUser
self.assertEqual(url_str_to_user_pk(self.user_id),
self.user_id)
def test_pk_to_url_string_identifies_UUID_as_stringlike(self):
user = self.UUIDUser(
is_active=True,
email='john@doe.com',
username='john')
self.assertEquals(user_pk_to_url_str(user), str(user.pk))
| 40.093074 | 95 | 0.582195 | from __future__ import absolute_import
import json
from datetime import timedelta
import django
from django.utils.timezone import now
from django.test.utils import override_settings
from django.conf import settings
from django.core.urlresolvers import reverse
from django.test.client import Client
from django.core import mail
from django.test.client import RequestFactory
from django.contrib.auth.models import AnonymousUser, AbstractUser
from django.db import models
import unittest
from allauth.tests import TestCase, patch
from allauth.account.forms import BaseSignupForm
from allauth.account.models import (
EmailAddress,
EmailConfirmation,
EmailConfirmationHMAC)
from allauth.utils import (
get_current_site,
get_user_model,
get_username_max_length)
from . import app_settings
from .auth_backends import AuthenticationBackend
from .adapter import get_adapter
from .utils import url_str_to_user_pk, user_pk_to_url_str
import uuid
@override_settings(
ACCOUNT_DEFAULT_HTTP_PROTOCOL='https',
ACCOUNT_EMAIL_VERIFICATION=app_settings.EmailVerificationMethod.MANDATORY,
ACCOUNT_AUTHENTICATION_METHOD=app_settings.AuthenticationMethod.USERNAME,
ACCOUNT_SIGNUP_FORM_CLASS=None,
ACCOUNT_EMAIL_SUBJECT_PREFIX=None,
LOGIN_REDIRECT_URL='/accounts/profile/',
ACCOUNT_ADAPTER='allauth.account.adapter.DefaultAccountAdapter',
ACCOUNT_USERNAME_REQUIRED=True)
class AccountTests(TestCase):
def setUp(self):
if 'allauth.socialaccount' in settings.INSTALLED_APPS:
from ..socialaccount.models import SocialApp
sa = SocialApp.objects.create(name='testfb',
provider='facebook')
sa.sites.add(get_current_site())
@override_settings(
ACCOUNT_AUTHENTICATION_METHOD=app_settings.AuthenticationMethod
.USERNAME_EMAIL)
def test_username_containing_at(self):
user = get_user_model().objects.create(username='@raymond.penners')
user.set_password('psst')
user.save()
EmailAddress.objects.create(user=user,
email='raymond.penners@gmail.com',
primary=True,
verified=True)
resp = self.client.post(reverse('account_login'),
{'login': '@raymond.penners',
'password': 'psst'})
self.assertRedirects(resp,
'http://testserver'+settings.LOGIN_REDIRECT_URL,
fetch_redirect_response=False)
def test_signup_same_email_verified_externally(self):
user = self._test_signup_email_verified_externally('john@doe.com',
'john@doe.com')
self.assertEqual(EmailAddress.objects.filter(user=user).count(),
1)
EmailAddress.objects.get(verified=True,
email='john@doe.com',
user=user,
primary=True)
def test_signup_other_email_verified_externally(self):
user = self._test_signup_email_verified_externally('john@home.com',
'john@work.com')
self.assertEqual(EmailAddress.objects.filter(user=user).count(),
2)
EmailAddress.objects.get(verified=False,
email='john@home.com',
user=user,
primary=False)
EmailAddress.objects.get(verified=True,
email='john@work.com',
user=user,
primary=True)
def _test_signup_email_verified_externally(self, signup_email,
verified_email):
username = 'johndoe'
request = RequestFactory().post(reverse('account_signup'),
{'username': username,
'email': signup_email,
'password1': 'johndoe',
'password2': 'johndoe'})
from django.contrib.messages.middleware import MessageMiddleware
from django.contrib.sessions.middleware import SessionMiddleware
SessionMiddleware().process_request(request)
MessageMiddleware().process_request(request)
request.user = AnonymousUser()
request.session['account_verified_email'] = verified_email
from .views import signup
resp = signup(request)
self.assertEqual(resp.status_code, 302)
self.assertEqual(resp['location'],
get_adapter().get_login_redirect_url(request))
self.assertEqual(len(mail.outbox), 0)
return get_user_model().objects.get(username=username)
@override_settings(
ACCOUNT_USERNAME_REQUIRED=True,
ACCOUNT_SIGNUP_EMAIL_ENTER_TWICE=True)
def test_signup_email_twice(self):
request = RequestFactory().post(reverse('account_signup'),
{'username': 'johndoe',
'email1': 'john@work.com',
'email2': 'john@work.com',
'password1': 'johndoe',
'password2': 'johndoe'})
from django.contrib.messages.middleware import MessageMiddleware
from django.contrib.sessions.middleware import SessionMiddleware
SessionMiddleware().process_request(request)
MessageMiddleware().process_request(request)
request.user = AnonymousUser()
from .views import signup
signup(request)
user = get_user_model().objects.get(username='johndoe')
self.assertEqual(user.email, 'john@work.com')
def _create_user(self):
user = get_user_model().objects.create(username='john', is_active=True)
user.set_password('doe')
user.save()
return user
def _create_user_and_login(self):
user = self._create_user()
self.client.login(username='john', password='doe')
return user
def test_redirect_when_authenticated(self):
self._create_user_and_login()
c = self.client
resp = c.get(reverse('account_login'))
self.assertRedirects(resp, 'http://testserver/accounts/profile/',
fetch_redirect_response=False)
def test_password_reset_get(self):
resp = self.client.get(reverse('account_reset_password'))
self.assertTemplateUsed(resp, 'account/password_reset.html')
def test_password_set_redirect(self):
resp = self._password_set_or_reset_redirect('account_set_password',
True)
self.assertEqual(resp.status_code, 302)
def test_password_reset_no_redirect(self):
resp = self._password_set_or_reset_redirect('account_change_password',
True)
self.assertEqual(resp.status_code, 200)
def test_password_set_no_redirect(self):
resp = self._password_set_or_reset_redirect('account_set_password',
False)
self.assertEqual(resp.status_code, 200)
def test_password_reset_redirect(self):
resp = self._password_set_or_reset_redirect('account_change_password',
False)
self.assertEqual(resp.status_code, 302)
def _password_set_or_reset_redirect(self, urlname, usable_password):
user = self._create_user_and_login()
c = self.client
if not usable_password:
user.set_unusable_password()
user.save()
resp = c.get(reverse(urlname))
return resp
def test_password_forgotten_username_hint(self):
user = self._request_new_password()
body = mail.outbox[0].body
assert user.username in body
@override_settings(
ACCOUNT_AUTHENTICATION_METHOD=app_settings.AuthenticationMethod.EMAIL)
def test_password_forgotten_no_username_hint(self):
user = self._request_new_password()
body = mail.outbox[0].body
assert user.username not in body
def _request_new_password(self):
user = get_user_model().objects.create(
username='john', email='john@doe.org', is_active=True)
user.set_password('doe')
user.save()
self.client.post(
reverse('account_reset_password'),
data={'email': 'john@doe.org'})
self.assertEqual(len(mail.outbox), 1)
self.assertEqual(mail.outbox[0].to, ['john@doe.org'])
return user
def test_password_reset_flow(self):
user = self._request_new_password()
body = mail.outbox[0].body
self.assertGreater(body.find('https://'), 0)
url = body[body.find('/password/reset/'):].split()[0]
resp = self.client.get(url)
self.assertTemplateUsed(
resp,
'account/password_reset_from_key.%s' %
app_settings.TEMPLATE_EXTENSION)
self.assertFalse('token_fail' in resp.context_data)
resp = self.client.post(url,
{'password1': 'newpass123',
'password2': 'newpass123'})
self.assertRedirects(resp,
reverse('account_reset_password_from_key_done'))
user = get_user_model().objects.get(pk=user.pk)
self.assertTrue(user.check_password('newpass123'))
resp = self.client.post(url,
{'password1': 'newpass123',
'password2': 'newpass123'})
self.assertTemplateUsed(
resp,
'account/password_reset_from_key.%s' %
app_settings.TEMPLATE_EXTENSION)
self.assertTrue(resp.context_data['token_fail'])
response = self.client.get(url)
self.assertTemplateUsed(
response,
'account/password_reset_from_key.%s' %
app_settings.TEMPLATE_EXTENSION)
self.assertTrue(response.context_data['token_fail'])
response = self.client.post(url,
{'password1': 'newpass123',
'password2': 'newpass123'},
HTTP_X_REQUESTED_WITH='XMLHttpRequest')
self.assertEqual(response.status_code, 400)
data = json.loads(response.content.decode('utf8'))
self.assertTrue('form_errors' in data)
self.assertTrue('__all__' in data['form_errors'])
@override_settings(ACCOUNT_LOGIN_ON_PASSWORD_RESET=True)
def test_password_reset_ACCOUNT_LOGIN_ON_PASSWORD_RESET(self):
user = self._request_new_password()
body = mail.outbox[0].body
url = body[body.find('/password/reset/'):].split()[0]
resp = self.client.post(
url,
{'password1': 'newpass123',
'password2': 'newpass123'})
self.assertTrue(user.is_authenticated())
self.assertRedirects(resp, '/confirm-email/')
@override_settings(ACCOUNT_EMAIL_CONFIRMATION_HMAC=False)
def test_email_verification_mandatory(self):
c = Client()
resp = c.post(reverse('account_signup'),
{'username': 'johndoe',
'email': 'john@doe.com',
'password1': 'johndoe',
'password2': 'johndoe'},
follow=True)
self.assertEqual(resp.status_code, 200)
self.assertEqual(mail.outbox[0].to, ['john@doe.com'])
self.assertGreater(mail.outbox[0].body.find('https://'), 0)
self.assertEqual(len(mail.outbox), 1)
self.assertTemplateUsed(
resp,
'account/verification_sent.%s' % app_settings.TEMPLATE_EXTENSION)
for attempt in [1, 2]:
resp = c.post(reverse('account_login'),
{'login': 'johndoe',
'password': 'johndoe'},
follow=True)
# users verify their email adresses.
self.assertTrue(get_user_model().objects.filter(
username='johndoe', is_active=True).exists())
self.assertTemplateUsed(
resp,
'account/verification_sent.' + app_settings.TEMPLATE_EXTENSION)
# Attempt 1: no mail is sent due to cool-down ,
# but there was already a mail in the outbox.
self.assertEqual(len(mail.outbox), attempt)
self.assertEqual(
EmailConfirmation.objects.filter(
email_address__email='john@doe.com').count(),
attempt)
# Wait for cooldown
EmailConfirmation.objects.update(sent=now() - timedelta(days=1))
# Verify, and re-attempt to login.
confirmation = EmailConfirmation \
.objects \
.filter(email_address__user__username='johndoe')[:1] \
.get()
resp = c.get(reverse('account_confirm_email',
args=[confirmation.key]))
self.assertTemplateUsed(
resp,
'account/email_confirm.%s' % app_settings.TEMPLATE_EXTENSION)
c.post(reverse('account_confirm_email',
args=[confirmation.key]))
resp = c.post(reverse('account_login'),
{'login': 'johndoe',
'password': 'johndoe'})
self.assertRedirects(resp,
'http://testserver'+settings.LOGIN_REDIRECT_URL,
fetch_redirect_response=False)
def test_email_escaping(self):
site = get_current_site()
site.name = '<enc&"test>'
site.save()
u = get_user_model().objects.create(
username='test',
email='foo@bar.com')
request = RequestFactory().get('/')
EmailAddress.objects.add_email(request, u, u.email, confirm=True)
self.assertTrue(mail.outbox[0].subject[1:].startswith(site.name))
@override_settings(
ACCOUNT_EMAIL_VERIFICATION=app_settings.EmailVerificationMethod
.OPTIONAL)
def test_login_unverified_account_optional(self):
user = get_user_model().objects.create(username='john')
user.set_password('doe')
user.save()
EmailAddress.objects.create(user=user,
email='john@example.com',
primary=True,
verified=False)
resp = self.client.post(reverse('account_login'),
{'login': 'john',
'password': 'doe'})
self.assertRedirects(resp,
'http://testserver'+settings.LOGIN_REDIRECT_URL,
fetch_redirect_response=False)
@override_settings(
ACCOUNT_EMAIL_VERIFICATION=app_settings.EmailVerificationMethod
.OPTIONAL,
ACCOUNT_LOGIN_ATTEMPTS_LIMIT=3)
def test_login_failed_attempts_exceeded(self):
user = get_user_model().objects.create(username='john')
user.set_password('doe')
user.save()
EmailAddress.objects.create(user=user,
email='john@example.com',
primary=True,
verified=False)
for i in range(5):
is_valid_attempt = (i == 4)
is_locked = (i >= 3)
resp = self.client.post(
reverse('account_login'),
{'login': 'john',
'password': (
'doe' if is_valid_attempt
else 'wrong')})
self.assertFormError(
resp,
'form',
None,
'Too many failed login attempts. Try again later.'
if is_locked
else
'The username and/or password you specified are not correct.')
def test_login_unverified_account_mandatory(self):
user = get_user_model().objects.create(username='john')
user.set_password('doe')
user.save()
EmailAddress.objects.create(user=user,
email='john@example.com',
primary=True,
verified=False)
resp = self.client.post(reverse('account_login'),
{'login': 'john',
'password': 'doe'})
self.assertRedirects(resp, reverse('account_email_verification_sent'))
def test_login_inactive_account(self):
# Inactive and verified user account
user = get_user_model().objects.create(username='john',
is_active=False)
user.set_password('doe')
user.save()
EmailAddress.objects.create(user=user,
email='john@example.com',
primary=True,
verified=True)
resp = self.client.post(reverse('account_login'),
{'login': 'john',
'password': 'doe'})
self.assertRedirects(resp, reverse('account_inactive'))
# Inactive and unverified user account
user = get_user_model().objects.create(username='doe', is_active=False)
user.set_password('john')
user.save()
EmailAddress.objects.create(user=user,
email='doe@example.com',
primary=True,
verified=False)
resp = self.client.post(reverse('account_login'),
{'login': 'doe',
'password': 'john'})
self.assertRedirects(resp, reverse('account_inactive'))
def test_ajax_password_reset(self):
get_user_model().objects.create(
username='john', email='john@doe.org', is_active=True)
resp = self.client.post(
reverse('account_reset_password'),
data={'email': 'john@doe.org'},
HTTP_X_REQUESTED_WITH='XMLHttpRequest')
self.assertEqual(len(mail.outbox), 1)
self.assertEqual(mail.outbox[0].to, ['john@doe.org'])
self.assertEqual(resp['content-type'], 'application/json')
def test_ajax_login_fail(self):
resp = self.client.post(reverse('account_login'),
{},
HTTP_X_REQUESTED_WITH='XMLHttpRequest')
self.assertEqual(resp.status_code, 400)
json.loads(resp.content.decode('utf8'))
# TODO: Actually test something
@override_settings(
ACCOUNT_EMAIL_VERIFICATION=app_settings.EmailVerificationMethod
.OPTIONAL)
def test_ajax_login_success(self):
user = get_user_model().objects.create(username='john', is_active=True)
user.set_password('doe')
user.save()
resp = self.client.post(reverse('account_login'),
{'login': 'john',
'password': 'doe'},
HTTP_X_REQUESTED_WITH='XMLHttpRequest')
self.assertEqual(resp.status_code, 200)
data = json.loads(resp.content.decode('utf8'))
self.assertEqual(data['location'], '/accounts/profile/')
def test_email_view(self):
self._create_user_and_login()
self.client.get(reverse('account_email'))
# TODO: Actually test something
@override_settings(ACCOUNT_LOGOUT_ON_GET=True)
def test_logout_view_on_get(self):
c, resp = self._logout_view('get')
self.assertTemplateUsed(resp, 'account/messages/logged_out.txt')
@override_settings(ACCOUNT_LOGOUT_ON_GET=False)
def test_logout_view_on_post(self):
c, resp = self._logout_view('get')
self.assertTemplateUsed(
resp,
'account/logout.%s' % app_settings.TEMPLATE_EXTENSION)
resp = c.post(reverse('account_logout'))
self.assertTemplateUsed(resp, 'account/messages/logged_out.txt')
def _logout_view(self, method):
c = Client()
user = get_user_model().objects.create(username='john', is_active=True)
user.set_password('doe')
user.save()
c = Client()
c.login(username='john', password='doe')
return c, getattr(c, method)(reverse('account_logout'))
@override_settings(ACCOUNT_EMAIL_VERIFICATION=app_settings
.EmailVerificationMethod.OPTIONAL)
def test_optional_email_verification(self):
c = Client()
# Signup
c.get(reverse('account_signup'))
resp = c.post(reverse('account_signup'),
{'username': 'johndoe',
'email': 'john@doe.com',
'password1': 'johndoe',
'password2': 'johndoe'})
# Logged in
self.assertRedirects(resp,
settings.LOGIN_REDIRECT_URL,
fetch_redirect_response=False)
self.assertEqual(mail.outbox[0].to, ['john@doe.com'])
self.assertEqual(len(mail.outbox), 1)
# Logout & login again
c.logout()
# Wait for cooldown
EmailConfirmation.objects.update(sent=now() - timedelta(days=1))
# Signup
resp = c.post(reverse('account_login'),
{'login': 'johndoe',
'password': 'johndoe'})
self.assertRedirects(resp,
settings.LOGIN_REDIRECT_URL,
fetch_redirect_response=False)
self.assertEqual(mail.outbox[0].to, ['john@doe.com'])
# There was an issue that we sent out email confirmation mails
# on each login in case of optional verification. Make sure
# this is not the case:
self.assertEqual(len(mail.outbox), 1)
@override_settings(ACCOUNT_AUTHENTICATED_LOGIN_REDIRECTS=False)
def test_account_authenticated_login_redirects_is_false(self):
self._create_user_and_login()
resp = self.client.get(reverse('account_login'))
self.assertEqual(resp.status_code, 200)
@override_settings(AUTH_PASSWORD_VALIDATORS=[{
'NAME':
'django.contrib.auth.password_validation.MinimumLengthValidator',
'OPTIONS': {
'min_length': 9,
}
}])
def test_django_password_validation(self):
if django.VERSION < (1, 9, ):
return
resp = self.client.post(
reverse('account_signup'),
{'username': 'johndoe',
'email': 'john@doe.com',
'password1': 'johndoe',
'password2': 'johndoe'})
self.assertFormError(resp, 'form', None, [])
self.assertFormError(
resp,
'form',
'password1',
['This password is too short.'
' It must contain at least 9 characters.'])
@override_settings(ACCOUNT_EMAIL_CONFIRMATION_HMAC=True)
def test_email_confirmation_hmac_falls_back(self):
user = self._create_user()
email = EmailAddress.objects.create(
user=user,
email='a@b.com',
verified=False,
primary=True)
confirmation = EmailConfirmation.create(email)
confirmation.sent = now()
confirmation.save()
self.client.post(
reverse('account_confirm_email',
args=[confirmation.key]))
email = EmailAddress.objects.get(pk=email.pk)
self.assertTrue(email.verified)
@override_settings(ACCOUNT_EMAIL_CONFIRMATION_HMAC=True)
def test_email_confirmation_hmac(self):
user = self._create_user()
email = EmailAddress.objects.create(
user=user,
email='a@b.com',
verified=False,
primary=True)
confirmation = EmailConfirmationHMAC(email)
confirmation.send()
self.assertEqual(len(mail.outbox), 1)
self.client.post(
reverse('account_confirm_email',
args=[confirmation.key]))
email = EmailAddress.objects.get(pk=email.pk)
self.assertTrue(email.verified)
@override_settings(
ACCOUNT_EMAIL_CONFIRMATION_HMAC=True,
ACCOUNT_EMAIL_CONFIRMATION_EXPIRE_DAYS=0)
def test_email_confirmation_hmac_timeout(self):
user = self._create_user()
email = EmailAddress.objects.create(
user=user,
email='a@b.com',
verified=False,
primary=True)
confirmation = EmailConfirmationHMAC(email)
confirmation.send()
self.assertEqual(len(mail.outbox), 1)
self.client.post(
reverse('account_confirm_email',
args=[confirmation.key]))
email = EmailAddress.objects.get(pk=email.pk)
self.assertFalse(email.verified)
class EmailFormTests(TestCase):
def setUp(self):
User = get_user_model()
self.user = User.objects.create(username='john',
email='john1@doe.org')
self.user.set_password('doe')
self.user.save()
self.email_address = EmailAddress.objects.create(
user=self.user,
email=self.user.email,
verified=True,
primary=True)
self.email_address2 = EmailAddress.objects.create(
user=self.user,
email='john2@doe.org',
verified=False,
primary=False)
self.client.login(username='john', password='doe')
def test_add(self):
resp = self.client.post(
reverse('account_email'),
{'action_add': '',
'email': 'john3@doe.org'})
EmailAddress.objects.get(
email='john3@doe.org',
user=self.user,
verified=False,
primary=False)
self.assertTemplateUsed(resp,
'account/messages/email_confirmation_sent.txt')
def test_ajax_add(self):
resp = self.client.post(
reverse('account_email'),
{'action_add': '',
'email': 'john3@doe.org'},
HTTP_X_REQUESTED_WITH='XMLHttpRequest')
data = json.loads(resp.content.decode('utf8'))
self.assertEqual(data['location'],
reverse('account_email'))
def test_ajax_add_invalid(self):
resp = self.client.post(
reverse('account_email'),
{'action_add': '',
'email': 'john3#doe.org'},
HTTP_X_REQUESTED_WITH='XMLHttpRequest')
data = json.loads(resp.content.decode('utf8'))
self.assertTrue('form_errors' in data)
self.assertTrue('email' in data['form_errors'])
def test_remove_primary(self):
resp = self.client.post(
reverse('account_email'),
{'action_remove': '',
'email': self.email_address.email})
EmailAddress.objects.get(pk=self.email_address.pk)
self.assertTemplateUsed(
resp,
'account/messages/cannot_delete_primary_email.txt')
def test_ajax_remove_primary(self):
resp = self.client.post(
reverse('account_email'),
{'action_remove': '',
'email': self.email_address.email},
HTTP_X_REQUESTED_WITH='XMLHttpRequest')
self.assertTemplateUsed(
resp,
'account/messages/cannot_delete_primary_email.txt')
data = json.loads(resp.content.decode('utf8'))
self.assertEqual(data['location'],
reverse('account_email'))
def test_remove_secondary(self):
resp = self.client.post(
reverse('account_email'),
{'action_remove': '',
'email': self.email_address2.email})
self.assertRaises(EmailAddress.DoesNotExist,
lambda: EmailAddress.objects.get(
pk=self.email_address2.pk))
self.assertTemplateUsed(
resp,
'account/messages/email_deleted.txt')
def test_set_primary_unverified(self):
resp = self.client.post(
reverse('account_email'),
{'action_primary': '',
'email': self.email_address2.email})
email_address = EmailAddress.objects.get(pk=self.email_address.pk)
email_address2 = EmailAddress.objects.get(pk=self.email_address2.pk)
self.assertFalse(email_address2.primary)
self.assertTrue(email_address.primary)
self.assertTemplateUsed(
resp,
'account/messages/unverified_primary_email.txt')
def test_set_primary(self):
email_address2 = EmailAddress.objects.get(pk=self.email_address2.pk)
email_address2.verified = True
email_address2.save()
resp = self.client.post(
reverse('account_email'),
{'action_primary': '',
'email': self.email_address2.email})
email_address = EmailAddress.objects.get(pk=self.email_address.pk)
email_address2 = EmailAddress.objects.get(pk=self.email_address2.pk)
self.assertFalse(email_address.primary)
self.assertTrue(email_address2.primary)
self.assertTemplateUsed(
resp,
'account/messages/primary_email_set.txt')
def test_verify(self):
resp = self.client.post(
reverse('account_email'),
{'action_send': '',
'email': self.email_address2.email})
self.assertTemplateUsed(
resp,
'account/messages/email_confirmation_sent.txt')
class BaseSignupFormTests(TestCase):
@override_settings(
ACCOUNT_USERNAME_REQUIRED=True,
ACCOUNT_USERNAME_BLACKLIST=['username'])
def test_username_in_blacklist(self):
data = {
'username': 'username',
'email': 'user@example.com',
}
form = BaseSignupForm(data, email_required=True)
self.assertFalse(form.is_valid())
@override_settings(
ACCOUNT_USERNAME_REQUIRED=True,
ACCOUNT_USERNAME_BLACKLIST=['username'])
def test_username_not_in_blacklist(self):
data = {
'username': 'theusername',
'email': 'user@example.com',
}
form = BaseSignupForm(data, email_required=True)
self.assertTrue(form.is_valid())
@override_settings(ACCOUNT_USERNAME_REQUIRED=True)
def test_username_maxlength(self):
data = {
'username': 'username',
'email': 'user@example.com',
}
form = BaseSignupForm(data, email_required=True)
max_length = get_username_max_length()
field = form.fields['username']
self.assertEqual(field.max_length, max_length)
widget = field.widget
self.assertEqual(widget.attrs.get('maxlength'), str(max_length))
@override_settings(
ACCOUNT_USERNAME_REQUIRED=True,
ACCOUNT_SIGNUP_EMAIL_ENTER_TWICE=True)
def test_signup_email_verification(self):
data = {
'username': 'username',
'email': 'user@example.com',
}
form = BaseSignupForm(data, email_required=True)
self.assertFalse(form.is_valid())
data = {
'username': 'username',
'email1': 'user@example.com',
'email2': 'user@example.com',
}
form = BaseSignupForm(data, email_required=True)
self.assertTrue(form.is_valid())
data['email2'] = 'anotheruser@example.com'
form = BaseSignupForm(data, email_required=True)
self.assertFalse(form.is_valid())
class AuthenticationBackendTests(TestCase):
def setUp(self):
user = get_user_model().objects.create(
is_active=True,
email='john@doe.com',
username='john')
user.set_password(user.username)
user.save()
self.user = user
@override_settings(
ACCOUNT_AUTHENTICATION_METHOD=app_settings.AuthenticationMethod.USERNAME) # noqa
def test_auth_by_username(self):
user = self.user
backend = AuthenticationBackend()
self.assertEqual(
backend.authenticate(
username=user.username,
password=user.username).pk,
user.pk)
self.assertEqual(
backend.authenticate(
username=user.email,
password=user.username),
None)
@override_settings(
ACCOUNT_AUTHENTICATION_METHOD=app_settings.AuthenticationMethod.EMAIL) # noqa
def test_auth_by_email(self):
user = self.user
backend = AuthenticationBackend()
self.assertEqual(
backend.authenticate(
username=user.email,
password=user.username).pk,
user.pk)
self.assertEqual(
backend.authenticate(
username=user.username,
password=user.username),
None)
@override_settings(
ACCOUNT_AUTHENTICATION_METHOD=app_settings.AuthenticationMethod.USERNAME_EMAIL) # noqa
def test_auth_by_username_or_email(self):
user = self.user
backend = AuthenticationBackend()
self.assertEqual(
backend.authenticate(
username=user.email,
password=user.username).pk,
user.pk)
self.assertEqual(
backend.authenticate(
username=user.username,
password=user.username).pk,
user.pk)
class UtilsTests(TestCase):
def setUp(self):
if hasattr(models, 'UUIDField'):
self.user_id = uuid.uuid4().hex
class UUIDUser(AbstractUser):
id = models.UUIDField(primary_key=True,
default=uuid.uuid4,
editable=False)
class Meta(AbstractUser.Meta):
swappable = 'AUTH_USER_MODEL'
else:
UUIDUser = get_user_model()
self.UUIDUser = UUIDUser
@unittest.skipUnless(hasattr(models, 'UUIDField'),
reason="No UUIDField in this django version")
def test_url_str_to_pk_identifies_UUID_as_stringlike(self):
with patch('allauth.account.utils.get_user_model') as mocked_gum:
mocked_gum.return_value = self.UUIDUser
self.assertEqual(url_str_to_user_pk(self.user_id),
self.user_id)
def test_pk_to_url_string_identifies_UUID_as_stringlike(self):
user = self.UUIDUser(
is_active=True,
email='john@doe.com',
username='john')
self.assertEquals(user_pk_to_url_str(user), str(user.pk))
| true | true |
f72476a51168fd61ef40256da67527b38bad600a | 5,368 | py | Python | core/cache/decorator.py | prorevizor/noc | 37e44b8afc64318b10699c06a1138eee9e7d6a4e | [
"BSD-3-Clause"
] | 84 | 2017-10-22T11:01:39.000Z | 2022-02-27T03:43:48.000Z | core/cache/decorator.py | prorevizor/noc | 37e44b8afc64318b10699c06a1138eee9e7d6a4e | [
"BSD-3-Clause"
] | 22 | 2017-12-11T07:21:56.000Z | 2021-09-23T02:53:50.000Z | core/cache/decorator.py | prorevizor/noc | 37e44b8afc64318b10699c06a1138eee9e7d6a4e | [
"BSD-3-Clause"
] | 23 | 2017-12-06T06:59:52.000Z | 2022-02-24T00:02:25.000Z | # ----------------------------------------------------------------------
# Decorators
# ----------------------------------------------------------------------
# Copyright (C) 2007-2020 The NOC Project
# See LICENSE for details
# ----------------------------------------------------------------------
# NOC modules
from noc.core.perf import metrics
from .base import cache as x_cache
def cachedmethod(cache=None, key="cache-%s", lock=None, ttl=None, version=0):
"""
Decorator to wrap class instance or method
with memoizing callable
:param cache: In-memory function which follows dict protocol.
None, when no in-memory caching required
:param key: Key mask to convert args to string
:param lock: Callable to get threading lock
:param ttl: Record time-to-live
:param version: External cache version
:return:
"""
def decorator(method):
if lock:
def wrapper(self, *args, **kwargs):
perf_key = key.replace("-%s", "").replace("-", "_")
perf_key_requests = metrics["cache_requests", ("cache_key", perf_key)]
perf_key_l1_hits = metrics[
"cache_hits", ("cache_key", perf_key), ("cache_level", "internal")
]
perf_key_l2_hits = metrics[
"cache_hits", ("cache_key", perf_key), ("cache_level", "external")
]
perf_key_misses = metrics["cache_misses", ("cache_key", perf_key)]
perf_key_lock_acquires = metrics["cache_locks_acquires", ("cache_key", perf_key)]
perf_key_requests += 1
k = key % args
with lock(self):
perf_key_lock_acquires += 1
if cache:
# Try in-memory cache
c = cache(self)
if c is not None:
# In-memory cache provided
try:
v = c[k]
perf_key_l1_hits += 1
return v
except KeyError:
pass
# Try external cache
v = x_cache.get(k, version=version)
if v:
perf_key_l2_hits += 1
if cache:
with lock(self):
perf_key_lock_acquires += 1
# Backfill in-memory cache
try:
c[k] = v
except ValueError:
pass # Value too large
return v
# Fallback to function
perf_key_misses += 1
v = method(self, *args, **kwargs)
with lock(self):
perf_key_lock_acquires += 1
if cache:
# Backfill in-memory cache
try:
c[k] = v
except ValueError:
pass
# Backfill external cache
x_cache.set(k, v, ttl=ttl, version=version)
# Done
return v
else:
def wrapper(self, *args, **kwargs):
perf_key = key.replace("-%s", "").replace("-", "_")
perf_key_requests = metrics["cache_requests", ("cache_key", perf_key)]
perf_key_l1_hits = metrics[
"cache_hits", ("cache_key", perf_key), ("cache_level", "internal")
]
perf_key_l2_hits = metrics[
"cache_hits", ("cache_key", perf_key), ("cache_level", "external")
]
perf_key_misses = metrics["cache_misses", ("cache_key", perf_key)]
perf_key_requests += 1
k = key % args
if cache:
# Try in-memory cache
c = cache(self)
if c is not None:
# In-memory cache provided
try:
v = c[k]
perf_key_l1_hits += 1
return v
except KeyError:
pass
# Try external cache
v = x_cache.get(k, version=version)
if v:
perf_key_l2_hits += 1
if cache:
# Backfill in-memory cache
try:
c[k] = v
except ValueError:
pass # Value too large
return v
# Fallback to function
perf_key_misses += 1
v = method(self, *args, **kwargs)
if cache:
# Backfill in-memory cache
try:
c[k] = v
except ValueError:
pass
# Backfill external cache
x_cache.set(k, v, ttl=ttl, version=version)
# Done
return v
return wrapper
return decorator
| 39.182482 | 97 | 0.404434 |
from noc.core.perf import metrics
from .base import cache as x_cache
def cachedmethod(cache=None, key="cache-%s", lock=None, ttl=None, version=0):
def decorator(method):
if lock:
def wrapper(self, *args, **kwargs):
perf_key = key.replace("-%s", "").replace("-", "_")
perf_key_requests = metrics["cache_requests", ("cache_key", perf_key)]
perf_key_l1_hits = metrics[
"cache_hits", ("cache_key", perf_key), ("cache_level", "internal")
]
perf_key_l2_hits = metrics[
"cache_hits", ("cache_key", perf_key), ("cache_level", "external")
]
perf_key_misses = metrics["cache_misses", ("cache_key", perf_key)]
perf_key_lock_acquires = metrics["cache_locks_acquires", ("cache_key", perf_key)]
perf_key_requests += 1
k = key % args
with lock(self):
perf_key_lock_acquires += 1
if cache:
c = cache(self)
if c is not None:
try:
v = c[k]
perf_key_l1_hits += 1
return v
except KeyError:
pass
v = x_cache.get(k, version=version)
if v:
perf_key_l2_hits += 1
if cache:
with lock(self):
perf_key_lock_acquires += 1
try:
c[k] = v
except ValueError:
pass
return v
perf_key_misses += 1
v = method(self, *args, **kwargs)
with lock(self):
perf_key_lock_acquires += 1
if cache:
try:
c[k] = v
except ValueError:
pass
x_cache.set(k, v, ttl=ttl, version=version)
return v
else:
def wrapper(self, *args, **kwargs):
perf_key = key.replace("-%s", "").replace("-", "_")
perf_key_requests = metrics["cache_requests", ("cache_key", perf_key)]
perf_key_l1_hits = metrics[
"cache_hits", ("cache_key", perf_key), ("cache_level", "internal")
]
perf_key_l2_hits = metrics[
"cache_hits", ("cache_key", perf_key), ("cache_level", "external")
]
perf_key_misses = metrics["cache_misses", ("cache_key", perf_key)]
perf_key_requests += 1
k = key % args
if cache:
c = cache(self)
if c is not None:
try:
v = c[k]
perf_key_l1_hits += 1
return v
except KeyError:
pass
v = x_cache.get(k, version=version)
if v:
perf_key_l2_hits += 1
if cache:
try:
c[k] = v
except ValueError:
pass
return v
perf_key_misses += 1
v = method(self, *args, **kwargs)
if cache:
try:
c[k] = v
except ValueError:
pass
x_cache.set(k, v, ttl=ttl, version=version)
return v
return wrapper
return decorator
| true | true |
f72476bf2e961b26c53e96e9358bb4c0a54239b7 | 8,355 | py | Python | tron/Vocab/hubCommands.py | sdss/tron | 886c5c5fb6341ad85e4a9f5d6f5ecb6bbc0d8322 | [
"BSD-3-Clause"
] | null | null | null | tron/Vocab/hubCommands.py | sdss/tron | 886c5c5fb6341ad85e4a9f5d6f5ecb6bbc0d8322 | [
"BSD-3-Clause"
] | null | null | null | tron/Vocab/hubCommands.py | sdss/tron | 886c5c5fb6341ad85e4a9f5d6f5ecb6bbc0d8322 | [
"BSD-3-Clause"
] | null | null | null | __all__ = ['hubCommands']
import sys
import Vocab.InternalCmd as InternalCmd
from tron import Misc, g, hub
from tron.Hub.KV.KVDict import kvAsASCII
class hubCommands(InternalCmd.InternalCmd):
""" All the commands that the "hub" package provides.
The user executes these from the command window:
hub startNubs tspec
hub status
etc.
"""
def __init__(self, **argv):
argv['safeCmds'] = r'^\s*(actors|commanders|actorInfo|version|status|ping)\s*$'
argv['needsAuth'] = True
InternalCmd.InternalCmd.__init__(self, 'hub', **argv)
self.commands = {
'actors': self.actors,
'commanders': self.commanders,
'restart!': self.reallyReallyRestart,
'startNubs': self.startNubs,
'stopNubs': self.stopNubs,
'actorInfo': self.actorInfo,
'commands': self.commandInfo,
'setUsername': self.setUsername,
'status': self.status,
'loadWords': self.loadWords,
'getKeys': self.getKeys,
'listen': self.doListen,
'version': self.version,
'ping': self.status,
'relog': self.relog,
}
def version(self, cmd, finish=True):
""" Return the hub's version number. """
hub.getSetHubVersion()
vString = 'version=%s' % (g.KVs.getKV('hub', 'version', default='Unknown'))
if finish:
cmd.finish(vString)
else:
cmd.inform(vString)
def doListen(self, cmd):
""" Change what replies get sent to us. """
matched, unmatched, leftovers = cmd.match([('listen', None), ('addActors', None),
('delActors', None)])
cmdr = cmd.cmdr()
if not cmdr:
cmd.fail('debug=%s' % (Misc.qstr('cmdr=%s; cmd=%s' % (cmdr, cmd))))
return
Misc.log('doListen', 'start: %s' % (cmdr.taster))
Misc.log('doListen', 'leftovers: %s' % (leftovers))
if 'addActors' in matched:
actors = list(leftovers.keys())
Misc.log('doListen', 'addActors: %s' % (actors))
# cmd.inform('text="%s"' % (Misc.qstr("adding actors: %s" % (actors))))
cmdr.taster.addToFilter(actors, [], actors)
cmd.finish()
elif 'delActors' in matched:
actors = list(leftovers.keys())
Misc.log('doListen', 'delActors: %s' % (actors))
# cmd.inform('text="%s"' % (Misc.qstr("removing actors: %s" % (actors))))
cmdr.taster.removeFromFilter(actors, [], actors)
cmd.finish()
else:
cmd.fail('text="unknown listen command"')
Misc.log('doListen', 'finish: %s' % (cmdr.taster))
def actors(self, cmd, finish=True):
""" Return a list of the currently connected actors. """
g.actors.listSelf(cmd=cmd)
if finish:
cmd.finish('')
def commanders(self, cmd, finish=True):
""" Return a list of the currently connected commanders. """
g.commanders.listSelf(cmd=cmd)
if finish:
cmd.finish('')
def status(self, cmd, finish=True):
Misc.cfg.flush()
self.version(cmd, finish=False)
self.actors(cmd, finish=False)
self.commanders(cmd, finish=False)
if finish:
cmd.finish('')
def setUsername(self, cmd):
""" Change the username for the cmd's commander. """
args = cmd.cmd.split()
args = args[1:]
if len(args) != 1:
cmd.fail('cmdError="usage: setUsername newname"')
return
username = args[0]
cmdr = cmd.cmdr()
cmdr.setName(username)
cmd.finish('')
def stopNubs(self, cmd):
""" stop a list of nubs. """
nubs = list(cmd.argDict.keys())[1:]
if len(nubs) == 0:
cmd.fail('text="must specify one or more nubs to stop..."')
return
for nub in nubs:
try:
cmd.inform('text=%s' % (Misc.qstr('stopping nub %s' % (nub))))
hub.stopNub(nub)
except Exception as e:
cmd.warn('text=%s' % (Misc.qstr('failed to stop nub %s: %s' % (nub, e))))
cmd.finish('')
def startNubs(self, cmd):
""" (re-)start a list of nubs. """
# Flush the configuration to force a reload later. This allows to change the
# configuration or nubs during runtime without restarting tron.
Misc.cfg.flush()
nubs = list(cmd.argDict.keys())[1:]
if len(nubs) == 0:
cmd.fail('text="must specify one or more nubs to start..."')
return
for nub in nubs:
try:
cmd.inform('text=%s' % (Misc.qstr('(re-)starting nub %s' % (nub))))
hub.startNub(nub)
except Exception as e:
cmd.warn('text=%s' % (Misc.qstr('failed to start nub %s: %s' % (nub, e))))
cmd.finish('')
def actorInfo(self, cmd):
""" Get gory status about a list of actor nubs. """
# Query all actors if none are specified.
names = list(cmd.argDict.keys())[1:]
if len(names) == 0:
names = list(g.actors.keys())
for n in names:
try:
nub = g.actors[n]
nub.statusCmd(cmd, doFinish=False)
except Exception as e:
cmd.warn('text=%s' % (Misc.qstr('failed to query actor %s: %s' % (n, e))))
cmd.finish('')
def commandInfo(self, cmd):
""" Get gory status about a list of actor nubs. """
# Query all actors if none are specified.
names = list(cmd.argDict.keys())[1:]
if len(names) == 0:
names = list(g.actors.keys())
for n in names:
try:
nub = g.actors[n]
nub.listCommandsCmd(cmd, doFinish=False)
except Exception as e:
cmd.warn('text=%s' % (Misc.qstr('failed to query actor %s: %s' % (n, e))))
cmd.finish('')
def loadWords(self, cmd, finish=True):
""" (re-)load an internal vocabulary word. """
words = list(cmd.argDict.keys())[1:]
if len(words) == 0:
words = None
Misc.log('hubCmd', 'loadWords loading %s' % (words))
try:
hub.loadWords(words)
except Exception as e:
Misc.tback('hub.loadWords', e)
cmd.fail('text=%s' % (Misc.qstr(e)))
return
if finish:
cmd.finish()
def getKeys(self, cmd):
""" Return a bunch of keys for a given source.
Cmd args:
src - a key source name.
keys - 1 or more key names.
"""
words = cmd.cmd.split()
if len(words) < 3:
cmd.fail('text="usage: getKeys srcName key1 [key2 ... keyN]"')
return
src = words[1]
keys = words[2:]
matched, unmatched = g.KVs.getValues(src, keys)
Misc.log('hub.getKeys', 'matched=%s unmatched=%s' % (matched, unmatched))
for k, v in matched.items():
kvString = kvAsASCII(k, v)
cmd.inform(kvString, src='hub.%s' % (src))
if unmatched:
cmd.warn('text=%s' % (Misc.qstr('unmatched %s keys: %s' %
(src, ', '.join(unmatched)))))
cmd.finish('')
def reallyReallyRestart(self, cmd):
""" Restart the entire MC. Which among other things kills us now. """
cmd.warn('text=%s' %
(Misc.qstr('Restarting the hub now... bye, bye, and please call back soon!')))
# Give the poller a chance to flush out the warning.
g.poller.callMeIn(hub.restart, 1.0)
def relog(self, cmd):
""" Change where stderr goes to. """
args = cmd.cmd.split()
args = args[1:]
if len(args) != 1:
cmd.fail('cmdError="usage: relog filename"')
return
filename = args[0]
import os
f = open(filename, 'a', 1)
os.dup2(f.fileno(), 1)
os.dup2(f.fileno(), 2)
sys.stdout = os.fdopen(1, 'w', 1)
sys.stderr = os.fdopen(2, 'w', 1)
f.close()
cmd.finish('text="Jeebus, you done it now, whatever it was"')
| 30.830258 | 95 | 0.518971 | __all__ = ['hubCommands']
import sys
import Vocab.InternalCmd as InternalCmd
from tron import Misc, g, hub
from tron.Hub.KV.KVDict import kvAsASCII
class hubCommands(InternalCmd.InternalCmd):
def __init__(self, **argv):
argv['safeCmds'] = r'^\s*(actors|commanders|actorInfo|version|status|ping)\s*$'
argv['needsAuth'] = True
InternalCmd.InternalCmd.__init__(self, 'hub', **argv)
self.commands = {
'actors': self.actors,
'commanders': self.commanders,
'restart!': self.reallyReallyRestart,
'startNubs': self.startNubs,
'stopNubs': self.stopNubs,
'actorInfo': self.actorInfo,
'commands': self.commandInfo,
'setUsername': self.setUsername,
'status': self.status,
'loadWords': self.loadWords,
'getKeys': self.getKeys,
'listen': self.doListen,
'version': self.version,
'ping': self.status,
'relog': self.relog,
}
def version(self, cmd, finish=True):
hub.getSetHubVersion()
vString = 'version=%s' % (g.KVs.getKV('hub', 'version', default='Unknown'))
if finish:
cmd.finish(vString)
else:
cmd.inform(vString)
def doListen(self, cmd):
matched, unmatched, leftovers = cmd.match([('listen', None), ('addActors', None),
('delActors', None)])
cmdr = cmd.cmdr()
if not cmdr:
cmd.fail('debug=%s' % (Misc.qstr('cmdr=%s; cmd=%s' % (cmdr, cmd))))
return
Misc.log('doListen', 'start: %s' % (cmdr.taster))
Misc.log('doListen', 'leftovers: %s' % (leftovers))
if 'addActors' in matched:
actors = list(leftovers.keys())
Misc.log('doListen', 'addActors: %s' % (actors))
cmdr.taster.addToFilter(actors, [], actors)
cmd.finish()
elif 'delActors' in matched:
actors = list(leftovers.keys())
Misc.log('doListen', 'delActors: %s' % (actors))
cmdr.taster.removeFromFilter(actors, [], actors)
cmd.finish()
else:
cmd.fail('text="unknown listen command"')
Misc.log('doListen', 'finish: %s' % (cmdr.taster))
def actors(self, cmd, finish=True):
g.actors.listSelf(cmd=cmd)
if finish:
cmd.finish('')
def commanders(self, cmd, finish=True):
g.commanders.listSelf(cmd=cmd)
if finish:
cmd.finish('')
def status(self, cmd, finish=True):
Misc.cfg.flush()
self.version(cmd, finish=False)
self.actors(cmd, finish=False)
self.commanders(cmd, finish=False)
if finish:
cmd.finish('')
def setUsername(self, cmd):
args = cmd.cmd.split()
args = args[1:]
if len(args) != 1:
cmd.fail('cmdError="usage: setUsername newname"')
return
username = args[0]
cmdr = cmd.cmdr()
cmdr.setName(username)
cmd.finish('')
def stopNubs(self, cmd):
nubs = list(cmd.argDict.keys())[1:]
if len(nubs) == 0:
cmd.fail('text="must specify one or more nubs to stop..."')
return
for nub in nubs:
try:
cmd.inform('text=%s' % (Misc.qstr('stopping nub %s' % (nub))))
hub.stopNub(nub)
except Exception as e:
cmd.warn('text=%s' % (Misc.qstr('failed to stop nub %s: %s' % (nub, e))))
cmd.finish('')
def startNubs(self, cmd):
Misc.cfg.flush()
nubs = list(cmd.argDict.keys())[1:]
if len(nubs) == 0:
cmd.fail('text="must specify one or more nubs to start..."')
return
for nub in nubs:
try:
cmd.inform('text=%s' % (Misc.qstr('(re-)starting nub %s' % (nub))))
hub.startNub(nub)
except Exception as e:
cmd.warn('text=%s' % (Misc.qstr('failed to start nub %s: %s' % (nub, e))))
cmd.finish('')
def actorInfo(self, cmd):
names = list(cmd.argDict.keys())[1:]
if len(names) == 0:
names = list(g.actors.keys())
for n in names:
try:
nub = g.actors[n]
nub.statusCmd(cmd, doFinish=False)
except Exception as e:
cmd.warn('text=%s' % (Misc.qstr('failed to query actor %s: %s' % (n, e))))
cmd.finish('')
def commandInfo(self, cmd):
names = list(cmd.argDict.keys())[1:]
if len(names) == 0:
names = list(g.actors.keys())
for n in names:
try:
nub = g.actors[n]
nub.listCommandsCmd(cmd, doFinish=False)
except Exception as e:
cmd.warn('text=%s' % (Misc.qstr('failed to query actor %s: %s' % (n, e))))
cmd.finish('')
def loadWords(self, cmd, finish=True):
words = list(cmd.argDict.keys())[1:]
if len(words) == 0:
words = None
Misc.log('hubCmd', 'loadWords loading %s' % (words))
try:
hub.loadWords(words)
except Exception as e:
Misc.tback('hub.loadWords', e)
cmd.fail('text=%s' % (Misc.qstr(e)))
return
if finish:
cmd.finish()
def getKeys(self, cmd):
words = cmd.cmd.split()
if len(words) < 3:
cmd.fail('text="usage: getKeys srcName key1 [key2 ... keyN]"')
return
src = words[1]
keys = words[2:]
matched, unmatched = g.KVs.getValues(src, keys)
Misc.log('hub.getKeys', 'matched=%s unmatched=%s' % (matched, unmatched))
for k, v in matched.items():
kvString = kvAsASCII(k, v)
cmd.inform(kvString, src='hub.%s' % (src))
if unmatched:
cmd.warn('text=%s' % (Misc.qstr('unmatched %s keys: %s' %
(src, ', '.join(unmatched)))))
cmd.finish('')
def reallyReallyRestart(self, cmd):
cmd.warn('text=%s' %
(Misc.qstr('Restarting the hub now... bye, bye, and please call back soon!')))
g.poller.callMeIn(hub.restart, 1.0)
def relog(self, cmd):
args = cmd.cmd.split()
args = args[1:]
if len(args) != 1:
cmd.fail('cmdError="usage: relog filename"')
return
filename = args[0]
import os
f = open(filename, 'a', 1)
os.dup2(f.fileno(), 1)
os.dup2(f.fileno(), 2)
sys.stdout = os.fdopen(1, 'w', 1)
sys.stderr = os.fdopen(2, 'w', 1)
f.close()
cmd.finish('text="Jeebus, you done it now, whatever it was"')
| true | true |
f7247748c768db7db41eded5f66dc4d97b47480c | 964 | py | Python | mats/model/components/gmm2d.py | StanfordASL/MATS | b31a86eb56728fc6025c71c7202ab425b078e3e5 | [
"MIT"
] | 21 | 2020-12-03T05:27:19.000Z | 2022-01-18T02:24:22.000Z | mats/model/components/gmm2d.py | StanfordASL/MATS | b31a86eb56728fc6025c71c7202ab425b078e3e5 | [
"MIT"
] | 1 | 2022-03-29T14:51:51.000Z | 2022-03-29T14:51:51.000Z | mats/model/components/gmm2d.py | StanfordASL/MATS | b31a86eb56728fc6025c71c7202ab425b078e3e5 | [
"MIT"
] | 5 | 2021-01-09T18:12:47.000Z | 2022-03-22T11:45:56.000Z | import torch
import torch.distributions as td
class GMM2D(td.MixtureSameFamily):
def __init__(self, mixture_distribution, component_distribution):
super(GMM2D, self).__init__(mixture_distribution, component_distribution)
def mode_mode(self):
mode_k = torch.argmax(self.mixture_distribution.probs[0, 0]).item()
mode_gaussian = self.component_distribution.mean[:, 0, mode_k, :2]
return mode_gaussian
def position_log_prob(self, x):
# Computing the log probability over only the positions.
component_dist = td.MultivariateNormal(loc=self.component_distribution.mean[..., :2],
scale_tril=self.component_distribution.scale_tril[..., :2, :2])
position_dist = td.MixtureSameFamily(self.mixture_distribution, component_dist)
return position_dist.log_prob(x)
@property
def pis(self):
return self.mixture_distribution.probs[0, 0]
| 40.166667 | 110 | 0.691909 | import torch
import torch.distributions as td
class GMM2D(td.MixtureSameFamily):
def __init__(self, mixture_distribution, component_distribution):
super(GMM2D, self).__init__(mixture_distribution, component_distribution)
def mode_mode(self):
mode_k = torch.argmax(self.mixture_distribution.probs[0, 0]).item()
mode_gaussian = self.component_distribution.mean[:, 0, mode_k, :2]
return mode_gaussian
def position_log_prob(self, x):
component_dist = td.MultivariateNormal(loc=self.component_distribution.mean[..., :2],
scale_tril=self.component_distribution.scale_tril[..., :2, :2])
position_dist = td.MixtureSameFamily(self.mixture_distribution, component_dist)
return position_dist.log_prob(x)
@property
def pis(self):
return self.mixture_distribution.probs[0, 0]
| true | true |
f72477b35e362dbce3d14f182fe8455bd0352f5f | 3,916 | py | Python | ironic/objects/base.py | hpproliant/ironic | 4f62cd97196b2a0068700ffb17456912147778d0 | [
"Apache-2.0"
] | null | null | null | ironic/objects/base.py | hpproliant/ironic | 4f62cd97196b2a0068700ffb17456912147778d0 | [
"Apache-2.0"
] | null | null | null | ironic/objects/base.py | hpproliant/ironic | 4f62cd97196b2a0068700ffb17456912147778d0 | [
"Apache-2.0"
] | null | null | null | # Copyright 2013 IBM Corp.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""Ironic common internal object model"""
from oslo_versionedobjects import base as object_base
from ironic.objects import fields as object_fields
class IronicObjectRegistry(object_base.VersionedObjectRegistry):
pass
class IronicObject(object_base.VersionedObject):
"""Base class and object factory.
This forms the base of all objects that can be remoted or instantiated
via RPC. Simply defining a class that inherits from this base class
will make it remotely instantiatable. Objects should implement the
necessary "get" classmethod routines as well as "save" object methods
as appropriate.
"""
OBJ_SERIAL_NAMESPACE = 'ironic_object'
OBJ_PROJECT_NAMESPACE = 'ironic'
# TODO(lintan) Refactor these fields and create PersistentObject and
# TimeStampObject like Nova when it is necessary.
fields = {
'created_at': object_fields.DateTimeField(nullable=True),
'updated_at': object_fields.DateTimeField(nullable=True),
}
def as_dict(self):
return dict((k, getattr(self, k))
for k in self.fields
if hasattr(self, k))
def obj_refresh(self, loaded_object):
"""Applies updates for objects that inherit from base.IronicObject.
Checks for updated attributes in an object. Updates are applied from
the loaded object column by column in comparison with the current
object.
"""
for field in self.fields:
if (self.obj_attr_is_set(field) and
self[field] != loaded_object[field]):
self[field] = loaded_object[field]
class IronicObjectIndirectionAPI(object_base.VersionedObjectIndirectionAPI):
def __init__(self):
super(IronicObjectIndirectionAPI, self).__init__()
# FIXME(xek): importing here due to a cyclical import error
from ironic.conductor import rpcapi as conductor_api
self._conductor = conductor_api.ConductorAPI()
def object_action(self, context, objinst, objmethod, args, kwargs):
return self._conductor.object_action(context, objinst, objmethod,
args, kwargs)
def object_class_action(self, context, objname, objmethod, objver,
args, kwargs):
# NOTE(xek): This method is implemented for compatibility with
# oslo.versionedobjects 0.10.0 and older. It will be replaced by
# object_class_action_versions.
versions = object_base.obj_tree_get_versions(objname)
return self.object_class_action_versions(
context, objname, objmethod, versions, args, kwargs)
def object_class_action_versions(self, context, objname, objmethod,
object_versions, args, kwargs):
return self._conductor.object_class_action_versions(
context, objname, objmethod, object_versions, args, kwargs)
def object_backport_versions(self, context, objinst, object_versions):
return self._conductor.object_backport_versions(context, objinst,
object_versions)
class IronicObjectSerializer(object_base.VersionedObjectSerializer):
# Base class to use for object hydration
OBJ_BASE_CLASS = IronicObject
| 40.371134 | 78 | 0.687692 |
from oslo_versionedobjects import base as object_base
from ironic.objects import fields as object_fields
class IronicObjectRegistry(object_base.VersionedObjectRegistry):
pass
class IronicObject(object_base.VersionedObject):
OBJ_SERIAL_NAMESPACE = 'ironic_object'
OBJ_PROJECT_NAMESPACE = 'ironic'
fields = {
'created_at': object_fields.DateTimeField(nullable=True),
'updated_at': object_fields.DateTimeField(nullable=True),
}
def as_dict(self):
return dict((k, getattr(self, k))
for k in self.fields
if hasattr(self, k))
def obj_refresh(self, loaded_object):
for field in self.fields:
if (self.obj_attr_is_set(field) and
self[field] != loaded_object[field]):
self[field] = loaded_object[field]
class IronicObjectIndirectionAPI(object_base.VersionedObjectIndirectionAPI):
def __init__(self):
super(IronicObjectIndirectionAPI, self).__init__()
from ironic.conductor import rpcapi as conductor_api
self._conductor = conductor_api.ConductorAPI()
def object_action(self, context, objinst, objmethod, args, kwargs):
return self._conductor.object_action(context, objinst, objmethod,
args, kwargs)
def object_class_action(self, context, objname, objmethod, objver,
args, kwargs):
versions = object_base.obj_tree_get_versions(objname)
return self.object_class_action_versions(
context, objname, objmethod, versions, args, kwargs)
def object_class_action_versions(self, context, objname, objmethod,
object_versions, args, kwargs):
return self._conductor.object_class_action_versions(
context, objname, objmethod, object_versions, args, kwargs)
def object_backport_versions(self, context, objinst, object_versions):
return self._conductor.object_backport_versions(context, objinst,
object_versions)
class IronicObjectSerializer(object_base.VersionedObjectSerializer):
OBJ_BASE_CLASS = IronicObject
| true | true |
f7247810b61f545dbbf06766dccc172c15e03ef6 | 62,418 | py | Python | superset/db_engine_specs.py | emacip/incubator-superset | 594cd7096070a742209851ff9112d5bf4d16a7be | [
"Apache-2.0",
"CC-BY-4.0",
"MIT"
] | 1 | 2019-02-05T04:53:37.000Z | 2019-02-05T04:53:37.000Z | superset/db_engine_specs.py | mankoven/incubator-superset | bab7ee7ecf222250287e591d91b38be583c9a2f3 | [
"Apache-2.0",
"CC-BY-4.0",
"MIT"
] | 7 | 2021-02-02T23:08:19.000Z | 2022-03-29T22:28:16.000Z | superset/db_engine_specs.py | g4brielvs/incubator-superset | 83ee9178328c5193808fe356ceb3090a299477f6 | [
"Apache-2.0",
"CC-BY-4.0",
"MIT"
] | null | null | null | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
# pylint: disable=C,R,W
"""Compatibility layer for different database engines
This modules stores logic specific to different database engines. Things
like time-related functions that are similar but not identical, or
information as to expose certain features or not and how to expose them.
For instance, Hive/Presto supports partitions and have a specific API to
list partitions. Other databases like Vertica also support partitions but
have different API to get to them. Other databases don't support partitions
at all. The classes here will use a common interface to specify all this.
The general idea is to use static classes and an inheritance scheme.
"""
from collections import namedtuple
import hashlib
import inspect
import logging
import os
import re
import textwrap
import time
from flask import g
from flask_babel import lazy_gettext as _
import pandas
import sqlalchemy as sqla
from sqlalchemy import Column, select
from sqlalchemy.engine import create_engine
from sqlalchemy.engine.url import make_url
from sqlalchemy.sql import quoted_name, text
from sqlalchemy.sql.expression import TextAsFrom
import sqlparse
from werkzeug.utils import secure_filename
from superset import app, conf, db, sql_parse
from superset.exceptions import SupersetTemplateException
from superset.utils import core as utils
QueryStatus = utils.QueryStatus
config = app.config
tracking_url_trans = conf.get('TRACKING_URL_TRANSFORMER')
hive_poll_interval = conf.get('HIVE_POLL_INTERVAL')
Grain = namedtuple('Grain', 'name label function duration')
builtin_time_grains = {
None: 'Time Column',
'PT1S': 'second',
'PT1M': 'minute',
'PT5M': '5 minute',
'PT10M': '10 minute',
'PT15M': '15 minute',
'PT0.5H': 'half hour',
'PT1H': 'hour',
'P1D': 'day',
'P1W': 'week',
'P1M': 'month',
'P0.25Y': 'quarter',
'P1Y': 'year',
'1969-12-28T00:00:00Z/P1W': 'week_start_sunday',
'1969-12-29T00:00:00Z/P1W': 'week_start_monday',
'P1W/1970-01-03T00:00:00Z': 'week_ending_saturday',
'P1W/1970-01-04T00:00:00Z': 'week_ending_sunday',
}
def _create_time_grains_tuple(time_grains, time_grain_functions, blacklist):
ret_list = []
blacklist = blacklist if blacklist else []
for duration, func in time_grain_functions.items():
if duration not in blacklist:
name = time_grains.get(duration)
ret_list.append(Grain(name, _(name), func, duration))
return tuple(ret_list)
class LimitMethod(object):
"""Enum the ways that limits can be applied"""
FETCH_MANY = 'fetch_many'
WRAP_SQL = 'wrap_sql'
FORCE_LIMIT = 'force_limit'
class BaseEngineSpec(object):
"""Abstract class for database engine specific configurations"""
engine = 'base' # str as defined in sqlalchemy.engine.engine
time_grain_functions = {}
time_groupby_inline = False
limit_method = LimitMethod.FORCE_LIMIT
time_secondary_columns = False
inner_joins = True
allows_subquery = True
force_column_alias_quotes = False
arraysize = None
@classmethod
def get_time_grains(cls):
blacklist = config.get('TIME_GRAIN_BLACKLIST', [])
grains = builtin_time_grains.copy()
grains.update(config.get('TIME_GRAIN_ADDONS', {}))
grain_functions = cls.time_grain_functions.copy()
grain_addon_functions = config.get('TIME_GRAIN_ADDON_FUNCTIONS', {})
grain_functions.update(grain_addon_functions.get(cls.engine, {}))
return _create_time_grains_tuple(grains, grain_functions, blacklist)
@classmethod
def fetch_data(cls, cursor, limit):
if cls.arraysize:
cursor.arraysize = cls.arraysize
if cls.limit_method == LimitMethod.FETCH_MANY:
return cursor.fetchmany(limit)
return cursor.fetchall()
@classmethod
def epoch_to_dttm(cls):
raise NotImplementedError()
@classmethod
def epoch_ms_to_dttm(cls):
return cls.epoch_to_dttm().replace('{col}', '({col}/1000.000)')
@classmethod
def get_datatype(cls, type_code):
if isinstance(type_code, str) and len(type_code):
return type_code.upper()
@classmethod
def extra_table_metadata(cls, database, table_name, schema_name):
"""Returns engine-specific table metadata"""
return {}
@classmethod
def apply_limit_to_sql(cls, sql, limit, database):
"""Alters the SQL statement to apply a LIMIT clause"""
if cls.limit_method == LimitMethod.WRAP_SQL:
sql = sql.strip('\t\n ;')
qry = (
select('*')
.select_from(
TextAsFrom(text(sql), ['*']).alias('inner_qry'),
)
.limit(limit)
)
return database.compile_sqla_query(qry)
elif LimitMethod.FORCE_LIMIT:
parsed_query = sql_parse.ParsedQuery(sql)
sql = parsed_query.get_query_with_new_limit(limit)
return sql
@classmethod
def get_limit_from_sql(cls, sql):
parsed_query = sql_parse.ParsedQuery(sql)
return parsed_query.limit
@classmethod
def get_query_with_new_limit(cls, sql, limit):
parsed_query = sql_parse.ParsedQuery(sql)
return parsed_query.get_query_with_new_limit(limit)
@staticmethod
def csv_to_df(**kwargs):
kwargs['filepath_or_buffer'] = \
config['UPLOAD_FOLDER'] + kwargs['filepath_or_buffer']
kwargs['encoding'] = 'utf-8'
kwargs['iterator'] = True
chunks = pandas.read_csv(**kwargs)
df = pandas.DataFrame()
df = pandas.concat(chunk for chunk in chunks)
return df
@staticmethod
def df_to_db(df, table, **kwargs):
df.to_sql(**kwargs)
table.user_id = g.user.id
table.schema = kwargs['schema']
table.fetch_metadata()
db.session.add(table)
db.session.commit()
@staticmethod
def create_table_from_csv(form, table):
def _allowed_file(filename):
# Only allow specific file extensions as specified in the config
extension = os.path.splitext(filename)[1]
return extension and extension[1:] in config['ALLOWED_EXTENSIONS']
filename = secure_filename(form.csv_file.data.filename)
if not _allowed_file(filename):
raise Exception('Invalid file type selected')
kwargs = {
'filepath_or_buffer': filename,
'sep': form.sep.data,
'header': form.header.data if form.header.data else 0,
'index_col': form.index_col.data,
'mangle_dupe_cols': form.mangle_dupe_cols.data,
'skipinitialspace': form.skipinitialspace.data,
'skiprows': form.skiprows.data,
'nrows': form.nrows.data,
'skip_blank_lines': form.skip_blank_lines.data,
'parse_dates': form.parse_dates.data,
'infer_datetime_format': form.infer_datetime_format.data,
'chunksize': 10000,
}
df = BaseEngineSpec.csv_to_df(**kwargs)
df_to_db_kwargs = {
'table': table,
'df': df,
'name': form.name.data,
'con': create_engine(form.con.data.sqlalchemy_uri_decrypted, echo=False),
'schema': form.schema.data,
'if_exists': form.if_exists.data,
'index': form.index.data,
'index_label': form.index_label.data,
'chunksize': 10000,
}
BaseEngineSpec.df_to_db(**df_to_db_kwargs)
@classmethod
def convert_dttm(cls, target_type, dttm):
return "'{}'".format(dttm.strftime('%Y-%m-%d %H:%M:%S'))
@classmethod
def fetch_result_sets(cls, db, datasource_type):
"""Returns a list of tables [schema1.table1, schema2.table2, ...]
Datasource_type can be 'table' or 'view'.
Empty schema corresponds to the list of full names of the all
tables or views: <schema>.<result_set_name>.
"""
schemas = db.all_schema_names(cache=db.schema_cache_enabled,
cache_timeout=db.schema_cache_timeout,
force=True)
all_result_sets = []
for schema in schemas:
if datasource_type == 'table':
all_datasource_names = db.all_table_names_in_schema(
schema=schema, force=True,
cache=db.table_cache_enabled,
cache_timeout=db.table_cache_timeout)
elif datasource_type == 'view':
all_datasource_names = db.all_view_names_in_schema(
schema=schema, force=True,
cache=db.table_cache_enabled,
cache_timeout=db.table_cache_timeout)
all_result_sets += [
'{}.{}'.format(schema, t) for t in all_datasource_names]
return all_result_sets
@classmethod
def handle_cursor(cls, cursor, query, session):
"""Handle a live cursor between the execute and fetchall calls
The flow works without this method doing anything, but it allows
for handling the cursor and updating progress information in the
query object"""
pass
@classmethod
def extract_error_message(cls, e):
"""Extract error message for queries"""
return utils.error_msg_from_exception(e)
@classmethod
def adjust_database_uri(cls, uri, selected_schema):
"""Based on a URI and selected schema, return a new URI
The URI here represents the URI as entered when saving the database,
``selected_schema`` is the schema currently active presumably in
the SQL Lab dropdown. Based on that, for some database engine,
we can return a new altered URI that connects straight to the
active schema, meaning the users won't have to prefix the object
names by the schema name.
Some databases engines have 2 level of namespacing: database and
schema (postgres, oracle, mssql, ...)
For those it's probably better to not alter the database
component of the URI with the schema name, it won't work.
Some database drivers like presto accept '{catalog}/{schema}' in
the database component of the URL, that can be handled here.
"""
return uri
@classmethod
def patch(cls):
pass
@classmethod
def get_schema_names(cls, inspector):
return sorted(inspector.get_schema_names())
@classmethod
def get_table_names(cls, inspector, schema):
return sorted(inspector.get_table_names(schema))
@classmethod
def get_view_names(cls, inspector, schema):
return sorted(inspector.get_view_names(schema))
@classmethod
def where_latest_partition(
cls, table_name, schema, database, qry, columns=None):
return False
@classmethod
def _get_fields(cls, cols):
return [sqla.column(c.get('name')) for c in cols]
@classmethod
def select_star(cls, my_db, table_name, engine, schema=None, limit=100,
show_cols=False, indent=True, latest_partition=True,
cols=None):
fields = '*'
cols = cols or []
if (show_cols or latest_partition) and not cols:
cols = my_db.get_columns(table_name, schema)
if show_cols:
fields = cls._get_fields(cols)
quote = engine.dialect.identifier_preparer.quote
if schema:
full_table_name = quote(schema) + '.' + quote(table_name)
else:
full_table_name = quote(table_name)
qry = select(fields).select_from(text(full_table_name))
if limit:
qry = qry.limit(limit)
if latest_partition:
partition_query = cls.where_latest_partition(
table_name, schema, my_db, qry, columns=cols)
if partition_query != False: # noqa
qry = partition_query
sql = my_db.compile_sqla_query(qry)
if indent:
sql = sqlparse.format(sql, reindent=True)
return sql
@classmethod
def modify_url_for_impersonation(cls, url, impersonate_user, username):
"""
Modify the SQL Alchemy URL object with the user to impersonate if applicable.
:param url: SQLAlchemy URL object
:param impersonate_user: Bool indicating if impersonation is enabled
:param username: Effective username
"""
if impersonate_user is not None and username is not None:
url.username = username
@classmethod
def get_configuration_for_impersonation(cls, uri, impersonate_user, username):
"""
Return a configuration dictionary that can be merged with other configs
that can set the correct properties for impersonating users
:param uri: URI string
:param impersonate_user: Bool indicating if impersonation is enabled
:param username: Effective username
:return: Dictionary with configs required for impersonation
"""
return {}
@classmethod
def execute(cls, cursor, query, **kwargs):
if cls.arraysize:
cursor.arraysize = cls.arraysize
cursor.execute(query)
@classmethod
def make_label_compatible(cls, label):
"""
Conditionally mutate and/or quote a sql column/expression label. If
force_column_alias_quotes is set to True, return the label as a
sqlalchemy.sql.elements.quoted_name object to ensure that the select query
and query results have same case. Otherwise return the mutated label as a
regular string.
"""
label = cls.mutate_label(label)
return quoted_name(label, True) if cls.force_column_alias_quotes else label
@staticmethod
def mutate_label(label):
"""
Most engines support mixed case aliases that can include numbers
and special characters, like commas, parentheses etc. For engines that
have restrictions on what types of aliases are supported, this method
can be overridden to ensure that labels conform to the engine's
limitations. Mutated labels should be deterministic (input label A always
yields output label X) and unique (input labels A and B don't yield the same
output label X).
"""
return label
class PostgresBaseEngineSpec(BaseEngineSpec):
""" Abstract class for Postgres 'like' databases """
engine = ''
time_grain_functions = {
None: '{col}',
'PT1S': "DATE_TRUNC('second', {col}) AT TIME ZONE 'UTC'",
'PT1M': "DATE_TRUNC('minute', {col}) AT TIME ZONE 'UTC'",
'PT1H': "DATE_TRUNC('hour', {col}) AT TIME ZONE 'UTC'",
'P1D': "DATE_TRUNC('day', {col}) AT TIME ZONE 'UTC'",
'P1W': "DATE_TRUNC('week', {col}) AT TIME ZONE 'UTC'",
'P1M': "DATE_TRUNC('month', {col}) AT TIME ZONE 'UTC'",
'P0.25Y': "DATE_TRUNC('quarter', {col}) AT TIME ZONE 'UTC'",
'P1Y': "DATE_TRUNC('year', {col}) AT TIME ZONE 'UTC'",
}
@classmethod
def fetch_data(cls, cursor, limit):
if not cursor.description:
return []
if cls.limit_method == LimitMethod.FETCH_MANY:
return cursor.fetchmany(limit)
return cursor.fetchall()
@classmethod
def epoch_to_dttm(cls):
return "(timestamp 'epoch' + {col} * interval '1 second')"
@classmethod
def convert_dttm(cls, target_type, dttm):
return "'{}'".format(dttm.strftime('%Y-%m-%d %H:%M:%S'))
class PostgresEngineSpec(PostgresBaseEngineSpec):
engine = 'postgresql'
@classmethod
def get_table_names(cls, inspector, schema):
"""Need to consider foreign tables for PostgreSQL"""
tables = inspector.get_table_names(schema)
tables.extend(inspector.get_foreign_table_names(schema))
return sorted(tables)
class SnowflakeEngineSpec(PostgresBaseEngineSpec):
engine = 'snowflake'
force_column_alias_quotes = True
time_grain_functions = {
None: '{col}',
'PT1S': "DATE_TRUNC('SECOND', {col})",
'PT1M': "DATE_TRUNC('MINUTE', {col})",
'PT5M': "DATEADD(MINUTE, FLOOR(DATE_PART(MINUTE, {col}) / 5) * 5, \
DATE_TRUNC('HOUR', {col}))",
'PT10M': "DATEADD(MINUTE, FLOOR(DATE_PART(MINUTE, {col}) / 10) * 10, \
DATE_TRUNC('HOUR', {col}))",
'PT15M': "DATEADD(MINUTE, FLOOR(DATE_PART(MINUTE, {col}) / 15) * 15, \
DATE_TRUNC('HOUR', {col}))",
'PT0.5H': "DATEADD(MINUTE, FLOOR(DATE_PART(MINUTE, {col}) / 30) * 30, \
DATE_TRUNC('HOUR', {col}))",
'PT1H': "DATE_TRUNC('HOUR', {col})",
'P1D': "DATE_TRUNC('DAY', {col})",
'P1W': "DATE_TRUNC('WEEK', {col})",
'P1M': "DATE_TRUNC('MONTH', {col})",
'P0.25Y': "DATE_TRUNC('QUARTER', {col})",
'P1Y': "DATE_TRUNC('YEAR', {col})",
}
@classmethod
def adjust_database_uri(cls, uri, selected_schema=None):
database = uri.database
if '/' in uri.database:
database = uri.database.split('/')[0]
if selected_schema:
uri.database = database + '/' + selected_schema
return uri
class VerticaEngineSpec(PostgresBaseEngineSpec):
engine = 'vertica'
class RedshiftEngineSpec(PostgresBaseEngineSpec):
engine = 'redshift'
@staticmethod
def mutate_label(label):
"""
Redshift only supports lowercase column names and aliases.
:param str label: Original label which might include uppercase letters
:return: String that is supported by the database
"""
return label.lower()
class OracleEngineSpec(PostgresBaseEngineSpec):
engine = 'oracle'
limit_method = LimitMethod.WRAP_SQL
force_column_alias_quotes = True
time_grain_functions = {
None: '{col}',
'PT1S': 'CAST({col} as DATE)',
'PT1M': "TRUNC(CAST({col} as DATE), 'MI')",
'PT1H': "TRUNC(CAST({col} as DATE), 'HH')",
'P1D': "TRUNC(CAST({col} as DATE), 'DDD')",
'P1W': "TRUNC(CAST({col} as DATE), 'WW')",
'P1M': "TRUNC(CAST({col} as DATE), 'MONTH')",
'P0.25Y': "TRUNC(CAST({col} as DATE), 'Q')",
'P1Y': "TRUNC(CAST({col} as DATE), 'YEAR')",
}
@classmethod
def convert_dttm(cls, target_type, dttm):
return (
"""TO_TIMESTAMP('{}', 'YYYY-MM-DD"T"HH24:MI:SS.ff6')"""
).format(dttm.isoformat())
@staticmethod
def mutate_label(label):
"""
Oracle 12.1 and earlier support a maximum of 30 byte length object names, which
usually means 30 characters.
:param str label: Original label which might include unsupported characters
:return: String that is supported by the database
"""
if len(label) > 30:
hashed_label = hashlib.md5(label.encode('utf-8')).hexdigest()
# truncate the hash to first 30 characters
return hashed_label[:30]
return label
class Db2EngineSpec(BaseEngineSpec):
engine = 'ibm_db_sa'
limit_method = LimitMethod.WRAP_SQL
force_column_alias_quotes = True
time_grain_functions = {
None: '{col}',
'PT1S': 'CAST({col} as TIMESTAMP)'
' - MICROSECOND({col}) MICROSECONDS',
'PT1M': 'CAST({col} as TIMESTAMP)'
' - SECOND({col}) SECONDS'
' - MICROSECOND({col}) MICROSECONDS',
'PT1H': 'CAST({col} as TIMESTAMP)'
' - MINUTE({col}) MINUTES'
' - SECOND({col}) SECONDS'
' - MICROSECOND({col}) MICROSECONDS ',
'P1D': 'CAST({col} as TIMESTAMP)'
' - HOUR({col}) HOURS'
' - MINUTE({col}) MINUTES'
' - SECOND({col}) SECONDS'
' - MICROSECOND({col}) MICROSECONDS',
'P1W': '{col} - (DAYOFWEEK({col})) DAYS',
'P1M': '{col} - (DAY({col})-1) DAYS',
'P0.25Y': '{col} - (DAY({col})-1) DAYS'
' - (MONTH({col})-1) MONTHS'
' + ((QUARTER({col})-1) * 3) MONTHS',
'P1Y': '{col} - (DAY({col})-1) DAYS'
' - (MONTH({col})-1) MONTHS',
}
@classmethod
def epoch_to_dttm(cls):
return "(TIMESTAMP('1970-01-01', '00:00:00') + {col} SECONDS)"
@classmethod
def convert_dttm(cls, target_type, dttm):
return "'{}'".format(dttm.strftime('%Y-%m-%d-%H.%M.%S'))
@staticmethod
def mutate_label(label):
"""
Db2 for z/OS supports a maximum of 30 byte length object names, which usually
means 30 characters.
:param str label: Original label which might include unsupported characters
:return: String that is supported by the database
"""
if len(label) > 30:
hashed_label = hashlib.md5(label.encode('utf-8')).hexdigest()
# truncate the hash to first 30 characters
return hashed_label[:30]
return label
class SqliteEngineSpec(BaseEngineSpec):
engine = 'sqlite'
time_grain_functions = {
None: '{col}',
'PT1H': "DATETIME(STRFTIME('%Y-%m-%dT%H:00:00', {col}))",
'P1D': 'DATE({col})',
'P1W': "DATE({col}, -strftime('%W', {col}) || ' days')",
'P1M': "DATE({col}, -strftime('%d', {col}) || ' days', '+1 day')",
'P1Y': "DATETIME(STRFTIME('%Y-01-01T00:00:00', {col}))",
'P1W/1970-01-03T00:00:00Z': "DATE({col}, 'weekday 6')",
'1969-12-28T00:00:00Z/P1W': "DATE({col}, 'weekday 0', '-7 days')",
}
@classmethod
def epoch_to_dttm(cls):
return "datetime({col}, 'unixepoch')"
@classmethod
def fetch_result_sets(cls, db, datasource_type):
schemas = db.all_schema_names(cache=db.schema_cache_enabled,
cache_timeout=db.schema_cache_timeout,
force=True)
all_result_sets = []
schema = schemas[0]
if datasource_type == 'table':
all_datasource_names = db.all_table_names_in_schema(
schema=schema, force=True,
cache=db.table_cache_enabled,
cache_timeout=db.table_cache_timeout)
elif datasource_type == 'view':
all_datasource_names = db.all_view_names_in_schema(
schema=schema, force=True,
cache=db.table_cache_enabled,
cache_timeout=db.table_cache_timeout)
all_result_sets += [
'{}.{}'.format(schema, t) for t in all_datasource_names]
return all_result_sets
@classmethod
def convert_dttm(cls, target_type, dttm):
iso = dttm.isoformat().replace('T', ' ')
if '.' not in iso:
iso += '.000000'
return "'{}'".format(iso)
@classmethod
def get_table_names(cls, inspector, schema):
"""Need to disregard the schema for Sqlite"""
return sorted(inspector.get_table_names())
class MySQLEngineSpec(BaseEngineSpec):
engine = 'mysql'
time_grain_functions = {
None: '{col}',
'PT1S': 'DATE_ADD(DATE({col}), '
'INTERVAL (HOUR({col})*60*60 + MINUTE({col})*60'
' + SECOND({col})) SECOND)',
'PT1M': 'DATE_ADD(DATE({col}), '
'INTERVAL (HOUR({col})*60 + MINUTE({col})) MINUTE)',
'PT1H': 'DATE_ADD(DATE({col}), '
'INTERVAL HOUR({col}) HOUR)',
'P1D': 'DATE({col})',
'P1W': 'DATE(DATE_SUB({col}, '
'INTERVAL DAYOFWEEK({col}) - 1 DAY))',
'P1M': 'DATE(DATE_SUB({col}, '
'INTERVAL DAYOFMONTH({col}) - 1 DAY))',
'P0.25Y': 'MAKEDATE(YEAR({col}), 1) '
'+ INTERVAL QUARTER({col}) QUARTER - INTERVAL 1 QUARTER',
'P1Y': 'DATE(DATE_SUB({col}, '
'INTERVAL DAYOFYEAR({col}) - 1 DAY))',
'1969-12-29T00:00:00Z/P1W': 'DATE(DATE_SUB({col}, '
'INTERVAL DAYOFWEEK(DATE_SUB({col}, INTERVAL 1 DAY)) - 1 DAY))',
}
type_code_map = {} # loaded from get_datatype only if needed
@classmethod
def convert_dttm(cls, target_type, dttm):
if target_type.upper() in ('DATETIME', 'DATE'):
return "STR_TO_DATE('{}', '%Y-%m-%d %H:%i:%s')".format(
dttm.strftime('%Y-%m-%d %H:%M:%S'))
return "'{}'".format(dttm.strftime('%Y-%m-%d %H:%M:%S'))
@classmethod
def adjust_database_uri(cls, uri, selected_schema=None):
if selected_schema:
uri.database = selected_schema
return uri
@classmethod
def get_datatype(cls, type_code):
if not cls.type_code_map:
# only import and store if needed at least once
import MySQLdb
ft = MySQLdb.constants.FIELD_TYPE
cls.type_code_map = {
getattr(ft, k): k
for k in dir(ft)
if not k.startswith('_')
}
datatype = type_code
if isinstance(type_code, int):
datatype = cls.type_code_map.get(type_code)
if datatype and isinstance(datatype, str) and len(datatype):
return datatype
@classmethod
def epoch_to_dttm(cls):
return 'from_unixtime({col})'
@classmethod
def extract_error_message(cls, e):
"""Extract error message for queries"""
message = str(e)
try:
if isinstance(e.args, tuple) and len(e.args) > 1:
message = e.args[1]
except Exception:
pass
return message
class PrestoEngineSpec(BaseEngineSpec):
engine = 'presto'
time_grain_functions = {
None: '{col}',
'PT1S': "date_trunc('second', CAST({col} AS TIMESTAMP))",
'PT1M': "date_trunc('minute', CAST({col} AS TIMESTAMP))",
'PT1H': "date_trunc('hour', CAST({col} AS TIMESTAMP))",
'P1D': "date_trunc('day', CAST({col} AS TIMESTAMP))",
'P1W': "date_trunc('week', CAST({col} AS TIMESTAMP))",
'P1M': "date_trunc('month', CAST({col} AS TIMESTAMP))",
'P0.25Y': "date_trunc('quarter', CAST({col} AS TIMESTAMP))",
'P1Y': "date_trunc('year', CAST({col} AS TIMESTAMP))",
'P1W/1970-01-03T00:00:00Z':
"date_add('day', 5, date_trunc('week', date_add('day', 1, \
CAST({col} AS TIMESTAMP))))",
'1969-12-28T00:00:00Z/P1W':
"date_add('day', -1, date_trunc('week', \
date_add('day', 1, CAST({col} AS TIMESTAMP))))",
}
@classmethod
def get_view_names(cls, inspector, schema):
"""Returns an empty list
get_table_names() function returns all table names and view names,
and get_view_names() is not implemented in sqlalchemy_presto.py
https://github.com/dropbox/PyHive/blob/e25fc8440a0686bbb7a5db5de7cb1a77bdb4167a/pyhive/sqlalchemy_presto.py
"""
return []
@classmethod
def adjust_database_uri(cls, uri, selected_schema=None):
database = uri.database
if selected_schema and database:
if '/' in database:
database = database.split('/')[0] + '/' + selected_schema
else:
database += '/' + selected_schema
uri.database = database
return uri
@classmethod
def convert_dttm(cls, target_type, dttm):
tt = target_type.upper()
if tt == 'DATE':
return "from_iso8601_date('{}')".format(dttm.isoformat()[:10])
if tt == 'TIMESTAMP':
return "from_iso8601_timestamp('{}')".format(dttm.isoformat())
return "'{}'".format(dttm.strftime('%Y-%m-%d %H:%M:%S'))
@classmethod
def epoch_to_dttm(cls):
return 'from_unixtime({col})'
@classmethod
def fetch_result_sets(cls, db, datasource_type):
"""Returns a list of tables [schema1.table1, schema2.table2, ...]
Datasource_type can be 'table' or 'view'.
Empty schema corresponds to the list of full names of the all
tables or views: <schema>.<result_set_name>.
"""
result_set_df = db.get_df(
"""SELECT table_schema, table_name FROM INFORMATION_SCHEMA.{}S
ORDER BY concat(table_schema, '.', table_name)""".format(
datasource_type.upper(),
),
None)
result_sets = []
for unused, row in result_set_df.iterrows():
result_sets.append('{}.{}'.format(
row['table_schema'], row['table_name']))
return result_sets
@classmethod
def extra_table_metadata(cls, database, table_name, schema_name):
indexes = database.get_indexes(table_name, schema_name)
if not indexes:
return {}
cols = indexes[0].get('column_names', [])
full_table_name = table_name
if schema_name and '.' not in table_name:
full_table_name = '{}.{}'.format(schema_name, table_name)
pql = cls._partition_query(full_table_name)
col_name, latest_part = cls.latest_partition(
table_name, schema_name, database, show_first=True)
return {
'partitions': {
'cols': cols,
'latest': {col_name: latest_part},
'partitionQuery': pql,
},
}
@classmethod
def handle_cursor(cls, cursor, query, session):
"""Updates progress information"""
logging.info('Polling the cursor for progress')
polled = cursor.poll()
# poll returns dict -- JSON status information or ``None``
# if the query is done
# https://github.com/dropbox/PyHive/blob/
# b34bdbf51378b3979eaf5eca9e956f06ddc36ca0/pyhive/presto.py#L178
while polled:
# Update the object and wait for the kill signal.
stats = polled.get('stats', {})
query = session.query(type(query)).filter_by(id=query.id).one()
if query.status in [QueryStatus.STOPPED, QueryStatus.TIMED_OUT]:
cursor.cancel()
break
if stats:
state = stats.get('state')
# if already finished, then stop polling
if state == 'FINISHED':
break
completed_splits = float(stats.get('completedSplits'))
total_splits = float(stats.get('totalSplits'))
if total_splits and completed_splits:
progress = 100 * (completed_splits / total_splits)
logging.info(
'Query progress: {} / {} '
'splits'.format(completed_splits, total_splits))
if progress > query.progress:
query.progress = progress
session.commit()
time.sleep(1)
logging.info('Polling the cursor for progress')
polled = cursor.poll()
@classmethod
def extract_error_message(cls, e):
if (
hasattr(e, 'orig') and
type(e.orig).__name__ == 'DatabaseError' and
isinstance(e.orig[0], dict)):
error_dict = e.orig[0]
return '{} at {}: {}'.format(
error_dict.get('errorName'),
error_dict.get('errorLocation'),
error_dict.get('message'),
)
if (
type(e).__name__ == 'DatabaseError' and
hasattr(e, 'args') and
len(e.args) > 0
):
error_dict = e.args[0]
return error_dict.get('message')
return utils.error_msg_from_exception(e)
@classmethod
def _partition_query(
cls, table_name, limit=0, order_by=None, filters=None):
"""Returns a partition query
:param table_name: the name of the table to get partitions from
:type table_name: str
:param limit: the number of partitions to be returned
:type limit: int
:param order_by: a list of tuples of field name and a boolean
that determines if that field should be sorted in descending
order
:type order_by: list of (str, bool) tuples
:param filters: a list of filters to apply
:param filters: dict of field name and filter value combinations
"""
limit_clause = 'LIMIT {}'.format(limit) if limit else ''
order_by_clause = ''
if order_by:
l = [] # noqa: E741
for field, desc in order_by:
l.append(field + ' DESC' if desc else '')
order_by_clause = 'ORDER BY ' + ', '.join(l)
where_clause = ''
if filters:
l = [] # noqa: E741
for field, value in filters.items():
l.append(f"{field} = '{value}'")
where_clause = 'WHERE ' + ' AND '.join(l)
sql = textwrap.dedent(f"""\
SHOW PARTITIONS FROM {table_name}
{where_clause}
{order_by_clause}
{limit_clause}
""")
return sql
@classmethod
def where_latest_partition(
cls, table_name, schema, database, qry, columns=None):
try:
col_name, value = cls.latest_partition(
table_name, schema, database, show_first=True)
except Exception:
# table is not partitioned
return False
for c in columns:
if c.get('name') == col_name:
return qry.where(Column(col_name) == value)
return False
@classmethod
def _latest_partition_from_df(cls, df):
recs = df.to_records(index=False)
if recs:
return recs[0][0]
@classmethod
def latest_partition(cls, table_name, schema, database, show_first=False):
"""Returns col name and the latest (max) partition value for a table
:param table_name: the name of the table
:type table_name: str
:param schema: schema / database / namespace
:type schema: str
:param database: database query will be run against
:type database: models.Database
:param show_first: displays the value for the first partitioning key
if there are many partitioning keys
:type show_first: bool
>>> latest_partition('foo_table')
'2018-01-01'
"""
indexes = database.get_indexes(table_name, schema)
if len(indexes[0]['column_names']) < 1:
raise SupersetTemplateException(
'The table should have one partitioned field')
elif not show_first and len(indexes[0]['column_names']) > 1:
raise SupersetTemplateException(
'The table should have a single partitioned field '
'to use this function. You may want to use '
'`presto.latest_sub_partition`')
part_field = indexes[0]['column_names'][0]
sql = cls._partition_query(table_name, 1, [(part_field, True)])
df = database.get_df(sql, schema)
return part_field, cls._latest_partition_from_df(df)
@classmethod
def latest_sub_partition(cls, table_name, schema, database, **kwargs):
"""Returns the latest (max) partition value for a table
A filtering criteria should be passed for all fields that are
partitioned except for the field to be returned. For example,
if a table is partitioned by (``ds``, ``event_type`` and
``event_category``) and you want the latest ``ds``, you'll want
to provide a filter as keyword arguments for both
``event_type`` and ``event_category`` as in
``latest_sub_partition('my_table',
event_category='page', event_type='click')``
:param table_name: the name of the table, can be just the table
name or a fully qualified table name as ``schema_name.table_name``
:type table_name: str
:param schema: schema / database / namespace
:type schema: str
:param database: database query will be run against
:type database: models.Database
:param kwargs: keyword arguments define the filtering criteria
on the partition list. There can be many of these.
:type kwargs: str
>>> latest_sub_partition('sub_partition_table', event_type='click')
'2018-01-01'
"""
indexes = database.get_indexes(table_name, schema)
part_fields = indexes[0]['column_names']
for k in kwargs.keys():
if k not in k in part_fields:
msg = 'Field [{k}] is not part of the portioning key'
raise SupersetTemplateException(msg)
if len(kwargs.keys()) != len(part_fields) - 1:
msg = (
'A filter needs to be specified for {} out of the '
'{} fields.'
).format(len(part_fields) - 1, len(part_fields))
raise SupersetTemplateException(msg)
for field in part_fields:
if field not in kwargs.keys():
field_to_return = field
sql = cls._partition_query(
table_name, 1, [(field_to_return, True)], kwargs)
df = database.get_df(sql, schema)
if df.empty:
return ''
return df.to_dict()[field_to_return][0]
class HiveEngineSpec(PrestoEngineSpec):
"""Reuses PrestoEngineSpec functionality."""
engine = 'hive'
# Scoping regex at class level to avoid recompiling
# 17/02/07 19:36:38 INFO ql.Driver: Total jobs = 5
jobs_stats_r = re.compile(
r'.*INFO.*Total jobs = (?P<max_jobs>[0-9]+)')
# 17/02/07 19:37:08 INFO ql.Driver: Launching Job 2 out of 5
launching_job_r = re.compile(
'.*INFO.*Launching Job (?P<job_number>[0-9]+) out of '
'(?P<max_jobs>[0-9]+)')
# 17/02/07 19:36:58 INFO exec.Task: 2017-02-07 19:36:58,152 Stage-18
# map = 0%, reduce = 0%
stage_progress_r = re.compile(
r'.*INFO.*Stage-(?P<stage_number>[0-9]+).*'
r'map = (?P<map_progress>[0-9]+)%.*'
r'reduce = (?P<reduce_progress>[0-9]+)%.*')
@classmethod
def patch(cls):
from pyhive import hive # pylint: disable=no-name-in-module
from superset.db_engines import hive as patched_hive
from TCLIService import (
constants as patched_constants,
ttypes as patched_ttypes,
TCLIService as patched_TCLIService)
hive.TCLIService = patched_TCLIService
hive.constants = patched_constants
hive.ttypes = patched_ttypes
hive.Cursor.fetch_logs = patched_hive.fetch_logs
@classmethod
def fetch_result_sets(cls, db, datasource_type):
return BaseEngineSpec.fetch_result_sets(
db, datasource_type)
@classmethod
def fetch_data(cls, cursor, limit):
import pyhive
from TCLIService import ttypes
state = cursor.poll()
if state.operationState == ttypes.TOperationState.ERROR_STATE:
raise Exception('Query error', state.errorMessage)
try:
return super(HiveEngineSpec, cls).fetch_data(cursor, limit)
except pyhive.exc.ProgrammingError:
return []
@staticmethod
def create_table_from_csv(form, table):
"""Uploads a csv file and creates a superset datasource in Hive."""
def convert_to_hive_type(col_type):
"""maps tableschema's types to hive types"""
tableschema_to_hive_types = {
'boolean': 'BOOLEAN',
'integer': 'INT',
'number': 'DOUBLE',
'string': 'STRING',
}
return tableschema_to_hive_types.get(col_type, 'STRING')
bucket_path = config['CSV_TO_HIVE_UPLOAD_S3_BUCKET']
if not bucket_path:
logging.info('No upload bucket specified')
raise Exception(
'No upload bucket specified. You can specify one in the config file.')
table_name = form.name.data
schema_name = form.schema.data
if config.get('UPLOADED_CSV_HIVE_NAMESPACE'):
if '.' in table_name or schema_name:
raise Exception(
"You can't specify a namespace. "
'All tables will be uploaded to the `{}` namespace'.format(
config.get('HIVE_NAMESPACE')))
full_table_name = '{}.{}'.format(
config.get('UPLOADED_CSV_HIVE_NAMESPACE'), table_name)
else:
if '.' in table_name and schema_name:
raise Exception(
"You can't specify a namespace both in the name of the table "
'and in the schema field. Please remove one')
full_table_name = '{}.{}'.format(
schema_name, table_name) if schema_name else table_name
filename = form.csv_file.data.filename
upload_prefix = config['CSV_TO_HIVE_UPLOAD_DIRECTORY']
upload_path = config['UPLOAD_FOLDER'] + \
secure_filename(filename)
# Optional dependency
from tableschema import Table # pylint: disable=import-error
hive_table_schema = Table(upload_path).infer()
column_name_and_type = []
for column_info in hive_table_schema['fields']:
column_name_and_type.append(
'`{}` {}'.format(
column_info['name'],
convert_to_hive_type(column_info['type'])))
schema_definition = ', '.join(column_name_and_type)
# Optional dependency
import boto3 # pylint: disable=import-error
s3 = boto3.client('s3')
location = os.path.join('s3a://', bucket_path, upload_prefix, table_name)
s3.upload_file(
upload_path, bucket_path,
os.path.join(upload_prefix, table_name, filename))
sql = f"""CREATE TABLE {full_table_name} ( {schema_definition} )
ROW FORMAT DELIMITED FIELDS TERMINATED BY ',' STORED AS
TEXTFILE LOCATION '{location}'
tblproperties ('skip.header.line.count'='1')"""
logging.info(form.con.data)
engine = create_engine(form.con.data.sqlalchemy_uri_decrypted)
engine.execute(sql)
@classmethod
def convert_dttm(cls, target_type, dttm):
tt = target_type.upper()
if tt == 'DATE':
return "CAST('{}' AS DATE)".format(dttm.isoformat()[:10])
elif tt == 'TIMESTAMP':
return "CAST('{}' AS TIMESTAMP)".format(
dttm.strftime('%Y-%m-%d %H:%M:%S'))
return "'{}'".format(dttm.strftime('%Y-%m-%d %H:%M:%S'))
@classmethod
def adjust_database_uri(cls, uri, selected_schema=None):
if selected_schema:
uri.database = selected_schema
return uri
@classmethod
def extract_error_message(cls, e):
msg = str(e)
match = re.search(r'errorMessage="(.*?)(?<!\\)"', msg)
if match:
msg = match.group(1)
return msg
@classmethod
def progress(cls, log_lines):
total_jobs = 1 # assuming there's at least 1 job
current_job = 1
stages = {}
for line in log_lines:
match = cls.jobs_stats_r.match(line)
if match:
total_jobs = int(match.groupdict()['max_jobs']) or 1
match = cls.launching_job_r.match(line)
if match:
current_job = int(match.groupdict()['job_number'])
total_jobs = int(match.groupdict()['max_jobs']) or 1
stages = {}
match = cls.stage_progress_r.match(line)
if match:
stage_number = int(match.groupdict()['stage_number'])
map_progress = int(match.groupdict()['map_progress'])
reduce_progress = int(match.groupdict()['reduce_progress'])
stages[stage_number] = (map_progress + reduce_progress) / 2
logging.info(
'Progress detail: {}, '
'current job {}, '
'total jobs: {}'.format(stages, current_job, total_jobs))
stage_progress = sum(
stages.values()) / len(stages.values()) if stages else 0
progress = (
100 * (current_job - 1) / total_jobs + stage_progress / total_jobs
)
return int(progress)
@classmethod
def get_tracking_url(cls, log_lines):
lkp = 'Tracking URL = '
for line in log_lines:
if lkp in line:
return line.split(lkp)[1]
@classmethod
def handle_cursor(cls, cursor, query, session):
"""Updates progress information"""
from pyhive import hive # pylint: disable=no-name-in-module
unfinished_states = (
hive.ttypes.TOperationState.INITIALIZED_STATE,
hive.ttypes.TOperationState.RUNNING_STATE,
)
polled = cursor.poll()
last_log_line = 0
tracking_url = None
job_id = None
while polled.operationState in unfinished_states:
query = session.query(type(query)).filter_by(id=query.id).one()
if query.status == QueryStatus.STOPPED:
cursor.cancel()
break
log = cursor.fetch_logs() or ''
if log:
log_lines = log.splitlines()
progress = cls.progress(log_lines)
logging.info('Progress total: {}'.format(progress))
needs_commit = False
if progress > query.progress:
query.progress = progress
needs_commit = True
if not tracking_url:
tracking_url = cls.get_tracking_url(log_lines)
if tracking_url:
job_id = tracking_url.split('/')[-2]
logging.info(
'Found the tracking url: {}'.format(tracking_url))
tracking_url = tracking_url_trans(tracking_url)
logging.info(
'Transformation applied: {}'.format(tracking_url))
query.tracking_url = tracking_url
logging.info('Job id: {}'.format(job_id))
needs_commit = True
if job_id and len(log_lines) > last_log_line:
# Wait for job id before logging things out
# this allows for prefixing all log lines and becoming
# searchable in something like Kibana
for l in log_lines[last_log_line:]:
logging.info('[{}] {}'.format(job_id, l))
last_log_line = len(log_lines)
if needs_commit:
session.commit()
time.sleep(hive_poll_interval)
polled = cursor.poll()
@classmethod
def where_latest_partition(
cls, table_name, schema, database, qry, columns=None):
try:
col_name, value = cls.latest_partition(
table_name, schema, database, show_first=True)
except Exception:
# table is not partitioned
return False
for c in columns:
if c.get('name') == col_name:
return qry.where(Column(col_name) == value)
return False
@classmethod
def latest_sub_partition(cls, table_name, schema, database, **kwargs):
# TODO(bogdan): implement`
pass
@classmethod
def _latest_partition_from_df(cls, df):
"""Hive partitions look like ds={partition name}"""
return df.ix[:, 0].max().split('=')[1]
@classmethod
def _partition_query(
cls, table_name, limit=0, order_by=None, filters=None):
return f'SHOW PARTITIONS {table_name}'
@classmethod
def modify_url_for_impersonation(cls, url, impersonate_user, username):
"""
Modify the SQL Alchemy URL object with the user to impersonate if applicable.
:param url: SQLAlchemy URL object
:param impersonate_user: Bool indicating if impersonation is enabled
:param username: Effective username
"""
# Do nothing in the URL object since instead this should modify
# the configuraiton dictionary. See get_configuration_for_impersonation
pass
@classmethod
def get_configuration_for_impersonation(cls, uri, impersonate_user, username):
"""
Return a configuration dictionary that can be merged with other configs
that can set the correct properties for impersonating users
:param uri: URI string
:param impersonate_user: Bool indicating if impersonation is enabled
:param username: Effective username
:return: Dictionary with configs required for impersonation
"""
configuration = {}
url = make_url(uri)
backend_name = url.get_backend_name()
# Must be Hive connection, enable impersonation, and set param auth=LDAP|KERBEROS
if (backend_name == 'hive' and 'auth' in url.query.keys() and
impersonate_user is True and username is not None):
configuration['hive.server2.proxy.user'] = username
return configuration
@staticmethod
def execute(cursor, query, async_=False):
kwargs = {'async': async_}
cursor.execute(query, **kwargs)
class MssqlEngineSpec(BaseEngineSpec):
engine = 'mssql'
epoch_to_dttm = "dateadd(S, {col}, '1970-01-01')"
limit_method = LimitMethod.WRAP_SQL
time_grain_functions = {
None: '{col}',
'PT1S': "DATEADD(second, DATEDIFF(second, '2000-01-01', {col}), '2000-01-01')",
'PT1M': 'DATEADD(minute, DATEDIFF(minute, 0, {col}), 0)',
'PT5M': 'DATEADD(minute, DATEDIFF(minute, 0, {col}) / 5 * 5, 0)',
'PT10M': 'DATEADD(minute, DATEDIFF(minute, 0, {col}) / 10 * 10, 0)',
'PT15M': 'DATEADD(minute, DATEDIFF(minute, 0, {col}) / 15 * 15, 0)',
'PT0.5H': 'DATEADD(minute, DATEDIFF(minute, 0, {col}) / 30 * 30, 0)',
'PT1H': 'DATEADD(hour, DATEDIFF(hour, 0, {col}), 0)',
'P1D': 'DATEADD(day, DATEDIFF(day, 0, {col}), 0)',
'P1W': 'DATEADD(week, DATEDIFF(week, 0, {col}), 0)',
'P1M': 'DATEADD(month, DATEDIFF(month, 0, {col}), 0)',
'P0.25Y': 'DATEADD(quarter, DATEDIFF(quarter, 0, {col}), 0)',
'P1Y': 'DATEADD(year, DATEDIFF(year, 0, {col}), 0)',
}
@classmethod
def convert_dttm(cls, target_type, dttm):
return "CONVERT(DATETIME, '{}', 126)".format(dttm.isoformat())
@classmethod
def fetch_data(cls, cursor, limit):
data = super(MssqlEngineSpec, cls).fetch_data(cursor, limit)
if len(data) != 0 and type(data[0]).__name__ == 'Row':
data = [[elem for elem in r] for r in data]
return data
class AthenaEngineSpec(BaseEngineSpec):
engine = 'awsathena'
time_grain_functions = {
None: '{col}',
'PT1S': "date_trunc('second', CAST({col} AS TIMESTAMP))",
'PT1M': "date_trunc('minute', CAST({col} AS TIMESTAMP))",
'PT1H': "date_trunc('hour', CAST({col} AS TIMESTAMP))",
'P1D': "date_trunc('day', CAST({col} AS TIMESTAMP))",
'P1W': "date_trunc('week', CAST({col} AS TIMESTAMP))",
'P1M': "date_trunc('month', CAST({col} AS TIMESTAMP))",
'P0.25Y': "date_trunc('quarter', CAST({col} AS TIMESTAMP))",
'P1Y': "date_trunc('year', CAST({col} AS TIMESTAMP))",
'P1W/1970-01-03T00:00:00Z': "date_add('day', 5, date_trunc('week', \
date_add('day', 1, CAST({col} AS TIMESTAMP))))",
'1969-12-28T00:00:00Z/P1W': "date_add('day', -1, date_trunc('week', \
date_add('day', 1, CAST({col} AS TIMESTAMP))))",
}
@classmethod
def convert_dttm(cls, target_type, dttm):
tt = target_type.upper()
if tt == 'DATE':
return "from_iso8601_date('{}')".format(dttm.isoformat()[:10])
if tt == 'TIMESTAMP':
return "from_iso8601_timestamp('{}')".format(dttm.isoformat())
return ("CAST ('{}' AS TIMESTAMP)"
.format(dttm.strftime('%Y-%m-%d %H:%M:%S')))
@classmethod
def epoch_to_dttm(cls):
return 'from_unixtime({col})'
class ClickHouseEngineSpec(BaseEngineSpec):
"""Dialect for ClickHouse analytical DB."""
engine = 'clickhouse'
time_secondary_columns = True
time_groupby_inline = True
time_grain_functions = {
None: '{col}',
'PT1M': 'toStartOfMinute(toDateTime({col}))',
'PT5M': 'toDateTime(intDiv(toUInt32(toDateTime({col})), 300)*300)',
'PT10M': 'toDateTime(intDiv(toUInt32(toDateTime({col})), 600)*600)',
'PT15M': 'toDateTime(intDiv(toUInt32(toDateTime({col})), 900)*900)',
'PT0.5H': 'toDateTime(intDiv(toUInt32(toDateTime({col})), 1800)*1800)',
'PT1H': 'toStartOfHour(toDateTime({col}))',
'P1D': 'toStartOfDay(toDateTime({col}))',
'P1W': 'toMonday(toDateTime({col}))',
'P1M': 'toStartOfMonth(toDateTime({col}))',
'P0.25Y': 'toStartOfQuarter(toDateTime({col}))',
'P1Y': 'toStartOfYear(toDateTime({col}))',
}
@classmethod
def convert_dttm(cls, target_type, dttm):
tt = target_type.upper()
if tt == 'DATE':
return "toDate('{}')".format(dttm.strftime('%Y-%m-%d'))
if tt == 'DATETIME':
return "toDateTime('{}')".format(
dttm.strftime('%Y-%m-%d %H:%M:%S'))
return "'{}'".format(dttm.strftime('%Y-%m-%d %H:%M:%S'))
class BQEngineSpec(BaseEngineSpec):
"""Engine spec for Google's BigQuery
As contributed by @mxmzdlv on issue #945"""
engine = 'bigquery'
"""
https://www.python.org/dev/peps/pep-0249/#arraysize
raw_connections bypass the pybigquery query execution context and deal with
raw dbapi connection directly.
If this value is not set, the default value is set to 1, as described here,
https://googlecloudplatform.github.io/google-cloud-python/latest/_modules/google/cloud/bigquery/dbapi/cursor.html#Cursor
The default value of 5000 is derived from the pybigquery.
https://github.com/mxmzdlv/pybigquery/blob/d214bb089ca0807ca9aaa6ce4d5a01172d40264e/pybigquery/sqlalchemy_bigquery.py#L102
"""
arraysize = 5000
time_grain_functions = {
None: '{col}',
'PT1S': 'TIMESTAMP_TRUNC({col}, SECOND)',
'PT1M': 'TIMESTAMP_TRUNC({col}, MINUTE)',
'PT1H': 'TIMESTAMP_TRUNC({col}, HOUR)',
'P1D': 'TIMESTAMP_TRUNC({col}, DAY)',
'P1W': 'TIMESTAMP_TRUNC({col}, WEEK)',
'P1M': 'TIMESTAMP_TRUNC({col}, MONTH)',
'P0.25Y': 'TIMESTAMP_TRUNC({col}, QUARTER)',
'P1Y': 'TIMESTAMP_TRUNC({col}, YEAR)',
}
@classmethod
def convert_dttm(cls, target_type, dttm):
tt = target_type.upper()
if tt == 'DATE':
return "'{}'".format(dttm.strftime('%Y-%m-%d'))
return "'{}'".format(dttm.strftime('%Y-%m-%d %H:%M:%S'))
@classmethod
def fetch_data(cls, cursor, limit):
data = super(BQEngineSpec, cls).fetch_data(cursor, limit)
if len(data) != 0 and type(data[0]).__name__ == 'Row':
data = [r.values() for r in data]
return data
@staticmethod
def mutate_label(label):
"""
BigQuery field_name should start with a letter or underscore, contain only
alphanumeric characters and be at most 128 characters long. Labels that start
with a number are prefixed with an underscore. Any unsupported characters are
replaced with underscores and an md5 hash is added to the end of the label to
avoid possible collisions. If the resulting label exceeds 128 characters, only
the md5 sum is returned.
:param str label: the original label which might include unsupported characters
:return: String that is supported by the database
"""
hashed_label = '_' + hashlib.md5(label.encode('utf-8')).hexdigest()
# if label starts with number, add underscore as first character
mutated_label = '_' + label if re.match(r'^\d', label) else label
# replace non-alphanumeric characters with underscores
mutated_label = re.sub(r'[^\w]+', '_', mutated_label)
if mutated_label != label:
# add md5 hash to label to avoid possible collisions
mutated_label += hashed_label
# return only hash if length of final label exceeds 128 chars
return mutated_label if len(mutated_label) <= 128 else hashed_label
@classmethod
def extra_table_metadata(cls, database, table_name, schema_name):
indexes = database.get_indexes(table_name, schema_name)
if not indexes:
return {}
partitions_columns = [
index.get('column_names', []) for index in indexes
if index.get('name') == 'partition'
]
cluster_columns = [
index.get('column_names', []) for index in indexes
if index.get('name') == 'clustering'
]
return {
'partitions': {
'cols': partitions_columns,
},
'clustering': {
'cols': cluster_columns,
},
}
@classmethod
def _get_fields(cls, cols):
"""
BigQuery dialect requires us to not use backtick in the fieldname which are
nested.
Using literal_column handles that issue.
http://docs.sqlalchemy.org/en/latest/core/tutorial.html#using-more-specific-text-with-table-literal-column-and-column
Also explicility specifying column names so we don't encounter duplicate
column names in the result.
"""
return [sqla.literal_column(c.get('name')).label(c.get('name').replace('.', '__'))
for c in cols]
class ImpalaEngineSpec(BaseEngineSpec):
"""Engine spec for Cloudera's Impala"""
engine = 'impala'
time_grain_functions = {
None: '{col}',
'PT1M': "TRUNC({col}, 'MI')",
'PT1H': "TRUNC({col}, 'HH')",
'P1D': "TRUNC({col}, 'DD')",
'P1W': "TRUNC({col}, 'WW')",
'P1M': "TRUNC({col}, 'MONTH')",
'P0.25Y': "TRUNC({col}, 'Q')",
'P1Y': "TRUNC({col}, 'YYYY')",
}
@classmethod
def epoch_to_dttm(cls):
return 'from_unixtime({col})'
@classmethod
def convert_dttm(cls, target_type, dttm):
tt = target_type.upper()
if tt == 'DATE':
return "'{}'".format(dttm.strftime('%Y-%m-%d'))
return "'{}'".format(dttm.strftime('%Y-%m-%d %H:%M:%S'))
@classmethod
def get_schema_names(cls, inspector):
schemas = [row[0] for row in inspector.engine.execute('SHOW SCHEMAS')
if not row[0].startswith('_')]
return schemas
class DruidEngineSpec(BaseEngineSpec):
"""Engine spec for Druid.io"""
engine = 'druid'
inner_joins = False
allows_subquery = False
time_grain_functions = {
None: '{col}',
'PT1S': 'FLOOR({col} TO SECOND)',
'PT1M': 'FLOOR({col} TO MINUTE)',
'PT1H': 'FLOOR({col} TO HOUR)',
'P1D': 'FLOOR({col} TO DAY)',
'P1W': 'FLOOR({col} TO WEEK)',
'P1M': 'FLOOR({col} TO MONTH)',
'P0.25Y': 'FLOOR({col} TO QUARTER)',
'P1Y': 'FLOOR({col} TO YEAR)',
}
class GSheetsEngineSpec(SqliteEngineSpec):
"""Engine for Google spreadsheets"""
engine = 'gsheets'
inner_joins = False
allows_subquery = False
class KylinEngineSpec(BaseEngineSpec):
"""Dialect for Apache Kylin"""
engine = 'kylin'
time_grain_functions = {
None: '{col}',
'PT1S': 'CAST(FLOOR(CAST({col} AS TIMESTAMP) TO SECOND) AS TIMESTAMP)',
'PT1M': 'CAST(FLOOR(CAST({col} AS TIMESTAMP) TO MINUTE) AS TIMESTAMP)',
'PT1H': 'CAST(FLOOR(CAST({col} AS TIMESTAMP) TO HOUR) AS TIMESTAMP)',
'P1D': 'CAST(FLOOR(CAST({col} AS TIMESTAMP) TO DAY) AS DATE)',
'P1W': 'CAST(TIMESTAMPADD(WEEK, WEEK(CAST({col} AS DATE)) - 1, \
FLOOR(CAST({col} AS TIMESTAMP) TO YEAR)) AS DATE)',
'P1M': 'CAST(FLOOR(CAST({col} AS TIMESTAMP) TO MONTH) AS DATE)',
'P0.25Y': 'CAST(TIMESTAMPADD(QUARTER, QUARTER(CAST({col} AS DATE)) - 1, \
FLOOR(CAST({col} AS TIMESTAMP) TO YEAR)) AS DATE)',
'P1Y': 'CAST(FLOOR(CAST({col} AS TIMESTAMP) TO YEAR) AS DATE)',
}
@classmethod
def convert_dttm(cls, target_type, dttm):
tt = target_type.upper()
if tt == 'DATE':
return "CAST('{}' AS DATE)".format(dttm.isoformat()[:10])
if tt == 'TIMESTAMP':
return "CAST('{}' AS TIMESTAMP)".format(
dttm.strftime('%Y-%m-%d %H:%M:%S'))
return "'{}'".format(dttm.strftime('%Y-%m-%d %H:%M:%S'))
class TeradataEngineSpec(BaseEngineSpec):
"""Dialect for Teradata DB."""
engine = 'teradata'
limit_method = LimitMethod.WRAP_SQL
time_grain_functions = {
None: '{col}',
'PT1M': "TRUNC(CAST({col} as DATE), 'MI')",
'PT1H': "TRUNC(CAST({col} as DATE), 'HH')",
'P1D': "TRUNC(CAST({col} as DATE), 'DDD')",
'P1W': "TRUNC(CAST({col} as DATE), 'WW')",
'P1M': "TRUNC(CAST({col} as DATE), 'MONTH')",
'P0.25Y': "TRUNC(CAST({col} as DATE), 'Q')",
'P1Y': "TRUNC(CAST({col} as DATE), 'YEAR')",
}
engines = {
o.engine: o for o in globals().values()
if inspect.isclass(o) and issubclass(o, BaseEngineSpec)}
| 37.829091 | 126 | 0.594156 |
from collections import namedtuple
import hashlib
import inspect
import logging
import os
import re
import textwrap
import time
from flask import g
from flask_babel import lazy_gettext as _
import pandas
import sqlalchemy as sqla
from sqlalchemy import Column, select
from sqlalchemy.engine import create_engine
from sqlalchemy.engine.url import make_url
from sqlalchemy.sql import quoted_name, text
from sqlalchemy.sql.expression import TextAsFrom
import sqlparse
from werkzeug.utils import secure_filename
from superset import app, conf, db, sql_parse
from superset.exceptions import SupersetTemplateException
from superset.utils import core as utils
QueryStatus = utils.QueryStatus
config = app.config
tracking_url_trans = conf.get('TRACKING_URL_TRANSFORMER')
hive_poll_interval = conf.get('HIVE_POLL_INTERVAL')
Grain = namedtuple('Grain', 'name label function duration')
builtin_time_grains = {
None: 'Time Column',
'PT1S': 'second',
'PT1M': 'minute',
'PT5M': '5 minute',
'PT10M': '10 minute',
'PT15M': '15 minute',
'PT0.5H': 'half hour',
'PT1H': 'hour',
'P1D': 'day',
'P1W': 'week',
'P1M': 'month',
'P0.25Y': 'quarter',
'P1Y': 'year',
'1969-12-28T00:00:00Z/P1W': 'week_start_sunday',
'1969-12-29T00:00:00Z/P1W': 'week_start_monday',
'P1W/1970-01-03T00:00:00Z': 'week_ending_saturday',
'P1W/1970-01-04T00:00:00Z': 'week_ending_sunday',
}
def _create_time_grains_tuple(time_grains, time_grain_functions, blacklist):
ret_list = []
blacklist = blacklist if blacklist else []
for duration, func in time_grain_functions.items():
if duration not in blacklist:
name = time_grains.get(duration)
ret_list.append(Grain(name, _(name), func, duration))
return tuple(ret_list)
class LimitMethod(object):
FETCH_MANY = 'fetch_many'
WRAP_SQL = 'wrap_sql'
FORCE_LIMIT = 'force_limit'
class BaseEngineSpec(object):
engine = 'base'
time_grain_functions = {}
time_groupby_inline = False
limit_method = LimitMethod.FORCE_LIMIT
time_secondary_columns = False
inner_joins = True
allows_subquery = True
force_column_alias_quotes = False
arraysize = None
@classmethod
def get_time_grains(cls):
blacklist = config.get('TIME_GRAIN_BLACKLIST', [])
grains = builtin_time_grains.copy()
grains.update(config.get('TIME_GRAIN_ADDONS', {}))
grain_functions = cls.time_grain_functions.copy()
grain_addon_functions = config.get('TIME_GRAIN_ADDON_FUNCTIONS', {})
grain_functions.update(grain_addon_functions.get(cls.engine, {}))
return _create_time_grains_tuple(grains, grain_functions, blacklist)
@classmethod
def fetch_data(cls, cursor, limit):
if cls.arraysize:
cursor.arraysize = cls.arraysize
if cls.limit_method == LimitMethod.FETCH_MANY:
return cursor.fetchmany(limit)
return cursor.fetchall()
@classmethod
def epoch_to_dttm(cls):
raise NotImplementedError()
@classmethod
def epoch_ms_to_dttm(cls):
return cls.epoch_to_dttm().replace('{col}', '({col}/1000.000)')
@classmethod
def get_datatype(cls, type_code):
if isinstance(type_code, str) and len(type_code):
return type_code.upper()
@classmethod
def extra_table_metadata(cls, database, table_name, schema_name):
return {}
@classmethod
def apply_limit_to_sql(cls, sql, limit, database):
if cls.limit_method == LimitMethod.WRAP_SQL:
sql = sql.strip('\t\n ;')
qry = (
select('*')
.select_from(
TextAsFrom(text(sql), ['*']).alias('inner_qry'),
)
.limit(limit)
)
return database.compile_sqla_query(qry)
elif LimitMethod.FORCE_LIMIT:
parsed_query = sql_parse.ParsedQuery(sql)
sql = parsed_query.get_query_with_new_limit(limit)
return sql
@classmethod
def get_limit_from_sql(cls, sql):
parsed_query = sql_parse.ParsedQuery(sql)
return parsed_query.limit
@classmethod
def get_query_with_new_limit(cls, sql, limit):
parsed_query = sql_parse.ParsedQuery(sql)
return parsed_query.get_query_with_new_limit(limit)
@staticmethod
def csv_to_df(**kwargs):
kwargs['filepath_or_buffer'] = \
config['UPLOAD_FOLDER'] + kwargs['filepath_or_buffer']
kwargs['encoding'] = 'utf-8'
kwargs['iterator'] = True
chunks = pandas.read_csv(**kwargs)
df = pandas.DataFrame()
df = pandas.concat(chunk for chunk in chunks)
return df
@staticmethod
def df_to_db(df, table, **kwargs):
df.to_sql(**kwargs)
table.user_id = g.user.id
table.schema = kwargs['schema']
table.fetch_metadata()
db.session.add(table)
db.session.commit()
@staticmethod
def create_table_from_csv(form, table):
def _allowed_file(filename):
extension = os.path.splitext(filename)[1]
return extension and extension[1:] in config['ALLOWED_EXTENSIONS']
filename = secure_filename(form.csv_file.data.filename)
if not _allowed_file(filename):
raise Exception('Invalid file type selected')
kwargs = {
'filepath_or_buffer': filename,
'sep': form.sep.data,
'header': form.header.data if form.header.data else 0,
'index_col': form.index_col.data,
'mangle_dupe_cols': form.mangle_dupe_cols.data,
'skipinitialspace': form.skipinitialspace.data,
'skiprows': form.skiprows.data,
'nrows': form.nrows.data,
'skip_blank_lines': form.skip_blank_lines.data,
'parse_dates': form.parse_dates.data,
'infer_datetime_format': form.infer_datetime_format.data,
'chunksize': 10000,
}
df = BaseEngineSpec.csv_to_df(**kwargs)
df_to_db_kwargs = {
'table': table,
'df': df,
'name': form.name.data,
'con': create_engine(form.con.data.sqlalchemy_uri_decrypted, echo=False),
'schema': form.schema.data,
'if_exists': form.if_exists.data,
'index': form.index.data,
'index_label': form.index_label.data,
'chunksize': 10000,
}
BaseEngineSpec.df_to_db(**df_to_db_kwargs)
@classmethod
def convert_dttm(cls, target_type, dttm):
return "'{}'".format(dttm.strftime('%Y-%m-%d %H:%M:%S'))
@classmethod
def fetch_result_sets(cls, db, datasource_type):
schemas = db.all_schema_names(cache=db.schema_cache_enabled,
cache_timeout=db.schema_cache_timeout,
force=True)
all_result_sets = []
for schema in schemas:
if datasource_type == 'table':
all_datasource_names = db.all_table_names_in_schema(
schema=schema, force=True,
cache=db.table_cache_enabled,
cache_timeout=db.table_cache_timeout)
elif datasource_type == 'view':
all_datasource_names = db.all_view_names_in_schema(
schema=schema, force=True,
cache=db.table_cache_enabled,
cache_timeout=db.table_cache_timeout)
all_result_sets += [
'{}.{}'.format(schema, t) for t in all_datasource_names]
return all_result_sets
@classmethod
def handle_cursor(cls, cursor, query, session):
pass
@classmethod
def extract_error_message(cls, e):
return utils.error_msg_from_exception(e)
@classmethod
def adjust_database_uri(cls, uri, selected_schema):
return uri
@classmethod
def patch(cls):
pass
@classmethod
def get_schema_names(cls, inspector):
return sorted(inspector.get_schema_names())
@classmethod
def get_table_names(cls, inspector, schema):
return sorted(inspector.get_table_names(schema))
@classmethod
def get_view_names(cls, inspector, schema):
return sorted(inspector.get_view_names(schema))
@classmethod
def where_latest_partition(
cls, table_name, schema, database, qry, columns=None):
return False
@classmethod
def _get_fields(cls, cols):
return [sqla.column(c.get('name')) for c in cols]
@classmethod
def select_star(cls, my_db, table_name, engine, schema=None, limit=100,
show_cols=False, indent=True, latest_partition=True,
cols=None):
fields = '*'
cols = cols or []
if (show_cols or latest_partition) and not cols:
cols = my_db.get_columns(table_name, schema)
if show_cols:
fields = cls._get_fields(cols)
quote = engine.dialect.identifier_preparer.quote
if schema:
full_table_name = quote(schema) + '.' + quote(table_name)
else:
full_table_name = quote(table_name)
qry = select(fields).select_from(text(full_table_name))
if limit:
qry = qry.limit(limit)
if latest_partition:
partition_query = cls.where_latest_partition(
table_name, schema, my_db, qry, columns=cols)
if partition_query != False:
qry = partition_query
sql = my_db.compile_sqla_query(qry)
if indent:
sql = sqlparse.format(sql, reindent=True)
return sql
@classmethod
def modify_url_for_impersonation(cls, url, impersonate_user, username):
if impersonate_user is not None and username is not None:
url.username = username
@classmethod
def get_configuration_for_impersonation(cls, uri, impersonate_user, username):
return {}
@classmethod
def execute(cls, cursor, query, **kwargs):
if cls.arraysize:
cursor.arraysize = cls.arraysize
cursor.execute(query)
@classmethod
def make_label_compatible(cls, label):
label = cls.mutate_label(label)
return quoted_name(label, True) if cls.force_column_alias_quotes else label
@staticmethod
def mutate_label(label):
return label
class PostgresBaseEngineSpec(BaseEngineSpec):
engine = ''
time_grain_functions = {
None: '{col}',
'PT1S': "DATE_TRUNC('second', {col}) AT TIME ZONE 'UTC'",
'PT1M': "DATE_TRUNC('minute', {col}) AT TIME ZONE 'UTC'",
'PT1H': "DATE_TRUNC('hour', {col}) AT TIME ZONE 'UTC'",
'P1D': "DATE_TRUNC('day', {col}) AT TIME ZONE 'UTC'",
'P1W': "DATE_TRUNC('week', {col}) AT TIME ZONE 'UTC'",
'P1M': "DATE_TRUNC('month', {col}) AT TIME ZONE 'UTC'",
'P0.25Y': "DATE_TRUNC('quarter', {col}) AT TIME ZONE 'UTC'",
'P1Y': "DATE_TRUNC('year', {col}) AT TIME ZONE 'UTC'",
}
@classmethod
def fetch_data(cls, cursor, limit):
if not cursor.description:
return []
if cls.limit_method == LimitMethod.FETCH_MANY:
return cursor.fetchmany(limit)
return cursor.fetchall()
@classmethod
def epoch_to_dttm(cls):
return "(timestamp 'epoch' + {col} * interval '1 second')"
@classmethod
def convert_dttm(cls, target_type, dttm):
return "'{}'".format(dttm.strftime('%Y-%m-%d %H:%M:%S'))
class PostgresEngineSpec(PostgresBaseEngineSpec):
engine = 'postgresql'
@classmethod
def get_table_names(cls, inspector, schema):
tables = inspector.get_table_names(schema)
tables.extend(inspector.get_foreign_table_names(schema))
return sorted(tables)
class SnowflakeEngineSpec(PostgresBaseEngineSpec):
engine = 'snowflake'
force_column_alias_quotes = True
time_grain_functions = {
None: '{col}',
'PT1S': "DATE_TRUNC('SECOND', {col})",
'PT1M': "DATE_TRUNC('MINUTE', {col})",
'PT5M': "DATEADD(MINUTE, FLOOR(DATE_PART(MINUTE, {col}) / 5) * 5, \
DATE_TRUNC('HOUR', {col}))",
'PT10M': "DATEADD(MINUTE, FLOOR(DATE_PART(MINUTE, {col}) / 10) * 10, \
DATE_TRUNC('HOUR', {col}))",
'PT15M': "DATEADD(MINUTE, FLOOR(DATE_PART(MINUTE, {col}) / 15) * 15, \
DATE_TRUNC('HOUR', {col}))",
'PT0.5H': "DATEADD(MINUTE, FLOOR(DATE_PART(MINUTE, {col}) / 30) * 30, \
DATE_TRUNC('HOUR', {col}))",
'PT1H': "DATE_TRUNC('HOUR', {col})",
'P1D': "DATE_TRUNC('DAY', {col})",
'P1W': "DATE_TRUNC('WEEK', {col})",
'P1M': "DATE_TRUNC('MONTH', {col})",
'P0.25Y': "DATE_TRUNC('QUARTER', {col})",
'P1Y': "DATE_TRUNC('YEAR', {col})",
}
@classmethod
def adjust_database_uri(cls, uri, selected_schema=None):
database = uri.database
if '/' in uri.database:
database = uri.database.split('/')[0]
if selected_schema:
uri.database = database + '/' + selected_schema
return uri
class VerticaEngineSpec(PostgresBaseEngineSpec):
engine = 'vertica'
class RedshiftEngineSpec(PostgresBaseEngineSpec):
engine = 'redshift'
@staticmethod
def mutate_label(label):
return label.lower()
class OracleEngineSpec(PostgresBaseEngineSpec):
engine = 'oracle'
limit_method = LimitMethod.WRAP_SQL
force_column_alias_quotes = True
time_grain_functions = {
None: '{col}',
'PT1S': 'CAST({col} as DATE)',
'PT1M': "TRUNC(CAST({col} as DATE), 'MI')",
'PT1H': "TRUNC(CAST({col} as DATE), 'HH')",
'P1D': "TRUNC(CAST({col} as DATE), 'DDD')",
'P1W': "TRUNC(CAST({col} as DATE), 'WW')",
'P1M': "TRUNC(CAST({col} as DATE), 'MONTH')",
'P0.25Y': "TRUNC(CAST({col} as DATE), 'Q')",
'P1Y': "TRUNC(CAST({col} as DATE), 'YEAR')",
}
@classmethod
def convert_dttm(cls, target_type, dttm):
return (
"""TO_TIMESTAMP('{}', 'YYYY-MM-DD"T"HH24:MI:SS.ff6')"""
).format(dttm.isoformat())
@staticmethod
def mutate_label(label):
if len(label) > 30:
hashed_label = hashlib.md5(label.encode('utf-8')).hexdigest()
return hashed_label[:30]
return label
class Db2EngineSpec(BaseEngineSpec):
engine = 'ibm_db_sa'
limit_method = LimitMethod.WRAP_SQL
force_column_alias_quotes = True
time_grain_functions = {
None: '{col}',
'PT1S': 'CAST({col} as TIMESTAMP)'
' - MICROSECOND({col}) MICROSECONDS',
'PT1M': 'CAST({col} as TIMESTAMP)'
' - SECOND({col}) SECONDS'
' - MICROSECOND({col}) MICROSECONDS',
'PT1H': 'CAST({col} as TIMESTAMP)'
' - MINUTE({col}) MINUTES'
' - SECOND({col}) SECONDS'
' - MICROSECOND({col}) MICROSECONDS ',
'P1D': 'CAST({col} as TIMESTAMP)'
' - HOUR({col}) HOURS'
' - MINUTE({col}) MINUTES'
' - SECOND({col}) SECONDS'
' - MICROSECOND({col}) MICROSECONDS',
'P1W': '{col} - (DAYOFWEEK({col})) DAYS',
'P1M': '{col} - (DAY({col})-1) DAYS',
'P0.25Y': '{col} - (DAY({col})-1) DAYS'
' - (MONTH({col})-1) MONTHS'
' + ((QUARTER({col})-1) * 3) MONTHS',
'P1Y': '{col} - (DAY({col})-1) DAYS'
' - (MONTH({col})-1) MONTHS',
}
@classmethod
def epoch_to_dttm(cls):
return "(TIMESTAMP('1970-01-01', '00:00:00') + {col} SECONDS)"
@classmethod
def convert_dttm(cls, target_type, dttm):
return "'{}'".format(dttm.strftime('%Y-%m-%d-%H.%M.%S'))
@staticmethod
def mutate_label(label):
if len(label) > 30:
hashed_label = hashlib.md5(label.encode('utf-8')).hexdigest()
return hashed_label[:30]
return label
class SqliteEngineSpec(BaseEngineSpec):
engine = 'sqlite'
time_grain_functions = {
None: '{col}',
'PT1H': "DATETIME(STRFTIME('%Y-%m-%dT%H:00:00', {col}))",
'P1D': 'DATE({col})',
'P1W': "DATE({col}, -strftime('%W', {col}) || ' days')",
'P1M': "DATE({col}, -strftime('%d', {col}) || ' days', '+1 day')",
'P1Y': "DATETIME(STRFTIME('%Y-01-01T00:00:00', {col}))",
'P1W/1970-01-03T00:00:00Z': "DATE({col}, 'weekday 6')",
'1969-12-28T00:00:00Z/P1W': "DATE({col}, 'weekday 0', '-7 days')",
}
@classmethod
def epoch_to_dttm(cls):
return "datetime({col}, 'unixepoch')"
@classmethod
def fetch_result_sets(cls, db, datasource_type):
schemas = db.all_schema_names(cache=db.schema_cache_enabled,
cache_timeout=db.schema_cache_timeout,
force=True)
all_result_sets = []
schema = schemas[0]
if datasource_type == 'table':
all_datasource_names = db.all_table_names_in_schema(
schema=schema, force=True,
cache=db.table_cache_enabled,
cache_timeout=db.table_cache_timeout)
elif datasource_type == 'view':
all_datasource_names = db.all_view_names_in_schema(
schema=schema, force=True,
cache=db.table_cache_enabled,
cache_timeout=db.table_cache_timeout)
all_result_sets += [
'{}.{}'.format(schema, t) for t in all_datasource_names]
return all_result_sets
@classmethod
def convert_dttm(cls, target_type, dttm):
iso = dttm.isoformat().replace('T', ' ')
if '.' not in iso:
iso += '.000000'
return "'{}'".format(iso)
@classmethod
def get_table_names(cls, inspector, schema):
return sorted(inspector.get_table_names())
class MySQLEngineSpec(BaseEngineSpec):
engine = 'mysql'
time_grain_functions = {
None: '{col}',
'PT1S': 'DATE_ADD(DATE({col}), '
'INTERVAL (HOUR({col})*60*60 + MINUTE({col})*60'
' + SECOND({col})) SECOND)',
'PT1M': 'DATE_ADD(DATE({col}), '
'INTERVAL (HOUR({col})*60 + MINUTE({col})) MINUTE)',
'PT1H': 'DATE_ADD(DATE({col}), '
'INTERVAL HOUR({col}) HOUR)',
'P1D': 'DATE({col})',
'P1W': 'DATE(DATE_SUB({col}, '
'INTERVAL DAYOFWEEK({col}) - 1 DAY))',
'P1M': 'DATE(DATE_SUB({col}, '
'INTERVAL DAYOFMONTH({col}) - 1 DAY))',
'P0.25Y': 'MAKEDATE(YEAR({col}), 1) '
'+ INTERVAL QUARTER({col}) QUARTER - INTERVAL 1 QUARTER',
'P1Y': 'DATE(DATE_SUB({col}, '
'INTERVAL DAYOFYEAR({col}) - 1 DAY))',
'1969-12-29T00:00:00Z/P1W': 'DATE(DATE_SUB({col}, '
'INTERVAL DAYOFWEEK(DATE_SUB({col}, INTERVAL 1 DAY)) - 1 DAY))',
}
type_code_map = {}
@classmethod
def convert_dttm(cls, target_type, dttm):
if target_type.upper() in ('DATETIME', 'DATE'):
return "STR_TO_DATE('{}', '%Y-%m-%d %H:%i:%s')".format(
dttm.strftime('%Y-%m-%d %H:%M:%S'))
return "'{}'".format(dttm.strftime('%Y-%m-%d %H:%M:%S'))
@classmethod
def adjust_database_uri(cls, uri, selected_schema=None):
if selected_schema:
uri.database = selected_schema
return uri
@classmethod
def get_datatype(cls, type_code):
if not cls.type_code_map:
import MySQLdb
ft = MySQLdb.constants.FIELD_TYPE
cls.type_code_map = {
getattr(ft, k): k
for k in dir(ft)
if not k.startswith('_')
}
datatype = type_code
if isinstance(type_code, int):
datatype = cls.type_code_map.get(type_code)
if datatype and isinstance(datatype, str) and len(datatype):
return datatype
@classmethod
def epoch_to_dttm(cls):
return 'from_unixtime({col})'
@classmethod
def extract_error_message(cls, e):
message = str(e)
try:
if isinstance(e.args, tuple) and len(e.args) > 1:
message = e.args[1]
except Exception:
pass
return message
class PrestoEngineSpec(BaseEngineSpec):
engine = 'presto'
time_grain_functions = {
None: '{col}',
'PT1S': "date_trunc('second', CAST({col} AS TIMESTAMP))",
'PT1M': "date_trunc('minute', CAST({col} AS TIMESTAMP))",
'PT1H': "date_trunc('hour', CAST({col} AS TIMESTAMP))",
'P1D': "date_trunc('day', CAST({col} AS TIMESTAMP))",
'P1W': "date_trunc('week', CAST({col} AS TIMESTAMP))",
'P1M': "date_trunc('month', CAST({col} AS TIMESTAMP))",
'P0.25Y': "date_trunc('quarter', CAST({col} AS TIMESTAMP))",
'P1Y': "date_trunc('year', CAST({col} AS TIMESTAMP))",
'P1W/1970-01-03T00:00:00Z':
"date_add('day', 5, date_trunc('week', date_add('day', 1, \
CAST({col} AS TIMESTAMP))))",
'1969-12-28T00:00:00Z/P1W':
"date_add('day', -1, date_trunc('week', \
date_add('day', 1, CAST({col} AS TIMESTAMP))))",
}
@classmethod
def get_view_names(cls, inspector, schema):
return []
@classmethod
def adjust_database_uri(cls, uri, selected_schema=None):
database = uri.database
if selected_schema and database:
if '/' in database:
database = database.split('/')[0] + '/' + selected_schema
else:
database += '/' + selected_schema
uri.database = database
return uri
@classmethod
def convert_dttm(cls, target_type, dttm):
tt = target_type.upper()
if tt == 'DATE':
return "from_iso8601_date('{}')".format(dttm.isoformat()[:10])
if tt == 'TIMESTAMP':
return "from_iso8601_timestamp('{}')".format(dttm.isoformat())
return "'{}'".format(dttm.strftime('%Y-%m-%d %H:%M:%S'))
@classmethod
def epoch_to_dttm(cls):
return 'from_unixtime({col})'
@classmethod
def fetch_result_sets(cls, db, datasource_type):
result_set_df = db.get_df(
"""SELECT table_schema, table_name FROM INFORMATION_SCHEMA.{}S
ORDER BY concat(table_schema, '.', table_name)""".format(
datasource_type.upper(),
),
None)
result_sets = []
for unused, row in result_set_df.iterrows():
result_sets.append('{}.{}'.format(
row['table_schema'], row['table_name']))
return result_sets
@classmethod
def extra_table_metadata(cls, database, table_name, schema_name):
indexes = database.get_indexes(table_name, schema_name)
if not indexes:
return {}
cols = indexes[0].get('column_names', [])
full_table_name = table_name
if schema_name and '.' not in table_name:
full_table_name = '{}.{}'.format(schema_name, table_name)
pql = cls._partition_query(full_table_name)
col_name, latest_part = cls.latest_partition(
table_name, schema_name, database, show_first=True)
return {
'partitions': {
'cols': cols,
'latest': {col_name: latest_part},
'partitionQuery': pql,
},
}
@classmethod
def handle_cursor(cls, cursor, query, session):
logging.info('Polling the cursor for progress')
polled = cursor.poll()
while polled:
stats = polled.get('stats', {})
query = session.query(type(query)).filter_by(id=query.id).one()
if query.status in [QueryStatus.STOPPED, QueryStatus.TIMED_OUT]:
cursor.cancel()
break
if stats:
state = stats.get('state')
if state == 'FINISHED':
break
completed_splits = float(stats.get('completedSplits'))
total_splits = float(stats.get('totalSplits'))
if total_splits and completed_splits:
progress = 100 * (completed_splits / total_splits)
logging.info(
'Query progress: {} / {} '
'splits'.format(completed_splits, total_splits))
if progress > query.progress:
query.progress = progress
session.commit()
time.sleep(1)
logging.info('Polling the cursor for progress')
polled = cursor.poll()
@classmethod
def extract_error_message(cls, e):
if (
hasattr(e, 'orig') and
type(e.orig).__name__ == 'DatabaseError' and
isinstance(e.orig[0], dict)):
error_dict = e.orig[0]
return '{} at {}: {}'.format(
error_dict.get('errorName'),
error_dict.get('errorLocation'),
error_dict.get('message'),
)
if (
type(e).__name__ == 'DatabaseError' and
hasattr(e, 'args') and
len(e.args) > 0
):
error_dict = e.args[0]
return error_dict.get('message')
return utils.error_msg_from_exception(e)
@classmethod
def _partition_query(
cls, table_name, limit=0, order_by=None, filters=None):
limit_clause = 'LIMIT {}'.format(limit) if limit else ''
order_by_clause = ''
if order_by:
l = []
for field, desc in order_by:
l.append(field + ' DESC' if desc else '')
order_by_clause = 'ORDER BY ' + ', '.join(l)
where_clause = ''
if filters:
l = []
for field, value in filters.items():
l.append(f"{field} = '{value}'")
where_clause = 'WHERE ' + ' AND '.join(l)
sql = textwrap.dedent(f"""\
SHOW PARTITIONS FROM {table_name}
{where_clause}
{order_by_clause}
{limit_clause}
""")
return sql
@classmethod
def where_latest_partition(
cls, table_name, schema, database, qry, columns=None):
try:
col_name, value = cls.latest_partition(
table_name, schema, database, show_first=True)
except Exception:
return False
for c in columns:
if c.get('name') == col_name:
return qry.where(Column(col_name) == value)
return False
@classmethod
def _latest_partition_from_df(cls, df):
recs = df.to_records(index=False)
if recs:
return recs[0][0]
@classmethod
def latest_partition(cls, table_name, schema, database, show_first=False):
indexes = database.get_indexes(table_name, schema)
if len(indexes[0]['column_names']) < 1:
raise SupersetTemplateException(
'The table should have one partitioned field')
elif not show_first and len(indexes[0]['column_names']) > 1:
raise SupersetTemplateException(
'The table should have a single partitioned field '
'to use this function. You may want to use '
'`presto.latest_sub_partition`')
part_field = indexes[0]['column_names'][0]
sql = cls._partition_query(table_name, 1, [(part_field, True)])
df = database.get_df(sql, schema)
return part_field, cls._latest_partition_from_df(df)
@classmethod
def latest_sub_partition(cls, table_name, schema, database, **kwargs):
indexes = database.get_indexes(table_name, schema)
part_fields = indexes[0]['column_names']
for k in kwargs.keys():
if k not in k in part_fields:
msg = 'Field [{k}] is not part of the portioning key'
raise SupersetTemplateException(msg)
if len(kwargs.keys()) != len(part_fields) - 1:
msg = (
'A filter needs to be specified for {} out of the '
'{} fields.'
).format(len(part_fields) - 1, len(part_fields))
raise SupersetTemplateException(msg)
for field in part_fields:
if field not in kwargs.keys():
field_to_return = field
sql = cls._partition_query(
table_name, 1, [(field_to_return, True)], kwargs)
df = database.get_df(sql, schema)
if df.empty:
return ''
return df.to_dict()[field_to_return][0]
class HiveEngineSpec(PrestoEngineSpec):
engine = 'hive'
jobs_stats_r = re.compile(
r'.*INFO.*Total jobs = (?P<max_jobs>[0-9]+)')
launching_job_r = re.compile(
'.*INFO.*Launching Job (?P<job_number>[0-9]+) out of '
'(?P<max_jobs>[0-9]+)')
stage_progress_r = re.compile(
r'.*INFO.*Stage-(?P<stage_number>[0-9]+).*'
r'map = (?P<map_progress>[0-9]+)%.*'
r'reduce = (?P<reduce_progress>[0-9]+)%.*')
@classmethod
def patch(cls):
from pyhive import hive
from superset.db_engines import hive as patched_hive
from TCLIService import (
constants as patched_constants,
ttypes as patched_ttypes,
TCLIService as patched_TCLIService)
hive.TCLIService = patched_TCLIService
hive.constants = patched_constants
hive.ttypes = patched_ttypes
hive.Cursor.fetch_logs = patched_hive.fetch_logs
@classmethod
def fetch_result_sets(cls, db, datasource_type):
return BaseEngineSpec.fetch_result_sets(
db, datasource_type)
@classmethod
def fetch_data(cls, cursor, limit):
import pyhive
from TCLIService import ttypes
state = cursor.poll()
if state.operationState == ttypes.TOperationState.ERROR_STATE:
raise Exception('Query error', state.errorMessage)
try:
return super(HiveEngineSpec, cls).fetch_data(cursor, limit)
except pyhive.exc.ProgrammingError:
return []
@staticmethod
def create_table_from_csv(form, table):
def convert_to_hive_type(col_type):
tableschema_to_hive_types = {
'boolean': 'BOOLEAN',
'integer': 'INT',
'number': 'DOUBLE',
'string': 'STRING',
}
return tableschema_to_hive_types.get(col_type, 'STRING')
bucket_path = config['CSV_TO_HIVE_UPLOAD_S3_BUCKET']
if not bucket_path:
logging.info('No upload bucket specified')
raise Exception(
'No upload bucket specified. You can specify one in the config file.')
table_name = form.name.data
schema_name = form.schema.data
if config.get('UPLOADED_CSV_HIVE_NAMESPACE'):
if '.' in table_name or schema_name:
raise Exception(
"You can't specify a namespace. "
'All tables will be uploaded to the `{}` namespace'.format(
config.get('HIVE_NAMESPACE')))
full_table_name = '{}.{}'.format(
config.get('UPLOADED_CSV_HIVE_NAMESPACE'), table_name)
else:
if '.' in table_name and schema_name:
raise Exception(
"You can't specify a namespace both in the name of the table "
'and in the schema field. Please remove one')
full_table_name = '{}.{}'.format(
schema_name, table_name) if schema_name else table_name
filename = form.csv_file.data.filename
upload_prefix = config['CSV_TO_HIVE_UPLOAD_DIRECTORY']
upload_path = config['UPLOAD_FOLDER'] + \
secure_filename(filename)
from tableschema import Table
hive_table_schema = Table(upload_path).infer()
column_name_and_type = []
for column_info in hive_table_schema['fields']:
column_name_and_type.append(
'`{}` {}'.format(
column_info['name'],
convert_to_hive_type(column_info['type'])))
schema_definition = ', '.join(column_name_and_type)
import boto3
s3 = boto3.client('s3')
location = os.path.join('s3a://', bucket_path, upload_prefix, table_name)
s3.upload_file(
upload_path, bucket_path,
os.path.join(upload_prefix, table_name, filename))
sql = f"""CREATE TABLE {full_table_name} ( {schema_definition} )
ROW FORMAT DELIMITED FIELDS TERMINATED BY ',' STORED AS
TEXTFILE LOCATION '{location}'
tblproperties ('skip.header.line.count'='1')"""
logging.info(form.con.data)
engine = create_engine(form.con.data.sqlalchemy_uri_decrypted)
engine.execute(sql)
@classmethod
def convert_dttm(cls, target_type, dttm):
tt = target_type.upper()
if tt == 'DATE':
return "CAST('{}' AS DATE)".format(dttm.isoformat()[:10])
elif tt == 'TIMESTAMP':
return "CAST('{}' AS TIMESTAMP)".format(
dttm.strftime('%Y-%m-%d %H:%M:%S'))
return "'{}'".format(dttm.strftime('%Y-%m-%d %H:%M:%S'))
@classmethod
def adjust_database_uri(cls, uri, selected_schema=None):
if selected_schema:
uri.database = selected_schema
return uri
@classmethod
def extract_error_message(cls, e):
msg = str(e)
match = re.search(r'errorMessage="(.*?)(?<!\\)"', msg)
if match:
msg = match.group(1)
return msg
@classmethod
def progress(cls, log_lines):
total_jobs = 1
current_job = 1
stages = {}
for line in log_lines:
match = cls.jobs_stats_r.match(line)
if match:
total_jobs = int(match.groupdict()['max_jobs']) or 1
match = cls.launching_job_r.match(line)
if match:
current_job = int(match.groupdict()['job_number'])
total_jobs = int(match.groupdict()['max_jobs']) or 1
stages = {}
match = cls.stage_progress_r.match(line)
if match:
stage_number = int(match.groupdict()['stage_number'])
map_progress = int(match.groupdict()['map_progress'])
reduce_progress = int(match.groupdict()['reduce_progress'])
stages[stage_number] = (map_progress + reduce_progress) / 2
logging.info(
'Progress detail: {}, '
'current job {}, '
'total jobs: {}'.format(stages, current_job, total_jobs))
stage_progress = sum(
stages.values()) / len(stages.values()) if stages else 0
progress = (
100 * (current_job - 1) / total_jobs + stage_progress / total_jobs
)
return int(progress)
@classmethod
def get_tracking_url(cls, log_lines):
lkp = 'Tracking URL = '
for line in log_lines:
if lkp in line:
return line.split(lkp)[1]
@classmethod
def handle_cursor(cls, cursor, query, session):
from pyhive import hive # pylint: disable=no-name-in-module
unfinished_states = (
hive.ttypes.TOperationState.INITIALIZED_STATE,
hive.ttypes.TOperationState.RUNNING_STATE,
)
polled = cursor.poll()
last_log_line = 0
tracking_url = None
job_id = None
while polled.operationState in unfinished_states:
query = session.query(type(query)).filter_by(id=query.id).one()
if query.status == QueryStatus.STOPPED:
cursor.cancel()
break
log = cursor.fetch_logs() or ''
if log:
log_lines = log.splitlines()
progress = cls.progress(log_lines)
logging.info('Progress total: {}'.format(progress))
needs_commit = False
if progress > query.progress:
query.progress = progress
needs_commit = True
if not tracking_url:
tracking_url = cls.get_tracking_url(log_lines)
if tracking_url:
job_id = tracking_url.split('/')[-2]
logging.info(
'Found the tracking url: {}'.format(tracking_url))
tracking_url = tracking_url_trans(tracking_url)
logging.info(
'Transformation applied: {}'.format(tracking_url))
query.tracking_url = tracking_url
logging.info('Job id: {}'.format(job_id))
needs_commit = True
if job_id and len(log_lines) > last_log_line:
# Wait for job id before logging things out
# this allows for prefixing all log lines and becoming
# searchable in something like Kibana
for l in log_lines[last_log_line:]:
logging.info('[{}] {}'.format(job_id, l))
last_log_line = len(log_lines)
if needs_commit:
session.commit()
time.sleep(hive_poll_interval)
polled = cursor.poll()
@classmethod
def where_latest_partition(
cls, table_name, schema, database, qry, columns=None):
try:
col_name, value = cls.latest_partition(
table_name, schema, database, show_first=True)
except Exception:
# table is not partitioned
return False
for c in columns:
if c.get('name') == col_name:
return qry.where(Column(col_name) == value)
return False
@classmethod
def latest_sub_partition(cls, table_name, schema, database, **kwargs):
# TODO(bogdan): implement`
pass
@classmethod
def _latest_partition_from_df(cls, df):
return df.ix[:, 0].max().split('=')[1]
@classmethod
def _partition_query(
cls, table_name, limit=0, order_by=None, filters=None):
return f'SHOW PARTITIONS {table_name}'
@classmethod
def modify_url_for_impersonation(cls, url, impersonate_user, username):
# Do nothing in the URL object since instead this should modify
# the configuraiton dictionary. See get_configuration_for_impersonation
pass
@classmethod
def get_configuration_for_impersonation(cls, uri, impersonate_user, username):
configuration = {}
url = make_url(uri)
backend_name = url.get_backend_name()
# Must be Hive connection, enable impersonation, and set param auth=LDAP|KERBEROS
if (backend_name == 'hive' and 'auth' in url.query.keys() and
impersonate_user is True and username is not None):
configuration['hive.server2.proxy.user'] = username
return configuration
@staticmethod
def execute(cursor, query, async_=False):
kwargs = {'async': async_}
cursor.execute(query, **kwargs)
class MssqlEngineSpec(BaseEngineSpec):
engine = 'mssql'
epoch_to_dttm = "dateadd(S, {col}, '1970-01-01')"
limit_method = LimitMethod.WRAP_SQL
time_grain_functions = {
None: '{col}',
'PT1S': "DATEADD(second, DATEDIFF(second, '2000-01-01', {col}), '2000-01-01')",
'PT1M': 'DATEADD(minute, DATEDIFF(minute, 0, {col}), 0)',
'PT5M': 'DATEADD(minute, DATEDIFF(minute, 0, {col}) / 5 * 5, 0)',
'PT10M': 'DATEADD(minute, DATEDIFF(minute, 0, {col}) / 10 * 10, 0)',
'PT15M': 'DATEADD(minute, DATEDIFF(minute, 0, {col}) / 15 * 15, 0)',
'PT0.5H': 'DATEADD(minute, DATEDIFF(minute, 0, {col}) / 30 * 30, 0)',
'PT1H': 'DATEADD(hour, DATEDIFF(hour, 0, {col}), 0)',
'P1D': 'DATEADD(day, DATEDIFF(day, 0, {col}), 0)',
'P1W': 'DATEADD(week, DATEDIFF(week, 0, {col}), 0)',
'P1M': 'DATEADD(month, DATEDIFF(month, 0, {col}), 0)',
'P0.25Y': 'DATEADD(quarter, DATEDIFF(quarter, 0, {col}), 0)',
'P1Y': 'DATEADD(year, DATEDIFF(year, 0, {col}), 0)',
}
@classmethod
def convert_dttm(cls, target_type, dttm):
return "CONVERT(DATETIME, '{}', 126)".format(dttm.isoformat())
@classmethod
def fetch_data(cls, cursor, limit):
data = super(MssqlEngineSpec, cls).fetch_data(cursor, limit)
if len(data) != 0 and type(data[0]).__name__ == 'Row':
data = [[elem for elem in r] for r in data]
return data
class AthenaEngineSpec(BaseEngineSpec):
engine = 'awsathena'
time_grain_functions = {
None: '{col}',
'PT1S': "date_trunc('second', CAST({col} AS TIMESTAMP))",
'PT1M': "date_trunc('minute', CAST({col} AS TIMESTAMP))",
'PT1H': "date_trunc('hour', CAST({col} AS TIMESTAMP))",
'P1D': "date_trunc('day', CAST({col} AS TIMESTAMP))",
'P1W': "date_trunc('week', CAST({col} AS TIMESTAMP))",
'P1M': "date_trunc('month', CAST({col} AS TIMESTAMP))",
'P0.25Y': "date_trunc('quarter', CAST({col} AS TIMESTAMP))",
'P1Y': "date_trunc('year', CAST({col} AS TIMESTAMP))",
'P1W/1970-01-03T00:00:00Z': "date_add('day', 5, date_trunc('week', \
date_add('day', 1, CAST({col} AS TIMESTAMP))))",
'1969-12-28T00:00:00Z/P1W': "date_add('day', -1, date_trunc('week', \
date_add('day', 1, CAST({col} AS TIMESTAMP))))",
}
@classmethod
def convert_dttm(cls, target_type, dttm):
tt = target_type.upper()
if tt == 'DATE':
return "from_iso8601_date('{}')".format(dttm.isoformat()[:10])
if tt == 'TIMESTAMP':
return "from_iso8601_timestamp('{}')".format(dttm.isoformat())
return ("CAST ('{}' AS TIMESTAMP)"
.format(dttm.strftime('%Y-%m-%d %H:%M:%S')))
@classmethod
def epoch_to_dttm(cls):
return 'from_unixtime({col})'
class ClickHouseEngineSpec(BaseEngineSpec):
engine = 'clickhouse'
time_secondary_columns = True
time_groupby_inline = True
time_grain_functions = {
None: '{col}',
'PT1M': 'toStartOfMinute(toDateTime({col}))',
'PT5M': 'toDateTime(intDiv(toUInt32(toDateTime({col})), 300)*300)',
'PT10M': 'toDateTime(intDiv(toUInt32(toDateTime({col})), 600)*600)',
'PT15M': 'toDateTime(intDiv(toUInt32(toDateTime({col})), 900)*900)',
'PT0.5H': 'toDateTime(intDiv(toUInt32(toDateTime({col})), 1800)*1800)',
'PT1H': 'toStartOfHour(toDateTime({col}))',
'P1D': 'toStartOfDay(toDateTime({col}))',
'P1W': 'toMonday(toDateTime({col}))',
'P1M': 'toStartOfMonth(toDateTime({col}))',
'P0.25Y': 'toStartOfQuarter(toDateTime({col}))',
'P1Y': 'toStartOfYear(toDateTime({col}))',
}
@classmethod
def convert_dttm(cls, target_type, dttm):
tt = target_type.upper()
if tt == 'DATE':
return "toDate('{}')".format(dttm.strftime('%Y-%m-%d'))
if tt == 'DATETIME':
return "toDateTime('{}')".format(
dttm.strftime('%Y-%m-%d %H:%M:%S'))
return "'{}'".format(dttm.strftime('%Y-%m-%d %H:%M:%S'))
class BQEngineSpec(BaseEngineSpec):
engine = 'bigquery'
arraysize = 5000
time_grain_functions = {
None: '{col}',
'PT1S': 'TIMESTAMP_TRUNC({col}, SECOND)',
'PT1M': 'TIMESTAMP_TRUNC({col}, MINUTE)',
'PT1H': 'TIMESTAMP_TRUNC({col}, HOUR)',
'P1D': 'TIMESTAMP_TRUNC({col}, DAY)',
'P1W': 'TIMESTAMP_TRUNC({col}, WEEK)',
'P1M': 'TIMESTAMP_TRUNC({col}, MONTH)',
'P0.25Y': 'TIMESTAMP_TRUNC({col}, QUARTER)',
'P1Y': 'TIMESTAMP_TRUNC({col}, YEAR)',
}
@classmethod
def convert_dttm(cls, target_type, dttm):
tt = target_type.upper()
if tt == 'DATE':
return "'{}'".format(dttm.strftime('%Y-%m-%d'))
return "'{}'".format(dttm.strftime('%Y-%m-%d %H:%M:%S'))
@classmethod
def fetch_data(cls, cursor, limit):
data = super(BQEngineSpec, cls).fetch_data(cursor, limit)
if len(data) != 0 and type(data[0]).__name__ == 'Row':
data = [r.values() for r in data]
return data
@staticmethod
def mutate_label(label):
hashed_label = '_' + hashlib.md5(label.encode('utf-8')).hexdigest()
# if label starts with number, add underscore as first character
mutated_label = '_' + label if re.match(r'^\d', label) else label
# replace non-alphanumeric characters with underscores
mutated_label = re.sub(r'[^\w]+', '_', mutated_label)
if mutated_label != label:
# add md5 hash to label to avoid possible collisions
mutated_label += hashed_label
# return only hash if length of final label exceeds 128 chars
return mutated_label if len(mutated_label) <= 128 else hashed_label
@classmethod
def extra_table_metadata(cls, database, table_name, schema_name):
indexes = database.get_indexes(table_name, schema_name)
if not indexes:
return {}
partitions_columns = [
index.get('column_names', []) for index in indexes
if index.get('name') == 'partition'
]
cluster_columns = [
index.get('column_names', []) for index in indexes
if index.get('name') == 'clustering'
]
return {
'partitions': {
'cols': partitions_columns,
},
'clustering': {
'cols': cluster_columns,
},
}
@classmethod
def _get_fields(cls, cols):
return [sqla.literal_column(c.get('name')).label(c.get('name').replace('.', '__'))
for c in cols]
class ImpalaEngineSpec(BaseEngineSpec):
engine = 'impala'
time_grain_functions = {
None: '{col}',
'PT1M': "TRUNC({col}, 'MI')",
'PT1H': "TRUNC({col}, 'HH')",
'P1D': "TRUNC({col}, 'DD')",
'P1W': "TRUNC({col}, 'WW')",
'P1M': "TRUNC({col}, 'MONTH')",
'P0.25Y': "TRUNC({col}, 'Q')",
'P1Y': "TRUNC({col}, 'YYYY')",
}
@classmethod
def epoch_to_dttm(cls):
return 'from_unixtime({col})'
@classmethod
def convert_dttm(cls, target_type, dttm):
tt = target_type.upper()
if tt == 'DATE':
return "'{}'".format(dttm.strftime('%Y-%m-%d'))
return "'{}'".format(dttm.strftime('%Y-%m-%d %H:%M:%S'))
@classmethod
def get_schema_names(cls, inspector):
schemas = [row[0] for row in inspector.engine.execute('SHOW SCHEMAS')
if not row[0].startswith('_')]
return schemas
class DruidEngineSpec(BaseEngineSpec):
engine = 'druid'
inner_joins = False
allows_subquery = False
time_grain_functions = {
None: '{col}',
'PT1S': 'FLOOR({col} TO SECOND)',
'PT1M': 'FLOOR({col} TO MINUTE)',
'PT1H': 'FLOOR({col} TO HOUR)',
'P1D': 'FLOOR({col} TO DAY)',
'P1W': 'FLOOR({col} TO WEEK)',
'P1M': 'FLOOR({col} TO MONTH)',
'P0.25Y': 'FLOOR({col} TO QUARTER)',
'P1Y': 'FLOOR({col} TO YEAR)',
}
class GSheetsEngineSpec(SqliteEngineSpec):
engine = 'gsheets'
inner_joins = False
allows_subquery = False
class KylinEngineSpec(BaseEngineSpec):
engine = 'kylin'
time_grain_functions = {
None: '{col}',
'PT1S': 'CAST(FLOOR(CAST({col} AS TIMESTAMP) TO SECOND) AS TIMESTAMP)',
'PT1M': 'CAST(FLOOR(CAST({col} AS TIMESTAMP) TO MINUTE) AS TIMESTAMP)',
'PT1H': 'CAST(FLOOR(CAST({col} AS TIMESTAMP) TO HOUR) AS TIMESTAMP)',
'P1D': 'CAST(FLOOR(CAST({col} AS TIMESTAMP) TO DAY) AS DATE)',
'P1W': 'CAST(TIMESTAMPADD(WEEK, WEEK(CAST({col} AS DATE)) - 1, \
FLOOR(CAST({col} AS TIMESTAMP) TO YEAR)) AS DATE)',
'P1M': 'CAST(FLOOR(CAST({col} AS TIMESTAMP) TO MONTH) AS DATE)',
'P0.25Y': 'CAST(TIMESTAMPADD(QUARTER, QUARTER(CAST({col} AS DATE)) - 1, \
FLOOR(CAST({col} AS TIMESTAMP) TO YEAR)) AS DATE)',
'P1Y': 'CAST(FLOOR(CAST({col} AS TIMESTAMP) TO YEAR) AS DATE)',
}
@classmethod
def convert_dttm(cls, target_type, dttm):
tt = target_type.upper()
if tt == 'DATE':
return "CAST('{}' AS DATE)".format(dttm.isoformat()[:10])
if tt == 'TIMESTAMP':
return "CAST('{}' AS TIMESTAMP)".format(
dttm.strftime('%Y-%m-%d %H:%M:%S'))
return "'{}'".format(dttm.strftime('%Y-%m-%d %H:%M:%S'))
class TeradataEngineSpec(BaseEngineSpec):
engine = 'teradata'
limit_method = LimitMethod.WRAP_SQL
time_grain_functions = {
None: '{col}',
'PT1M': "TRUNC(CAST({col} as DATE), 'MI')",
'PT1H': "TRUNC(CAST({col} as DATE), 'HH')",
'P1D': "TRUNC(CAST({col} as DATE), 'DDD')",
'P1W': "TRUNC(CAST({col} as DATE), 'WW')",
'P1M': "TRUNC(CAST({col} as DATE), 'MONTH')",
'P0.25Y': "TRUNC(CAST({col} as DATE), 'Q')",
'P1Y': "TRUNC(CAST({col} as DATE), 'YEAR')",
}
engines = {
o.engine: o for o in globals().values()
if inspect.isclass(o) and issubclass(o, BaseEngineSpec)}
| true | true |
f724783584f78b08b18463f09ec4dd4c262a7666 | 2,714 | py | Python | lib/metrics.py | ppmdatix/rtdl | a01ecd9ae6b673f4e82e51f804ffd7031c7350a0 | [
"Apache-2.0"
] | 298 | 2021-06-22T15:41:18.000Z | 2022-03-09T07:52:30.000Z | lib/metrics.py | ppmdatix/rtdl | a01ecd9ae6b673f4e82e51f804ffd7031c7350a0 | [
"Apache-2.0"
] | 15 | 2021-07-27T05:39:21.000Z | 2022-02-25T11:33:32.000Z | lib/metrics.py | ppmdatix/rtdl | a01ecd9ae6b673f4e82e51f804ffd7031c7350a0 | [
"Apache-2.0"
] | 37 | 2021-06-25T03:56:37.000Z | 2022-03-10T11:07:51.000Z | import typing as ty
import numpy as np
import scipy.special
import sklearn.metrics as skm
from . import util
def calculate_metrics(
task_type: str,
y: np.ndarray,
prediction: np.ndarray,
classification_mode: str,
y_info: ty.Optional[ty.Dict[str, ty.Any]],
) -> ty.Dict[str, float]:
if task_type == util.REGRESSION:
del classification_mode
rmse = skm.mean_squared_error(y, prediction) ** 0.5 # type: ignore[code]
if y_info:
if y_info['policy'] == 'mean_std':
rmse *= y_info['std']
else:
assert False
return {'rmse': rmse, 'score': -rmse}
else:
assert task_type in (util.BINCLASS, util.MULTICLASS)
labels = None
if classification_mode == 'probs':
probs = prediction
elif classification_mode == 'logits':
probs = (
scipy.special.expit(prediction)
if task_type == util.BINCLASS
else scipy.special.softmax(prediction, axis=1)
)
else:
assert classification_mode == 'labels'
probs = None
labels = prediction
if labels is None:
labels = (
np.round(probs).astype('int64')
if task_type == util.BINCLASS
else probs.argmax(axis=1) # type: ignore[code]
)
result = skm.classification_report(y, labels, output_dict=True) # type: ignore[code]
if task_type == util.BINCLASS:
result['roc_auc'] = skm.roc_auc_score(y, probs) # type: ignore[code]
result['score'] = result['accuracy'] # type: ignore[code]
return result # type: ignore[code]
def make_summary(metrics: ty.Dict[str, ty.Any]) -> str:
precision = 3
summary = {}
for k, v in metrics.items():
if k.isdigit():
continue
k = {
'score': 'SCORE',
'accuracy': 'acc',
'roc_auc': 'roc_auc',
'macro avg': 'm',
'weighted avg': 'w',
}.get(k, k)
if isinstance(v, float):
v = round(v, precision)
summary[k] = v
else:
v = {
{'precision': 'p', 'recall': 'r', 'f1-score': 'f1', 'support': 's'}.get(
x, x
): round(v[x], precision)
for x in v
}
for item in v.items():
summary[k + item[0]] = item[1]
s = [f'score = {summary.pop("SCORE"):.3f}']
for k, v in summary.items():
if k not in ['mp', 'mr', 'wp', 'wr']: # just to save screen space
s.append(f'{k} = {v}')
return ' | '.join(s)
| 31.55814 | 93 | 0.507369 | import typing as ty
import numpy as np
import scipy.special
import sklearn.metrics as skm
from . import util
def calculate_metrics(
task_type: str,
y: np.ndarray,
prediction: np.ndarray,
classification_mode: str,
y_info: ty.Optional[ty.Dict[str, ty.Any]],
) -> ty.Dict[str, float]:
if task_type == util.REGRESSION:
del classification_mode
rmse = skm.mean_squared_error(y, prediction) ** 0.5
if y_info:
if y_info['policy'] == 'mean_std':
rmse *= y_info['std']
else:
assert False
return {'rmse': rmse, 'score': -rmse}
else:
assert task_type in (util.BINCLASS, util.MULTICLASS)
labels = None
if classification_mode == 'probs':
probs = prediction
elif classification_mode == 'logits':
probs = (
scipy.special.expit(prediction)
if task_type == util.BINCLASS
else scipy.special.softmax(prediction, axis=1)
)
else:
assert classification_mode == 'labels'
probs = None
labels = prediction
if labels is None:
labels = (
np.round(probs).astype('int64')
if task_type == util.BINCLASS
else probs.argmax(axis=1)
)
result = skm.classification_report(y, labels, output_dict=True)
if task_type == util.BINCLASS:
result['roc_auc'] = skm.roc_auc_score(y, probs)
result['score'] = result['accuracy']
return result
def make_summary(metrics: ty.Dict[str, ty.Any]) -> str:
precision = 3
summary = {}
for k, v in metrics.items():
if k.isdigit():
continue
k = {
'score': 'SCORE',
'accuracy': 'acc',
'roc_auc': 'roc_auc',
'macro avg': 'm',
'weighted avg': 'w',
}.get(k, k)
if isinstance(v, float):
v = round(v, precision)
summary[k] = v
else:
v = {
{'precision': 'p', 'recall': 'r', 'f1-score': 'f1', 'support': 's'}.get(
x, x
): round(v[x], precision)
for x in v
}
for item in v.items():
summary[k + item[0]] = item[1]
s = [f'score = {summary.pop("SCORE"):.3f}']
for k, v in summary.items():
if k not in ['mp', 'mr', 'wp', 'wr']:
s.append(f'{k} = {v}')
return ' | '.join(s)
| true | true |
f724797778a03ff70b6b2d2cacc77e8f0dc791c8 | 1,451 | py | Python | api/tests.py | toast38coza/KongOAuth | 827d6f0cb47c67903f0a0236f56cd20c18bb84bb | [
"MIT"
] | null | null | null | api/tests.py | toast38coza/KongOAuth | 827d6f0cb47c67903f0a0236f56cd20c18bb84bb | [
"MIT"
] | null | null | null | api/tests.py | toast38coza/KongOAuth | 827d6f0cb47c67903f0a0236f56cd20c18bb84bb | [
"MIT"
] | null | null | null | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.test import TestCase, override_settings
from django.contrib.auth import get_user_model
import json
import responses
## responses:
def kong_login_success():
responses.add(
responses.POST,
'https://kong:8443/test/oauth2/token',
body=json.dumps({'refresh_token': 'rtoken', 'token_type': 'bearer', 'access_token': 'atoken', 'expires_in': 7200}),
status=200,
content_type='application/json'
)
class OAuthTestCase(TestCase):
def login(self, data):
return self.client.post('/oauth2/token/', json.dumps(data), content_type="application/json")
@override_settings(KONG_GATEWAY_URL='https://kong:8443')
@responses.activate
def setUp(self):
self.user = get_user_model().objects.create_user(username='admin', password='testtest1234')
data = {
"username": "admin",
"password": "testtest1234",
"client_id": "cliendid",
"client_secret": "secret"
}
kong_login_success()
self.result = self.login(data)
def test_is_ok(self):
assert self.result.status_code == 200
@responses.activate # assert no response is made
def test_invalid_login_returns_401(self):
data = {
"username": "foo",
"password": "bar",
}
result = self.login(data)
assert result.status_code == 401
| 29.612245 | 123 | 0.637491 |
from __future__ import unicode_literals
from django.test import TestCase, override_settings
from django.contrib.auth import get_user_model
import json
import responses
gin_success():
responses.add(
responses.POST,
'https://kong:8443/test/oauth2/token',
body=json.dumps({'refresh_token': 'rtoken', 'token_type': 'bearer', 'access_token': 'atoken', 'expires_in': 7200}),
status=200,
content_type='application/json'
)
class OAuthTestCase(TestCase):
def login(self, data):
return self.client.post('/oauth2/token/', json.dumps(data), content_type="application/json")
@override_settings(KONG_GATEWAY_URL='https://kong:8443')
@responses.activate
def setUp(self):
self.user = get_user_model().objects.create_user(username='admin', password='testtest1234')
data = {
"username": "admin",
"password": "testtest1234",
"client_id": "cliendid",
"client_secret": "secret"
}
kong_login_success()
self.result = self.login(data)
def test_is_ok(self):
assert self.result.status_code == 200
@responses.activate
def test_invalid_login_returns_401(self):
data = {
"username": "foo",
"password": "bar",
}
result = self.login(data)
assert result.status_code == 401
| true | true |
f7247a8886b4c59ba58b43b090bbe0a5d941f51a | 9,304 | py | Python | monai/transforms/__init__.py | marksgraham/MONAI | 42591511e9493fedd70af857344cc91073b867e8 | [
"Apache-2.0"
] | null | null | null | monai/transforms/__init__.py | marksgraham/MONAI | 42591511e9493fedd70af857344cc91073b867e8 | [
"Apache-2.0"
] | null | null | null | monai/transforms/__init__.py | marksgraham/MONAI | 42591511e9493fedd70af857344cc91073b867e8 | [
"Apache-2.0"
] | 1 | 2021-01-19T19:35:00.000Z | 2021-01-19T19:35:00.000Z | # Copyright 2020 - 2021 MONAI Consortium
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
# http://www.apache.org/licenses/LICENSE-2.0
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from .adaptors import FunctionSignature, adaptor, apply_alias, to_kwargs
from .compose import Compose
from .croppad.array import (
BorderPad,
BoundingRect,
CenterSpatialCrop,
CropForeground,
DivisiblePad,
RandCropByPosNegLabel,
RandSpatialCrop,
RandSpatialCropSamples,
RandWeightedCrop,
ResizeWithPadOrCrop,
SpatialCrop,
SpatialPad,
)
from .croppad.batch import PadListDataCollate
from .croppad.dictionary import (
BorderPadd,
BorderPadD,
BorderPadDict,
BoundingRectd,
BoundingRectD,
BoundingRectDict,
CenterSpatialCropd,
CenterSpatialCropD,
CenterSpatialCropDict,
CropForegroundd,
CropForegroundD,
CropForegroundDict,
DivisiblePadd,
DivisiblePadD,
DivisiblePadDict,
NumpyPadModeSequence,
RandCropByPosNegLabeld,
RandCropByPosNegLabelD,
RandCropByPosNegLabelDict,
RandSpatialCropd,
RandSpatialCropD,
RandSpatialCropDict,
RandSpatialCropSamplesd,
RandSpatialCropSamplesD,
RandSpatialCropSamplesDict,
RandWeightedCropd,
RandWeightedCropD,
RandWeightedCropDict,
ResizeWithPadOrCropd,
ResizeWithPadOrCropD,
ResizeWithPadOrCropDict,
SpatialCropd,
SpatialCropD,
SpatialCropDict,
SpatialPadd,
SpatialPadD,
SpatialPadDict,
)
from .intensity.array import (
AdjustContrast,
DetectEnvelope,
GaussianSharpen,
GaussianSmooth,
MaskIntensity,
NormalizeIntensity,
RandAdjustContrast,
RandBiasField,
RandGaussianNoise,
RandGaussianSharpen,
RandGaussianSmooth,
RandHistogramShift,
RandRicianNoise,
RandScaleIntensity,
RandShiftIntensity,
RandStdShiftIntensity,
SavitzkyGolaySmooth,
ScaleIntensity,
ScaleIntensityRange,
ScaleIntensityRangePercentiles,
ShiftIntensity,
StdShiftIntensity,
ThresholdIntensity,
)
from .intensity.dictionary import (
AdjustContrastd,
AdjustContrastD,
AdjustContrastDict,
GaussianSharpend,
GaussianSharpenD,
GaussianSharpenDict,
GaussianSmoothd,
GaussianSmoothD,
GaussianSmoothDict,
MaskIntensityd,
MaskIntensityD,
MaskIntensityDict,
NormalizeIntensityd,
NormalizeIntensityD,
NormalizeIntensityDict,
RandAdjustContrastd,
RandAdjustContrastD,
RandAdjustContrastDict,
RandBiasFieldd,
RandBiasFieldD,
RandBiasFieldDict,
RandGaussianNoised,
RandGaussianNoiseD,
RandGaussianNoiseDict,
RandGaussianSharpend,
RandGaussianSharpenD,
RandGaussianSharpenDict,
RandGaussianSmoothd,
RandGaussianSmoothD,
RandGaussianSmoothDict,
RandHistogramShiftd,
RandHistogramShiftD,
RandHistogramShiftDict,
RandRicianNoised,
RandRicianNoiseD,
RandRicianNoiseDict,
RandScaleIntensityd,
RandScaleIntensityD,
RandScaleIntensityDict,
RandShiftIntensityd,
RandShiftIntensityD,
RandShiftIntensityDict,
RandStdShiftIntensityd,
RandStdShiftIntensityD,
RandStdShiftIntensityDict,
ScaleIntensityd,
ScaleIntensityD,
ScaleIntensityDict,
ScaleIntensityRanged,
ScaleIntensityRangeD,
ScaleIntensityRangeDict,
ScaleIntensityRangePercentilesd,
ScaleIntensityRangePercentilesD,
ScaleIntensityRangePercentilesDict,
ShiftIntensityd,
ShiftIntensityD,
ShiftIntensityDict,
StdShiftIntensityd,
StdShiftIntensityD,
StdShiftIntensityDict,
ThresholdIntensityd,
ThresholdIntensityD,
ThresholdIntensityDict,
)
from .inverse import InvertibleTransform
from .io.array import LoadImage, SaveImage
from .io.dictionary import LoadImaged, LoadImageD, LoadImageDict, SaveImaged, SaveImageD, SaveImageDict
from .post.array import (
Activations,
AsDiscrete,
KeepLargestConnectedComponent,
LabelToContour,
MeanEnsemble,
ProbNMS,
VoteEnsemble,
)
from .post.dictionary import (
Activationsd,
ActivationsD,
ActivationsDict,
AsDiscreted,
AsDiscreteD,
AsDiscreteDict,
Decollated,
DecollateD,
DecollateDict,
Ensembled,
KeepLargestConnectedComponentd,
KeepLargestConnectedComponentD,
KeepLargestConnectedComponentDict,
LabelToContourd,
LabelToContourD,
LabelToContourDict,
MeanEnsembled,
MeanEnsembleD,
MeanEnsembleDict,
ProbNMSd,
ProbNMSD,
ProbNMSDict,
VoteEnsembled,
VoteEnsembleD,
VoteEnsembleDict,
)
from .spatial.array import (
Affine,
AffineGrid,
Flip,
Orientation,
Rand2DElastic,
Rand3DElastic,
RandAffine,
RandAffineGrid,
RandAxisFlip,
RandDeformGrid,
RandFlip,
RandRotate,
RandRotate90,
RandZoom,
Resample,
Resize,
Rotate,
Rotate90,
Spacing,
Zoom,
)
from .spatial.dictionary import (
Affined,
AffineD,
AffineDict,
Flipd,
FlipD,
FlipDict,
Orientationd,
OrientationD,
OrientationDict,
Rand2DElasticd,
Rand2DElasticD,
Rand2DElasticDict,
Rand3DElasticd,
Rand3DElasticD,
Rand3DElasticDict,
RandAffined,
RandAffineD,
RandAffineDict,
RandAxisFlipd,
RandAxisFlipD,
RandAxisFlipDict,
RandFlipd,
RandFlipD,
RandFlipDict,
RandRotate90d,
RandRotate90D,
RandRotate90Dict,
RandRotated,
RandRotateD,
RandRotateDict,
RandZoomd,
RandZoomD,
RandZoomDict,
Resized,
ResizeD,
ResizeDict,
Rotate90d,
Rotate90D,
Rotate90Dict,
Rotated,
RotateD,
RotateDict,
Spacingd,
SpacingD,
SpacingDict,
Zoomd,
ZoomD,
ZoomDict,
)
from .transform import MapTransform, Randomizable, RandomizableTransform, Transform, apply_transform
from .utility.array import (
AddChannel,
AddExtremePointsChannel,
AsChannelFirst,
AsChannelLast,
CastToType,
ConvertToMultiChannelBasedOnBratsClasses,
DataStats,
EnsureChannelFirst,
FgBgToIndices,
Identity,
LabelToMask,
Lambda,
MapLabelValue,
RemoveRepeatedChannel,
RepeatChannel,
SimulateDelay,
SplitChannel,
SqueezeDim,
ToCupy,
ToNumpy,
ToPIL,
TorchVision,
ToTensor,
Transpose,
)
from .utility.dictionary import (
AddChanneld,
AddChannelD,
AddChannelDict,
AddExtremePointsChanneld,
AddExtremePointsChannelD,
AddExtremePointsChannelDict,
AsChannelFirstd,
AsChannelFirstD,
AsChannelFirstDict,
AsChannelLastd,
AsChannelLastD,
AsChannelLastDict,
CastToTyped,
CastToTypeD,
CastToTypeDict,
ConcatItemsd,
ConcatItemsD,
ConcatItemsDict,
ConvertToMultiChannelBasedOnBratsClassesd,
ConvertToMultiChannelBasedOnBratsClassesD,
ConvertToMultiChannelBasedOnBratsClassesDict,
CopyItemsd,
CopyItemsD,
CopyItemsDict,
DataStatsd,
DataStatsD,
DataStatsDict,
DeleteItemsd,
DeleteItemsD,
DeleteItemsDict,
EnsureChannelFirstd,
EnsureChannelFirstD,
EnsureChannelFirstDict,
FgBgToIndicesd,
FgBgToIndicesD,
FgBgToIndicesDict,
Identityd,
IdentityD,
IdentityDict,
LabelToMaskd,
LabelToMaskD,
LabelToMaskDict,
Lambdad,
LambdaD,
LambdaDict,
MapLabelValued,
MapLabelValueD,
MapLabelValueDict,
RandLambdad,
RandLambdaD,
RandLambdaDict,
RandTorchVisiond,
RandTorchVisionD,
RandTorchVisionDict,
RemoveRepeatedChanneld,
RemoveRepeatedChannelD,
RemoveRepeatedChannelDict,
RepeatChanneld,
RepeatChannelD,
RepeatChannelDict,
SelectItemsd,
SelectItemsD,
SelectItemsDict,
SimulateDelayd,
SimulateDelayD,
SimulateDelayDict,
SplitChanneld,
SplitChannelD,
SplitChannelDict,
SqueezeDimd,
SqueezeDimD,
SqueezeDimDict,
ToCupyd,
ToCupyD,
ToCupyDict,
ToNumpyd,
ToNumpyD,
ToNumpyDict,
ToPILd,
ToPILD,
ToPILDict,
TorchVisiond,
TorchVisionD,
TorchVisionDict,
ToTensord,
ToTensorD,
ToTensorDict,
Transposed,
TransposeD,
TransposeDict,
)
from .utils import (
allow_missing_keys_mode,
compute_divisible_spatial_size,
convert_inverse_interp_mode,
copypaste_arrays,
create_control_grid,
create_grid,
create_rotate,
create_scale,
create_shear,
create_translate,
extreme_points_to_image,
generate_pos_neg_label_crop_centers,
generate_spatial_bounding_box,
get_extreme_points,
get_largest_connected_component_mask,
img_bounds,
in_bounds,
is_empty,
is_positive,
map_binary_to_indices,
map_spatial_axes,
rand_choice,
rescale_array,
rescale_array_int_max,
rescale_instance_array,
resize_center,
weighted_patch_samples,
zero_margins,
)
| 22.258373 | 103 | 0.725064 |
from .adaptors import FunctionSignature, adaptor, apply_alias, to_kwargs
from .compose import Compose
from .croppad.array import (
BorderPad,
BoundingRect,
CenterSpatialCrop,
CropForeground,
DivisiblePad,
RandCropByPosNegLabel,
RandSpatialCrop,
RandSpatialCropSamples,
RandWeightedCrop,
ResizeWithPadOrCrop,
SpatialCrop,
SpatialPad,
)
from .croppad.batch import PadListDataCollate
from .croppad.dictionary import (
BorderPadd,
BorderPadD,
BorderPadDict,
BoundingRectd,
BoundingRectD,
BoundingRectDict,
CenterSpatialCropd,
CenterSpatialCropD,
CenterSpatialCropDict,
CropForegroundd,
CropForegroundD,
CropForegroundDict,
DivisiblePadd,
DivisiblePadD,
DivisiblePadDict,
NumpyPadModeSequence,
RandCropByPosNegLabeld,
RandCropByPosNegLabelD,
RandCropByPosNegLabelDict,
RandSpatialCropd,
RandSpatialCropD,
RandSpatialCropDict,
RandSpatialCropSamplesd,
RandSpatialCropSamplesD,
RandSpatialCropSamplesDict,
RandWeightedCropd,
RandWeightedCropD,
RandWeightedCropDict,
ResizeWithPadOrCropd,
ResizeWithPadOrCropD,
ResizeWithPadOrCropDict,
SpatialCropd,
SpatialCropD,
SpatialCropDict,
SpatialPadd,
SpatialPadD,
SpatialPadDict,
)
from .intensity.array import (
AdjustContrast,
DetectEnvelope,
GaussianSharpen,
GaussianSmooth,
MaskIntensity,
NormalizeIntensity,
RandAdjustContrast,
RandBiasField,
RandGaussianNoise,
RandGaussianSharpen,
RandGaussianSmooth,
RandHistogramShift,
RandRicianNoise,
RandScaleIntensity,
RandShiftIntensity,
RandStdShiftIntensity,
SavitzkyGolaySmooth,
ScaleIntensity,
ScaleIntensityRange,
ScaleIntensityRangePercentiles,
ShiftIntensity,
StdShiftIntensity,
ThresholdIntensity,
)
from .intensity.dictionary import (
AdjustContrastd,
AdjustContrastD,
AdjustContrastDict,
GaussianSharpend,
GaussianSharpenD,
GaussianSharpenDict,
GaussianSmoothd,
GaussianSmoothD,
GaussianSmoothDict,
MaskIntensityd,
MaskIntensityD,
MaskIntensityDict,
NormalizeIntensityd,
NormalizeIntensityD,
NormalizeIntensityDict,
RandAdjustContrastd,
RandAdjustContrastD,
RandAdjustContrastDict,
RandBiasFieldd,
RandBiasFieldD,
RandBiasFieldDict,
RandGaussianNoised,
RandGaussianNoiseD,
RandGaussianNoiseDict,
RandGaussianSharpend,
RandGaussianSharpenD,
RandGaussianSharpenDict,
RandGaussianSmoothd,
RandGaussianSmoothD,
RandGaussianSmoothDict,
RandHistogramShiftd,
RandHistogramShiftD,
RandHistogramShiftDict,
RandRicianNoised,
RandRicianNoiseD,
RandRicianNoiseDict,
RandScaleIntensityd,
RandScaleIntensityD,
RandScaleIntensityDict,
RandShiftIntensityd,
RandShiftIntensityD,
RandShiftIntensityDict,
RandStdShiftIntensityd,
RandStdShiftIntensityD,
RandStdShiftIntensityDict,
ScaleIntensityd,
ScaleIntensityD,
ScaleIntensityDict,
ScaleIntensityRanged,
ScaleIntensityRangeD,
ScaleIntensityRangeDict,
ScaleIntensityRangePercentilesd,
ScaleIntensityRangePercentilesD,
ScaleIntensityRangePercentilesDict,
ShiftIntensityd,
ShiftIntensityD,
ShiftIntensityDict,
StdShiftIntensityd,
StdShiftIntensityD,
StdShiftIntensityDict,
ThresholdIntensityd,
ThresholdIntensityD,
ThresholdIntensityDict,
)
from .inverse import InvertibleTransform
from .io.array import LoadImage, SaveImage
from .io.dictionary import LoadImaged, LoadImageD, LoadImageDict, SaveImaged, SaveImageD, SaveImageDict
from .post.array import (
Activations,
AsDiscrete,
KeepLargestConnectedComponent,
LabelToContour,
MeanEnsemble,
ProbNMS,
VoteEnsemble,
)
from .post.dictionary import (
Activationsd,
ActivationsD,
ActivationsDict,
AsDiscreted,
AsDiscreteD,
AsDiscreteDict,
Decollated,
DecollateD,
DecollateDict,
Ensembled,
KeepLargestConnectedComponentd,
KeepLargestConnectedComponentD,
KeepLargestConnectedComponentDict,
LabelToContourd,
LabelToContourD,
LabelToContourDict,
MeanEnsembled,
MeanEnsembleD,
MeanEnsembleDict,
ProbNMSd,
ProbNMSD,
ProbNMSDict,
VoteEnsembled,
VoteEnsembleD,
VoteEnsembleDict,
)
from .spatial.array import (
Affine,
AffineGrid,
Flip,
Orientation,
Rand2DElastic,
Rand3DElastic,
RandAffine,
RandAffineGrid,
RandAxisFlip,
RandDeformGrid,
RandFlip,
RandRotate,
RandRotate90,
RandZoom,
Resample,
Resize,
Rotate,
Rotate90,
Spacing,
Zoom,
)
from .spatial.dictionary import (
Affined,
AffineD,
AffineDict,
Flipd,
FlipD,
FlipDict,
Orientationd,
OrientationD,
OrientationDict,
Rand2DElasticd,
Rand2DElasticD,
Rand2DElasticDict,
Rand3DElasticd,
Rand3DElasticD,
Rand3DElasticDict,
RandAffined,
RandAffineD,
RandAffineDict,
RandAxisFlipd,
RandAxisFlipD,
RandAxisFlipDict,
RandFlipd,
RandFlipD,
RandFlipDict,
RandRotate90d,
RandRotate90D,
RandRotate90Dict,
RandRotated,
RandRotateD,
RandRotateDict,
RandZoomd,
RandZoomD,
RandZoomDict,
Resized,
ResizeD,
ResizeDict,
Rotate90d,
Rotate90D,
Rotate90Dict,
Rotated,
RotateD,
RotateDict,
Spacingd,
SpacingD,
SpacingDict,
Zoomd,
ZoomD,
ZoomDict,
)
from .transform import MapTransform, Randomizable, RandomizableTransform, Transform, apply_transform
from .utility.array import (
AddChannel,
AddExtremePointsChannel,
AsChannelFirst,
AsChannelLast,
CastToType,
ConvertToMultiChannelBasedOnBratsClasses,
DataStats,
EnsureChannelFirst,
FgBgToIndices,
Identity,
LabelToMask,
Lambda,
MapLabelValue,
RemoveRepeatedChannel,
RepeatChannel,
SimulateDelay,
SplitChannel,
SqueezeDim,
ToCupy,
ToNumpy,
ToPIL,
TorchVision,
ToTensor,
Transpose,
)
from .utility.dictionary import (
AddChanneld,
AddChannelD,
AddChannelDict,
AddExtremePointsChanneld,
AddExtremePointsChannelD,
AddExtremePointsChannelDict,
AsChannelFirstd,
AsChannelFirstD,
AsChannelFirstDict,
AsChannelLastd,
AsChannelLastD,
AsChannelLastDict,
CastToTyped,
CastToTypeD,
CastToTypeDict,
ConcatItemsd,
ConcatItemsD,
ConcatItemsDict,
ConvertToMultiChannelBasedOnBratsClassesd,
ConvertToMultiChannelBasedOnBratsClassesD,
ConvertToMultiChannelBasedOnBratsClassesDict,
CopyItemsd,
CopyItemsD,
CopyItemsDict,
DataStatsd,
DataStatsD,
DataStatsDict,
DeleteItemsd,
DeleteItemsD,
DeleteItemsDict,
EnsureChannelFirstd,
EnsureChannelFirstD,
EnsureChannelFirstDict,
FgBgToIndicesd,
FgBgToIndicesD,
FgBgToIndicesDict,
Identityd,
IdentityD,
IdentityDict,
LabelToMaskd,
LabelToMaskD,
LabelToMaskDict,
Lambdad,
LambdaD,
LambdaDict,
MapLabelValued,
MapLabelValueD,
MapLabelValueDict,
RandLambdad,
RandLambdaD,
RandLambdaDict,
RandTorchVisiond,
RandTorchVisionD,
RandTorchVisionDict,
RemoveRepeatedChanneld,
RemoveRepeatedChannelD,
RemoveRepeatedChannelDict,
RepeatChanneld,
RepeatChannelD,
RepeatChannelDict,
SelectItemsd,
SelectItemsD,
SelectItemsDict,
SimulateDelayd,
SimulateDelayD,
SimulateDelayDict,
SplitChanneld,
SplitChannelD,
SplitChannelDict,
SqueezeDimd,
SqueezeDimD,
SqueezeDimDict,
ToCupyd,
ToCupyD,
ToCupyDict,
ToNumpyd,
ToNumpyD,
ToNumpyDict,
ToPILd,
ToPILD,
ToPILDict,
TorchVisiond,
TorchVisionD,
TorchVisionDict,
ToTensord,
ToTensorD,
ToTensorDict,
Transposed,
TransposeD,
TransposeDict,
)
from .utils import (
allow_missing_keys_mode,
compute_divisible_spatial_size,
convert_inverse_interp_mode,
copypaste_arrays,
create_control_grid,
create_grid,
create_rotate,
create_scale,
create_shear,
create_translate,
extreme_points_to_image,
generate_pos_neg_label_crop_centers,
generate_spatial_bounding_box,
get_extreme_points,
get_largest_connected_component_mask,
img_bounds,
in_bounds,
is_empty,
is_positive,
map_binary_to_indices,
map_spatial_axes,
rand_choice,
rescale_array,
rescale_array_int_max,
rescale_instance_array,
resize_center,
weighted_patch_samples,
zero_margins,
)
| true | true |
f7247b2dbd9cf7eb773ad8e4856771996587a897 | 48 | py | Python | samcli/__init__.py | kylelaker/aws-sam-cli | d2917102ef56ac05b9973f96c716612f9638bb62 | [
"BSD-2-Clause",
"Apache-2.0"
] | null | null | null | samcli/__init__.py | kylelaker/aws-sam-cli | d2917102ef56ac05b9973f96c716612f9638bb62 | [
"BSD-2-Clause",
"Apache-2.0"
] | null | null | null | samcli/__init__.py | kylelaker/aws-sam-cli | d2917102ef56ac05b9973f96c716612f9638bb62 | [
"BSD-2-Clause",
"Apache-2.0"
] | null | null | null | """
SAM CLI version
"""
__version__ = "1.13.1"
| 8 | 22 | 0.583333 |
__version__ = "1.13.1"
| true | true |
f7247b7c5f90f8592a8c662974c56a475935ed18 | 1,394 | py | Python | 505 The Maze II.py | krishna13052001/LeetCode | cd6ec626bea61f0bd9e8493622074f9e69a7a1c3 | [
"MIT"
] | 872 | 2015-06-15T12:02:41.000Z | 2022-03-30T08:44:35.000Z | 505 The Maze II.py | nadeemshaikh-github/LeetCode | 3fb14aeea62a960442e47dfde9f964c7ffce32be | [
"MIT"
] | 8 | 2015-06-21T15:11:59.000Z | 2022-02-01T11:22:34.000Z | 505 The Maze II.py | nadeemshaikh-github/LeetCode | 3fb14aeea62a960442e47dfde9f964c7ffce32be | [
"MIT"
] | 328 | 2015-06-28T03:10:35.000Z | 2022-03-29T11:05:28.000Z | #!/usr/bin/python3
"""
premium question
"""
from typing import List
import heapq
dirs = [(0, -1), (0, 1), (-1, 0), (1, 0)]
class Solution:
def shortestDistance(self, maze: List[List[int]], start: List[int], destination: List[int]) -> int:
"""
No friction rolling ball
F[i][j][dir] = min distance given direction
S[i][j] = whether stoppable
Dijkstra's algorith, reduce to a graph problem
"""
m, n = len(maze), len(maze[0])
D = [[float("inf") for _ in range(n)] for _ in range(m)] # distance matrix
i, j = start
D[i][j] = 0
q = [(0, i, j)]
while q:
dist, i, j = heapq.heappop(q)
for di, dj in dirs:
cur_dist = 0
I = i
J = j
# look ahead
while 0 <= I + di < m and 0 <= J + dj < n and maze[I + di][J + dj] == 0:
I += di
J += dj
cur_dist += 1
if dist + cur_dist < D[I][J]:
D[I][J] = dist + cur_dist
heapq.heappush(q, (D[I][J], I, J))
i, j = destination
return D[i][j] if D[i][j] != float("inf") else -1
if __name__ == "__main__":
assert Solution().shortestDistance([[0,0,1,0,0],[0,0,0,0,0],[0,0,0,1,0],[1,1,0,1,1],[0,0,0,0,0]], [0,4], [4,4]) == 12
| 28.44898 | 121 | 0.444763 |
from typing import List
import heapq
dirs = [(0, -1), (0, 1), (-1, 0), (1, 0)]
class Solution:
def shortestDistance(self, maze: List[List[int]], start: List[int], destination: List[int]) -> int:
m, n = len(maze), len(maze[0])
D = [[float("inf") for _ in range(n)] for _ in range(m)]
i, j = start
D[i][j] = 0
q = [(0, i, j)]
while q:
dist, i, j = heapq.heappop(q)
for di, dj in dirs:
cur_dist = 0
I = i
J = j
while 0 <= I + di < m and 0 <= J + dj < n and maze[I + di][J + dj] == 0:
I += di
J += dj
cur_dist += 1
if dist + cur_dist < D[I][J]:
D[I][J] = dist + cur_dist
heapq.heappush(q, (D[I][J], I, J))
i, j = destination
return D[i][j] if D[i][j] != float("inf") else -1
if __name__ == "__main__":
assert Solution().shortestDistance([[0,0,1,0,0],[0,0,0,0,0],[0,0,0,1,0],[1,1,0,1,1],[0,0,0,0,0]], [0,4], [4,4]) == 12
| true | true |
f7247bcaf5d604756097f5b9a64dc5e8efcee241 | 1,801 | py | Python | src/augmented_pickle.py | opskrift/expman | 637bbef34d79ce03311889ce310797e78a9f7710 | [
"MIT"
] | null | null | null | src/augmented_pickle.py | opskrift/expman | 637bbef34d79ce03311889ce310797e78a9f7710 | [
"MIT"
] | 16 | 2021-08-04T12:08:00.000Z | 2021-09-12T13:01:27.000Z | src/augmented_pickle.py | opskrift/opskrift | 637bbef34d79ce03311889ce310797e78a9f7710 | [
"MIT"
] | null | null | null | """
Suppose you have some input data sources `data_in` on which you apply some process `F` parameterized by `args`:
data_out = F(data_in, args)
You want to serialize `data_out`, but also don't want to lose `args`,
to preserve the exact setup that generated the output data.
Now suppose you want to inspect `args` for a particular `data_out`:
- Saving both `{"data": data_out, "args": args}` may not be a viable solution,
as `data_out` needs to be fully loaded into memory without actually needing it.
- Saving `data_out` and `args` separately necessitates extra care to keep them tied together.
Solution: define a simple data format -- *augmented pickle*
<metadata>
<body (actual data)>
Pickle both objects, but read body on-demand:
res = read_augmented_pickle("./data.apkl", get_body=True)
# get metadata (body is not loaded)
meta = next(res)
# query the generator again to get body (data)
data = next(res)
"""
import pickle
from os import PathLike
from typing import Any, Iterable, Union
def write_augmented_pickle(
metadata: Any,
body: Any,
path: Union[str, PathLike],
) -> None:
"""Write an augmented pickle file containing `metadata` and `body`."""
with open(path, "wb") as fp:
pickle.dump(metadata, fp)
pickle.dump(body, fp)
def read_augmented_pickle(
path: Union[str, PathLike],
get_body: bool,
) -> Iterable[Any]:
"""Read an augmented pickle file containing `metadata` and `body`.
Returns a generator that can be queried on-demand using `next`.
If `get_body` is False, only `metadata` is yielded.
"""
with open(path, "rb") as fp:
metadata = pickle.load(fp)
yield metadata
if not get_body:
return
body = pickle.load(fp)
yield body
| 27.287879 | 111 | 0.675736 |
import pickle
from os import PathLike
from typing import Any, Iterable, Union
def write_augmented_pickle(
metadata: Any,
body: Any,
path: Union[str, PathLike],
) -> None:
with open(path, "wb") as fp:
pickle.dump(metadata, fp)
pickle.dump(body, fp)
def read_augmented_pickle(
path: Union[str, PathLike],
get_body: bool,
) -> Iterable[Any]:
with open(path, "rb") as fp:
metadata = pickle.load(fp)
yield metadata
if not get_body:
return
body = pickle.load(fp)
yield body
| true | true |
f7247d937c3d515dd43275659017e44fc03cb44c | 1,247 | py | Python | crawling/crawler.py | LukasTinnes/sPyRat | 42e012e426befa3876e590be2ea83874d5351d12 | [
"Unlicense"
] | null | null | null | crawling/crawler.py | LukasTinnes/sPyRat | 42e012e426befa3876e590be2ea83874d5351d12 | [
"Unlicense"
] | 3 | 2022-02-07T19:53:47.000Z | 2022-02-13T19:51:33.000Z | crawling/crawler.py | LukasTinnes/sPyRat | 42e012e426befa3876e590be2ea83874d5351d12 | [
"Unlicense"
] | null | null | null | from abc import abstractmethod
from crawling.crawler_data_structures.crawl_data import CrawlData
class Crawler:
"""
An abstract class for other Crawlers to inherit from.
A Crawler should open a given file and attempt to find an associated file pattern at every byte in the given file.
"""
@abstractmethod
def crawl(self, file: str) -> CrawlData:
"""
Crawls a file to find a certain file pattern at every byte.
:param file: The file path
:return:
"""
...
@abstractmethod
def crawl_in_range(self, file: str, start_byte: int, end_byte: int) -> CrawlData:
"""
Crawls the file for a file pattern between the start byte (inclusive) and the end_byte (exclusive).
:param file: Tjhe file path
:param start_byte: The byte to start crawling at.
:param end_byte: The byte to end crawling at.
:return:
"""
...
@abstractmethod
def crawl_at_byte(self, file:str, start_byte: int = 0) -> CrawlData:
"""
Crawls for a file pattern at the specific byte given.
:param file: The filepath.
:param start_byte: The byte to start crawling at.
:return:
"""
...
| 31.175 | 118 | 0.623095 | from abc import abstractmethod
from crawling.crawler_data_structures.crawl_data import CrawlData
class Crawler:
@abstractmethod
def crawl(self, file: str) -> CrawlData:
...
@abstractmethod
def crawl_in_range(self, file: str, start_byte: int, end_byte: int) -> CrawlData:
...
@abstractmethod
def crawl_at_byte(self, file:str, start_byte: int = 0) -> CrawlData:
...
| true | true |
f7247d9b14431ca407254cad9d929acd151162dc | 9,493 | py | Python | docs/conf.py | benedikt-mangold/obfuscate | 4e51b3c3c6d3d869a742f036234632c77cebcb54 | [
"MIT"
] | null | null | null | docs/conf.py | benedikt-mangold/obfuscate | 4e51b3c3c6d3d869a742f036234632c77cebcb54 | [
"MIT"
] | 1 | 2021-06-01T14:48:09.000Z | 2021-06-01T14:48:09.000Z | docs/conf.py | benedikt-mangold/obfuscate | 4e51b3c3c6d3d869a742f036234632c77cebcb54 | [
"MIT"
] | null | null | null | # This file is execfile()d with the current directory set to its containing dir.
#
# This file only contains a selection of the most common options. For a full
# list see the documentation:
# https://www.sphinx-doc.org/en/master/usage/configuration.html
#
# All configuration values have a default; values that are commented out
# serve to show the default.
import os
import sys
import inspect
import shutil
# -- Path setup --------------------------------------------------------------
__location__ = os.path.join(
os.getcwd(), os.path.dirname(inspect.getfile(inspect.currentframe()))
)
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
sys.path.insert(0, os.path.join(__location__, "../src"))
# -- Run sphinx-apidoc -------------------------------------------------------
# This hack is necessary since RTD does not issue `sphinx-apidoc` before running
# `sphinx-build -b html . _build/html`. See Issue:
# https://github.com/rtfd/readthedocs.org/issues/1139
# DON'T FORGET: Check the box "Install your project inside a virtualenv using
# setup.py install" in the RTD Advanced Settings.
# Additionally it helps us to avoid running apidoc manually
try: # for Sphinx >= 1.7
from sphinx.ext import apidoc
except ImportError:
from sphinx import apidoc
output_dir = os.path.join(__location__, "api")
module_dir = os.path.join(__location__, "../src/obfuscator")
try:
shutil.rmtree(output_dir)
except FileNotFoundError:
pass
try:
import sphinx
cmd_line_template = (
"sphinx-apidoc --implicit-namespaces -f -o {outputdir} {moduledir}"
)
cmd_line = cmd_line_template.format(outputdir=output_dir, moduledir=module_dir)
args = cmd_line.split(" ")
if tuple(sphinx.__version__.split(".")) >= ("1", "7"):
# This is a rudimentary parse_version to avoid external dependencies
args = args[1:]
apidoc.main(args)
except Exception as e:
print("Running `sphinx-apidoc` failed!\n{}".format(e))
# -- General configuration ---------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
# needs_sphinx = '1.0'
# Add any Sphinx extension module names here, as strings. They can be extensions
# coming with Sphinx (named 'sphinx.ext.*') or your custom ones.
extensions = [
"sphinx.ext.autodoc",
"sphinx.ext.intersphinx",
"sphinx.ext.todo",
"sphinx.ext.autosummary",
"sphinx.ext.viewcode",
"sphinx.ext.coverage",
"sphinx.ext.doctest",
"sphinx.ext.ifconfig",
"sphinx.ext.mathjax",
"sphinx.ext.napoleon",
]
# Add any paths that contain templates here, relative to this directory.
templates_path = ["_templates"]
# The suffix of source filenames.
source_suffix = ".rst"
# The encoding of source files.
# source_encoding = 'utf-8-sig'
# The master toctree document.
master_doc = "index"
# General information about the project.
project = "obfuscator"
copyright = "2021, Benedikt Mangold"
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The short X.Y version.
version = "" # Is set by calling `setup.py docs`
# The full version, including alpha/beta/rc tags.
release = "" # Is set by calling `setup.py docs`
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
# language = None
# There are two options for replacing |today|: either, you set today to some
# non-false value, then it is used:
# today = ''
# Else, today_fmt is used as the format for a strftime call.
# today_fmt = '%B %d, %Y'
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
exclude_patterns = ["_build", "Thumbs.db", ".DS_Store", ".venv"]
# The reST default role (used for this markup: `text`) to use for all documents.
# default_role = None
# If true, '()' will be appended to :func: etc. cross-reference text.
# add_function_parentheses = True
# If true, the current module name will be prepended to all description
# unit titles (such as .. function::).
# add_module_names = True
# If true, sectionauthor and moduleauthor directives will be shown in the
# output. They are ignored by default.
# show_authors = False
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = "sphinx"
# A list of ignored prefixes for module index sorting.
# modindex_common_prefix = []
# If true, keep warnings as "system message" paragraphs in the built documents.
# keep_warnings = False
# -- Options for HTML output -------------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
html_theme = "alabaster"
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
html_theme_options = {
"sidebar_width": "300px",
"page_width": "1200px"
}
# Add any paths that contain custom themes here, relative to this directory.
# html_theme_path = []
# The name for this set of Sphinx documents. If None, it defaults to
# "<project> v<release> documentation".
try:
from obfuscator import __version__ as version
except ImportError:
pass
else:
release = version
# A shorter title for the navigation bar. Default is the same as html_title.
# html_short_title = None
# The name of an image file (relative to this directory) to place at the top
# of the sidebar.
# html_logo = ""
# The name of an image file (within the static path) to use as favicon of the
# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
# pixels large.
# html_favicon = None
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ["_static"]
# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
# using the given strftime format.
# html_last_updated_fmt = '%b %d, %Y'
# If true, SmartyPants will be used to convert quotes and dashes to
# typographically correct entities.
# html_use_smartypants = True
# Custom sidebar templates, maps document names to template names.
# html_sidebars = {}
# Additional templates that should be rendered to pages, maps page names to
# template names.
# html_additional_pages = {}
# If false, no module index is generated.
# html_domain_indices = True
# If false, no index is generated.
# html_use_index = True
# If true, the index is split into individual pages for each letter.
# html_split_index = False
# If true, links to the reST sources are added to the pages.
# html_show_sourcelink = True
# If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
# html_show_sphinx = True
# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
# html_show_copyright = True
# If true, an OpenSearch description file will be output, and all pages will
# contain a <link> tag referring to it. The value of this option must be the
# base URL from which the finished HTML is served.
# html_use_opensearch = ''
# This is the file name suffix for HTML files (e.g. ".xhtml").
# html_file_suffix = None
# Output file base name for HTML help builder.
htmlhelp_basename = "obfuscator-doc"
# -- Options for LaTeX output ------------------------------------------------
latex_elements = {
# The paper size ("letterpaper" or "a4paper").
# "papersize": "letterpaper",
# The font size ("10pt", "11pt" or "12pt").
# "pointsize": "10pt",
# Additional stuff for the LaTeX preamble.
# "preamble": "",
}
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title, author, documentclass [howto/manual]).
latex_documents = [
("index", "user_guide.tex", "obfuscator Documentation", "Benedikt Mangold", "manual")
]
# The name of an image file (relative to this directory) to place at the top of
# the title page.
# latex_logo = ""
# For "manual" documents, if this is true, then toplevel headings are parts,
# not chapters.
# latex_use_parts = False
# If true, show page references after internal links.
# latex_show_pagerefs = False
# If true, show URL addresses after external links.
# latex_show_urls = False
# Documents to append as an appendix to all manuals.
# latex_appendices = []
# If false, no module index is generated.
# latex_domain_indices = True
# -- External mapping --------------------------------------------------------
python_version = ".".join(map(str, sys.version_info[0:2]))
intersphinx_mapping = {
"sphinx": ("http://www.sphinx-doc.org/en/stable", None),
"python": ("https://docs.python.org/" + python_version, None),
"matplotlib": ("https://matplotlib.org", None),
"numpy": ("https://docs.scipy.org/doc/numpy", None),
"sklearn": ("https://scikit-learn.org/stable", None),
"pandas": ("https://pandas.pydata.org/pandas-docs/stable", None),
"scipy": ("https://docs.scipy.org/doc/scipy/reference", None),
"pyscaffold": ("https://pyscaffold.org/en/stable", None),
}
| 33.54417 | 89 | 0.696092 |
import os
import sys
import inspect
import shutil
__location__ = os.path.join(
os.getcwd(), os.path.dirname(inspect.getfile(inspect.currentframe()))
)
sys.path.insert(0, os.path.join(__location__, "../src"))
# setup.py install" in the RTD Advanced Settings.
# Additionally it helps us to avoid running apidoc manually
try: # for Sphinx >= 1.7
from sphinx.ext import apidoc
except ImportError:
from sphinx import apidoc
output_dir = os.path.join(__location__, "api")
module_dir = os.path.join(__location__, "../src/obfuscator")
try:
shutil.rmtree(output_dir)
except FileNotFoundError:
pass
try:
import sphinx
cmd_line_template = (
"sphinx-apidoc --implicit-namespaces -f -o {outputdir} {moduledir}"
)
cmd_line = cmd_line_template.format(outputdir=output_dir, moduledir=module_dir)
args = cmd_line.split(" ")
if tuple(sphinx.__version__.split(".")) >= ("1", "7"):
# This is a rudimentary parse_version to avoid external dependencies
args = args[1:]
apidoc.main(args)
except Exception as e:
print("Running `sphinx-apidoc` failed!\n{}".format(e))
# -- General configuration ---------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
# needs_sphinx = '1.0'
# Add any Sphinx extension module names here, as strings. They can be extensions
# coming with Sphinx (named 'sphinx.ext.*') or your custom ones.
extensions = [
"sphinx.ext.autodoc",
"sphinx.ext.intersphinx",
"sphinx.ext.todo",
"sphinx.ext.autosummary",
"sphinx.ext.viewcode",
"sphinx.ext.coverage",
"sphinx.ext.doctest",
"sphinx.ext.ifconfig",
"sphinx.ext.mathjax",
"sphinx.ext.napoleon",
]
# Add any paths that contain templates here, relative to this directory.
templates_path = ["_templates"]
# The suffix of source filenames.
source_suffix = ".rst"
# The encoding of source files.
# source_encoding = 'utf-8-sig'
# The master toctree document.
master_doc = "index"
# General information about the project.
project = "obfuscator"
copyright = "2021, Benedikt Mangold"
# The version info for the project you're documenting, acts as replacement for
version = ""
release = ""
exclude_patterns = ["_build", "Thumbs.db", ".DS_Store", ".venv"]
pygments_style = "sphinx"
html_theme = "alabaster"
html_theme_options = {
"sidebar_width": "300px",
"page_width": "1200px"
}
try:
from obfuscator import __version__ as version
except ImportError:
pass
else:
release = version
html_static_path = ["_static"]
htmlhelp_basename = "obfuscator-doc"
latex_elements = {
}
latex_documents = [
("index", "user_guide.tex", "obfuscator Documentation", "Benedikt Mangold", "manual")
]
python_version = ".".join(map(str, sys.version_info[0:2]))
intersphinx_mapping = {
"sphinx": ("http://www.sphinx-doc.org/en/stable", None),
"python": ("https://docs.python.org/" + python_version, None),
"matplotlib": ("https://matplotlib.org", None),
"numpy": ("https://docs.scipy.org/doc/numpy", None),
"sklearn": ("https://scikit-learn.org/stable", None),
"pandas": ("https://pandas.pydata.org/pandas-docs/stable", None),
"scipy": ("https://docs.scipy.org/doc/scipy/reference", None),
"pyscaffold": ("https://pyscaffold.org/en/stable", None),
}
| true | true |
f7247db16c62de200bc2c5deee5c7ca83220e585 | 4,738 | py | Python | test/streams.py | gonsp/LotterySampling | 92ff14f602c05d747708b522cf05b9f9066c43e0 | [
"MIT"
] | 4 | 2020-06-03T15:17:28.000Z | 2020-09-29T20:52:15.000Z | test/streams.py | gonsp/LotterySampling | 92ff14f602c05d747708b522cf05b9f9066c43e0 | [
"MIT"
] | null | null | null | test/streams.py | gonsp/LotterySampling | 92ff14f602c05d747708b522cf05b9f9066c43e0 | [
"MIT"
] | null | null | null | import itertools
import math
import numpy as np
from abc import abstractmethod
from io import TextIOWrapper
from sorted_list import SortedList
class Stream():
def __init__(self, length, save=True):
self.length = length
self.N = 0
self.n = 0
self.save = save
self.elements = SortedList()
def __iter__(self):
return self
def __next__(self):
self.N += 1
if self.N > self.length:
raise StopIteration
element = self.next_element()
if self.save: # To speed-up tests in which it is not necessary to check accuracy
self.elements.process_element(element)
self.n = self.elements.size()
return element
@abstractmethod
def next_element(self):
pass
def top_k_query(self, k):
return [(str(id), count/self.N) for id, count in itertools.islice(iter(self.elements), k)]
def frequent_query(self, freq):
return [(str(id), count/self.N) for id, count in itertools.takewhile(lambda element: element[1] >= math.ceil(freq * self.N), iter(self.elements))]
def chunk_stream(stream, chunk_size):
it = iter(stream)
while True:
chunk = list(itertools.islice(it, chunk_size))
if len(chunk) > 0:
yield chunk
else:
return None
class MultiZipf(Stream):
def __init__(self, length, alpha=1.5, segments=2, offset=10000, seed=None, save=True):
super().__init__(length, save)
self.alpha = alpha
self.segments = segments
self.offset = offset
np.random.seed(seed)
def next_element(self):
element = np.random.zipf(self.alpha)
element += self.offset * (self.N // (self.length / self.segments))
return int(element)
class Zipf(MultiZipf):
def __init__(self, length, alpha=1.5, seed=None, save=True):
super().__init__(length, alpha=alpha, segments=1, seed=seed, save=save)
class Uniform(Stream):
def __init__(self, length, n_max, seed=None, save=True):
super().__init__(length, save)
self.n_max = n_max
np.random.seed(seed)
def next_element(self):
return np.random.randint(0, self.n_max)
class Unequal(Stream):
def __init__(self, length, alpha, beta, seed=None, save=True):
super().__init__(length, save)
data = np.zeros(length, dtype=int)
for i in range(alpha):
for j in range(beta):
data[i*beta + j] = i
for i in range(alpha * beta, length):
data[i] = i - alpha * (beta - 1)
np.random.seed(seed)
self.data = iter(np.random.permutation(data))
def next_element(self):
return next(self.data)
class File(Stream):
def __init__(self, file_path, length=math.inf, shuffle=False, repetitions=1, seed=None, save=True):
if shuffle or repetitions > 1:
self.data = []
with open(file_path, 'r') as file:
for line in file:
element = line[:-1]
self.data.append(element)
self.data *= repetitions
length = min(len(self.data), length)
if shuffle:
np.random.seed(seed)
self.data = np.random.permutation(self.data)
else:
with open(file_path, 'r') as file:
length = min(sum(1 for _ in file), length)
self.data = open(file_path, 'r')
super().__init__(length, save)
def next_element(self):
if isinstance(self.data, TextIOWrapper):
element = self.data.readline()[:-1]
if element == '':
raise StopIteration
return element
else:
if self.N == len(self.data):
raise StopIteration
return self.data[self.N]
class ZipfNoiseZipf(Stream):
def __init__(self, length, alpha=1.5, noise=0.3, offset=10000, seed=None, save=True):
super().__init__(length, save)
self.alpha = alpha
self.noise = noise
self.offset = offset
np.random.seed(seed)
def next_element(self):
if self.N < self.length * (1 - self.noise) // 2:
return int(np.random.zipf(self.alpha))
elif self.N < self.length - self.length * (1 - self.noise) // 2:
return self.N
else:
return int(np.random.zipf(self.alpha) + self.offset)
class ESA(Stream):
def __init__(self, length, seed=None, save=True):
super().__init__(length, save)
np.random.seed(seed)
def next_element(self):
if self.N < self.length // 2:
return self.N // 2
else:
return self.length | 27.387283 | 154 | 0.580625 | import itertools
import math
import numpy as np
from abc import abstractmethod
from io import TextIOWrapper
from sorted_list import SortedList
class Stream():
def __init__(self, length, save=True):
self.length = length
self.N = 0
self.n = 0
self.save = save
self.elements = SortedList()
def __iter__(self):
return self
def __next__(self):
self.N += 1
if self.N > self.length:
raise StopIteration
element = self.next_element()
if self.save:
self.elements.process_element(element)
self.n = self.elements.size()
return element
@abstractmethod
def next_element(self):
pass
def top_k_query(self, k):
return [(str(id), count/self.N) for id, count in itertools.islice(iter(self.elements), k)]
def frequent_query(self, freq):
return [(str(id), count/self.N) for id, count in itertools.takewhile(lambda element: element[1] >= math.ceil(freq * self.N), iter(self.elements))]
def chunk_stream(stream, chunk_size):
it = iter(stream)
while True:
chunk = list(itertools.islice(it, chunk_size))
if len(chunk) > 0:
yield chunk
else:
return None
class MultiZipf(Stream):
def __init__(self, length, alpha=1.5, segments=2, offset=10000, seed=None, save=True):
super().__init__(length, save)
self.alpha = alpha
self.segments = segments
self.offset = offset
np.random.seed(seed)
def next_element(self):
element = np.random.zipf(self.alpha)
element += self.offset * (self.N // (self.length / self.segments))
return int(element)
class Zipf(MultiZipf):
def __init__(self, length, alpha=1.5, seed=None, save=True):
super().__init__(length, alpha=alpha, segments=1, seed=seed, save=save)
class Uniform(Stream):
def __init__(self, length, n_max, seed=None, save=True):
super().__init__(length, save)
self.n_max = n_max
np.random.seed(seed)
def next_element(self):
return np.random.randint(0, self.n_max)
class Unequal(Stream):
def __init__(self, length, alpha, beta, seed=None, save=True):
super().__init__(length, save)
data = np.zeros(length, dtype=int)
for i in range(alpha):
for j in range(beta):
data[i*beta + j] = i
for i in range(alpha * beta, length):
data[i] = i - alpha * (beta - 1)
np.random.seed(seed)
self.data = iter(np.random.permutation(data))
def next_element(self):
return next(self.data)
class File(Stream):
def __init__(self, file_path, length=math.inf, shuffle=False, repetitions=1, seed=None, save=True):
if shuffle or repetitions > 1:
self.data = []
with open(file_path, 'r') as file:
for line in file:
element = line[:-1]
self.data.append(element)
self.data *= repetitions
length = min(len(self.data), length)
if shuffle:
np.random.seed(seed)
self.data = np.random.permutation(self.data)
else:
with open(file_path, 'r') as file:
length = min(sum(1 for _ in file), length)
self.data = open(file_path, 'r')
super().__init__(length, save)
def next_element(self):
if isinstance(self.data, TextIOWrapper):
element = self.data.readline()[:-1]
if element == '':
raise StopIteration
return element
else:
if self.N == len(self.data):
raise StopIteration
return self.data[self.N]
class ZipfNoiseZipf(Stream):
def __init__(self, length, alpha=1.5, noise=0.3, offset=10000, seed=None, save=True):
super().__init__(length, save)
self.alpha = alpha
self.noise = noise
self.offset = offset
np.random.seed(seed)
def next_element(self):
if self.N < self.length * (1 - self.noise) // 2:
return int(np.random.zipf(self.alpha))
elif self.N < self.length - self.length * (1 - self.noise) // 2:
return self.N
else:
return int(np.random.zipf(self.alpha) + self.offset)
class ESA(Stream):
def __init__(self, length, seed=None, save=True):
super().__init__(length, save)
np.random.seed(seed)
def next_element(self):
if self.N < self.length // 2:
return self.N // 2
else:
return self.length | true | true |
f7247e8a17813fa402740c3c525d9fef0664b3d9 | 116 | py | Python | web-app/backend/apps/common/routes/__init__.py | titoeb/kfserving | b072a76842b57e904dbdf46a136474a22051500d | [
"Apache-2.0"
] | 47 | 2022-01-02T09:59:15.000Z | 2022-01-25T11:11:17.000Z | web-app/backend/apps/common/routes/__init__.py | titoeb/kfserving | b072a76842b57e904dbdf46a136474a22051500d | [
"Apache-2.0"
] | 7 | 2021-08-31T23:55:06.000Z | 2022-03-02T11:34:58.000Z | web-app/backend/apps/common/routes/__init__.py | titoeb/kfserving | b072a76842b57e904dbdf46a136474a22051500d | [
"Apache-2.0"
] | 4 | 2022-01-27T08:59:15.000Z | 2022-02-27T14:42:19.000Z | from flask import Blueprint
bp = Blueprint("base_routes", __name__)
from . import delete, get # noqa: F401, E402
| 19.333333 | 45 | 0.732759 | from flask import Blueprint
bp = Blueprint("base_routes", __name__)
from . import delete, get
| true | true |
f7247ef6bd54462db7b045b5ea78324bc3752082 | 256 | py | Python | sessioncontroller/settings.py | synteny/AuroraBot | 179919e1e6cc5f24d9cf3e9295d0f043174a6169 | [
"MIT"
] | 2 | 2015-11-27T11:08:24.000Z | 2018-03-21T15:35:08.000Z | sessioncontroller/settings.py | synteny/AuroraBot | 179919e1e6cc5f24d9cf3e9295d0f043174a6169 | [
"MIT"
] | 2 | 2015-10-27T15:06:09.000Z | 2015-10-28T12:53:12.000Z | sessioncontroller/settings.py | synteny/AuroraBot | 179919e1e6cc5f24d9cf3e9295d0f043174a6169 | [
"MIT"
] | null | null | null | import os
TELEGRAM_TOKEN = os.environ['TELEGRAM_TOKEN']
DATABASE = {
'HOST': os.getenv('DB_PORT_3306_TCP_ADDR', 'localhost'),
'USER': os.getenv('DB_MYSQL_USER', 'root'),
'PASSWORD': os.getenv('DB_MYSQL_PASSWORD', ''),
'NAME': 'aurora',
}
| 23.272727 | 60 | 0.652344 | import os
TELEGRAM_TOKEN = os.environ['TELEGRAM_TOKEN']
DATABASE = {
'HOST': os.getenv('DB_PORT_3306_TCP_ADDR', 'localhost'),
'USER': os.getenv('DB_MYSQL_USER', 'root'),
'PASSWORD': os.getenv('DB_MYSQL_PASSWORD', ''),
'NAME': 'aurora',
}
| true | true |
f7247f14b21d4ad2ba934ab6dab587b66188f368 | 8,305 | py | Python | fpn/operator_py/fpn_roi_pooling.py | CosmosHua/Deformable-ConvNets | 6aeda878a95bcb55eadffbe125804e730574de8d | [
"MIT"
] | 3,976 | 2017-05-05T13:48:27.000Z | 2022-03-30T13:37:48.000Z | fpn/operator_py/fpn_roi_pooling.py | CosmosHua/Deformable-ConvNets | 6aeda878a95bcb55eadffbe125804e730574de8d | [
"MIT"
] | 259 | 2017-05-06T13:30:11.000Z | 2022-03-16T14:11:16.000Z | fpn/operator_py/fpn_roi_pooling.py | CosmosHua/Deformable-ConvNets | 6aeda878a95bcb55eadffbe125804e730574de8d | [
"MIT"
] | 1,051 | 2017-05-05T14:55:57.000Z | 2022-03-23T01:02:47.000Z | # --------------------------------------------------------
# Deformable Convolutional Networks
# Copyright (c) 2017 Microsoft
# Licensed under The MIT License [see LICENSE for details]
# Modified by Haozhi Qi, Yuwen Xiong
# --------------------------------------------------------
import mxnet as mx
import numpy as np
from mxnet.contrib import autograd
import gc
class FPNROIPoolingOperator(mx.operator.CustomOp):
def __init__(self, feat_strides, pooled_height, pooled_width, output_dim, with_deformable):
self.pooled_height = pooled_height
self.pooled_width = pooled_width
self.feat_strides = feat_strides
self.with_deformable = with_deformable
self.output_dim = output_dim
self.in_grad_hist_list = []
self.num_strides = len(self.feat_strides)
self.roi_pool = [None for _ in range(self.num_strides)]
self.feat_idx = [None for _ in range(self.num_strides)]
def forward(self, is_train, req, in_data, out_data, aux):
rois = in_data[-1].asnumpy()
w = rois[:, 3] - rois[:, 1] + 1
h = rois[:, 4] - rois[:, 2] + 1
feat_id = np.clip(np.floor(2 + np.log2(np.sqrt(w * h) / 224)), 0, len(self.feat_strides) - 1)
pyramid_idx = []
rois_p = [None for _ in range(self.num_strides)]
for i in range(self.num_strides):
self.feat_idx[i] = np.where(feat_id == i)[0]
if len(self.feat_idx[i]) == 0:
# padding dummy roi
rois_p[i] = np.zeros((1, 5))
pyramid_idx.append(-1)
else:
rois_p[i] = rois[self.feat_idx[i]]
pyramid_idx.append(self.feat_idx[i])
rois_idx = np.argsort(np.hstack(pyramid_idx))[-rois.shape[0]:]
if is_train:
for i in range(self.num_strides):
self.in_grad_hist_list.append(mx.nd.zeros_like(in_data[i]))
if self.with_deformable:
for i in range(self.num_strides, self.num_strides * 3):
self.in_grad_hist_list.append(mx.nd.zeros_like(in_data[i]))
autograd.mark_variables([in_data[i] for i in range(self.num_strides * 3)], self.in_grad_hist_list)
with autograd.train_section():
for i in range(self.num_strides):
roi_offset_t = mx.contrib.nd.DeformablePSROIPooling(data=in_data[i], rois=mx.nd.array(rois_p[i], in_data[i].context), group_size=1, pooled_size=7,
sample_per_part=4, no_trans=True, part_size=7, output_dim=256, spatial_scale=1.0 / self.feat_strides[i])
roi_offset = mx.nd.FullyConnected(data=roi_offset_t, num_hidden=7 * 7 * 2, weight=in_data[i * 2 + self.num_strides], bias=in_data[i * 2 + 1 + self.num_strides])
roi_offset_reshape = mx.nd.reshape(data=roi_offset, shape=(-1, 2, 7, 7))
self.roi_pool[i] = mx.contrib.nd.DeformablePSROIPooling(data=in_data[i], rois=mx.nd.array(rois_p[i], in_data[i].context), trans=roi_offset_reshape,
group_size=1, pooled_size=7, sample_per_part=4, no_trans=False, part_size=7,
output_dim=self.output_dim, spatial_scale=1.0 / self.feat_strides[i], trans_std=0.1)
else:
autograd.mark_variables([in_data[i] for i in range(self.num_strides)], self.in_grad_hist_list)
with autograd.train_section():
for i in range(self.num_strides):
self.roi_pool[i] = mx.nd.ROIPooling(in_data[i], mx.nd.array(rois_p[i], in_data[i].context), (7, 7), spatial_scale=1.0 / self.feat_strides[i])
roi_pool = mx.nd.concatenate(self.roi_pool, axis=0)
else:
# during testing, there is no need to record variable, thus saving memory
roi_pool = [None for _ in range(self.num_strides)]
if self.with_deformable:
for i in range(self.num_strides):
roi_offset_t = mx.contrib.nd.DeformablePSROIPooling(data=in_data[i], rois=mx.nd.array(rois_p[i], in_data[i].context), group_size=1, pooled_size=7,
sample_per_part=4, no_trans=True, part_size=7, output_dim=256, spatial_scale=1.0 / self.feat_strides[i])
roi_offset = mx.nd.FullyConnected(data=roi_offset_t, num_hidden=7 * 7 * 2, weight=in_data[i * 2 + self.num_strides], bias=in_data[i * 2 + 1 + self.num_strides])
roi_offset_reshape = mx.nd.reshape(data=roi_offset, shape=(-1, 2, 7, 7))
roi_pool[i] = mx.contrib.nd.DeformablePSROIPooling(data=in_data[i], rois=mx.nd.array(rois_p[i], in_data[i].context), trans=roi_offset_reshape,
group_size=1, pooled_size=7, sample_per_part=4, no_trans=False, part_size=7,
output_dim=self.output_dim, spatial_scale=1.0 / self.feat_strides[i], trans_std=0.1)
else:
for i in range(self.num_strides):
roi_pool[i] = mx.nd.ROIPooling(in_data[i], mx.nd.array(rois_p[i], in_data[i].context), (7, 7), spatial_scale=1.0 / self.feat_strides[i])
roi_pool = mx.nd.concatenate(roi_pool, axis=0)
roi_pool = mx.nd.take(roi_pool, mx.nd.array(rois_idx, roi_pool.context))
self.assign(out_data[0], req[0], roi_pool)
def backward(self, req, out_grad, in_data, out_data, in_grad, aux):
for i in range(len(in_grad)):
self.assign(in_grad[i], req[i], 0)
with autograd.train_section():
for i in range(self.num_strides):
if len(self.feat_idx[i] > 0):
autograd.compute_gradient([mx.nd.take(out_grad[0], mx.nd.array(self.feat_idx[i], out_grad[0].context)) * self.roi_pool[i]])
if self.with_deformable:
for i in range(0, self.num_strides * 3):
self.assign(in_grad[i], req[i], self.in_grad_hist_list[i])
else:
for i in range(0, self.num_strides):
self.assign(in_grad[i], req[i], self.in_grad_hist_list[i])
gc.collect()
@mx.operator.register('fpn_roi_pooling')
class FPNROIPoolingProp(mx.operator.CustomOpProp):
def __init__(self, feat_strides='(4,8,16,32)', pooled_height='7', pooled_width='7', with_deformable='False', output_dim='256'):
super(FPNROIPoolingProp, self).__init__(need_top_grad=True)
self.pooled_height = int(pooled_height)
self.pooled_width = int(pooled_width)
self.feat_strides = np.fromstring(feat_strides[1:-1], dtype=int, sep=',')
self.with_deformable = with_deformable == 'True'
self.output_dim = int(output_dim)
self.num_strides = len(self.feat_strides)
def list_arguments(self):
args_list = []
for i in range(self.num_strides):
args_list.append('data_p{}'.format(2 + i))
if self.with_deformable:
for i in range(self.num_strides):
args_list.extend(['offset_weight_p{}'.format(2 + i), 'offset_bias_p{}'.format(2 + i)])
args_list.append('rois')
return args_list
def list_outputs(self):
return ['output']
def infer_shape(self, in_shape):
output_feat_shape = [in_shape[-1][0], in_shape[0][1], self.pooled_height, self.pooled_width]
if self.with_deformable:
offset_dim = self.pooled_height * self.pooled_width * 2
input_dim = self.pooled_height * self.pooled_width * self.output_dim
for i in range(self.num_strides):
in_shape[i * 2 + self.num_strides], in_shape[i * 2 + 1 + self.num_strides] = [offset_dim, input_dim], [offset_dim, ]
return in_shape, [output_feat_shape]
def create_operator(self, ctx, shapes, dtypes):
return FPNROIPoolingOperator(self.feat_strides, self.pooled_height, self.pooled_width, self.output_dim, self.with_deformable)
def declare_backward_dependency(self, out_grad, in_data, out_data):
return [out_grad[0]]
| 56.114865 | 184 | 0.593618 |
import mxnet as mx
import numpy as np
from mxnet.contrib import autograd
import gc
class FPNROIPoolingOperator(mx.operator.CustomOp):
def __init__(self, feat_strides, pooled_height, pooled_width, output_dim, with_deformable):
self.pooled_height = pooled_height
self.pooled_width = pooled_width
self.feat_strides = feat_strides
self.with_deformable = with_deformable
self.output_dim = output_dim
self.in_grad_hist_list = []
self.num_strides = len(self.feat_strides)
self.roi_pool = [None for _ in range(self.num_strides)]
self.feat_idx = [None for _ in range(self.num_strides)]
def forward(self, is_train, req, in_data, out_data, aux):
rois = in_data[-1].asnumpy()
w = rois[:, 3] - rois[:, 1] + 1
h = rois[:, 4] - rois[:, 2] + 1
feat_id = np.clip(np.floor(2 + np.log2(np.sqrt(w * h) / 224)), 0, len(self.feat_strides) - 1)
pyramid_idx = []
rois_p = [None for _ in range(self.num_strides)]
for i in range(self.num_strides):
self.feat_idx[i] = np.where(feat_id == i)[0]
if len(self.feat_idx[i]) == 0:
rois_p[i] = np.zeros((1, 5))
pyramid_idx.append(-1)
else:
rois_p[i] = rois[self.feat_idx[i]]
pyramid_idx.append(self.feat_idx[i])
rois_idx = np.argsort(np.hstack(pyramid_idx))[-rois.shape[0]:]
if is_train:
for i in range(self.num_strides):
self.in_grad_hist_list.append(mx.nd.zeros_like(in_data[i]))
if self.with_deformable:
for i in range(self.num_strides, self.num_strides * 3):
self.in_grad_hist_list.append(mx.nd.zeros_like(in_data[i]))
autograd.mark_variables([in_data[i] for i in range(self.num_strides * 3)], self.in_grad_hist_list)
with autograd.train_section():
for i in range(self.num_strides):
roi_offset_t = mx.contrib.nd.DeformablePSROIPooling(data=in_data[i], rois=mx.nd.array(rois_p[i], in_data[i].context), group_size=1, pooled_size=7,
sample_per_part=4, no_trans=True, part_size=7, output_dim=256, spatial_scale=1.0 / self.feat_strides[i])
roi_offset = mx.nd.FullyConnected(data=roi_offset_t, num_hidden=7 * 7 * 2, weight=in_data[i * 2 + self.num_strides], bias=in_data[i * 2 + 1 + self.num_strides])
roi_offset_reshape = mx.nd.reshape(data=roi_offset, shape=(-1, 2, 7, 7))
self.roi_pool[i] = mx.contrib.nd.DeformablePSROIPooling(data=in_data[i], rois=mx.nd.array(rois_p[i], in_data[i].context), trans=roi_offset_reshape,
group_size=1, pooled_size=7, sample_per_part=4, no_trans=False, part_size=7,
output_dim=self.output_dim, spatial_scale=1.0 / self.feat_strides[i], trans_std=0.1)
else:
autograd.mark_variables([in_data[i] for i in range(self.num_strides)], self.in_grad_hist_list)
with autograd.train_section():
for i in range(self.num_strides):
self.roi_pool[i] = mx.nd.ROIPooling(in_data[i], mx.nd.array(rois_p[i], in_data[i].context), (7, 7), spatial_scale=1.0 / self.feat_strides[i])
roi_pool = mx.nd.concatenate(self.roi_pool, axis=0)
else:
roi_pool = [None for _ in range(self.num_strides)]
if self.with_deformable:
for i in range(self.num_strides):
roi_offset_t = mx.contrib.nd.DeformablePSROIPooling(data=in_data[i], rois=mx.nd.array(rois_p[i], in_data[i].context), group_size=1, pooled_size=7,
sample_per_part=4, no_trans=True, part_size=7, output_dim=256, spatial_scale=1.0 / self.feat_strides[i])
roi_offset = mx.nd.FullyConnected(data=roi_offset_t, num_hidden=7 * 7 * 2, weight=in_data[i * 2 + self.num_strides], bias=in_data[i * 2 + 1 + self.num_strides])
roi_offset_reshape = mx.nd.reshape(data=roi_offset, shape=(-1, 2, 7, 7))
roi_pool[i] = mx.contrib.nd.DeformablePSROIPooling(data=in_data[i], rois=mx.nd.array(rois_p[i], in_data[i].context), trans=roi_offset_reshape,
group_size=1, pooled_size=7, sample_per_part=4, no_trans=False, part_size=7,
output_dim=self.output_dim, spatial_scale=1.0 / self.feat_strides[i], trans_std=0.1)
else:
for i in range(self.num_strides):
roi_pool[i] = mx.nd.ROIPooling(in_data[i], mx.nd.array(rois_p[i], in_data[i].context), (7, 7), spatial_scale=1.0 / self.feat_strides[i])
roi_pool = mx.nd.concatenate(roi_pool, axis=0)
roi_pool = mx.nd.take(roi_pool, mx.nd.array(rois_idx, roi_pool.context))
self.assign(out_data[0], req[0], roi_pool)
def backward(self, req, out_grad, in_data, out_data, in_grad, aux):
for i in range(len(in_grad)):
self.assign(in_grad[i], req[i], 0)
with autograd.train_section():
for i in range(self.num_strides):
if len(self.feat_idx[i] > 0):
autograd.compute_gradient([mx.nd.take(out_grad[0], mx.nd.array(self.feat_idx[i], out_grad[0].context)) * self.roi_pool[i]])
if self.with_deformable:
for i in range(0, self.num_strides * 3):
self.assign(in_grad[i], req[i], self.in_grad_hist_list[i])
else:
for i in range(0, self.num_strides):
self.assign(in_grad[i], req[i], self.in_grad_hist_list[i])
gc.collect()
@mx.operator.register('fpn_roi_pooling')
class FPNROIPoolingProp(mx.operator.CustomOpProp):
def __init__(self, feat_strides='(4,8,16,32)', pooled_height='7', pooled_width='7', with_deformable='False', output_dim='256'):
super(FPNROIPoolingProp, self).__init__(need_top_grad=True)
self.pooled_height = int(pooled_height)
self.pooled_width = int(pooled_width)
self.feat_strides = np.fromstring(feat_strides[1:-1], dtype=int, sep=',')
self.with_deformable = with_deformable == 'True'
self.output_dim = int(output_dim)
self.num_strides = len(self.feat_strides)
def list_arguments(self):
args_list = []
for i in range(self.num_strides):
args_list.append('data_p{}'.format(2 + i))
if self.with_deformable:
for i in range(self.num_strides):
args_list.extend(['offset_weight_p{}'.format(2 + i), 'offset_bias_p{}'.format(2 + i)])
args_list.append('rois')
return args_list
def list_outputs(self):
return ['output']
def infer_shape(self, in_shape):
output_feat_shape = [in_shape[-1][0], in_shape[0][1], self.pooled_height, self.pooled_width]
if self.with_deformable:
offset_dim = self.pooled_height * self.pooled_width * 2
input_dim = self.pooled_height * self.pooled_width * self.output_dim
for i in range(self.num_strides):
in_shape[i * 2 + self.num_strides], in_shape[i * 2 + 1 + self.num_strides] = [offset_dim, input_dim], [offset_dim, ]
return in_shape, [output_feat_shape]
def create_operator(self, ctx, shapes, dtypes):
return FPNROIPoolingOperator(self.feat_strides, self.pooled_height, self.pooled_width, self.output_dim, self.with_deformable)
def declare_backward_dependency(self, out_grad, in_data, out_data):
return [out_grad[0]]
| true | true |
f7247f42ef9c871c8ebd07fc747da69ad689d3a3 | 405 | py | Python | 05/iterator_example.py | alissonit/pythontrap | b7780913d49af2142be4a9674ac435e2a67da201 | [
"MIT"
] | null | null | null | 05/iterator_example.py | alissonit/pythontrap | b7780913d49af2142be4a9674ac435e2a67da201 | [
"MIT"
] | null | null | null | 05/iterator_example.py | alissonit/pythontrap | b7780913d49af2142be4a9674ac435e2a67da201 | [
"MIT"
] | 1 | 2021-03-15T18:26:14.000Z | 2021-03-15T18:26:14.000Z | #CONSTRUINDO UMA CLASSE ITERATOR
class GenItem(object):
def __init__(self, first, last):
self.first = first
self.last = last
def __iter__(self):
return self
def __next__(self):
if self.first > self.last:
raise StopIteration
else:
self.first += 1
return self.first - 1
n_list = GenItem(1,10)
print(list(n_list)) | 22.5 | 36 | 0.577778 |
class GenItem(object):
def __init__(self, first, last):
self.first = first
self.last = last
def __iter__(self):
return self
def __next__(self):
if self.first > self.last:
raise StopIteration
else:
self.first += 1
return self.first - 1
n_list = GenItem(1,10)
print(list(n_list)) | true | true |
f72480521f1fad6394a1656241b51fbd1c7d3230 | 14,939 | py | Python | Lib/site-packages/wx-3.0-msw/wx/lib/gridmovers.py | jickieduan/python27 | c752b552396bbed68d8555080d475718cea2edd0 | [
"bzip2-1.0.6"
] | 1 | 2021-02-13T22:40:50.000Z | 2021-02-13T22:40:50.000Z | Lib/site-packages/wx-3.0-msw/wx/lib/gridmovers.py | jickieduan/python27 | c752b552396bbed68d8555080d475718cea2edd0 | [
"bzip2-1.0.6"
] | 1 | 2018-07-28T20:07:04.000Z | 2018-07-30T18:28:34.000Z | Lib/site-packages/wx-3.0-msw/wx/lib/gridmovers.py | jickieduan/python27 | c752b552396bbed68d8555080d475718cea2edd0 | [
"bzip2-1.0.6"
] | 2 | 2019-12-02T01:39:10.000Z | 2021-02-13T22:41:00.000Z | #----------------------------------------------------------------------------
# Name: GridColMover.py
# Purpose: Grid Column Mover Extension
#
# Author: Gerrit van Dyk (email: gerritvd@decillion.net)
#
# Version 0.1
# Date: Nov 19, 2002
# RCS-ID: $Id$
# Licence: wxWindows license
#----------------------------------------------------------------------------
# 12/07/2003 - Jeff Grimmett (grimmtooth@softhome.net)
#
# o 2.5 Compatability changes
#
# 12/18/2003 - Jeff Grimmett (grimmtooth@softhome.net)
#
# o wxGridColMoveEvent -> GridColMoveEvent
# o wxGridRowMoveEvent -> GridRowMoveEvent
# o wxGridColMover -> GridColMover
# o wxGridRowMover -> GridRowMover
#
import wx
import wx.grid
#----------------------------------------------------------------------------
# event class and macros
#
# New style 12/7/03
#
wxEVT_COMMAND_GRID_COL_MOVE = wx.NewEventType()
wxEVT_COMMAND_GRID_ROW_MOVE = wx.NewEventType()
EVT_GRID_COL_MOVE = wx.PyEventBinder(wxEVT_COMMAND_GRID_COL_MOVE, 1)
EVT_GRID_ROW_MOVE = wx.PyEventBinder(wxEVT_COMMAND_GRID_ROW_MOVE, 1)
#----------------------------------------------------------------------------
class GridColMoveEvent(wx.PyCommandEvent):
def __init__(self, id, dCol, bCol):
wx.PyCommandEvent.__init__(self, id = id)
self.SetEventType(wxEVT_COMMAND_GRID_COL_MOVE)
self.moveColumn = dCol
self.beforeColumn = bCol
def GetMoveColumn(self):
return self.moveColumn
def GetBeforeColumn(self):
return self.beforeColumn
class GridRowMoveEvent(wx.PyCommandEvent):
def __init__(self, id, dRow, bRow):
wx.PyCommandEvent.__init__(self,id = id)
self.SetEventType(wxEVT_COMMAND_GRID_ROW_MOVE)
self.moveRow = dRow
self.beforeRow = bRow
def GetMoveRow(self):
return self.moveRow
def GetBeforeRow(self):
return self.beforeRow
#----------------------------------------------------------------------------
# graft new methods into the wxGrid class
def _ColToRect(self,col):
if self.GetNumberRows() > 0:
rect = self.CellToRect(0,col)
else:
rect = wx.Rect()
rect.height = self.GetColLabelSize()
rect.width = self.GetColSize(col)
for cCol in range(0,col):
rect.x += self.GetColSize(cCol)
rect.y = self.GetGridColLabelWindow().GetPosition()[1]
return rect
wx.grid.Grid.ColToRect = _ColToRect
def _RowToRect(self,row):
if self.GetNumberCols() > 0:
rect = self.CellToRect(row,0)
else:
rect = wx.Rect()
rect.width = self.GetRowLabelSize()
rect.height = self.GetRowSize(row)
for cRow in range(0,row):
rect.y += self.GetRowSize(cRow)
rect.x = self.GetGridRowLabelWindow().GetPosition()[0]
return rect
wx.grid.Grid.RowToRect = _RowToRect
#----------------------------------------------------------------------------
class ColDragWindow(wx.Window):
def __init__(self,parent,image,dragCol):
wx.Window.__init__(self,parent,-1, style=wx.SIMPLE_BORDER)
self.image = image
self.SetSize((self.image.GetWidth(),self.image.GetHeight()))
self.ux = parent.GetScrollPixelsPerUnit()[0]
self.moveColumn = dragCol
self.Bind(wx.EVT_PAINT, self.OnPaint)
def DisplayAt(self,pos,y):
x = self.GetPositionTuple()[0]
if x == pos:
self.Refresh() # Need to display insertion point
else:
self.MoveXY(pos,y)
def GetMoveColumn(self):
return self.moveColumn
def _GetInsertionInfo(self):
parent = self.GetParent()
sx = parent.GetViewStart()[0] * self.ux
sx -= parent.GetRowLabelSize()
x = self.GetPosition()[0]
w = self.GetSize()[0]
sCol = parent.XToCol(x + sx)
eCol = parent.XToCol(x + w + sx)
iPos = xPos = xCol = 99999
centerPos = x + sx + (w / 2)
for col in range(sCol,eCol + 1):
cx = parent.ColToRect(col)[0]
if abs(cx - centerPos) < iPos:
iPos = abs(cx - centerPos)
xCol = col
xPos = cx
if xCol < 0 or xCol > parent.GetNumberCols():
xCol = parent.GetNumberCols()
return (xPos - sx - x,xCol)
def GetInsertionColumn(self):
return self._GetInsertionInfo()[1]
def GetInsertionPos(self):
return self._GetInsertionInfo()[0]
def OnPaint(self,evt):
dc = wx.PaintDC(self)
w,h = self.GetSize()
dc.DrawBitmap(self.image, 0,0)
dc.SetPen(wx.Pen(wx.BLACK,1,wx.SOLID))
dc.SetBrush(wx.TRANSPARENT_BRUSH)
dc.DrawRectangle(0,0, w,h)
iPos = self.GetInsertionPos()
dc.DrawLine(iPos,h - 10, iPos,h)
class RowDragWindow(wx.Window):
def __init__(self,parent,image,dragRow):
wx.Window.__init__(self,parent,-1, style=wx.SIMPLE_BORDER)
self.image = image
self.SetSize((self.image.GetWidth(),self.image.GetHeight()))
self.uy = parent.GetScrollPixelsPerUnit()[1]
self.moveRow = dragRow
self.Bind(wx.EVT_PAINT, self.OnPaint)
def DisplayAt(self,x,pos):
y = self.GetPosition()[1]
if y == pos:
self.Refresh() # Need to display insertion point
else:
self.MoveXY(x,pos)
def GetMoveRow(self):
return self.moveRow
def _GetInsertionInfo(self):
parent = self.GetParent()
sy = parent.GetViewStart()[1] * self.uy
sy -= parent.GetColLabelSize()
y = self.GetPosition()[1]
h = self.GetSize()[1]
sRow = parent.YToRow(y + sy)
eRow = parent.YToRow(y + h + sy)
iPos = yPos = yRow = 99999
centerPos = y + sy + (h / 2)
for row in range(sRow,eRow + 1):
cy = parent.RowToRect(row)[1]
if abs(cy - centerPos) < iPos:
iPos = abs(cy - centerPos)
yRow = row
yPos = cy
if yRow < 0 or yRow > parent.GetNumberRows():
yRow = parent.GetNumberRows()
return (yPos - sy - y,yRow)
def GetInsertionRow(self):
return self._GetInsertionInfo()[1]
def GetInsertionPos(self):
return self._GetInsertionInfo()[0]
def OnPaint(self,evt):
dc = wx.PaintDC(self)
w,h = self.GetSize()
dc.DrawBitmap(self.image, 0,0)
dc.SetPen(wx.Pen(wx.BLACK,1,wx.SOLID))
dc.SetBrush(wx.TRANSPARENT_BRUSH)
dc.DrawRectangle(0,0, w,h)
iPos = self.GetInsertionPos()
dc.DrawLine(w - 10,iPos, w,iPos)
#----------------------------------------------------------------------------
class GridColMover(wx.EvtHandler):
def __init__(self,grid):
wx.EvtHandler.__init__(self)
self.grid = grid
self.lwin = grid.GetGridColLabelWindow()
self.lwin.PushEventHandler(self)
self.colWin = None
self.ux = self.grid.GetScrollPixelsPerUnit()[0]
self.startX = -10
self.cellX = 0
self.didMove = False
self.isDragging = False
self.Bind(wx.EVT_MOTION, self.OnMouseMove)
self.Bind(wx.EVT_LEFT_DOWN, self.OnPress)
self.Bind(wx.EVT_LEFT_UP, self.OnRelease)
def OnMouseMove(self,evt):
if not self.isDragging:
evt.Skip()
else:
_rlSize = self.grid.GetRowLabelSize()
if abs(self.startX - evt.X) >= 3 \
and abs(evt.X - self.lastX) >= 3:
self.lastX = evt.X
self.didMove = True
sx,y = self.grid.GetViewStart()
w,h = self.lwin.GetClientSize()
x = sx * self.ux
if (evt.X + x) < x:
x = evt.X + x
elif evt.X > w:
x += evt.X - w
if x < 1: x = 0
else: x /= self.ux
if x != sx:
if wx.Platform == '__WXMSW__':
self.colWin.Show(False)
self.grid.Scroll(x,y)
x,y = self.lwin.ClientToScreenXY(evt.X,0)
x,y = self.grid.ScreenToClientXY(x,y)
if not self.colWin.IsShown():
self.colWin.Show(True)
px = x - self.cellX
if px < 0 + _rlSize: px = 0 + _rlSize
if px > w - self.colWin.GetSize()[0] + _rlSize:
px = w - self.colWin.GetSize()[0] + _rlSize
self.colWin.DisplayAt(px,y)
return
def OnPress(self,evt):
self.startX = self.lastX = evt.X
_rlSize = self.grid.GetRowLabelSize()
sx = self.grid.GetViewStart()[0] * self.ux
sx -= _rlSize
px,py = self.lwin.ClientToScreenXY(evt.X,evt.Y)
px,py = self.grid.ScreenToClientXY(px,py)
if self.grid.XToEdgeOfCol(px + sx) != wx.NOT_FOUND:
evt.Skip()
return
self.isDragging = True
self.didMove = False
col = self.grid.XToCol(px + sx)
rect = self.grid.ColToRect(col)
self.cellX = px + sx - rect.x
size = self.lwin.GetSize()
rect.y = 0
rect.x -= sx + _rlSize
rect.height = size[1]
colImg = self._CaptureImage(rect)
self.colWin = ColDragWindow(self.grid,colImg,col)
self.colWin.Show(False)
self.lwin.CaptureMouse()
evt.Skip()
def OnRelease(self,evt):
if self.isDragging:
self.lwin.ReleaseMouse()
self.colWin.Show(False)
self.isDragging = False
if not self.didMove:
px = self.lwin.ClientToScreenXY(self.startX,0)[0]
px = self.grid.ScreenToClientXY(px,0)[0]
sx = self.grid.GetViewStart()[0] * self.ux
sx -= self.grid.GetRowLabelSize()
col = self.grid.XToCol(px+sx)
if col != wx.NOT_FOUND:
self.grid.SelectCol(col,evt.ControlDown())
return
else:
bCol = self.colWin.GetInsertionColumn()
dCol = self.colWin.GetMoveColumn()
wx.PostEvent(self,
GridColMoveEvent(self.grid.GetId(), dCol, bCol))
self.colWin.Destroy()
evt.Skip()
def _CaptureImage(self,rect):
bmp = wx.EmptyBitmap(rect.width,rect.height)
memdc = wx.MemoryDC()
memdc.SelectObject(bmp)
dc = wx.WindowDC(self.lwin)
memdc.Blit(0,0, rect.width, rect.height, dc, rect.x, rect.y)
memdc.SelectObject(wx.NullBitmap)
return bmp
class GridRowMover(wx.EvtHandler):
def __init__(self,grid):
wx.EvtHandler.__init__(self)
self.grid = grid
self.lwin = grid.GetGridRowLabelWindow()
self.lwin.PushEventHandler(self)
self.rowWin = None
self.uy = self.grid.GetScrollPixelsPerUnit()[1]
self.startY = -10
self.cellY = 0
self.didMove = False
self.isDragging = False
self.Bind(wx.EVT_MOTION, self.OnMouseMove)
self.Bind(wx.EVT_LEFT_DOWN, self.OnPress)
self.Bind(wx.EVT_LEFT_UP, self.OnRelease)
def OnMouseMove(self,evt):
if not self.isDragging:
evt.Skip()
else:
_clSize = self.grid.GetColLabelSize()
if abs(self.startY - evt.Y) >= 3 \
and abs(evt.Y - self.lastY) >= 3:
self.lastY = evt.Y
self.didMove = True
x,sy = self.grid.GetViewStart()
w,h = self.lwin.GetClientSizeTuple()
y = sy * self.uy
if (evt.Y + y) < y:
y = evt.Y + y
elif evt.Y > h:
y += evt.Y - h
if y < 1:
y = 0
else:
y /= self.uy
if y != sy:
if wx.Platform == '__WXMSW__':
self.rowWin.Show(False)
self.grid.Scroll(x,y)
x,y = self.lwin.ClientToScreenXY(0,evt.Y)
x,y = self.grid.ScreenToClientXY(x,y)
if not self.rowWin.IsShown():
self.rowWin.Show(True)
py = y - self.cellY
if py < 0 + _clSize:
py = 0 + _clSize
if py > h - self.rowWin.GetSize()[1] + _clSize:
py = h - self.rowWin.GetSize()[1] + _clSize
self.rowWin.DisplayAt(x,py)
return
def OnPress(self,evt):
self.startY = self.lastY = evt.Y
_clSize = self.grid.GetColLabelSize()
sy = self.grid.GetViewStart()[1] * self.uy
sy -= _clSize
px,py = self.lwin.ClientToScreenXY(evt.X,evt.Y)
px,py = self.grid.ScreenToClientXY(px,py)
if self.grid.YToEdgeOfRow(py + sy) != wx.NOT_FOUND:
evt.Skip()
return
row = self.grid.YToRow(py + sy)
if row == wx.NOT_FOUND:
evt.Skip()
return
self.isDragging = True
self.didMove = False
rect = self.grid.RowToRect(row)
self.cellY = py + sy - rect.y
size = self.lwin.GetSize()
rect.x = 0
rect.y -= sy + _clSize
rect.width = size[0]
rowImg = self._CaptureImage(rect)
self.rowWin = RowDragWindow(self.grid,rowImg,row)
self.rowWin.Show(False)
self.lwin.CaptureMouse()
evt.Skip()
def OnRelease(self,evt):
if self.isDragging:
self.lwin.ReleaseMouse()
self.rowWin.Show(False)
self.isDragging = False
if not self.didMove:
py = self.lwin.ClientToScreenXY(0,self.startY)[1]
py = self.grid.ScreenToClientXY(0,py)[1]
sy = self.grid.GetViewStart()[1] * self.uy
sy -= self.grid.GetColLabelSize()
row = self.grid.YToRow(py + sy)
if row != wx.NOT_FOUND:
self.grid.SelectRow(row,evt.ControlDown())
return
else:
bRow = self.rowWin.GetInsertionRow()
dRow = self.rowWin.GetMoveRow()
wx.PostEvent(self,
GridRowMoveEvent(self.grid.GetId(), dRow, bRow))
self.rowWin.Destroy()
evt.Skip()
def _CaptureImage(self,rect):
bmp = wx.EmptyBitmap(rect.width,rect.height)
memdc = wx.MemoryDC()
memdc.SelectObject(bmp)
dc = wx.WindowDC(self.lwin)
memdc.Blit(0,0, rect.width, rect.height, dc, rect.x, rect.y)
memdc.SelectObject(wx.NullBitmap)
return bmp
#----------------------------------------------------------------------------
| 30.240891 | 77 | 0.522391 |
import wx
import wx.grid
wxEVT_COMMAND_GRID_COL_MOVE = wx.NewEventType()
wxEVT_COMMAND_GRID_ROW_MOVE = wx.NewEventType()
EVT_GRID_COL_MOVE = wx.PyEventBinder(wxEVT_COMMAND_GRID_COL_MOVE, 1)
EVT_GRID_ROW_MOVE = wx.PyEventBinder(wxEVT_COMMAND_GRID_ROW_MOVE, 1)
class GridColMoveEvent(wx.PyCommandEvent):
def __init__(self, id, dCol, bCol):
wx.PyCommandEvent.__init__(self, id = id)
self.SetEventType(wxEVT_COMMAND_GRID_COL_MOVE)
self.moveColumn = dCol
self.beforeColumn = bCol
def GetMoveColumn(self):
return self.moveColumn
def GetBeforeColumn(self):
return self.beforeColumn
class GridRowMoveEvent(wx.PyCommandEvent):
def __init__(self, id, dRow, bRow):
wx.PyCommandEvent.__init__(self,id = id)
self.SetEventType(wxEVT_COMMAND_GRID_ROW_MOVE)
self.moveRow = dRow
self.beforeRow = bRow
def GetMoveRow(self):
return self.moveRow
def GetBeforeRow(self):
return self.beforeRow
def _ColToRect(self,col):
if self.GetNumberRows() > 0:
rect = self.CellToRect(0,col)
else:
rect = wx.Rect()
rect.height = self.GetColLabelSize()
rect.width = self.GetColSize(col)
for cCol in range(0,col):
rect.x += self.GetColSize(cCol)
rect.y = self.GetGridColLabelWindow().GetPosition()[1]
return rect
wx.grid.Grid.ColToRect = _ColToRect
def _RowToRect(self,row):
if self.GetNumberCols() > 0:
rect = self.CellToRect(row,0)
else:
rect = wx.Rect()
rect.width = self.GetRowLabelSize()
rect.height = self.GetRowSize(row)
for cRow in range(0,row):
rect.y += self.GetRowSize(cRow)
rect.x = self.GetGridRowLabelWindow().GetPosition()[0]
return rect
wx.grid.Grid.RowToRect = _RowToRect
class ColDragWindow(wx.Window):
def __init__(self,parent,image,dragCol):
wx.Window.__init__(self,parent,-1, style=wx.SIMPLE_BORDER)
self.image = image
self.SetSize((self.image.GetWidth(),self.image.GetHeight()))
self.ux = parent.GetScrollPixelsPerUnit()[0]
self.moveColumn = dragCol
self.Bind(wx.EVT_PAINT, self.OnPaint)
def DisplayAt(self,pos,y):
x = self.GetPositionTuple()[0]
if x == pos:
self.Refresh()
else:
self.MoveXY(pos,y)
def GetMoveColumn(self):
return self.moveColumn
def _GetInsertionInfo(self):
parent = self.GetParent()
sx = parent.GetViewStart()[0] * self.ux
sx -= parent.GetRowLabelSize()
x = self.GetPosition()[0]
w = self.GetSize()[0]
sCol = parent.XToCol(x + sx)
eCol = parent.XToCol(x + w + sx)
iPos = xPos = xCol = 99999
centerPos = x + sx + (w / 2)
for col in range(sCol,eCol + 1):
cx = parent.ColToRect(col)[0]
if abs(cx - centerPos) < iPos:
iPos = abs(cx - centerPos)
xCol = col
xPos = cx
if xCol < 0 or xCol > parent.GetNumberCols():
xCol = parent.GetNumberCols()
return (xPos - sx - x,xCol)
def GetInsertionColumn(self):
return self._GetInsertionInfo()[1]
def GetInsertionPos(self):
return self._GetInsertionInfo()[0]
def OnPaint(self,evt):
dc = wx.PaintDC(self)
w,h = self.GetSize()
dc.DrawBitmap(self.image, 0,0)
dc.SetPen(wx.Pen(wx.BLACK,1,wx.SOLID))
dc.SetBrush(wx.TRANSPARENT_BRUSH)
dc.DrawRectangle(0,0, w,h)
iPos = self.GetInsertionPos()
dc.DrawLine(iPos,h - 10, iPos,h)
class RowDragWindow(wx.Window):
def __init__(self,parent,image,dragRow):
wx.Window.__init__(self,parent,-1, style=wx.SIMPLE_BORDER)
self.image = image
self.SetSize((self.image.GetWidth(),self.image.GetHeight()))
self.uy = parent.GetScrollPixelsPerUnit()[1]
self.moveRow = dragRow
self.Bind(wx.EVT_PAINT, self.OnPaint)
def DisplayAt(self,x,pos):
y = self.GetPosition()[1]
if y == pos:
self.Refresh()
else:
self.MoveXY(x,pos)
def GetMoveRow(self):
return self.moveRow
def _GetInsertionInfo(self):
parent = self.GetParent()
sy = parent.GetViewStart()[1] * self.uy
sy -= parent.GetColLabelSize()
y = self.GetPosition()[1]
h = self.GetSize()[1]
sRow = parent.YToRow(y + sy)
eRow = parent.YToRow(y + h + sy)
iPos = yPos = yRow = 99999
centerPos = y + sy + (h / 2)
for row in range(sRow,eRow + 1):
cy = parent.RowToRect(row)[1]
if abs(cy - centerPos) < iPos:
iPos = abs(cy - centerPos)
yRow = row
yPos = cy
if yRow < 0 or yRow > parent.GetNumberRows():
yRow = parent.GetNumberRows()
return (yPos - sy - y,yRow)
def GetInsertionRow(self):
return self._GetInsertionInfo()[1]
def GetInsertionPos(self):
return self._GetInsertionInfo()[0]
def OnPaint(self,evt):
dc = wx.PaintDC(self)
w,h = self.GetSize()
dc.DrawBitmap(self.image, 0,0)
dc.SetPen(wx.Pen(wx.BLACK,1,wx.SOLID))
dc.SetBrush(wx.TRANSPARENT_BRUSH)
dc.DrawRectangle(0,0, w,h)
iPos = self.GetInsertionPos()
dc.DrawLine(w - 10,iPos, w,iPos)
class GridColMover(wx.EvtHandler):
def __init__(self,grid):
wx.EvtHandler.__init__(self)
self.grid = grid
self.lwin = grid.GetGridColLabelWindow()
self.lwin.PushEventHandler(self)
self.colWin = None
self.ux = self.grid.GetScrollPixelsPerUnit()[0]
self.startX = -10
self.cellX = 0
self.didMove = False
self.isDragging = False
self.Bind(wx.EVT_MOTION, self.OnMouseMove)
self.Bind(wx.EVT_LEFT_DOWN, self.OnPress)
self.Bind(wx.EVT_LEFT_UP, self.OnRelease)
def OnMouseMove(self,evt):
if not self.isDragging:
evt.Skip()
else:
_rlSize = self.grid.GetRowLabelSize()
if abs(self.startX - evt.X) >= 3 \
and abs(evt.X - self.lastX) >= 3:
self.lastX = evt.X
self.didMove = True
sx,y = self.grid.GetViewStart()
w,h = self.lwin.GetClientSize()
x = sx * self.ux
if (evt.X + x) < x:
x = evt.X + x
elif evt.X > w:
x += evt.X - w
if x < 1: x = 0
else: x /= self.ux
if x != sx:
if wx.Platform == '__WXMSW__':
self.colWin.Show(False)
self.grid.Scroll(x,y)
x,y = self.lwin.ClientToScreenXY(evt.X,0)
x,y = self.grid.ScreenToClientXY(x,y)
if not self.colWin.IsShown():
self.colWin.Show(True)
px = x - self.cellX
if px < 0 + _rlSize: px = 0 + _rlSize
if px > w - self.colWin.GetSize()[0] + _rlSize:
px = w - self.colWin.GetSize()[0] + _rlSize
self.colWin.DisplayAt(px,y)
return
def OnPress(self,evt):
self.startX = self.lastX = evt.X
_rlSize = self.grid.GetRowLabelSize()
sx = self.grid.GetViewStart()[0] * self.ux
sx -= _rlSize
px,py = self.lwin.ClientToScreenXY(evt.X,evt.Y)
px,py = self.grid.ScreenToClientXY(px,py)
if self.grid.XToEdgeOfCol(px + sx) != wx.NOT_FOUND:
evt.Skip()
return
self.isDragging = True
self.didMove = False
col = self.grid.XToCol(px + sx)
rect = self.grid.ColToRect(col)
self.cellX = px + sx - rect.x
size = self.lwin.GetSize()
rect.y = 0
rect.x -= sx + _rlSize
rect.height = size[1]
colImg = self._CaptureImage(rect)
self.colWin = ColDragWindow(self.grid,colImg,col)
self.colWin.Show(False)
self.lwin.CaptureMouse()
evt.Skip()
def OnRelease(self,evt):
if self.isDragging:
self.lwin.ReleaseMouse()
self.colWin.Show(False)
self.isDragging = False
if not self.didMove:
px = self.lwin.ClientToScreenXY(self.startX,0)[0]
px = self.grid.ScreenToClientXY(px,0)[0]
sx = self.grid.GetViewStart()[0] * self.ux
sx -= self.grid.GetRowLabelSize()
col = self.grid.XToCol(px+sx)
if col != wx.NOT_FOUND:
self.grid.SelectCol(col,evt.ControlDown())
return
else:
bCol = self.colWin.GetInsertionColumn()
dCol = self.colWin.GetMoveColumn()
wx.PostEvent(self,
GridColMoveEvent(self.grid.GetId(), dCol, bCol))
self.colWin.Destroy()
evt.Skip()
def _CaptureImage(self,rect):
bmp = wx.EmptyBitmap(rect.width,rect.height)
memdc = wx.MemoryDC()
memdc.SelectObject(bmp)
dc = wx.WindowDC(self.lwin)
memdc.Blit(0,0, rect.width, rect.height, dc, rect.x, rect.y)
memdc.SelectObject(wx.NullBitmap)
return bmp
class GridRowMover(wx.EvtHandler):
def __init__(self,grid):
wx.EvtHandler.__init__(self)
self.grid = grid
self.lwin = grid.GetGridRowLabelWindow()
self.lwin.PushEventHandler(self)
self.rowWin = None
self.uy = self.grid.GetScrollPixelsPerUnit()[1]
self.startY = -10
self.cellY = 0
self.didMove = False
self.isDragging = False
self.Bind(wx.EVT_MOTION, self.OnMouseMove)
self.Bind(wx.EVT_LEFT_DOWN, self.OnPress)
self.Bind(wx.EVT_LEFT_UP, self.OnRelease)
def OnMouseMove(self,evt):
if not self.isDragging:
evt.Skip()
else:
_clSize = self.grid.GetColLabelSize()
if abs(self.startY - evt.Y) >= 3 \
and abs(evt.Y - self.lastY) >= 3:
self.lastY = evt.Y
self.didMove = True
x,sy = self.grid.GetViewStart()
w,h = self.lwin.GetClientSizeTuple()
y = sy * self.uy
if (evt.Y + y) < y:
y = evt.Y + y
elif evt.Y > h:
y += evt.Y - h
if y < 1:
y = 0
else:
y /= self.uy
if y != sy:
if wx.Platform == '__WXMSW__':
self.rowWin.Show(False)
self.grid.Scroll(x,y)
x,y = self.lwin.ClientToScreenXY(0,evt.Y)
x,y = self.grid.ScreenToClientXY(x,y)
if not self.rowWin.IsShown():
self.rowWin.Show(True)
py = y - self.cellY
if py < 0 + _clSize:
py = 0 + _clSize
if py > h - self.rowWin.GetSize()[1] + _clSize:
py = h - self.rowWin.GetSize()[1] + _clSize
self.rowWin.DisplayAt(x,py)
return
def OnPress(self,evt):
self.startY = self.lastY = evt.Y
_clSize = self.grid.GetColLabelSize()
sy = self.grid.GetViewStart()[1] * self.uy
sy -= _clSize
px,py = self.lwin.ClientToScreenXY(evt.X,evt.Y)
px,py = self.grid.ScreenToClientXY(px,py)
if self.grid.YToEdgeOfRow(py + sy) != wx.NOT_FOUND:
evt.Skip()
return
row = self.grid.YToRow(py + sy)
if row == wx.NOT_FOUND:
evt.Skip()
return
self.isDragging = True
self.didMove = False
rect = self.grid.RowToRect(row)
self.cellY = py + sy - rect.y
size = self.lwin.GetSize()
rect.x = 0
rect.y -= sy + _clSize
rect.width = size[0]
rowImg = self._CaptureImage(rect)
self.rowWin = RowDragWindow(self.grid,rowImg,row)
self.rowWin.Show(False)
self.lwin.CaptureMouse()
evt.Skip()
def OnRelease(self,evt):
if self.isDragging:
self.lwin.ReleaseMouse()
self.rowWin.Show(False)
self.isDragging = False
if not self.didMove:
py = self.lwin.ClientToScreenXY(0,self.startY)[1]
py = self.grid.ScreenToClientXY(0,py)[1]
sy = self.grid.GetViewStart()[1] * self.uy
sy -= self.grid.GetColLabelSize()
row = self.grid.YToRow(py + sy)
if row != wx.NOT_FOUND:
self.grid.SelectRow(row,evt.ControlDown())
return
else:
bRow = self.rowWin.GetInsertionRow()
dRow = self.rowWin.GetMoveRow()
wx.PostEvent(self,
GridRowMoveEvent(self.grid.GetId(), dRow, bRow))
self.rowWin.Destroy()
evt.Skip()
def _CaptureImage(self,rect):
bmp = wx.EmptyBitmap(rect.width,rect.height)
memdc = wx.MemoryDC()
memdc.SelectObject(bmp)
dc = wx.WindowDC(self.lwin)
memdc.Blit(0,0, rect.width, rect.height, dc, rect.x, rect.y)
memdc.SelectObject(wx.NullBitmap)
return bmp
| true | true |
f7248062a2abc3c1b4355687c2e16191aa358970 | 28,942 | py | Python | front-end/testsuite-python-lib/Python-2.3/Lib/test/test_re.py | MalloyPower/parsing-python | b2bca5eed07ea2af7a2001cd4f63becdfb0570be | [
"MIT"
] | 1 | 2020-11-26T18:53:46.000Z | 2020-11-26T18:53:46.000Z | front-end/testsuite-python-lib/Python-2.3/Lib/test/test_re.py | MalloyPower/parsing-python | b2bca5eed07ea2af7a2001cd4f63becdfb0570be | [
"MIT"
] | null | null | null | front-end/testsuite-python-lib/Python-2.3/Lib/test/test_re.py | MalloyPower/parsing-python | b2bca5eed07ea2af7a2001cd4f63becdfb0570be | [
"MIT"
] | 1 | 2019-04-11T11:27:01.000Z | 2019-04-11T11:27:01.000Z | import sys
sys.path = ['.'] + sys.path
from test.test_support import verbose, run_unittest
import re
from sre import Scanner
import sys, os, traceback
# Misc tests from Tim Peters' re.doc
# WARNING: Don't change details in these tests if you don't know
# what you're doing. Some of these tests were carefuly modeled to
# cover most of the code.
import unittest
class ReTests(unittest.TestCase):
def test_search_star_plus(self):
self.assertEqual(re.search('x*', 'axx').span(0), (0, 0))
self.assertEqual(re.search('x*', 'axx').span(), (0, 0))
self.assertEqual(re.search('x+', 'axx').span(0), (1, 3))
self.assertEqual(re.search('x+', 'axx').span(), (1, 3))
self.assertEqual(re.search('x', 'aaa'), None)
self.assertEqual(re.match('a*', 'xxx').span(0), (0, 0))
self.assertEqual(re.match('a*', 'xxx').span(), (0, 0))
self.assertEqual(re.match('x*', 'xxxa').span(0), (0, 3))
self.assertEqual(re.match('x*', 'xxxa').span(), (0, 3))
self.assertEqual(re.match('a+', 'xxx'), None)
def bump_num(self, matchobj):
int_value = int(matchobj.group(0))
return str(int_value + 1)
def test_basic_re_sub(self):
self.assertEqual(re.sub("(?i)b+", "x", "bbbb BBBB"), 'x x')
self.assertEqual(re.sub(r'\d+', self.bump_num, '08.2 -2 23x99y'),
'9.3 -3 24x100y')
self.assertEqual(re.sub(r'\d+', self.bump_num, '08.2 -2 23x99y', 3),
'9.3 -3 23x99y')
self.assertEqual(re.sub('.', lambda m: r"\n", 'x'), '\\n')
self.assertEqual(re.sub('.', r"\n", 'x'), '\n')
s = r"\1\1"
self.assertEqual(re.sub('(.)', s, 'x'), 'xx')
self.assertEqual(re.sub('(.)', re.escape(s), 'x'), s)
self.assertEqual(re.sub('(.)', lambda m: s, 'x'), s)
self.assertEqual(re.sub('(?P<a>x)', '\g<a>\g<a>', 'xx'), 'xxxx')
self.assertEqual(re.sub('(?P<a>x)', '\g<a>\g<1>', 'xx'), 'xxxx')
self.assertEqual(re.sub('(?P<unk>x)', '\g<unk>\g<unk>', 'xx'), 'xxxx')
self.assertEqual(re.sub('(?P<unk>x)', '\g<1>\g<1>', 'xx'), 'xxxx')
self.assertEqual(re.sub('a',r'\t\n\v\r\f\a\b\B\Z\a\A\w\W\s\S\d\D','a'),
'\t\n\v\r\f\a\b\\B\\Z\a\\A\\w\\W\\s\\S\\d\\D')
self.assertEqual(re.sub('a', '\t\n\v\r\f\a', 'a'), '\t\n\v\r\f\a')
self.assertEqual(re.sub('a', '\t\n\v\r\f\a', 'a'),
(chr(9)+chr(10)+chr(11)+chr(13)+chr(12)+chr(7)))
self.assertEqual(re.sub('^\s*', 'X', 'test'), 'Xtest')
def test_bug_449964(self):
# fails for group followed by other escape
self.assertEqual(re.sub(r'(?P<unk>x)', '\g<1>\g<1>\\b', 'xx'),
'xx\bxx\b')
def test_bug_449000(self):
# Test for sub() on escaped characters
self.assertEqual(re.sub(r'\r\n', r'\n', 'abc\r\ndef\r\n'),
'abc\ndef\n')
self.assertEqual(re.sub('\r\n', r'\n', 'abc\r\ndef\r\n'),
'abc\ndef\n')
self.assertEqual(re.sub(r'\r\n', '\n', 'abc\r\ndef\r\n'),
'abc\ndef\n')
self.assertEqual(re.sub('\r\n', '\n', 'abc\r\ndef\r\n'),
'abc\ndef\n')
def test_qualified_re_sub(self):
self.assertEqual(re.sub('a', 'b', 'aaaaa'), 'bbbbb')
self.assertEqual(re.sub('a', 'b', 'aaaaa', 1), 'baaaa')
def test_bug_114660(self):
self.assertEqual(re.sub(r'(\S)\s+(\S)', r'\1 \2', 'hello there'),
'hello there')
def test_bug_462270(self):
# Test for empty sub() behaviour, see SF bug #462270
self.assertEqual(re.sub('x*', '-', 'abxd'), '-a-b-d-')
self.assertEqual(re.sub('x+', '-', 'abxd'), 'ab-d')
def test_symbolic_refs(self):
self.assertRaises(re.error, re.sub, '(?P<a>x)', '\g<a', 'xx')
self.assertRaises(re.error, re.sub, '(?P<a>x)', '\g<', 'xx')
self.assertRaises(re.error, re.sub, '(?P<a>x)', '\g', 'xx')
self.assertRaises(re.error, re.sub, '(?P<a>x)', '\g<a a>', 'xx')
self.assertRaises(re.error, re.sub, '(?P<a>x)', '\g<1a1>', 'xx')
self.assertRaises(IndexError, re.sub, '(?P<a>x)', '\g<ab>', 'xx')
self.assertRaises(re.error, re.sub, '(?P<a>x)|(?P<b>y)', '\g<b>', 'xx')
self.assertRaises(re.error, re.sub, '(?P<a>x)|(?P<b>y)', '\\2', 'xx')
def test_re_subn(self):
self.assertEqual(re.subn("(?i)b+", "x", "bbbb BBBB"), ('x x', 2))
self.assertEqual(re.subn("b+", "x", "bbbb BBBB"), ('x BBBB', 1))
self.assertEqual(re.subn("b+", "x", "xyz"), ('xyz', 0))
self.assertEqual(re.subn("b*", "x", "xyz"), ('xxxyxzx', 4))
self.assertEqual(re.subn("b*", "x", "xyz", 2), ('xxxyz', 2))
def test_re_split(self):
self.assertEqual(re.split(":", ":a:b::c"), ['', 'a', 'b', '', 'c'])
self.assertEqual(re.split(":*", ":a:b::c"), ['', 'a', 'b', 'c'])
self.assertEqual(re.split("(:*)", ":a:b::c"),
['', ':', 'a', ':', 'b', '::', 'c'])
self.assertEqual(re.split("(?::*)", ":a:b::c"), ['', 'a', 'b', 'c'])
self.assertEqual(re.split("(:)*", ":a:b::c"),
['', ':', 'a', ':', 'b', ':', 'c'])
self.assertEqual(re.split("([b:]+)", ":a:b::c"),
['', ':', 'a', ':b::', 'c'])
self.assertEqual(re.split("(b)|(:+)", ":a:b::c"),
['', None, ':', 'a', None, ':', '', 'b', None, '',
None, '::', 'c'])
self.assertEqual(re.split("(?:b)|(?::+)", ":a:b::c"),
['', 'a', '', '', 'c'])
def test_qualified_re_split(self):
self.assertEqual(re.split(":", ":a:b::c", 2), ['', 'a', 'b::c'])
self.assertEqual(re.split(':', 'a:b:c:d', 2), ['a', 'b', 'c:d'])
self.assertEqual(re.split("(:)", ":a:b::c", 2),
['', ':', 'a', ':', 'b::c'])
self.assertEqual(re.split("(:*)", ":a:b::c", 2),
['', ':', 'a', ':', 'b::c'])
def test_re_findall(self):
self.assertEqual(re.findall(":+", "abc"), [])
self.assertEqual(re.findall(":+", "a:b::c:::d"), [":", "::", ":::"])
self.assertEqual(re.findall("(:+)", "a:b::c:::d"), [":", "::", ":::"])
self.assertEqual(re.findall("(:)(:*)", "a:b::c:::d"), [(":", ""),
(":", ":"),
(":", "::")])
def test_bug_117612(self):
self.assertEqual(re.findall(r"(a|(b))", "aba"),
[("a", ""),("b", "b"),("a", "")])
def test_re_match(self):
self.assertEqual(re.match('a', 'a').groups(), ())
self.assertEqual(re.match('(a)', 'a').groups(), ('a',))
self.assertEqual(re.match(r'(a)', 'a').group(0), 'a')
self.assertEqual(re.match(r'(a)', 'a').group(1), 'a')
self.assertEqual(re.match(r'(a)', 'a').group(1, 1), ('a', 'a'))
pat = re.compile('((a)|(b))(c)?')
self.assertEqual(pat.match('a').groups(), ('a', 'a', None, None))
self.assertEqual(pat.match('b').groups(), ('b', None, 'b', None))
self.assertEqual(pat.match('ac').groups(), ('a', 'a', None, 'c'))
self.assertEqual(pat.match('bc').groups(), ('b', None, 'b', 'c'))
self.assertEqual(pat.match('bc').groups(""), ('b', "", 'b', 'c'))
# A single group
m = re.match('(a)', 'a')
self.assertEqual(m.group(0), 'a')
self.assertEqual(m.group(0), 'a')
self.assertEqual(m.group(1), 'a')
self.assertEqual(m.group(1, 1), ('a', 'a'))
pat = re.compile('(?:(?P<a1>a)|(?P<b2>b))(?P<c3>c)?')
self.assertEqual(pat.match('a').group(1, 2, 3), ('a', None, None))
self.assertEqual(pat.match('b').group('a1', 'b2', 'c3'),
(None, 'b', None))
self.assertEqual(pat.match('ac').group(1, 'b2', 3), ('a', None, 'c'))
def test_re_groupref_exists(self):
return # not yet
self.assertEqual(re.match('^(\()?([^()]+)(?(1)\))$', '(a)').groups(),
('(', 'a'))
self.assertEqual(re.match('^(\()?([^()]+)(?(1)\))$', 'a').groups(),
(None, 'a'))
self.assertEqual(re.match('^(\()?([^()]+)(?(1)\))$', 'a)'), None)
self.assertEqual(re.match('^(\()?([^()]+)(?(1)\))$', '(a'), None)
self.assertEqual(re.match('^(?:(a)|c)((?(1)b|d))$', 'ab').groups(),
('a', 'b'))
self.assertEqual(re.match('^(?:(a)|c)((?(1)b|d))$', 'cd').groups(),
(None, 'd'))
self.assertEqual(re.match('^(?:(a)|c)((?(1)|d))$', 'cd').groups(),
(None, 'd'))
self.assertEqual(re.match('^(?:(a)|c)((?(1)|d))$', 'a').groups(),
('a', ''))
def test_re_groupref(self):
self.assertEqual(re.match(r'^(\|)?([^()]+)\1$', '|a|').groups(),
('|', 'a'))
self.assertEqual(re.match(r'^(\|)?([^()]+)\1?$', 'a').groups(),
(None, 'a'))
self.assertEqual(re.match(r'^(\|)?([^()]+)\1$', 'a|'), None)
self.assertEqual(re.match(r'^(\|)?([^()]+)\1$', '|a'), None)
self.assertEqual(re.match(r'^(?:(a)|c)(\1)$', 'aa').groups(),
('a', 'a'))
self.assertEqual(re.match(r'^(?:(a)|c)(\1)?$', 'c').groups(),
(None, None))
def test_groupdict(self):
self.assertEqual(re.match('(?P<first>first) (?P<second>second)',
'first second').groupdict(),
{'first':'first', 'second':'second'})
def test_expand(self):
self.assertEqual(re.match("(?P<first>first) (?P<second>second)",
"first second")
.expand(r"\2 \1 \g<second> \g<first>"),
"second first second first")
def test_repeat_minmax(self):
self.assertEqual(re.match("^(\w){1}$", "abc"), None)
self.assertEqual(re.match("^(\w){1}?$", "abc"), None)
self.assertEqual(re.match("^(\w){1,2}$", "abc"), None)
self.assertEqual(re.match("^(\w){1,2}?$", "abc"), None)
self.assertEqual(re.match("^(\w){3}$", "abc").group(1), "c")
self.assertEqual(re.match("^(\w){1,3}$", "abc").group(1), "c")
self.assertEqual(re.match("^(\w){1,4}$", "abc").group(1), "c")
self.assertEqual(re.match("^(\w){3,4}?$", "abc").group(1), "c")
self.assertEqual(re.match("^(\w){3}?$", "abc").group(1), "c")
self.assertEqual(re.match("^(\w){1,3}?$", "abc").group(1), "c")
self.assertEqual(re.match("^(\w){1,4}?$", "abc").group(1), "c")
self.assertEqual(re.match("^(\w){3,4}?$", "abc").group(1), "c")
self.assertEqual(re.match("^x{1}$", "xxx"), None)
self.assertEqual(re.match("^x{1}?$", "xxx"), None)
self.assertEqual(re.match("^x{1,2}$", "xxx"), None)
self.assertEqual(re.match("^x{1,2}?$", "xxx"), None)
self.assertNotEqual(re.match("^x{3}$", "xxx"), None)
self.assertNotEqual(re.match("^x{1,3}$", "xxx"), None)
self.assertNotEqual(re.match("^x{1,4}$", "xxx"), None)
self.assertNotEqual(re.match("^x{3,4}?$", "xxx"), None)
self.assertNotEqual(re.match("^x{3}?$", "xxx"), None)
self.assertNotEqual(re.match("^x{1,3}?$", "xxx"), None)
self.assertNotEqual(re.match("^x{1,4}?$", "xxx"), None)
self.assertNotEqual(re.match("^x{3,4}?$", "xxx"), None)
def test_getattr(self):
self.assertEqual(re.match("(a)", "a").pos, 0)
self.assertEqual(re.match("(a)", "a").endpos, 1)
self.assertEqual(re.match("(a)", "a").string, "a")
self.assertEqual(re.match("(a)", "a").regs, ((0, 1), (0, 1)))
self.assertNotEqual(re.match("(a)", "a").re, None)
def test_special_escapes(self):
self.assertEqual(re.search(r"\b(b.)\b",
"abcd abc bcd bx").group(1), "bx")
self.assertEqual(re.search(r"\B(b.)\B",
"abc bcd bc abxd").group(1), "bx")
self.assertEqual(re.search(r"\b(b.)\b",
"abcd abc bcd bx", re.LOCALE).group(1), "bx")
self.assertEqual(re.search(r"\B(b.)\B",
"abc bcd bc abxd", re.LOCALE).group(1), "bx")
self.assertEqual(re.search(r"\b(b.)\b",
"abcd abc bcd bx", re.UNICODE).group(1), "bx")
self.assertEqual(re.search(r"\B(b.)\B",
"abc bcd bc abxd", re.UNICODE).group(1), "bx")
self.assertEqual(re.search(r"^abc$", "\nabc\n", re.M).group(0), "abc")
self.assertEqual(re.search(r"^\Aabc\Z$", "abc", re.M).group(0), "abc")
self.assertEqual(re.search(r"^\Aabc\Z$", "\nabc\n", re.M), None)
self.assertEqual(re.search(r"\b(b.)\b",
u"abcd abc bcd bx").group(1), "bx")
self.assertEqual(re.search(r"\B(b.)\B",
u"abc bcd bc abxd").group(1), "bx")
self.assertEqual(re.search(r"^abc$", u"\nabc\n", re.M).group(0), "abc")
self.assertEqual(re.search(r"^\Aabc\Z$", u"abc", re.M).group(0), "abc")
self.assertEqual(re.search(r"^\Aabc\Z$", u"\nabc\n", re.M), None)
self.assertEqual(re.search(r"\d\D\w\W\s\S",
"1aa! a").group(0), "1aa! a")
self.assertEqual(re.search(r"\d\D\w\W\s\S",
"1aa! a", re.LOCALE).group(0), "1aa! a")
self.assertEqual(re.search(r"\d\D\w\W\s\S",
"1aa! a", re.UNICODE).group(0), "1aa! a")
def test_ignore_case(self):
self.assertEqual(re.match("abc", "ABC", re.I).group(0), "ABC")
self.assertEqual(re.match("abc", u"ABC", re.I).group(0), "ABC")
def test_bigcharset(self):
self.assertEqual(re.match(u"([\u2222\u2223])",
u"\u2222").group(1), u"\u2222")
self.assertEqual(re.match(u"([\u2222\u2223])",
u"\u2222", re.UNICODE).group(1), u"\u2222")
def test_anyall(self):
self.assertEqual(re.match("a.b", "a\nb", re.DOTALL).group(0),
"a\nb")
self.assertEqual(re.match("a.*b", "a\n\nb", re.DOTALL).group(0),
"a\n\nb")
def test_non_consuming(self):
self.assertEqual(re.match("(a(?=\s[^a]))", "a b").group(1), "a")
self.assertEqual(re.match("(a(?=\s[^a]*))", "a b").group(1), "a")
self.assertEqual(re.match("(a(?=\s[abc]))", "a b").group(1), "a")
self.assertEqual(re.match("(a(?=\s[abc]*))", "a bc").group(1), "a")
self.assertEqual(re.match(r"(a)(?=\s\1)", "a a").group(1), "a")
self.assertEqual(re.match(r"(a)(?=\s\1*)", "a aa").group(1), "a")
self.assertEqual(re.match(r"(a)(?=\s(abc|a))", "a a").group(1), "a")
self.assertEqual(re.match(r"(a(?!\s[^a]))", "a a").group(1), "a")
self.assertEqual(re.match(r"(a(?!\s[abc]))", "a d").group(1), "a")
self.assertEqual(re.match(r"(a)(?!\s\1)", "a b").group(1), "a")
self.assertEqual(re.match(r"(a)(?!\s(abc|a))", "a b").group(1), "a")
def test_ignore_case(self):
self.assertEqual(re.match(r"(a\s[^a])", "a b", re.I).group(1), "a b")
self.assertEqual(re.match(r"(a\s[^a]*)", "a bb", re.I).group(1), "a bb")
self.assertEqual(re.match(r"(a\s[abc])", "a b", re.I).group(1), "a b")
self.assertEqual(re.match(r"(a\s[abc]*)", "a bb", re.I).group(1), "a bb")
self.assertEqual(re.match(r"((a)\s\2)", "a a", re.I).group(1), "a a")
self.assertEqual(re.match(r"((a)\s\2*)", "a aa", re.I).group(1), "a aa")
self.assertEqual(re.match(r"((a)\s(abc|a))", "a a", re.I).group(1), "a a")
self.assertEqual(re.match(r"((a)\s(abc|a)*)", "a aa", re.I).group(1), "a aa")
def test_category(self):
self.assertEqual(re.match(r"(\s)", " ").group(1), " ")
def test_getlower(self):
import _sre
self.assertEqual(_sre.getlower(ord('A'), 0), ord('a'))
self.assertEqual(_sre.getlower(ord('A'), re.LOCALE), ord('a'))
self.assertEqual(_sre.getlower(ord('A'), re.UNICODE), ord('a'))
self.assertEqual(re.match("abc", "ABC", re.I).group(0), "ABC")
self.assertEqual(re.match("abc", u"ABC", re.I).group(0), "ABC")
def test_not_literal(self):
self.assertEqual(re.search("\s([^a])", " b").group(1), "b")
self.assertEqual(re.search("\s([^a]*)", " bb").group(1), "bb")
def test_search_coverage(self):
self.assertEqual(re.search("\s(b)", " b").group(1), "b")
self.assertEqual(re.search("a\s", "a ").group(0), "a ")
def test_re_escape(self):
p=""
for i in range(0, 256):
p = p + chr(i)
self.assertEqual(re.match(re.escape(chr(i)), chr(i)) is not None,
True)
self.assertEqual(re.match(re.escape(chr(i)), chr(i)).span(), (0,1))
pat=re.compile(re.escape(p))
self.assertEqual(pat.match(p) is not None, True)
self.assertEqual(pat.match(p).span(), (0,256))
def test_pickling(self):
import pickle
self.pickle_test(pickle)
import cPickle
self.pickle_test(cPickle)
def pickle_test(self, pickle):
oldpat = re.compile('a(?:b|(c|e){1,2}?|d)+?(.)')
s = pickle.dumps(oldpat)
newpat = pickle.loads(s)
self.assertEqual(oldpat, newpat)
def test_constants(self):
self.assertEqual(re.I, re.IGNORECASE)
self.assertEqual(re.L, re.LOCALE)
self.assertEqual(re.M, re.MULTILINE)
self.assertEqual(re.S, re.DOTALL)
self.assertEqual(re.X, re.VERBOSE)
def test_flags(self):
for flag in [re.I, re.M, re.X, re.S, re.L]:
self.assertNotEqual(re.compile('^pattern$', flag), None)
def test_sre_character_literals(self):
for i in [0, 8, 16, 32, 64, 127, 128, 255]:
self.assertNotEqual(re.match(r"\%03o" % i, chr(i)), None)
self.assertNotEqual(re.match(r"\%03o0" % i, chr(i)+"0"), None)
self.assertNotEqual(re.match(r"\%03o8" % i, chr(i)+"8"), None)
self.assertNotEqual(re.match(r"\x%02x" % i, chr(i)), None)
self.assertNotEqual(re.match(r"\x%02x0" % i, chr(i)+"0"), None)
self.assertNotEqual(re.match(r"\x%02xz" % i, chr(i)+"z"), None)
self.assertRaises(re.error, re.match, "\911", "")
def test_bug_113254(self):
self.assertEqual(re.match(r'(a)|(b)', 'b').start(1), -1)
self.assertEqual(re.match(r'(a)|(b)', 'b').end(1), -1)
self.assertEqual(re.match(r'(a)|(b)', 'b').span(1), (-1, -1))
def test_bug_527371(self):
# bug described in patches 527371/672491
self.assertEqual(re.match(r'(a)?a','a').lastindex, None)
self.assertEqual(re.match(r'(a)(b)?b','ab').lastindex, 1)
self.assertEqual(re.match(r'(?P<a>a)(?P<b>b)?b','ab').lastgroup, 'a')
self.assertEqual(re.match("(?P<a>a(b))", "ab").lastgroup, 'a')
self.assertEqual(re.match("((a))", "a").lastindex, 1)
def test_bug_545855(self):
# bug 545855 -- This pattern failed to cause a compile error as it
# should, instead provoking a TypeError.
self.assertRaises(re.error, re.compile, 'foo[a-')
def test_bug_418626(self):
# bugs 418626 at al. -- Testing Greg Chapman's addition of op code
# SRE_OP_MIN_REPEAT_ONE for eliminating recursion on simple uses of
# pattern '*?' on a long string.
self.assertEqual(re.match('.*?c', 10000*'ab'+'cd').end(0), 20001)
self.assertEqual(re.match('.*?cd', 5000*'ab'+'c'+5000*'ab'+'cde').end(0),
20003)
self.assertEqual(re.match('.*?cd', 20000*'abc'+'de').end(0), 60001)
# non-simple '*?' still recurses and hits the recursion limit
self.assertRaises(RuntimeError, re.search, '(a|b)*?c', 10000*'ab'+'cd')
def test_bug_612074(self):
pat=u"["+re.escape(u"\u2039")+u"]"
self.assertEqual(re.compile(pat) and 1, 1)
def test_stack_overflow(self):
# nasty case that overflows the straightforward recursive
# implementation of repeated groups.
self.assertRaises(RuntimeError, re.match, '(x)*', 50000*'x')
self.assertRaises(RuntimeError, re.match, '(x)*y', 50000*'x'+'y')
self.assertRaises(RuntimeError, re.match, '(x)*?y', 50000*'x'+'y')
def test_scanner(self):
def s_ident(scanner, token): return token
def s_operator(scanner, token): return "op%s" % token
def s_float(scanner, token): return float(token)
def s_int(scanner, token): return int(token)
scanner = Scanner([
(r"[a-zA-Z_]\w*", s_ident),
(r"\d+\.\d*", s_float),
(r"\d+", s_int),
(r"=|\+|-|\*|/", s_operator),
(r"\s+", None),
])
self.assertNotEqual(scanner.scanner.scanner("").pattern, None)
self.assertEqual(scanner.scan("sum = 3*foo + 312.50 + bar"),
(['sum', 'op=', 3, 'op*', 'foo', 'op+', 312.5,
'op+', 'bar'], ''))
def test_bug_448951(self):
# bug 448951 (similar to 429357, but with single char match)
# (Also test greedy matches.)
for op in '','?','*':
self.assertEqual(re.match(r'((.%s):)?z'%op, 'z').groups(),
(None, None))
self.assertEqual(re.match(r'((.%s):)?z'%op, 'a:z').groups(),
('a:', 'a'))
def test_bug_725106(self):
# capturing groups in alternatives in repeats
self.assertEqual(re.match('^((a)|b)*', 'abc').groups(),
('b', 'a'))
self.assertEqual(re.match('^(([ab])|c)*', 'abc').groups(),
('c', 'b'))
self.assertEqual(re.match('^((d)|[ab])*', 'abc').groups(),
('b', None))
self.assertEqual(re.match('^((a)c|[ab])*', 'abc').groups(),
('b', None))
self.assertEqual(re.match('^((a)|b)*?c', 'abc').groups(),
('b', 'a'))
self.assertEqual(re.match('^(([ab])|c)*?d', 'abcd').groups(),
('c', 'b'))
self.assertEqual(re.match('^((d)|[ab])*?c', 'abc').groups(),
('b', None))
self.assertEqual(re.match('^((a)c|[ab])*?c', 'abc').groups(),
('b', None))
def test_bug_725149(self):
# mark_stack_base restoring before restoring marks
self.assertEqual(re.match('(a)(?:(?=(b)*)c)*', 'abb').groups(),
('a', None))
self.assertEqual(re.match('(a)((?!(b)*))*', 'abb').groups(),
('a', None, None))
def test_bug_764548(self):
# bug 764548, re.compile() barfs on str/unicode subclasses
try:
unicode
except NameError:
return # no problem if we have no unicode
class my_unicode(unicode): pass
pat = re.compile(my_unicode("abc"))
self.assertEqual(pat.match("xyz"), None)
def test_finditer(self):
iter = re.finditer(r":+", "a:b::c:::d")
self.assertEqual([item.group(0) for item in iter],
[":", "::", ":::"])
def run_re_tests():
from test.re_tests import benchmarks, tests, SUCCEED, FAIL, SYNTAX_ERROR
if verbose:
print 'Running re_tests test suite'
else:
# To save time, only run the first and last 10 tests
#tests = tests[:10] + tests[-10:]
pass
for t in tests:
sys.stdout.flush()
pattern = s = outcome = repl = expected = None
if len(t) == 5:
pattern, s, outcome, repl, expected = t
elif len(t) == 3:
pattern, s, outcome = t
else:
raise ValueError, ('Test tuples should have 3 or 5 fields', t)
try:
obj = re.compile(pattern)
except re.error:
if outcome == SYNTAX_ERROR: pass # Expected a syntax error
else:
print '=== Syntax error:', t
except KeyboardInterrupt: raise KeyboardInterrupt
except:
print '*** Unexpected error ***', t
if verbose:
traceback.print_exc(file=sys.stdout)
else:
try:
result = obj.search(s)
except re.error, msg:
print '=== Unexpected exception', t, repr(msg)
if outcome == SYNTAX_ERROR:
# This should have been a syntax error; forget it.
pass
elif outcome == FAIL:
if result is None: pass # No match, as expected
else: print '=== Succeeded incorrectly', t
elif outcome == SUCCEED:
if result is not None:
# Matched, as expected, so now we compute the
# result string and compare it to our expected result.
start, end = result.span(0)
vardict={'found': result.group(0),
'groups': result.group(),
'flags': result.re.flags}
for i in range(1, 100):
try:
gi = result.group(i)
# Special hack because else the string concat fails:
if gi is None:
gi = "None"
except IndexError:
gi = "Error"
vardict['g%d' % i] = gi
for i in result.re.groupindex.keys():
try:
gi = result.group(i)
if gi is None:
gi = "None"
except IndexError:
gi = "Error"
vardict[i] = gi
repl = eval(repl, vardict)
if repl != expected:
print '=== grouping error', t,
print repr(repl) + ' should be ' + repr(expected)
else:
print '=== Failed incorrectly', t
# Try the match on a unicode string, and check that it
# still succeeds.
try:
result = obj.search(unicode(s, "latin-1"))
if result is None:
print '=== Fails on unicode match', t
except NameError:
continue # 1.5.2
except TypeError:
continue # unicode test case
# Try the match on a unicode pattern, and check that it
# still succeeds.
obj=re.compile(unicode(pattern, "latin-1"))
result = obj.search(s)
if result is None:
print '=== Fails on unicode pattern match', t
# Try the match with the search area limited to the extent
# of the match and see if it still succeeds. \B will
# break (because it won't match at the end or start of a
# string), so we'll ignore patterns that feature it.
if pattern[:2] != '\\B' and pattern[-2:] != '\\B' \
and result is not None:
obj = re.compile(pattern)
result = obj.search(s, result.start(0), result.end(0) + 1)
if result is None:
print '=== Failed on range-limited match', t
# Try the match with IGNORECASE enabled, and check that it
# still succeeds.
obj = re.compile(pattern, re.IGNORECASE)
result = obj.search(s)
if result is None:
print '=== Fails on case-insensitive match', t
# Try the match with LOCALE enabled, and check that it
# still succeeds.
obj = re.compile(pattern, re.LOCALE)
result = obj.search(s)
if result is None:
print '=== Fails on locale-sensitive match', t
# Try the match with UNICODE locale enabled, and check
# that it still succeeds.
obj = re.compile(pattern, re.UNICODE)
result = obj.search(s)
if result is None:
print '=== Fails on unicode-sensitive match', t
def test_main():
run_unittest(ReTests)
run_re_tests()
if __name__ == "__main__":
test_main()
| 46.530547 | 85 | 0.478854 | import sys
sys.path = ['.'] + sys.path
from test.test_support import verbose, run_unittest
import re
from sre import Scanner
import sys, os, traceback
# WARNING: Don't change details in these tests if you don't know
# what you're doing. Some of these tests were carefuly modeled to
import unittest
class ReTests(unittest.TestCase):
def test_search_star_plus(self):
self.assertEqual(re.search('x*', 'axx').span(0), (0, 0))
self.assertEqual(re.search('x*', 'axx').span(), (0, 0))
self.assertEqual(re.search('x+', 'axx').span(0), (1, 3))
self.assertEqual(re.search('x+', 'axx').span(), (1, 3))
self.assertEqual(re.search('x', 'aaa'), None)
self.assertEqual(re.match('a*', 'xxx').span(0), (0, 0))
self.assertEqual(re.match('a*', 'xxx').span(), (0, 0))
self.assertEqual(re.match('x*', 'xxxa').span(0), (0, 3))
self.assertEqual(re.match('x*', 'xxxa').span(), (0, 3))
self.assertEqual(re.match('a+', 'xxx'), None)
def bump_num(self, matchobj):
int_value = int(matchobj.group(0))
return str(int_value + 1)
def test_basic_re_sub(self):
self.assertEqual(re.sub("(?i)b+", "x", "bbbb BBBB"), 'x x')
self.assertEqual(re.sub(r'\d+', self.bump_num, '08.2 -2 23x99y'),
'9.3 -3 24x100y')
self.assertEqual(re.sub(r'\d+', self.bump_num, '08.2 -2 23x99y', 3),
'9.3 -3 23x99y')
self.assertEqual(re.sub('.', lambda m: r"\n", 'x'), '\\n')
self.assertEqual(re.sub('.', r"\n", 'x'), '\n')
s = r"\1\1"
self.assertEqual(re.sub('(.)', s, 'x'), 'xx')
self.assertEqual(re.sub('(.)', re.escape(s), 'x'), s)
self.assertEqual(re.sub('(.)', lambda m: s, 'x'), s)
self.assertEqual(re.sub('(?P<a>x)', '\g<a>\g<a>', 'xx'), 'xxxx')
self.assertEqual(re.sub('(?P<a>x)', '\g<a>\g<1>', 'xx'), 'xxxx')
self.assertEqual(re.sub('(?P<unk>x)', '\g<unk>\g<unk>', 'xx'), 'xxxx')
self.assertEqual(re.sub('(?P<unk>x)', '\g<1>\g<1>', 'xx'), 'xxxx')
self.assertEqual(re.sub('a',r'\t\n\v\r\f\a\b\B\Z\a\A\w\W\s\S\d\D','a'),
'\t\n\v\r\f\a\b\\B\\Z\a\\A\\w\\W\\s\\S\\d\\D')
self.assertEqual(re.sub('a', '\t\n\v\r\f\a', 'a'), '\t\n\v\r\f\a')
self.assertEqual(re.sub('a', '\t\n\v\r\f\a', 'a'),
(chr(9)+chr(10)+chr(11)+chr(13)+chr(12)+chr(7)))
self.assertEqual(re.sub('^\s*', 'X', 'test'), 'Xtest')
def test_bug_449964(self):
self.assertEqual(re.sub(r'(?P<unk>x)', '\g<1>\g<1>\\b', 'xx'),
'xx\bxx\b')
def test_bug_449000(self):
self.assertEqual(re.sub(r'\r\n', r'\n', 'abc\r\ndef\r\n'),
'abc\ndef\n')
self.assertEqual(re.sub('\r\n', r'\n', 'abc\r\ndef\r\n'),
'abc\ndef\n')
self.assertEqual(re.sub(r'\r\n', '\n', 'abc\r\ndef\r\n'),
'abc\ndef\n')
self.assertEqual(re.sub('\r\n', '\n', 'abc\r\ndef\r\n'),
'abc\ndef\n')
def test_qualified_re_sub(self):
self.assertEqual(re.sub('a', 'b', 'aaaaa'), 'bbbbb')
self.assertEqual(re.sub('a', 'b', 'aaaaa', 1), 'baaaa')
def test_bug_114660(self):
self.assertEqual(re.sub(r'(\S)\s+(\S)', r'\1 \2', 'hello there'),
'hello there')
def test_bug_462270(self):
self.assertEqual(re.sub('x*', '-', 'abxd'), '-a-b-d-')
self.assertEqual(re.sub('x+', '-', 'abxd'), 'ab-d')
def test_symbolic_refs(self):
self.assertRaises(re.error, re.sub, '(?P<a>x)', '\g<a', 'xx')
self.assertRaises(re.error, re.sub, '(?P<a>x)', '\g<', 'xx')
self.assertRaises(re.error, re.sub, '(?P<a>x)', '\g', 'xx')
self.assertRaises(re.error, re.sub, '(?P<a>x)', '\g<a a>', 'xx')
self.assertRaises(re.error, re.sub, '(?P<a>x)', '\g<1a1>', 'xx')
self.assertRaises(IndexError, re.sub, '(?P<a>x)', '\g<ab>', 'xx')
self.assertRaises(re.error, re.sub, '(?P<a>x)|(?P<b>y)', '\g<b>', 'xx')
self.assertRaises(re.error, re.sub, '(?P<a>x)|(?P<b>y)', '\\2', 'xx')
def test_re_subn(self):
self.assertEqual(re.subn("(?i)b+", "x", "bbbb BBBB"), ('x x', 2))
self.assertEqual(re.subn("b+", "x", "bbbb BBBB"), ('x BBBB', 1))
self.assertEqual(re.subn("b+", "x", "xyz"), ('xyz', 0))
self.assertEqual(re.subn("b*", "x", "xyz"), ('xxxyxzx', 4))
self.assertEqual(re.subn("b*", "x", "xyz", 2), ('xxxyz', 2))
def test_re_split(self):
self.assertEqual(re.split(":", ":a:b::c"), ['', 'a', 'b', '', 'c'])
self.assertEqual(re.split(":*", ":a:b::c"), ['', 'a', 'b', 'c'])
self.assertEqual(re.split("(:*)", ":a:b::c"),
['', ':', 'a', ':', 'b', '::', 'c'])
self.assertEqual(re.split("(?::*)", ":a:b::c"), ['', 'a', 'b', 'c'])
self.assertEqual(re.split("(:)*", ":a:b::c"),
['', ':', 'a', ':', 'b', ':', 'c'])
self.assertEqual(re.split("([b:]+)", ":a:b::c"),
['', ':', 'a', ':b::', 'c'])
self.assertEqual(re.split("(b)|(:+)", ":a:b::c"),
['', None, ':', 'a', None, ':', '', 'b', None, '',
None, '::', 'c'])
self.assertEqual(re.split("(?:b)|(?::+)", ":a:b::c"),
['', 'a', '', '', 'c'])
def test_qualified_re_split(self):
self.assertEqual(re.split(":", ":a:b::c", 2), ['', 'a', 'b::c'])
self.assertEqual(re.split(':', 'a:b:c:d', 2), ['a', 'b', 'c:d'])
self.assertEqual(re.split("(:)", ":a:b::c", 2),
['', ':', 'a', ':', 'b::c'])
self.assertEqual(re.split("(:*)", ":a:b::c", 2),
['', ':', 'a', ':', 'b::c'])
def test_re_findall(self):
self.assertEqual(re.findall(":+", "abc"), [])
self.assertEqual(re.findall(":+", "a:b::c:::d"), [":", "::", ":::"])
self.assertEqual(re.findall("(:+)", "a:b::c:::d"), [":", "::", ":::"])
self.assertEqual(re.findall("(:)(:*)", "a:b::c:::d"), [(":", ""),
(":", ":"),
(":", "::")])
def test_bug_117612(self):
self.assertEqual(re.findall(r"(a|(b))", "aba"),
[("a", ""),("b", "b"),("a", "")])
def test_re_match(self):
self.assertEqual(re.match('a', 'a').groups(), ())
self.assertEqual(re.match('(a)', 'a').groups(), ('a',))
self.assertEqual(re.match(r'(a)', 'a').group(0), 'a')
self.assertEqual(re.match(r'(a)', 'a').group(1), 'a')
self.assertEqual(re.match(r'(a)', 'a').group(1, 1), ('a', 'a'))
pat = re.compile('((a)|(b))(c)?')
self.assertEqual(pat.match('a').groups(), ('a', 'a', None, None))
self.assertEqual(pat.match('b').groups(), ('b', None, 'b', None))
self.assertEqual(pat.match('ac').groups(), ('a', 'a', None, 'c'))
self.assertEqual(pat.match('bc').groups(), ('b', None, 'b', 'c'))
self.assertEqual(pat.match('bc').groups(""), ('b', "", 'b', 'c'))
m = re.match('(a)', 'a')
self.assertEqual(m.group(0), 'a')
self.assertEqual(m.group(0), 'a')
self.assertEqual(m.group(1), 'a')
self.assertEqual(m.group(1, 1), ('a', 'a'))
pat = re.compile('(?:(?P<a1>a)|(?P<b2>b))(?P<c3>c)?')
self.assertEqual(pat.match('a').group(1, 2, 3), ('a', None, None))
self.assertEqual(pat.match('b').group('a1', 'b2', 'c3'),
(None, 'b', None))
self.assertEqual(pat.match('ac').group(1, 'b2', 3), ('a', None, 'c'))
def test_re_groupref_exists(self):
return
self.assertEqual(re.match('^(\()?([^()]+)(?(1)\))$', '(a)').groups(),
('(', 'a'))
self.assertEqual(re.match('^(\()?([^()]+)(?(1)\))$', 'a').groups(),
(None, 'a'))
self.assertEqual(re.match('^(\()?([^()]+)(?(1)\))$', 'a)'), None)
self.assertEqual(re.match('^(\()?([^()]+)(?(1)\))$', '(a'), None)
self.assertEqual(re.match('^(?:(a)|c)((?(1)b|d))$', 'ab').groups(),
('a', 'b'))
self.assertEqual(re.match('^(?:(a)|c)((?(1)b|d))$', 'cd').groups(),
(None, 'd'))
self.assertEqual(re.match('^(?:(a)|c)((?(1)|d))$', 'cd').groups(),
(None, 'd'))
self.assertEqual(re.match('^(?:(a)|c)((?(1)|d))$', 'a').groups(),
('a', ''))
def test_re_groupref(self):
self.assertEqual(re.match(r'^(\|)?([^()]+)\1$', '|a|').groups(),
('|', 'a'))
self.assertEqual(re.match(r'^(\|)?([^()]+)\1?$', 'a').groups(),
(None, 'a'))
self.assertEqual(re.match(r'^(\|)?([^()]+)\1$', 'a|'), None)
self.assertEqual(re.match(r'^(\|)?([^()]+)\1$', '|a'), None)
self.assertEqual(re.match(r'^(?:(a)|c)(\1)$', 'aa').groups(),
('a', 'a'))
self.assertEqual(re.match(r'^(?:(a)|c)(\1)?$', 'c').groups(),
(None, None))
def test_groupdict(self):
self.assertEqual(re.match('(?P<first>first) (?P<second>second)',
'first second').groupdict(),
{'first':'first', 'second':'second'})
def test_expand(self):
self.assertEqual(re.match("(?P<first>first) (?P<second>second)",
"first second")
.expand(r"\2 \1 \g<second> \g<first>"),
"second first second first")
def test_repeat_minmax(self):
self.assertEqual(re.match("^(\w){1}$", "abc"), None)
self.assertEqual(re.match("^(\w){1}?$", "abc"), None)
self.assertEqual(re.match("^(\w){1,2}$", "abc"), None)
self.assertEqual(re.match("^(\w){1,2}?$", "abc"), None)
self.assertEqual(re.match("^(\w){3}$", "abc").group(1), "c")
self.assertEqual(re.match("^(\w){1,3}$", "abc").group(1), "c")
self.assertEqual(re.match("^(\w){1,4}$", "abc").group(1), "c")
self.assertEqual(re.match("^(\w){3,4}?$", "abc").group(1), "c")
self.assertEqual(re.match("^(\w){3}?$", "abc").group(1), "c")
self.assertEqual(re.match("^(\w){1,3}?$", "abc").group(1), "c")
self.assertEqual(re.match("^(\w){1,4}?$", "abc").group(1), "c")
self.assertEqual(re.match("^(\w){3,4}?$", "abc").group(1), "c")
self.assertEqual(re.match("^x{1}$", "xxx"), None)
self.assertEqual(re.match("^x{1}?$", "xxx"), None)
self.assertEqual(re.match("^x{1,2}$", "xxx"), None)
self.assertEqual(re.match("^x{1,2}?$", "xxx"), None)
self.assertNotEqual(re.match("^x{3}$", "xxx"), None)
self.assertNotEqual(re.match("^x{1,3}$", "xxx"), None)
self.assertNotEqual(re.match("^x{1,4}$", "xxx"), None)
self.assertNotEqual(re.match("^x{3,4}?$", "xxx"), None)
self.assertNotEqual(re.match("^x{3}?$", "xxx"), None)
self.assertNotEqual(re.match("^x{1,3}?$", "xxx"), None)
self.assertNotEqual(re.match("^x{1,4}?$", "xxx"), None)
self.assertNotEqual(re.match("^x{3,4}?$", "xxx"), None)
def test_getattr(self):
self.assertEqual(re.match("(a)", "a").pos, 0)
self.assertEqual(re.match("(a)", "a").endpos, 1)
self.assertEqual(re.match("(a)", "a").string, "a")
self.assertEqual(re.match("(a)", "a").regs, ((0, 1), (0, 1)))
self.assertNotEqual(re.match("(a)", "a").re, None)
def test_special_escapes(self):
self.assertEqual(re.search(r"\b(b.)\b",
"abcd abc bcd bx").group(1), "bx")
self.assertEqual(re.search(r"\B(b.)\B",
"abc bcd bc abxd").group(1), "bx")
self.assertEqual(re.search(r"\b(b.)\b",
"abcd abc bcd bx", re.LOCALE).group(1), "bx")
self.assertEqual(re.search(r"\B(b.)\B",
"abc bcd bc abxd", re.LOCALE).group(1), "bx")
self.assertEqual(re.search(r"\b(b.)\b",
"abcd abc bcd bx", re.UNICODE).group(1), "bx")
self.assertEqual(re.search(r"\B(b.)\B",
"abc bcd bc abxd", re.UNICODE).group(1), "bx")
self.assertEqual(re.search(r"^abc$", "\nabc\n", re.M).group(0), "abc")
self.assertEqual(re.search(r"^\Aabc\Z$", "abc", re.M).group(0), "abc")
self.assertEqual(re.search(r"^\Aabc\Z$", "\nabc\n", re.M), None)
self.assertEqual(re.search(r"\b(b.)\b",
u"abcd abc bcd bx").group(1), "bx")
self.assertEqual(re.search(r"\B(b.)\B",
u"abc bcd bc abxd").group(1), "bx")
self.assertEqual(re.search(r"^abc$", u"\nabc\n", re.M).group(0), "abc")
self.assertEqual(re.search(r"^\Aabc\Z$", u"abc", re.M).group(0), "abc")
self.assertEqual(re.search(r"^\Aabc\Z$", u"\nabc\n", re.M), None)
self.assertEqual(re.search(r"\d\D\w\W\s\S",
"1aa! a").group(0), "1aa! a")
self.assertEqual(re.search(r"\d\D\w\W\s\S",
"1aa! a", re.LOCALE).group(0), "1aa! a")
self.assertEqual(re.search(r"\d\D\w\W\s\S",
"1aa! a", re.UNICODE).group(0), "1aa! a")
def test_ignore_case(self):
self.assertEqual(re.match("abc", "ABC", re.I).group(0), "ABC")
self.assertEqual(re.match("abc", u"ABC", re.I).group(0), "ABC")
def test_bigcharset(self):
self.assertEqual(re.match(u"([\u2222\u2223])",
u"\u2222").group(1), u"\u2222")
self.assertEqual(re.match(u"([\u2222\u2223])",
u"\u2222", re.UNICODE).group(1), u"\u2222")
def test_anyall(self):
self.assertEqual(re.match("a.b", "a\nb", re.DOTALL).group(0),
"a\nb")
self.assertEqual(re.match("a.*b", "a\n\nb", re.DOTALL).group(0),
"a\n\nb")
def test_non_consuming(self):
self.assertEqual(re.match("(a(?=\s[^a]))", "a b").group(1), "a")
self.assertEqual(re.match("(a(?=\s[^a]*))", "a b").group(1), "a")
self.assertEqual(re.match("(a(?=\s[abc]))", "a b").group(1), "a")
self.assertEqual(re.match("(a(?=\s[abc]*))", "a bc").group(1), "a")
self.assertEqual(re.match(r"(a)(?=\s\1)", "a a").group(1), "a")
self.assertEqual(re.match(r"(a)(?=\s\1*)", "a aa").group(1), "a")
self.assertEqual(re.match(r"(a)(?=\s(abc|a))", "a a").group(1), "a")
self.assertEqual(re.match(r"(a(?!\s[^a]))", "a a").group(1), "a")
self.assertEqual(re.match(r"(a(?!\s[abc]))", "a d").group(1), "a")
self.assertEqual(re.match(r"(a)(?!\s\1)", "a b").group(1), "a")
self.assertEqual(re.match(r"(a)(?!\s(abc|a))", "a b").group(1), "a")
def test_ignore_case(self):
self.assertEqual(re.match(r"(a\s[^a])", "a b", re.I).group(1), "a b")
self.assertEqual(re.match(r"(a\s[^a]*)", "a bb", re.I).group(1), "a bb")
self.assertEqual(re.match(r"(a\s[abc])", "a b", re.I).group(1), "a b")
self.assertEqual(re.match(r"(a\s[abc]*)", "a bb", re.I).group(1), "a bb")
self.assertEqual(re.match(r"((a)\s\2)", "a a", re.I).group(1), "a a")
self.assertEqual(re.match(r"((a)\s\2*)", "a aa", re.I).group(1), "a aa")
self.assertEqual(re.match(r"((a)\s(abc|a))", "a a", re.I).group(1), "a a")
self.assertEqual(re.match(r"((a)\s(abc|a)*)", "a aa", re.I).group(1), "a aa")
def test_category(self):
self.assertEqual(re.match(r"(\s)", " ").group(1), " ")
def test_getlower(self):
import _sre
self.assertEqual(_sre.getlower(ord('A'), 0), ord('a'))
self.assertEqual(_sre.getlower(ord('A'), re.LOCALE), ord('a'))
self.assertEqual(_sre.getlower(ord('A'), re.UNICODE), ord('a'))
self.assertEqual(re.match("abc", "ABC", re.I).group(0), "ABC")
self.assertEqual(re.match("abc", u"ABC", re.I).group(0), "ABC")
def test_not_literal(self):
self.assertEqual(re.search("\s([^a])", " b").group(1), "b")
self.assertEqual(re.search("\s([^a]*)", " bb").group(1), "bb")
def test_search_coverage(self):
self.assertEqual(re.search("\s(b)", " b").group(1), "b")
self.assertEqual(re.search("a\s", "a ").group(0), "a ")
def test_re_escape(self):
p=""
for i in range(0, 256):
p = p + chr(i)
self.assertEqual(re.match(re.escape(chr(i)), chr(i)) is not None,
True)
self.assertEqual(re.match(re.escape(chr(i)), chr(i)).span(), (0,1))
pat=re.compile(re.escape(p))
self.assertEqual(pat.match(p) is not None, True)
self.assertEqual(pat.match(p).span(), (0,256))
def test_pickling(self):
import pickle
self.pickle_test(pickle)
import cPickle
self.pickle_test(cPickle)
def pickle_test(self, pickle):
oldpat = re.compile('a(?:b|(c|e){1,2}?|d)+?(.)')
s = pickle.dumps(oldpat)
newpat = pickle.loads(s)
self.assertEqual(oldpat, newpat)
def test_constants(self):
self.assertEqual(re.I, re.IGNORECASE)
self.assertEqual(re.L, re.LOCALE)
self.assertEqual(re.M, re.MULTILINE)
self.assertEqual(re.S, re.DOTALL)
self.assertEqual(re.X, re.VERBOSE)
def test_flags(self):
for flag in [re.I, re.M, re.X, re.S, re.L]:
self.assertNotEqual(re.compile('^pattern$', flag), None)
def test_sre_character_literals(self):
for i in [0, 8, 16, 32, 64, 127, 128, 255]:
self.assertNotEqual(re.match(r"\%03o" % i, chr(i)), None)
self.assertNotEqual(re.match(r"\%03o0" % i, chr(i)+"0"), None)
self.assertNotEqual(re.match(r"\%03o8" % i, chr(i)+"8"), None)
self.assertNotEqual(re.match(r"\x%02x" % i, chr(i)), None)
self.assertNotEqual(re.match(r"\x%02x0" % i, chr(i)+"0"), None)
self.assertNotEqual(re.match(r"\x%02xz" % i, chr(i)+"z"), None)
self.assertRaises(re.error, re.match, "\911", "")
def test_bug_113254(self):
self.assertEqual(re.match(r'(a)|(b)', 'b').start(1), -1)
self.assertEqual(re.match(r'(a)|(b)', 'b').end(1), -1)
self.assertEqual(re.match(r'(a)|(b)', 'b').span(1), (-1, -1))
def test_bug_527371(self):
self.assertEqual(re.match(r'(a)?a','a').lastindex, None)
self.assertEqual(re.match(r'(a)(b)?b','ab').lastindex, 1)
self.assertEqual(re.match(r'(?P<a>a)(?P<b>b)?b','ab').lastgroup, 'a')
self.assertEqual(re.match("(?P<a>a(b))", "ab").lastgroup, 'a')
self.assertEqual(re.match("((a))", "a").lastindex, 1)
def test_bug_545855(self):
self.assertRaises(re.error, re.compile, 'foo[a-')
def test_bug_418626(self):
# SRE_OP_MIN_REPEAT_ONE for eliminating recursion on simple uses of
# pattern '*?' on a long string.
self.assertEqual(re.match('.*?c', 10000*'ab'+'cd').end(0), 20001)
self.assertEqual(re.match('.*?cd', 5000*'ab'+'c'+5000*'ab'+'cde').end(0),
20003)
self.assertEqual(re.match('.*?cd', 20000*'abc'+'de').end(0), 60001)
# non-simple '*?' still recurses and hits the recursion limit
self.assertRaises(RuntimeError, re.search, '(a|b)*?c', 10000*'ab'+'cd')
def test_bug_612074(self):
pat=u"["+re.escape(u"\u2039")+u"]"
self.assertEqual(re.compile(pat) and 1, 1)
def test_stack_overflow(self):
# nasty case that overflows the straightforward recursive
# implementation of repeated groups.
self.assertRaises(RuntimeError, re.match, '(x)*', 50000*'x')
self.assertRaises(RuntimeError, re.match, '(x)*y', 50000*'x'+'y')
self.assertRaises(RuntimeError, re.match, '(x)*?y', 50000*'x'+'y')
def test_scanner(self):
def s_ident(scanner, token): return token
def s_operator(scanner, token): return "op%s" % token
def s_float(scanner, token): return float(token)
def s_int(scanner, token): return int(token)
scanner = Scanner([
(r"[a-zA-Z_]\w*", s_ident),
(r"\d+\.\d*", s_float),
(r"\d+", s_int),
(r"=|\+|-|\*|/", s_operator),
(r"\s+", None),
])
self.assertNotEqual(scanner.scanner.scanner("").pattern, None)
self.assertEqual(scanner.scan("sum = 3*foo + 312.50 + bar"),
(['sum', 'op=', 3, 'op*', 'foo', 'op+', 312.5,
'op+', 'bar'], ''))
def test_bug_448951(self):
# bug 448951 (similar to 429357, but with single char match)
# (Also test greedy matches.)
for op in '','?','*':
self.assertEqual(re.match(r'((.%s):)?z'%op, 'z').groups(),
(None, None))
self.assertEqual(re.match(r'((.%s):)?z'%op, 'a:z').groups(),
('a:', 'a'))
def test_bug_725106(self):
# capturing groups in alternatives in repeats
self.assertEqual(re.match('^((a)|b)*', 'abc').groups(),
('b', 'a'))
self.assertEqual(re.match('^(([ab])|c)*', 'abc').groups(),
('c', 'b'))
self.assertEqual(re.match('^((d)|[ab])*', 'abc').groups(),
('b', None))
self.assertEqual(re.match('^((a)c|[ab])*', 'abc').groups(),
('b', None))
self.assertEqual(re.match('^((a)|b)*?c', 'abc').groups(),
('b', 'a'))
self.assertEqual(re.match('^(([ab])|c)*?d', 'abcd').groups(),
('c', 'b'))
self.assertEqual(re.match('^((d)|[ab])*?c', 'abc').groups(),
('b', None))
self.assertEqual(re.match('^((a)c|[ab])*?c', 'abc').groups(),
('b', None))
def test_bug_725149(self):
# mark_stack_base restoring before restoring marks
self.assertEqual(re.match('(a)(?:(?=(b)*)c)*', 'abb').groups(),
('a', None))
self.assertEqual(re.match('(a)((?!(b)*))*', 'abb').groups(),
('a', None, None))
def test_bug_764548(self):
# bug 764548, re.compile() barfs on str/unicode subclasses
try:
unicode
except NameError:
return # no problem if we have no unicode
class my_unicode(unicode): pass
pat = re.compile(my_unicode("abc"))
self.assertEqual(pat.match("xyz"), None)
def test_finditer(self):
iter = re.finditer(r":+", "a:b::c:::d")
self.assertEqual([item.group(0) for item in iter],
[":", "::", ":::"])
def run_re_tests():
from test.re_tests import benchmarks, tests, SUCCEED, FAIL, SYNTAX_ERROR
if verbose:
print 'Running re_tests test suite'
else:
# To save time, only run the first and last 10 tests
#tests = tests[:10] + tests[-10:]
pass
for t in tests:
sys.stdout.flush()
pattern = s = outcome = repl = expected = None
if len(t) == 5:
pattern, s, outcome, repl, expected = t
elif len(t) == 3:
pattern, s, outcome = t
else:
raise ValueError, ('Test tuples should have 3 or 5 fields', t)
try:
obj = re.compile(pattern)
except re.error:
if outcome == SYNTAX_ERROR: pass # Expected a syntax error
else:
print '=== Syntax error:', t
except KeyboardInterrupt: raise KeyboardInterrupt
except:
print '*** Unexpected error ***', t
if verbose:
traceback.print_exc(file=sys.stdout)
else:
try:
result = obj.search(s)
except re.error, msg:
print '=== Unexpected exception', t, repr(msg)
if outcome == SYNTAX_ERROR:
# This should have been a syntax error; forget it.
pass
elif outcome == FAIL:
if result is None: pass # No match, as expected
else: print '=== Succeeded incorrectly', t
elif outcome == SUCCEED:
if result is not None:
# Matched, as expected, so now we compute the
# result string and compare it to our expected result.
start, end = result.span(0)
vardict={'found': result.group(0),
'groups': result.group(),
'flags': result.re.flags}
for i in range(1, 100):
try:
gi = result.group(i)
# Special hack because else the string concat fails:
if gi is None:
gi = "None"
except IndexError:
gi = "Error"
vardict['g%d' % i] = gi
for i in result.re.groupindex.keys():
try:
gi = result.group(i)
if gi is None:
gi = "None"
except IndexError:
gi = "Error"
vardict[i] = gi
repl = eval(repl, vardict)
if repl != expected:
print '=== grouping error', t,
print repr(repl) + ' should be ' + repr(expected)
else:
print '=== Failed incorrectly', t
# Try the match on a unicode string, and check that it
# still succeeds.
try:
result = obj.search(unicode(s, "latin-1"))
if result is None:
print '=== Fails on unicode match', t
except NameError:
continue # 1.5.2
except TypeError:
continue # unicode test case
# Try the match on a unicode pattern, and check that it
# still succeeds.
obj=re.compile(unicode(pattern, "latin-1"))
result = obj.search(s)
if result is None:
print '=== Fails on unicode pattern match', t
# Try the match with the search area limited to the extent
# of the match and see if it still succeeds. \B will
# break (because it won't match at the end or start of a
if pattern[:2] != '\\B' and pattern[-2:] != '\\B' \
and result is not None:
obj = re.compile(pattern)
result = obj.search(s, result.start(0), result.end(0) + 1)
if result is None:
print '=== Failed on range-limited match', t
# Try the match with IGNORECASE enabled, and check that it
# still succeeds.
obj = re.compile(pattern, re.IGNORECASE)
result = obj.search(s)
if result is None:
print '=== Fails on case-insensitive match', t
# Try the match with LOCALE enabled, and check that it
# still succeeds.
obj = re.compile(pattern, re.LOCALE)
result = obj.search(s)
if result is None:
print '=== Fails on locale-sensitive match', t
# Try the match with UNICODE locale enabled, and check
# that it still succeeds.
obj = re.compile(pattern, re.UNICODE)
result = obj.search(s)
if result is None:
print '=== Fails on unicode-sensitive match', t
def test_main():
run_unittest(ReTests)
run_re_tests()
if __name__ == "__main__":
test_main()
| false | true |
f72480fcb8551a59e4a32a3f79265c45343d673d | 26,964 | py | Python | fair/forward.py | shaheen19/FAIR | 345c23b3d35918729e7aa49ecb39047494c48a6e | [
"Apache-2.0"
] | 1 | 2019-09-15T02:35:47.000Z | 2019-09-15T02:35:47.000Z | fair/forward.py | shaheen19/FAIR | 345c23b3d35918729e7aa49ecb39047494c48a6e | [
"Apache-2.0"
] | null | null | null | fair/forward.py | shaheen19/FAIR | 345c23b3d35918729e7aa49ecb39047494c48a6e | [
"Apache-2.0"
] | null | null | null | from __future__ import division
import inspect
import numpy as np
import warnings
from scipy.optimize import root
from .ancil import natural, cmip6_volcanic, cmip6_solar, historical_scaling
from .constants import molwt, lifetime, radeff
from .constants.general import M_ATMOS, ppm_gtc
from .defaults import carbon, thermal
from .forcing import ozone_tr, ozone_st, h2o_st, contrails, aerosols, bc_snow,\
landuse
from .forcing.ghg import co2_log
def iirf_interp(alp_b,a,tau,iirf_h,targ_iirf):
"""Interpolation function for finding alpha, the CO2 decay time constant
scaling factor, in iirf_h equation. See Eq. (7) of Millar et al ACP (2017).
Inputs:
alp_b : Guess for alpha, the scale factor, for tau
a : partition fractions for CO2 boxes
tau : time constants for CO2 boxes
iirf_h : time horizon for time-integrated airborne fraction
targ_iirf: iirf_h calculated using simple parameterisation (Eq. (8),
Millar et al (2017)).
"""
iirf_arr = alp_b*(np.sum(a*tau*(1.0 - np.exp(-iirf_h/(tau*alp_b)))))
return iirf_arr - targ_iirf
def iirf_simple(c_acc, temp, r0, rc, rt, iirf_max):
"""Simple linear iIRF relationship. Eq. (8) of Millar et al ACP (2017).
Inputs:
c_acc : cumulative airborne carbon anomaly (GtC) since
pre-industrial
temp : temperature anomaly since pre-industrial
r0 : pre-industrial time-integrated airborne fraction (yr)
rc : sensitivity of time-integrated airborne fraction to airborne
carbon (yr/GtC)
rt : sensitivity of time-integrated airborne fraction to
temperature (yr/K)
iirf_max : maximum value of time-integrated airborne fraction (yr)
Outputs:
iirf : time-integrated airborne fraction of carbon (yr)
"""
return np.min([r0 + rc * c_acc + rt * temp, iirf_max])
def calculate_q(tcrecs, d, f2x, tcr_dbl, nt):
"""If TCR and ECS are supplied, calculate the q model coefficients.
See Eqs. (4) and (5) of Millar et al ACP (2017).
Inputs:
tcrecs : 2-element array of transient climate response (TCR) and
equilibrium climate sensitivity (ECS).
d : The slow and fast thermal response time constants
f2x : Effective radiative forcing from a doubling of CO2
tcr_dbl : time to a doubling of CO2 under 1% per year CO2 increase, yr
nt : number of timesteps
Outputs:
q : coefficients of slow and fast temperature change in each
timestep ((nt, 2) array).
"""
# TODO:
# error checking before call
# benchmark one call per timestep and if not slower do not convert to 2D
# - will make code cleaner
k = 1.0 - (d/tcr_dbl)*(1.0 - np.exp(-tcr_dbl/d))
# if ECS and TCR are not time-varying, expand them to 2D array anyway
if tcrecs.ndim==1:
if len(tcrecs)!=2:
raise ValueError(
"Constant TCR and ECS should be a 2-element array")
tcrecs = np.ones((nt, 2)) * tcrecs
elif tcrecs.ndim==2:
if tcrecs.shape!=(nt, 2):
raise ValueError(
"Transient TCR and ECS should be a nt x 2 array")
q = (1.0 / f2x) * (1.0/(k[0]-k[1])) * np.array([
tcrecs[:,0]-tcrecs[:,1]*k[1],tcrecs[:,1]*k[0]-tcrecs[:,0]]).T
return q
def carbon_cycle(e0, c_acc0, temp, r0, rc, rt, iirf_max, time_scale_sf0, a, tau,
iirf_h, carbon_boxes0, c_pi, c0, e1):
"""Calculates CO2 concentrations from emissions.
Inputs:
e0 : emissions of CO2 (GtC) in timestep t-1
c_acc0 : cumulative airborne carbon anomaly (GtC) since
pre-industrial, timestep t-1
temp : temperature anomaly above pre-industrial (K)
r0 : pre-industrial time-integrated airborne fraction (yr)
rc : sensitivity of time-integrated airborne fraction to
airborne carbon (yr/GtC)
rt : sensitivity of time-integrated airborne fraction to
temperature (yr/K)
iirf_max : maximum value of time-integrated airborne fraction (yr)
time_scale_sf0: initial guess of alpha scaling factor
a : partition coefficient of carbon boxes
tau : present-day decay time constants of CO2 (yr)
iirf_h : time horizon for time-integrated airborne fraction (yr)
carbon_boxes0 : carbon stored in each atmospheric reservoir at timestep
t-1 (GtC)
c_pi : pre-industrial concentration of CO2, ppmv
c0 : concentration of CO2 in timestep t-1, ppmv
e1 : emissions of CO2 in timestep t, GtC
Outputs:
c1 : concentrations of CO2 in timestep t, ppmv
c_acc1 : cumulative airborne carbon anomaly (GtC) since
pre-industrial, timestep t
carbon_boxes1 : carbon stored in each atmospheric reservoir at timestep
t (GtC)
time_scale_sf : scale factor for CO2 decay constants
"""
iirf = iirf_simple(c_acc0, temp, r0, rc, rt, iirf_max)
time_scale_sf = root(iirf_interp, time_scale_sf0,
args=(a, tau, iirf_h, iirf))['x']
tau_new = tau * time_scale_sf
carbon_boxes1 = carbon_boxes0*np.exp(-1.0/tau_new) + a*e1 / ppm_gtc
c1 = np.sum(carbon_boxes1) + c_pi
c_acc1 = c_acc0 + 0.5*(e1 + e0) - (c1 - c0)*ppm_gtc
return c1, c_acc1, carbon_boxes1, time_scale_sf
def emis_to_conc(c0, e0, e1, ts, lt, vm):
"""Calculate concentrations of well mixed GHGs from emissions for simple
one-box model.
Inputs (all can be scalar or 1D arrays for multiple species):
c0: concentrations in timestep t-1
e0: emissions in timestep t-1
e1: emissions in timestep t
ts: length of timestep. Use 1 for sensible results in FaIR 1.3.
lt: atmospheric (e-folding) lifetime of GHG
vm: conversion from emissions units (e.g. Mt) to concentrations units
(e.g. ppb)
Outputs:
c1: concentrations in timestep t
"""
c1 = c0 - c0 * (1.0 - np.exp(-ts/lt)) + 0.5 * ts * (e1 + e0) * vm
return c1
def forc_to_temp(t0, q, d, f, e=1.0):
"""Calculate temperature from a given radiative forcing.
Inputs:
t0: Temperature in timestep t-1
q: The matrix contributions to slow and fast temperature change
calculated from ECS and TCR (2 element array)
d: The slow and fast thermal response time constants (2 element array)
f: radiative forcing (can be scalar or 1D array representing multiple
species)
Keywords:
e: efficacy factor (default 1); if f is an array, e should be an array
of the same length.
Outputs:
t1: slow and fast contributions to total temperature (2 element array)
in timestep t
"""
t1 = t0*np.exp(-1.0/d) + q*(1.0-np.exp((-1.0)/d))*np.sum(f*e)
return t1
def fair_scm(
emissions=False,
emissions_driven=True,
C=None,
other_rf=0.0,
q = thermal.q,
tcrecs = thermal.tcrecs,
d = thermal.d,
F2x = thermal.f2x,
tcr_dbl = thermal.tcr_dbl,
a = carbon.a,
tau = carbon.tau,
r0 = carbon.r0,
rc = carbon.rc,
rt = carbon.rt,
iirf_max = carbon.iirf_max,
iirf_h = carbon.iirf_h,
C_pi=np.array([278., 722., 273., 34.497] + [0.]*25 + [13.0975, 547.996]),
restart_in=False,
restart_out=False,
F_tropO3 = 0.,
F_aerosol = 0.,
F_volcanic=cmip6_volcanic.Forcing.volcanic,
F_solar=cmip6_solar.Forcing.solar,
F_contrails=0.,
F_bcsnow=0.,
F_landuse=0.,
aviNOx_frac=0.,
fossilCH4_frac=0.,
natural=natural.Emissions.emissions,
efficacy=np.array([1.]*9 + [3.] + [1.]*3),
scale=None,
oxCH4_frac=0.61,
ghg_forcing="Etminan",
stwv_from_ch4=None,
b_aero = np.array([-6.2227e-3, 0.0, -3.8392e-4, -1.16551e-3, 1.601537e-2,
-1.45339e-3, -1.55605e-3]),
b_tro3 = np.array([2.8249e-4, 1.0695e-4, -9.3604e-4, 99.7831e-4]),
ghan_params = np.array([-1.95011431, 0.01107147, 0.01387492]),
stevens_params = np.array([0.001875, 0.634, 60.]),
useMultigas=True,
useStevenson=True,
lifetimes=False,
aerosol_forcing="aerocom+ghan",
scaleAerosolAR5=True,
fixPre1850RCP=True,
useTropO3TFeedback=True,
scaleHistoricalAR5=False,
contrail_forcing='NOx',
kerosene_supply=0.,
landuse_forcing='co2',
):
# is iirf_h < iirf_max? Don't stop the code, but warn user
if iirf_h < iirf_max:
warnings.warn('iirf_h=%f, which is less than iirf_max (%f)'
% (iirf_h, iirf_max), RuntimeWarning)
# Conversion between ppb/ppt concentrations and Mt/kt emissions
# in the RCP databases ppb = Mt and ppt = kt so factor always 1e18
emis2conc = M_ATMOS/1e18*np.asarray(molwt.aslist)/molwt.AIR
# Funny units for nitrogen emissions - N2O is expressed in N2 equivalent
n2o_sf = molwt.N2O/molwt.N2
emis2conc[2] = emis2conc[2] / n2o_sf
# Convert any list to a numpy array for (a) speed and (b) consistency.
# Goes through all variables in scope and converts them.
frame = inspect.currentframe()
args, _, _, values = inspect.getargvalues(frame)
for arg_to_check in args:
if type(values[arg_to_check]) is list:
exec(arg_to_check + '= np.array(' + arg_to_check + ')')
# Set up the output timeseries variables depending on options and perform
# basic sense checks
if useMultigas:
ngas = 31
nF = 13
if emissions_driven:
if type(emissions) is not np.ndarray or emissions.shape[1] != 40:
raise ValueError(
"emissions timeseries should be a nt x 40 numpy array")
carbon_boxes_shape = (emissions.shape[0], a.shape[0])
thermal_boxes_shape = (emissions.shape[0], d.shape[0])
nt = emissions.shape[0]
else:
if type(C) is not np.ndarray or C.shape[1] != ngas:
raise ValueError(
"C timeseries should be a nt x %d numpy array" % ngas)
thermal_boxes_shape = (C.shape[0], d.shape[0])
nt = C.shape[0]
if np.isscalar(fossilCH4_frac):
fossilCH4_frac = np.ones(nt) * fossilCH4_frac
# If custom gas lifetimes are supplied, use them, else import defaults
if type(lifetimes) is np.ndarray:
if len(lifetimes)!=ngas:
raise ValueError(
"custom GHG lifetime array must have " + str(ngas) +
" elements")
else:
lifetimes = lifetime.aslist
# Select the desired GHG forcing relationship and populate
# stratospheric water vapour from methane scale factor if not specified
# by user
if ghg_forcing.lower()=="etminan":
from .forcing.ghg import etminan as ghg
if stwv_from_ch4==None: stwv_from_ch4=0.12
elif ghg_forcing.lower()=="myhre":
from .forcing.ghg import myhre as ghg
if stwv_from_ch4==None: stwv_from_ch4=0.15
else:
raise ValueError(
"ghg_forcing should be 'etminan' (default) or 'myhre'")
# Check natural emissions and convert to 2D array if necessary
if type(natural) in [float,int]:
natural = natural * np.ones((nt,2))
elif type(natural) is np.ndarray:
if natural.ndim==1:
if natural.shape[0]!=2:
raise ValueError(
"natural emissions should be a 2-element or nt x 2 " +
"array")
natural = np.tile(natural, nt).reshape((nt,2))
elif natural.ndim==2:
if natural.shape[1]!=2 or natural.shape[0]!=nt:
raise ValueError(
"natural emissions should be a 2-element or nt x 2 " +
"array")
else:
raise ValueError(
"natural emissions should be a scalar, 2-element, or nt x 2 " +
"array")
# check scale factor is correct shape. If 1D inflate to 2D
if scale is None:
scale = np.ones((nt,nF))
elif scale.shape[-1]==nF:
if scale.ndim==2 and scale.shape[0]==nt:
pass
elif scale.ndim==1:
scale = np.tile(scale, nt).reshape((nt,nF))
else:
raise ValueError("in multi-gas mode, scale should be None, or a "+
"(13,) or (nt, 13) array")
# if scaling the historical time series to match AR5, apply these
# factors to whatever the user specifies
if scaleHistoricalAR5:
scale=scale*historical_scaling.all[:nt,:]
else:
ngas = 1
nF = 1
if emissions_driven:
if type(emissions) is np.ndarray:
if emissions.ndim != 1:
raise ValueError(
"In CO2-only mode, emissions should be a 1D array")
nt = emissions.shape[0]
carbon_boxes_shape = (nt, a.shape[0])
thermal_boxes_shape = (nt, d.shape[0])
elif type(other_rf) is np.ndarray:
if other_rf.ndim != 1:
raise ValueError(
"In CO2-only mode, other_rf should be a 1D array")
nt = other_rf.shape[0]
carbon_boxes_shape = (nt, a.shape[0])
thermal_boxes_shape = (nt, d.shape[0])
emissions = np.zeros(nt)
else:
raise ValueError(
"Neither emissions or other_rf is defined as a timeseries")
else:
if type(C) is not np.ndarray or C.ndim != 1:
raise ValueError(
"In CO2-only mode, concentrations should be a 1D array")
nt = C.shape[0]
thermal_boxes_shape = (nt, d.shape[0])
# expand C to 2D array for consistency with other calcs
C = C.reshape((nt, 1))
# check scale factor is correct shape - either scalar or 1D
# needs try/except really
if scale is None:
scale = np.ones(nt)
elif np.isscalar(scale):
scale = np.ones(nt) * scale
elif scale.ndim==1 and scale.shape[0]==nt:
pass
else:
raise ValueError("in CO2-only mode, scale should be None, a "+
"scalar or a (nt,) array")
# if scaling the historical time series to match AR5, apply these
# factors to whatever the user specifies
if scaleHistoricalAR5:
scale=scale*historical_scaling.co2[:nt]
# If TCR and ECS are supplied, calculate q coefficients
if type(tcrecs) is np.ndarray:
q = calculate_q(tcrecs, d, F2x, tcr_dbl, nt)
# Check a and tau are same size
if a.ndim != 1:
raise ValueError("a should be a 1D array")
if tau.ndim != 1:
raise ValueError("tau should be a 1D array")
if len(a) != len(tau):
raise ValueError("a and tau should be the same size")
if not np.isclose(np.sum(a), 1.0):
raise ValueError("a should sum to one")
# Allocate intermediate and output arrays
F = np.zeros((nt, nF))
C_acc = np.zeros(nt)
T_j = np.zeros(thermal_boxes_shape)
T = np.zeros(nt)
C_0 = np.copy(C_pi)
if emissions_driven:
C = np.zeros((nt, ngas))
R_i = np.zeros(carbon_boxes_shape)
if restart_in:
R_minus1 = restart_in[0]
T_j_minus1 = restart_in[1]
C_acc_minus1 = restart_in[2]
E_minus1 = restart_in[3]
C_minus1 = np.sum(R_minus1,axis=-1) + C_0[0]
C[0,0], C_acc[0], R_i[0,:], time_scale_sf = carbon_cycle(
E_minus1,
C_acc_minus1,
np.sum(T_j_minus1),
r0,
rc,
rt,
iirf_max,
0.16,
a,
tau,
iirf_h,
R_minus1,
C_pi[0],
C_minus1,
emissions[0]
)
if np.isscalar(other_rf):
F[0,0] = co2_log(C[0,0], C_pi[0], F2x) + other_rf
else:
F[0,0] = co2_log(C[0,0], C_pi[0], F2x) + other_rf[0]
F[0,0] = F[0,0] * scale[0]
T_j[0,:] = forc_to_temp(T_j_minus1, q[0,:], d, F[0,:])
T[0]=np.sum(T_j[0,:],axis=-1)
else:
# Initialise the carbon pools to be correct for first timestep in
# numerical method
if emissions_driven:
if useMultigas:
R_i[0,:] = a * (np.sum(emissions[0,1:3])) / ppm_gtc
C[0,1:] = C_0[1:]
else:
R_i[0,:] = a * emissions[0,np.newaxis] / ppm_gtc
C[0,0] = np.sum(R_i[0,:],axis=-1) + C_0[0]
if useMultigas:
# CO2, CH4 and N2O are co-dependent
F[0,0:3] = ghg(C[0,0:3], C_pi[0:3], F2x=F2x)
# Minor (F- and H-gases) are linear in concentration
# the factor of 0.001 here is because radiative efficiencies are given
# in W/m2/ppb and concentrations of minor gases are in ppt.
F[0,3] = np.sum((C[0,3:] - C_pi[3:]) * radeff.aslist[3:] * 0.001)
# Tropospheric ozone:
if emissions_driven:
if useStevenson:
F[0,4] = ozone_tr.stevenson(emissions[0,:], C[0,1],
T=np.sum(T_j[0,:]),
feedback=useTropO3TFeedback,
fix_pre1850_RCP=fixPre1850RCP)
else:
F[0,4] = ozone_tr.regress(emissions[0,:], beta=b_tro3)
else:
F[:,4] = F_tropO3
# Stratospheric ozone depends on concentrations of ODSs (index 15-30)
F[0,5] = ozone_st.magicc(C[0,15:], C_pi[15:])
# Stratospheric water vapour is a function of the methane ERF
F[0,6] = h2o_st.linear(F[0,1], ratio=stwv_from_ch4)
# Forcing from contrails. No climate feedback so can live outside
# of forward model in this version
if emissions_driven:
if contrail_forcing.lower()[0]=='n': # from NOx emissions
F[:,7] = contrails.from_aviNOx(emissions, aviNOx_frac)
elif contrail_forcing.lower()[0]=='f': # from kerosene production
F[:,7] = contrails.from_fuel(kerosene_supply)
elif contrail_forcing.lower()[0]=='e': # external forcing timeseries
F[:,7] = F_contrails
else:
raise ValueError("contrails must be one of 'NOx' (estimated "+
"from NOx emissions), 'fuel' (estimated from annual jet fuel "+
"supplied) or 'external' (an external forcing time series).")
else:
F[:,7] = F_contrails
# Forcing from aerosols - again no feedback dependence
if emissions_driven:
if aerosol_forcing.lower()=='stevens':
F[:,8] = aerosols.Stevens(emissions, stevens_params=stevens_params)
elif 'aerocom' in aerosol_forcing.lower():
F[:,8] = aerosols.aerocom_direct(emissions, beta=b_aero)
if 'ghan' in aerosol_forcing.lower():
F[:,8] = F[:,8] + aerosols.ghan_indirect(emissions,
scale_AR5=scaleAerosolAR5,
fix_pre1850_RCP=fixPre1850RCP,
ghan_params=ghan_params)
elif aerosol_forcing.lower()[0] == 'e':
F[:,8] = F_aerosol
else:
raise ValueError("aerosol_forcing should be one of 'stevens', " +
"aerocom, aerocom+ghan or external")
else:
F[:,8] = F_aerosol
# Black carbon on snow - no feedback dependence
if emissions_driven:
F[:,9] = bc_snow.linear(emissions)
else:
F[:,9] = F_bcsnow
# Land use change - either use a scaling with cumulative CO2 emissions
# or an external time series
if emissions_driven:
if landuse_forcing.lower()[0]=='c':
F[:,10] = landuse.cumulative(emissions)
elif landuse_forcing.lower()[0]=='e':
F[:,10] = F_landuse
else:
raise ValueError(
"landuse_forcing should be one of 'co2' or 'external'")
else:
F[:,10] = F_landuse
# Volcanic and solar copied straight to the output arrays
F[:,11] = F_volcanic
F[:,12] = F_solar
# multiply by scale factors
F[0,:] = F[0,:] * scale[0,:]
else:
if np.isscalar(other_rf):
F[0,0] = co2_log(C[0,0], C_pi[0], F2x) + other_rf
else:
F[0,0] = co2_log(C[0,0], C_pi[0], F2x) + other_rf[0]
F[0,0] = F[0,0] * scale[0]
if restart_in == False:
# Update the thermal response boxes
T_j[0,:] = (q[0,:]/d)*(np.sum(F[0,:]))
# Sum the thermal response boxes to get the total temperature anomaly
T[0]=np.sum(T_j[0,:],axis=-1)
for t in range(1,nt):
if emissions_driven:
if useMultigas:
if t == 1:
time_scale_sf = 0.16
# Calculate concentrations
# a. CARBON DIOXIDE
# Firstly add any oxidised methane from last year to the CO2
# pool
oxidised_CH4 = ((C[t-1,1]-C_pi[1]) *
(1.0 - np.exp(-1.0/lifetimes[1])) *
(molwt.C/molwt.CH4 * 0.001 * oxCH4_frac * fossilCH4_frac[t]))
oxidised_CH4 = np.max((oxidised_CH4, 0))
C[t,0], C_acc[t], R_i[t,:], time_scale_sf = carbon_cycle(
np.sum(emissions[t-1,1:3]),
C_acc[t-1],
T[t-1],
r0,
rc,
rt,
iirf_max,
time_scale_sf,
a,
tau,
iirf_h,
R_i[t-1,:] + oxidised_CH4,
C_pi[0],
C[t-1,0],
np.sum(emissions[t,1:3])
)
# b. METHANE
C[t,1] = emis_to_conc(
C[t-1,1],
emissions[t-1,3]+natural[t,0],
emissions[t,3]+natural[t,0],
1.0,
lifetimes[1],
1.0/emis2conc[1]
)
# c. NITROUS OXIDE
C[t,2] = emis_to_conc(
C[t-1,2],
emissions[t-1,4]+natural[t,1],
emissions[t,4]+natural[t,1],
1.0,
lifetimes[2],
1.0/emis2conc[2]
)
# d. OTHER WMGHGs
C[t,3:] = emis_to_conc(
C[t-1,3:],
emissions[t-1,12:],
emissions[t,12:],
1.0,
np.array(lifetimes[3:]),
1.0/emis2conc[3:]
)
# 2. Radiative forcing
F[t,0:3] = ghg(C[t,0:3], C_pi[0:3], F2x=F2x)
F[t,3] = np.sum((C[t,3:] - C_pi[3:]) * radeff.aslist[3:]
* 0.001)
if useStevenson:
F[t,4] = ozone_tr.stevenson(emissions[t,:],
C[t,1],
T=T[t-1],
feedback=useTropO3TFeedback,
fix_pre1850_RCP=fixPre1850RCP)
else:
F[t,4] = ozone_tr.regress(emissions[t,:], beta=b_tro3)
F[t,5] = ozone_st.magicc(C[t,15:], C_pi[15:])
F[t,6] = h2o_st.linear(F[t,1], ratio=stwv_from_ch4)
# multiply by scale factors
F[t,:] = F[t,:] * scale[t,:]
# 3. Temperature
# Update the thermal response boxes
T_j[t,:] = forc_to_temp(
T_j[t-1,:], q[t,:], d, F[t,:], e=efficacy)
# Sum the thermal response boxes to get the total temperature
T[t]=np.sum(T_j[t,:],axis=-1)
else:
if t == 1:
time_scale_sf = 0.16
C[t,0], C_acc[t], R_i[t,:], time_scale_sf = carbon_cycle(
emissions[t-1],
C_acc[t-1],
T[t-1],
r0,
rc,
rt,
iirf_max,
time_scale_sf,
a,
tau,
iirf_h,
R_i[t-1,:],
C_pi[0],
C[t-1,0],
emissions[t]
)
if np.isscalar(other_rf):
F[t,0] = co2_log(C[t,0], C_pi[0], F2x) + other_rf
else:
F[t,0] = co2_log(C[t,0], C_pi[0], F2x) + other_rf[t]
F[t,0] = F[t,0] * scale[t]
T_j[t,:] = forc_to_temp(T_j[t-1,:], q[t,:], d, F[t,:])
T[t]=np.sum(T_j[t,:],axis=-1)
else:
if useMultigas:
F[t,0:3] = ghg(C[t,0:3], C_pi[0:3], F2x=F2x)
F[t,3] = np.sum((C[t,3:] - C_pi[3:]) * radeff.aslist[3:]
* 0.001)
F[t,5] = ozone_st.magicc(C[t,15:], C_pi[15:])
F[t,6] = h2o_st.linear(F[t,1], ratio=stwv_from_ch4)
# multiply by scale factors
F[t,:] = F[t,:] * scale[t,:]
# 3. Temperature
# Update the thermal response boxes
T_j[t,:] = T_j[t,:] = forc_to_temp(
T_j[t-1,:], q[t,:], d, F[t,:], e=efficacy)
# Sum the thermal response boxes to get the total temperature
T[t]=np.sum(T_j[t,:],axis=-1)
else:
if np.isscalar(other_rf):
F[t,0] = co2_log(C[t,0], C_pi[0], F2x) + other_rf
else:
F[t,0] = co2_log(C[t,0], C_pi[0], F2x) + other_rf[t]
F[t,0] = F[t,0] * scale[t]
T_j[t,:] = forc_to_temp(T_j[t-1,:], q[t,:], d, F[t,:])
T[t]=np.sum(T_j[t,:],axis=-1)
if not useMultigas:
C = np.squeeze(C)
F = np.squeeze(F)
if restart_out:
if useMultigas:
E_minus1 = np.sum(emissions[-1,1:3])
else:
E_minus1 = emissions[-1]
restart_out_val=(R_i[-1],T_j[-1],C_acc[-1],E_minus1)
return C, F, T, restart_out_val
else:
return C, F, T
| 37.870787 | 83 | 0.528371 | from __future__ import division
import inspect
import numpy as np
import warnings
from scipy.optimize import root
from .ancil import natural, cmip6_volcanic, cmip6_solar, historical_scaling
from .constants import molwt, lifetime, radeff
from .constants.general import M_ATMOS, ppm_gtc
from .defaults import carbon, thermal
from .forcing import ozone_tr, ozone_st, h2o_st, contrails, aerosols, bc_snow,\
landuse
from .forcing.ghg import co2_log
def iirf_interp(alp_b,a,tau,iirf_h,targ_iirf):
iirf_arr = alp_b*(np.sum(a*tau*(1.0 - np.exp(-iirf_h/(tau*alp_b)))))
return iirf_arr - targ_iirf
def iirf_simple(c_acc, temp, r0, rc, rt, iirf_max):
return np.min([r0 + rc * c_acc + rt * temp, iirf_max])
def calculate_q(tcrecs, d, f2x, tcr_dbl, nt):
k = 1.0 - (d/tcr_dbl)*(1.0 - np.exp(-tcr_dbl/d))
if tcrecs.ndim==1:
if len(tcrecs)!=2:
raise ValueError(
"Constant TCR and ECS should be a 2-element array")
tcrecs = np.ones((nt, 2)) * tcrecs
elif tcrecs.ndim==2:
if tcrecs.shape!=(nt, 2):
raise ValueError(
"Transient TCR and ECS should be a nt x 2 array")
q = (1.0 / f2x) * (1.0/(k[0]-k[1])) * np.array([
tcrecs[:,0]-tcrecs[:,1]*k[1],tcrecs[:,1]*k[0]-tcrecs[:,0]]).T
return q
def carbon_cycle(e0, c_acc0, temp, r0, rc, rt, iirf_max, time_scale_sf0, a, tau,
iirf_h, carbon_boxes0, c_pi, c0, e1):
iirf = iirf_simple(c_acc0, temp, r0, rc, rt, iirf_max)
time_scale_sf = root(iirf_interp, time_scale_sf0,
args=(a, tau, iirf_h, iirf))['x']
tau_new = tau * time_scale_sf
carbon_boxes1 = carbon_boxes0*np.exp(-1.0/tau_new) + a*e1 / ppm_gtc
c1 = np.sum(carbon_boxes1) + c_pi
c_acc1 = c_acc0 + 0.5*(e1 + e0) - (c1 - c0)*ppm_gtc
return c1, c_acc1, carbon_boxes1, time_scale_sf
def emis_to_conc(c0, e0, e1, ts, lt, vm):
c1 = c0 - c0 * (1.0 - np.exp(-ts/lt)) + 0.5 * ts * (e1 + e0) * vm
return c1
def forc_to_temp(t0, q, d, f, e=1.0):
t1 = t0*np.exp(-1.0/d) + q*(1.0-np.exp((-1.0)/d))*np.sum(f*e)
return t1
def fair_scm(
emissions=False,
emissions_driven=True,
C=None,
other_rf=0.0,
q = thermal.q,
tcrecs = thermal.tcrecs,
d = thermal.d,
F2x = thermal.f2x,
tcr_dbl = thermal.tcr_dbl,
a = carbon.a,
tau = carbon.tau,
r0 = carbon.r0,
rc = carbon.rc,
rt = carbon.rt,
iirf_max = carbon.iirf_max,
iirf_h = carbon.iirf_h,
C_pi=np.array([278., 722., 273., 34.497] + [0.]*25 + [13.0975, 547.996]),
restart_in=False,
restart_out=False,
F_tropO3 = 0.,
F_aerosol = 0.,
F_volcanic=cmip6_volcanic.Forcing.volcanic,
F_solar=cmip6_solar.Forcing.solar,
F_contrails=0.,
F_bcsnow=0.,
F_landuse=0.,
aviNOx_frac=0.,
fossilCH4_frac=0.,
natural=natural.Emissions.emissions,
efficacy=np.array([1.]*9 + [3.] + [1.]*3),
scale=None,
oxCH4_frac=0.61,
ghg_forcing="Etminan",
stwv_from_ch4=None,
b_aero = np.array([-6.2227e-3, 0.0, -3.8392e-4, -1.16551e-3, 1.601537e-2,
-1.45339e-3, -1.55605e-3]),
b_tro3 = np.array([2.8249e-4, 1.0695e-4, -9.3604e-4, 99.7831e-4]),
ghan_params = np.array([-1.95011431, 0.01107147, 0.01387492]),
stevens_params = np.array([0.001875, 0.634, 60.]),
useMultigas=True,
useStevenson=True,
lifetimes=False,
aerosol_forcing="aerocom+ghan",
scaleAerosolAR5=True,
fixPre1850RCP=True,
useTropO3TFeedback=True,
scaleHistoricalAR5=False,
contrail_forcing='NOx',
kerosene_supply=0.,
landuse_forcing='co2',
):
if iirf_h < iirf_max:
warnings.warn('iirf_h=%f, which is less than iirf_max (%f)'
% (iirf_h, iirf_max), RuntimeWarning)
# Conversion between ppb/ppt concentrations and Mt/kt emissions
# in the RCP databases ppb = Mt and ppt = kt so factor always 1e18
emis2conc = M_ATMOS/1e18*np.asarray(molwt.aslist)/molwt.AIR
# Funny units for nitrogen emissions - N2O is expressed in N2 equivalent
n2o_sf = molwt.N2O/molwt.N2
emis2conc[2] = emis2conc[2] / n2o_sf
# Convert any list to a numpy array for (a) speed and (b) consistency.
# Goes through all variables in scope and converts them.
frame = inspect.currentframe()
args, _, _, values = inspect.getargvalues(frame)
for arg_to_check in args:
if type(values[arg_to_check]) is list:
exec(arg_to_check + '= np.array(' + arg_to_check + ')')
# Set up the output timeseries variables depending on options and perform
# basic sense checks
if useMultigas:
ngas = 31
nF = 13
if emissions_driven:
if type(emissions) is not np.ndarray or emissions.shape[1] != 40:
raise ValueError(
"emissions timeseries should be a nt x 40 numpy array")
carbon_boxes_shape = (emissions.shape[0], a.shape[0])
thermal_boxes_shape = (emissions.shape[0], d.shape[0])
nt = emissions.shape[0]
else:
if type(C) is not np.ndarray or C.shape[1] != ngas:
raise ValueError(
"C timeseries should be a nt x %d numpy array" % ngas)
thermal_boxes_shape = (C.shape[0], d.shape[0])
nt = C.shape[0]
if np.isscalar(fossilCH4_frac):
fossilCH4_frac = np.ones(nt) * fossilCH4_frac
# If custom gas lifetimes are supplied, use them, else import defaults
if type(lifetimes) is np.ndarray:
if len(lifetimes)!=ngas:
raise ValueError(
"custom GHG lifetime array must have " + str(ngas) +
" elements")
else:
lifetimes = lifetime.aslist
# Select the desired GHG forcing relationship and populate
# stratospheric water vapour from methane scale factor if not specified
# by user
if ghg_forcing.lower()=="etminan":
from .forcing.ghg import etminan as ghg
if stwv_from_ch4==None: stwv_from_ch4=0.12
elif ghg_forcing.lower()=="myhre":
from .forcing.ghg import myhre as ghg
if stwv_from_ch4==None: stwv_from_ch4=0.15
else:
raise ValueError(
"ghg_forcing should be 'etminan' (default) or 'myhre'")
# Check natural emissions and convert to 2D array if necessary
if type(natural) in [float,int]:
natural = natural * np.ones((nt,2))
elif type(natural) is np.ndarray:
if natural.ndim==1:
if natural.shape[0]!=2:
raise ValueError(
"natural emissions should be a 2-element or nt x 2 " +
"array")
natural = np.tile(natural, nt).reshape((nt,2))
elif natural.ndim==2:
if natural.shape[1]!=2 or natural.shape[0]!=nt:
raise ValueError(
"natural emissions should be a 2-element or nt x 2 " +
"array")
else:
raise ValueError(
"natural emissions should be a scalar, 2-element, or nt x 2 " +
"array")
# check scale factor is correct shape. If 1D inflate to 2D
if scale is None:
scale = np.ones((nt,nF))
elif scale.shape[-1]==nF:
if scale.ndim==2 and scale.shape[0]==nt:
pass
elif scale.ndim==1:
scale = np.tile(scale, nt).reshape((nt,nF))
else:
raise ValueError("in multi-gas mode, scale should be None, or a "+
"(13,) or (nt, 13) array")
# if scaling the historical time series to match AR5, apply these
# factors to whatever the user specifies
if scaleHistoricalAR5:
scale=scale*historical_scaling.all[:nt,:]
else:
ngas = 1
nF = 1
if emissions_driven:
if type(emissions) is np.ndarray:
if emissions.ndim != 1:
raise ValueError(
"In CO2-only mode, emissions should be a 1D array")
nt = emissions.shape[0]
carbon_boxes_shape = (nt, a.shape[0])
thermal_boxes_shape = (nt, d.shape[0])
elif type(other_rf) is np.ndarray:
if other_rf.ndim != 1:
raise ValueError(
"In CO2-only mode, other_rf should be a 1D array")
nt = other_rf.shape[0]
carbon_boxes_shape = (nt, a.shape[0])
thermal_boxes_shape = (nt, d.shape[0])
emissions = np.zeros(nt)
else:
raise ValueError(
"Neither emissions or other_rf is defined as a timeseries")
else:
if type(C) is not np.ndarray or C.ndim != 1:
raise ValueError(
"In CO2-only mode, concentrations should be a 1D array")
nt = C.shape[0]
thermal_boxes_shape = (nt, d.shape[0])
# expand C to 2D array for consistency with other calcs
C = C.reshape((nt, 1))
# check scale factor is correct shape - either scalar or 1D
# needs try/except really
if scale is None:
scale = np.ones(nt)
elif np.isscalar(scale):
scale = np.ones(nt) * scale
elif scale.ndim==1 and scale.shape[0]==nt:
pass
else:
raise ValueError("in CO2-only mode, scale should be None, a "+
"scalar or a (nt,) array")
# if scaling the historical time series to match AR5, apply these
# factors to whatever the user specifies
if scaleHistoricalAR5:
scale=scale*historical_scaling.co2[:nt]
# If TCR and ECS are supplied, calculate q coefficients
if type(tcrecs) is np.ndarray:
q = calculate_q(tcrecs, d, F2x, tcr_dbl, nt)
# Check a and tau are same size
if a.ndim != 1:
raise ValueError("a should be a 1D array")
if tau.ndim != 1:
raise ValueError("tau should be a 1D array")
if len(a) != len(tau):
raise ValueError("a and tau should be the same size")
if not np.isclose(np.sum(a), 1.0):
raise ValueError("a should sum to one")
# Allocate intermediate and output arrays
F = np.zeros((nt, nF))
C_acc = np.zeros(nt)
T_j = np.zeros(thermal_boxes_shape)
T = np.zeros(nt)
C_0 = np.copy(C_pi)
if emissions_driven:
C = np.zeros((nt, ngas))
R_i = np.zeros(carbon_boxes_shape)
if restart_in:
R_minus1 = restart_in[0]
T_j_minus1 = restart_in[1]
C_acc_minus1 = restart_in[2]
E_minus1 = restart_in[3]
C_minus1 = np.sum(R_minus1,axis=-1) + C_0[0]
C[0,0], C_acc[0], R_i[0,:], time_scale_sf = carbon_cycle(
E_minus1,
C_acc_minus1,
np.sum(T_j_minus1),
r0,
rc,
rt,
iirf_max,
0.16,
a,
tau,
iirf_h,
R_minus1,
C_pi[0],
C_minus1,
emissions[0]
)
if np.isscalar(other_rf):
F[0,0] = co2_log(C[0,0], C_pi[0], F2x) + other_rf
else:
F[0,0] = co2_log(C[0,0], C_pi[0], F2x) + other_rf[0]
F[0,0] = F[0,0] * scale[0]
T_j[0,:] = forc_to_temp(T_j_minus1, q[0,:], d, F[0,:])
T[0]=np.sum(T_j[0,:],axis=-1)
else:
# Initialise the carbon pools to be correct for first timestep in
# numerical method
if emissions_driven:
if useMultigas:
R_i[0,:] = a * (np.sum(emissions[0,1:3])) / ppm_gtc
C[0,1:] = C_0[1:]
else:
R_i[0,:] = a * emissions[0,np.newaxis] / ppm_gtc
C[0,0] = np.sum(R_i[0,:],axis=-1) + C_0[0]
if useMultigas:
# CO2, CH4 and N2O are co-dependent
F[0,0:3] = ghg(C[0,0:3], C_pi[0:3], F2x=F2x)
# Minor (F- and H-gases) are linear in concentration
# the factor of 0.001 here is because radiative efficiencies are given
# in W/m2/ppb and concentrations of minor gases are in ppt.
F[0,3] = np.sum((C[0,3:] - C_pi[3:]) * radeff.aslist[3:] * 0.001)
# Tropospheric ozone:
if emissions_driven:
if useStevenson:
F[0,4] = ozone_tr.stevenson(emissions[0,:], C[0,1],
T=np.sum(T_j[0,:]),
feedback=useTropO3TFeedback,
fix_pre1850_RCP=fixPre1850RCP)
else:
F[0,4] = ozone_tr.regress(emissions[0,:], beta=b_tro3)
else:
F[:,4] = F_tropO3
# Stratospheric ozone depends on concentrations of ODSs (index 15-30)
F[0,5] = ozone_st.magicc(C[0,15:], C_pi[15:])
# Stratospheric water vapour is a function of the methane ERF
F[0,6] = h2o_st.linear(F[0,1], ratio=stwv_from_ch4)
# Forcing from contrails. No climate feedback so can live outside
# of forward model in this version
if emissions_driven:
if contrail_forcing.lower()[0]=='n': # from NOx emissions
F[:,7] = contrails.from_aviNOx(emissions, aviNOx_frac)
elif contrail_forcing.lower()[0]=='f': # from kerosene production
F[:,7] = contrails.from_fuel(kerosene_supply)
elif contrail_forcing.lower()[0]=='e': # external forcing timeseries
F[:,7] = F_contrails
else:
raise ValueError("contrails must be one of 'NOx' (estimated "+
"from NOx emissions), 'fuel' (estimated from annual jet fuel "+
"supplied) or 'external' (an external forcing time series).")
else:
F[:,7] = F_contrails
# Forcing from aerosols - again no feedback dependence
if emissions_driven:
if aerosol_forcing.lower()=='stevens':
F[:,8] = aerosols.Stevens(emissions, stevens_params=stevens_params)
elif 'aerocom' in aerosol_forcing.lower():
F[:,8] = aerosols.aerocom_direct(emissions, beta=b_aero)
if 'ghan' in aerosol_forcing.lower():
F[:,8] = F[:,8] + aerosols.ghan_indirect(emissions,
scale_AR5=scaleAerosolAR5,
fix_pre1850_RCP=fixPre1850RCP,
ghan_params=ghan_params)
elif aerosol_forcing.lower()[0] == 'e':
F[:,8] = F_aerosol
else:
raise ValueError("aerosol_forcing should be one of 'stevens', " +
"aerocom, aerocom+ghan or external")
else:
F[:,8] = F_aerosol
# Black carbon on snow - no feedback dependence
if emissions_driven:
F[:,9] = bc_snow.linear(emissions)
else:
F[:,9] = F_bcsnow
# Land use change - either use a scaling with cumulative CO2 emissions
# or an external time series
if emissions_driven:
if landuse_forcing.lower()[0]=='c':
F[:,10] = landuse.cumulative(emissions)
elif landuse_forcing.lower()[0]=='e':
F[:,10] = F_landuse
else:
raise ValueError(
"landuse_forcing should be one of 'co2' or 'external'")
else:
F[:,10] = F_landuse
# Volcanic and solar copied straight to the output arrays
F[:,11] = F_volcanic
F[:,12] = F_solar
# multiply by scale factors
F[0,:] = F[0,:] * scale[0,:]
else:
if np.isscalar(other_rf):
F[0,0] = co2_log(C[0,0], C_pi[0], F2x) + other_rf
else:
F[0,0] = co2_log(C[0,0], C_pi[0], F2x) + other_rf[0]
F[0,0] = F[0,0] * scale[0]
if restart_in == False:
# Update the thermal response boxes
T_j[0,:] = (q[0,:]/d)*(np.sum(F[0,:]))
# Sum the thermal response boxes to get the total temperature anomaly
T[0]=np.sum(T_j[0,:],axis=-1)
for t in range(1,nt):
if emissions_driven:
if useMultigas:
if t == 1:
time_scale_sf = 0.16
# Calculate concentrations
# a. CARBON DIOXIDE
# Firstly add any oxidised methane from last year to the CO2
# pool
oxidised_CH4 = ((C[t-1,1]-C_pi[1]) *
(1.0 - np.exp(-1.0/lifetimes[1])) *
(molwt.C/molwt.CH4 * 0.001 * oxCH4_frac * fossilCH4_frac[t]))
oxidised_CH4 = np.max((oxidised_CH4, 0))
C[t,0], C_acc[t], R_i[t,:], time_scale_sf = carbon_cycle(
np.sum(emissions[t-1,1:3]),
C_acc[t-1],
T[t-1],
r0,
rc,
rt,
iirf_max,
time_scale_sf,
a,
tau,
iirf_h,
R_i[t-1,:] + oxidised_CH4,
C_pi[0],
C[t-1,0],
np.sum(emissions[t,1:3])
)
# b. METHANE
C[t,1] = emis_to_conc(
C[t-1,1],
emissions[t-1,3]+natural[t,0],
emissions[t,3]+natural[t,0],
1.0,
lifetimes[1],
1.0/emis2conc[1]
)
# c. NITROUS OXIDE
C[t,2] = emis_to_conc(
C[t-1,2],
emissions[t-1,4]+natural[t,1],
emissions[t,4]+natural[t,1],
1.0,
lifetimes[2],
1.0/emis2conc[2]
)
# d. OTHER WMGHGs
C[t,3:] = emis_to_conc(
C[t-1,3:],
emissions[t-1,12:],
emissions[t,12:],
1.0,
np.array(lifetimes[3:]),
1.0/emis2conc[3:]
)
# 2. Radiative forcing
F[t,0:3] = ghg(C[t,0:3], C_pi[0:3], F2x=F2x)
F[t,3] = np.sum((C[t,3:] - C_pi[3:]) * radeff.aslist[3:]
* 0.001)
if useStevenson:
F[t,4] = ozone_tr.stevenson(emissions[t,:],
C[t,1],
T=T[t-1],
feedback=useTropO3TFeedback,
fix_pre1850_RCP=fixPre1850RCP)
else:
F[t,4] = ozone_tr.regress(emissions[t,:], beta=b_tro3)
F[t,5] = ozone_st.magicc(C[t,15:], C_pi[15:])
F[t,6] = h2o_st.linear(F[t,1], ratio=stwv_from_ch4)
# multiply by scale factors
F[t,:] = F[t,:] * scale[t,:]
# 3. Temperature
# Update the thermal response boxes
T_j[t,:] = forc_to_temp(
T_j[t-1,:], q[t,:], d, F[t,:], e=efficacy)
# Sum the thermal response boxes to get the total temperature
T[t]=np.sum(T_j[t,:],axis=-1)
else:
if t == 1:
time_scale_sf = 0.16
C[t,0], C_acc[t], R_i[t,:], time_scale_sf = carbon_cycle(
emissions[t-1],
C_acc[t-1],
T[t-1],
r0,
rc,
rt,
iirf_max,
time_scale_sf,
a,
tau,
iirf_h,
R_i[t-1,:],
C_pi[0],
C[t-1,0],
emissions[t]
)
if np.isscalar(other_rf):
F[t,0] = co2_log(C[t,0], C_pi[0], F2x) + other_rf
else:
F[t,0] = co2_log(C[t,0], C_pi[0], F2x) + other_rf[t]
F[t,0] = F[t,0] * scale[t]
T_j[t,:] = forc_to_temp(T_j[t-1,:], q[t,:], d, F[t,:])
T[t]=np.sum(T_j[t,:],axis=-1)
else:
if useMultigas:
F[t,0:3] = ghg(C[t,0:3], C_pi[0:3], F2x=F2x)
F[t,3] = np.sum((C[t,3:] - C_pi[3:]) * radeff.aslist[3:]
* 0.001)
F[t,5] = ozone_st.magicc(C[t,15:], C_pi[15:])
F[t,6] = h2o_st.linear(F[t,1], ratio=stwv_from_ch4)
# multiply by scale factors
F[t,:] = F[t,:] * scale[t,:]
# 3. Temperature
# Update the thermal response boxes
T_j[t,:] = T_j[t,:] = forc_to_temp(
T_j[t-1,:], q[t,:], d, F[t,:], e=efficacy)
# Sum the thermal response boxes to get the total temperature
T[t]=np.sum(T_j[t,:],axis=-1)
else:
if np.isscalar(other_rf):
F[t,0] = co2_log(C[t,0], C_pi[0], F2x) + other_rf
else:
F[t,0] = co2_log(C[t,0], C_pi[0], F2x) + other_rf[t]
F[t,0] = F[t,0] * scale[t]
T_j[t,:] = forc_to_temp(T_j[t-1,:], q[t,:], d, F[t,:])
T[t]=np.sum(T_j[t,:],axis=-1)
if not useMultigas:
C = np.squeeze(C)
F = np.squeeze(F)
if restart_out:
if useMultigas:
E_minus1 = np.sum(emissions[-1,1:3])
else:
E_minus1 = emissions[-1]
restart_out_val=(R_i[-1],T_j[-1],C_acc[-1],E_minus1)
return C, F, T, restart_out_val
else:
return C, F, T
| true | true |
f7248131aa87bcae1465dd9f6fc9da5036cc235d | 29,226 | py | Python | absl/flags/tests/_validators_test.py | mcx/abseil-py | 58ead8c22230a2493006fa0ab9f76776b6e7280f | [
"Apache-2.0"
] | 1,969 | 2017-04-24T22:21:29.000Z | 2022-03-30T13:27:09.000Z | absl/flags/tests/_validators_test.py | mcx/abseil-py | 58ead8c22230a2493006fa0ab9f76776b6e7280f | [
"Apache-2.0"
] | 111 | 2017-09-27T05:45:53.000Z | 2022-03-29T16:48:49.000Z | absl/flags/tests/_validators_test.py | mcx/abseil-py | 58ead8c22230a2493006fa0ab9f76776b6e7280f | [
"Apache-2.0"
] | 240 | 2017-09-26T01:18:10.000Z | 2022-03-31T06:24:40.000Z | # Copyright 2017 The Abseil Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Testing that flags validators framework does work.
This file tests that each flag validator called when it should be, and that
failed validator will throw an exception, etc.
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import warnings
from absl.flags import _defines
from absl.flags import _exceptions
from absl.flags import _flagvalues
from absl.flags import _validators
from absl.testing import absltest
class SingleFlagValidatorTest(absltest.TestCase):
"""Testing _validators.register_validator() method."""
def setUp(self):
super(SingleFlagValidatorTest, self).setUp()
self.flag_values = _flagvalues.FlagValues()
self.call_args = []
def test_success(self):
def checker(x):
self.call_args.append(x)
return True
_defines.DEFINE_integer(
'test_flag', None, 'Usual integer flag', flag_values=self.flag_values)
_validators.register_validator(
'test_flag',
checker,
message='Errors happen',
flag_values=self.flag_values)
argv = ('./program',)
self.flag_values(argv)
self.assertIsNone(self.flag_values.test_flag)
self.flag_values.test_flag = 2
self.assertEqual(2, self.flag_values.test_flag)
self.assertEqual([None, 2], self.call_args)
def test_default_value_not_used_success(self):
def checker(x):
self.call_args.append(x)
return True
_defines.DEFINE_integer(
'test_flag', None, 'Usual integer flag', flag_values=self.flag_values)
_validators.register_validator(
'test_flag',
checker,
message='Errors happen',
flag_values=self.flag_values)
argv = ('./program', '--test_flag=1')
self.flag_values(argv)
self.assertEqual(1, self.flag_values.test_flag)
self.assertEqual([1], self.call_args)
def test_validator_not_called_when_other_flag_is_changed(self):
def checker(x):
self.call_args.append(x)
return True
_defines.DEFINE_integer(
'test_flag', 1, 'Usual integer flag', flag_values=self.flag_values)
_defines.DEFINE_integer(
'other_flag', 2, 'Other integer flag', flag_values=self.flag_values)
_validators.register_validator(
'test_flag',
checker,
message='Errors happen',
flag_values=self.flag_values)
argv = ('./program',)
self.flag_values(argv)
self.assertEqual(1, self.flag_values.test_flag)
self.flag_values.other_flag = 3
self.assertEqual([1], self.call_args)
def test_exception_raised_if_checker_fails(self):
def checker(x):
self.call_args.append(x)
return x == 1
_defines.DEFINE_integer(
'test_flag', None, 'Usual integer flag', flag_values=self.flag_values)
_validators.register_validator(
'test_flag',
checker,
message='Errors happen',
flag_values=self.flag_values)
argv = ('./program', '--test_flag=1')
self.flag_values(argv)
with self.assertRaises(_exceptions.IllegalFlagValueError) as cm:
self.flag_values.test_flag = 2
self.assertEqual('flag --test_flag=2: Errors happen', str(cm.exception))
self.assertEqual([1, 2], self.call_args)
def test_exception_raised_if_checker_raises_exception(self):
def checker(x):
self.call_args.append(x)
if x == 1:
return True
raise _exceptions.ValidationError('Specific message')
_defines.DEFINE_integer(
'test_flag', None, 'Usual integer flag', flag_values=self.flag_values)
_validators.register_validator(
'test_flag',
checker,
message='Errors happen',
flag_values=self.flag_values)
argv = ('./program', '--test_flag=1')
self.flag_values(argv)
with self.assertRaises(_exceptions.IllegalFlagValueError) as cm:
self.flag_values.test_flag = 2
self.assertEqual('flag --test_flag=2: Specific message', str(cm.exception))
self.assertEqual([1, 2], self.call_args)
def test_error_message_when_checker_returns_false_on_start(self):
def checker(x):
self.call_args.append(x)
return False
_defines.DEFINE_integer(
'test_flag', None, 'Usual integer flag', flag_values=self.flag_values)
_validators.register_validator(
'test_flag',
checker,
message='Errors happen',
flag_values=self.flag_values)
argv = ('./program', '--test_flag=1')
with self.assertRaises(_exceptions.IllegalFlagValueError) as cm:
self.flag_values(argv)
self.assertEqual('flag --test_flag=1: Errors happen', str(cm.exception))
self.assertEqual([1], self.call_args)
def test_error_message_when_checker_raises_exception_on_start(self):
def checker(x):
self.call_args.append(x)
raise _exceptions.ValidationError('Specific message')
_defines.DEFINE_integer(
'test_flag', None, 'Usual integer flag', flag_values=self.flag_values)
_validators.register_validator(
'test_flag',
checker,
message='Errors happen',
flag_values=self.flag_values)
argv = ('./program', '--test_flag=1')
with self.assertRaises(_exceptions.IllegalFlagValueError) as cm:
self.flag_values(argv)
self.assertEqual('flag --test_flag=1: Specific message', str(cm.exception))
self.assertEqual([1], self.call_args)
def test_validators_checked_in_order(self):
def required(x):
self.calls.append('required')
return x is not None
def even(x):
self.calls.append('even')
return x % 2 == 0
self.calls = []
self._define_flag_and_validators(required, even)
self.assertEqual(['required', 'even'], self.calls)
self.calls = []
self._define_flag_and_validators(even, required)
self.assertEqual(['even', 'required'], self.calls)
def _define_flag_and_validators(self, first_validator, second_validator):
local_flags = _flagvalues.FlagValues()
_defines.DEFINE_integer(
'test_flag', 2, 'test flag', flag_values=local_flags)
_validators.register_validator(
'test_flag', first_validator, message='', flag_values=local_flags)
_validators.register_validator(
'test_flag', second_validator, message='', flag_values=local_flags)
argv = ('./program',)
local_flags(argv)
def test_validator_as_decorator(self):
_defines.DEFINE_integer(
'test_flag', None, 'Simple integer flag', flag_values=self.flag_values)
@_validators.validator('test_flag', flag_values=self.flag_values)
def checker(x):
self.call_args.append(x)
return True
argv = ('./program',)
self.flag_values(argv)
self.assertIsNone(self.flag_values.test_flag)
self.flag_values.test_flag = 2
self.assertEqual(2, self.flag_values.test_flag)
self.assertEqual([None, 2], self.call_args)
# Check that 'Checker' is still a function and has not been replaced.
self.assertTrue(checker(3))
self.assertEqual([None, 2, 3], self.call_args)
class MultiFlagsValidatorTest(absltest.TestCase):
"""Test flags multi-flag validators."""
def setUp(self):
super(MultiFlagsValidatorTest, self).setUp()
self.flag_values = _flagvalues.FlagValues()
self.call_args = []
_defines.DEFINE_integer(
'foo', 1, 'Usual integer flag', flag_values=self.flag_values)
_defines.DEFINE_integer(
'bar', 2, 'Usual integer flag', flag_values=self.flag_values)
def test_success(self):
def checker(flags_dict):
self.call_args.append(flags_dict)
return True
_validators.register_multi_flags_validator(
['foo', 'bar'], checker, flag_values=self.flag_values)
argv = ('./program', '--bar=2')
self.flag_values(argv)
self.assertEqual(1, self.flag_values.foo)
self.assertEqual(2, self.flag_values.bar)
self.assertEqual([{'foo': 1, 'bar': 2}], self.call_args)
self.flag_values.foo = 3
self.assertEqual(3, self.flag_values.foo)
self.assertEqual([{'foo': 1, 'bar': 2}, {'foo': 3, 'bar': 2}],
self.call_args)
def test_validator_not_called_when_other_flag_is_changed(self):
def checker(flags_dict):
self.call_args.append(flags_dict)
return True
_defines.DEFINE_integer(
'other_flag', 3, 'Other integer flag', flag_values=self.flag_values)
_validators.register_multi_flags_validator(
['foo', 'bar'], checker, flag_values=self.flag_values)
argv = ('./program',)
self.flag_values(argv)
self.flag_values.other_flag = 3
self.assertEqual([{'foo': 1, 'bar': 2}], self.call_args)
def test_exception_raised_if_checker_fails(self):
def checker(flags_dict):
self.call_args.append(flags_dict)
values = flags_dict.values()
# Make sure all the flags have different values.
return len(set(values)) == len(values)
_validators.register_multi_flags_validator(
['foo', 'bar'],
checker,
message='Errors happen',
flag_values=self.flag_values)
argv = ('./program',)
self.flag_values(argv)
with self.assertRaises(_exceptions.IllegalFlagValueError) as cm:
self.flag_values.bar = 1
self.assertEqual('flags foo=1, bar=1: Errors happen', str(cm.exception))
self.assertEqual([{'foo': 1, 'bar': 2}, {'foo': 1, 'bar': 1}],
self.call_args)
def test_exception_raised_if_checker_raises_exception(self):
def checker(flags_dict):
self.call_args.append(flags_dict)
values = flags_dict.values()
# Make sure all the flags have different values.
if len(set(values)) != len(values):
raise _exceptions.ValidationError('Specific message')
return True
_validators.register_multi_flags_validator(
['foo', 'bar'],
checker,
message='Errors happen',
flag_values=self.flag_values)
argv = ('./program',)
self.flag_values(argv)
with self.assertRaises(_exceptions.IllegalFlagValueError) as cm:
self.flag_values.bar = 1
self.assertEqual('flags foo=1, bar=1: Specific message', str(cm.exception))
self.assertEqual([{'foo': 1, 'bar': 2}, {'foo': 1, 'bar': 1}],
self.call_args)
def test_decorator(self):
@_validators.multi_flags_validator(
['foo', 'bar'], message='Errors happen', flag_values=self.flag_values)
def checker(flags_dict): # pylint: disable=unused-variable
self.call_args.append(flags_dict)
values = flags_dict.values()
# Make sure all the flags have different values.
return len(set(values)) == len(values)
argv = ('./program',)
self.flag_values(argv)
with self.assertRaises(_exceptions.IllegalFlagValueError) as cm:
self.flag_values.bar = 1
self.assertEqual('flags foo=1, bar=1: Errors happen', str(cm.exception))
self.assertEqual([{'foo': 1, 'bar': 2}, {'foo': 1, 'bar': 1}],
self.call_args)
class MarkFlagsAsMutualExclusiveTest(absltest.TestCase):
def setUp(self):
super(MarkFlagsAsMutualExclusiveTest, self).setUp()
self.flag_values = _flagvalues.FlagValues()
_defines.DEFINE_string(
'flag_one', None, 'flag one', flag_values=self.flag_values)
_defines.DEFINE_string(
'flag_two', None, 'flag two', flag_values=self.flag_values)
_defines.DEFINE_string(
'flag_three', None, 'flag three', flag_values=self.flag_values)
_defines.DEFINE_integer(
'int_flag_one', None, 'int flag one', flag_values=self.flag_values)
_defines.DEFINE_integer(
'int_flag_two', None, 'int flag two', flag_values=self.flag_values)
_defines.DEFINE_multi_string(
'multi_flag_one', None, 'multi flag one', flag_values=self.flag_values)
_defines.DEFINE_multi_string(
'multi_flag_two', None, 'multi flag two', flag_values=self.flag_values)
_defines.DEFINE_boolean(
'flag_not_none', False, 'false default', flag_values=self.flag_values)
def _mark_flags_as_mutually_exclusive(self, flag_names, required):
_validators.mark_flags_as_mutual_exclusive(
flag_names, required=required, flag_values=self.flag_values)
def test_no_flags_present(self):
self._mark_flags_as_mutually_exclusive(['flag_one', 'flag_two'], False)
argv = ('./program',)
self.flag_values(argv)
self.assertIsNone(self.flag_values.flag_one)
self.assertIsNone(self.flag_values.flag_two)
def test_no_flags_present_required(self):
self._mark_flags_as_mutually_exclusive(['flag_one', 'flag_two'], True)
argv = ('./program',)
expected = (
'flags flag_one=None, flag_two=None: '
'Exactly one of (flag_one, flag_two) must have a value other than '
'None.')
self.assertRaisesWithLiteralMatch(_exceptions.IllegalFlagValueError,
expected, self.flag_values, argv)
def test_one_flag_present(self):
self._mark_flags_as_mutually_exclusive(['flag_one', 'flag_two'], False)
self.flag_values(('./program', '--flag_one=1'))
self.assertEqual('1', self.flag_values.flag_one)
def test_one_flag_present_required(self):
self._mark_flags_as_mutually_exclusive(['flag_one', 'flag_two'], True)
self.flag_values(('./program', '--flag_two=2'))
self.assertEqual('2', self.flag_values.flag_two)
def test_one_flag_zero_required(self):
self._mark_flags_as_mutually_exclusive(
['int_flag_one', 'int_flag_two'], True)
self.flag_values(('./program', '--int_flag_one=0'))
self.assertEqual(0, self.flag_values.int_flag_one)
def test_mutual_exclusion_with_extra_flags(self):
self._mark_flags_as_mutually_exclusive(['flag_one', 'flag_two'], True)
argv = ('./program', '--flag_two=2', '--flag_three=3')
self.flag_values(argv)
self.assertEqual('2', self.flag_values.flag_two)
self.assertEqual('3', self.flag_values.flag_three)
def test_mutual_exclusion_with_zero(self):
self._mark_flags_as_mutually_exclusive(
['int_flag_one', 'int_flag_two'], False)
argv = ('./program', '--int_flag_one=0', '--int_flag_two=0')
expected = (
'flags int_flag_one=0, int_flag_two=0: '
'At most one of (int_flag_one, int_flag_two) must have a value other '
'than None.')
self.assertRaisesWithLiteralMatch(_exceptions.IllegalFlagValueError,
expected, self.flag_values, argv)
def test_multiple_flags_present(self):
self._mark_flags_as_mutually_exclusive(
['flag_one', 'flag_two', 'flag_three'], False)
argv = ('./program', '--flag_one=1', '--flag_two=2', '--flag_three=3')
expected = (
'flags flag_one=1, flag_two=2, flag_three=3: '
'At most one of (flag_one, flag_two, flag_three) must have a value '
'other than None.')
self.assertRaisesWithLiteralMatch(_exceptions.IllegalFlagValueError,
expected, self.flag_values, argv)
def test_multiple_flags_present_required(self):
self._mark_flags_as_mutually_exclusive(
['flag_one', 'flag_two', 'flag_three'], True)
argv = ('./program', '--flag_one=1', '--flag_two=2', '--flag_three=3')
expected = (
'flags flag_one=1, flag_two=2, flag_three=3: '
'Exactly one of (flag_one, flag_two, flag_three) must have a value '
'other than None.')
self.assertRaisesWithLiteralMatch(_exceptions.IllegalFlagValueError,
expected, self.flag_values, argv)
def test_no_multiflags_present(self):
self._mark_flags_as_mutually_exclusive(
['multi_flag_one', 'multi_flag_two'], False)
argv = ('./program',)
self.flag_values(argv)
self.assertIsNone(self.flag_values.multi_flag_one)
self.assertIsNone(self.flag_values.multi_flag_two)
def test_no_multistring_flags_present_required(self):
self._mark_flags_as_mutually_exclusive(
['multi_flag_one', 'multi_flag_two'], True)
argv = ('./program',)
expected = (
'flags multi_flag_one=None, multi_flag_two=None: '
'Exactly one of (multi_flag_one, multi_flag_two) must have a value '
'other than None.')
self.assertRaisesWithLiteralMatch(_exceptions.IllegalFlagValueError,
expected, self.flag_values, argv)
def test_one_multiflag_present(self):
self._mark_flags_as_mutually_exclusive(
['multi_flag_one', 'multi_flag_two'], True)
self.flag_values(('./program', '--multi_flag_one=1'))
self.assertEqual(['1'], self.flag_values.multi_flag_one)
def test_one_multiflag_present_repeated(self):
self._mark_flags_as_mutually_exclusive(
['multi_flag_one', 'multi_flag_two'], True)
self.flag_values(('./program', '--multi_flag_one=1', '--multi_flag_one=1b'))
self.assertEqual(['1', '1b'], self.flag_values.multi_flag_one)
def test_multiple_multiflags_present(self):
self._mark_flags_as_mutually_exclusive(
['multi_flag_one', 'multi_flag_two'], False)
argv = ('./program', '--multi_flag_one=1', '--multi_flag_two=2')
expected = (
"flags multi_flag_one=['1'], multi_flag_two=['2']: "
'At most one of (multi_flag_one, multi_flag_two) must have a value '
'other than None.')
self.assertRaisesWithLiteralMatch(_exceptions.IllegalFlagValueError,
expected, self.flag_values, argv)
def test_multiple_multiflags_present_required(self):
self._mark_flags_as_mutually_exclusive(
['multi_flag_one', 'multi_flag_two'], True)
argv = ('./program', '--multi_flag_one=1', '--multi_flag_two=2')
expected = (
"flags multi_flag_one=['1'], multi_flag_two=['2']: "
'Exactly one of (multi_flag_one, multi_flag_two) must have a value '
'other than None.')
self.assertRaisesWithLiteralMatch(_exceptions.IllegalFlagValueError,
expected, self.flag_values, argv)
def test_flag_default_not_none_warning(self):
with warnings.catch_warnings(record=True) as caught_warnings:
warnings.simplefilter('always')
self._mark_flags_as_mutually_exclusive(['flag_one', 'flag_not_none'],
False)
self.assertLen(caught_warnings, 1)
self.assertIn('--flag_not_none has a non-None default value',
str(caught_warnings[0].message))
class MarkBoolFlagsAsMutualExclusiveTest(absltest.TestCase):
def setUp(self):
super(MarkBoolFlagsAsMutualExclusiveTest, self).setUp()
self.flag_values = _flagvalues.FlagValues()
_defines.DEFINE_boolean(
'false_1', False, 'default false 1', flag_values=self.flag_values)
_defines.DEFINE_boolean(
'false_2', False, 'default false 2', flag_values=self.flag_values)
_defines.DEFINE_boolean(
'true_1', True, 'default true 1', flag_values=self.flag_values)
_defines.DEFINE_integer(
'non_bool', None, 'non bool', flag_values=self.flag_values)
def _mark_bool_flags_as_mutually_exclusive(self, flag_names, required):
_validators.mark_bool_flags_as_mutual_exclusive(
flag_names, required=required, flag_values=self.flag_values)
def test_no_flags_present(self):
self._mark_bool_flags_as_mutually_exclusive(['false_1', 'false_2'], False)
self.flag_values(('./program',))
self.assertEqual(False, self.flag_values.false_1)
self.assertEqual(False, self.flag_values.false_2)
def test_no_flags_present_required(self):
self._mark_bool_flags_as_mutually_exclusive(['false_1', 'false_2'], True)
argv = ('./program',)
expected = (
'flags false_1=False, false_2=False: '
'Exactly one of (false_1, false_2) must be True.')
self.assertRaisesWithLiteralMatch(_exceptions.IllegalFlagValueError,
expected, self.flag_values, argv)
def test_no_flags_present_with_default_true_required(self):
self._mark_bool_flags_as_mutually_exclusive(['false_1', 'true_1'], True)
self.flag_values(('./program',))
self.assertEqual(False, self.flag_values.false_1)
self.assertEqual(True, self.flag_values.true_1)
def test_two_flags_true(self):
self._mark_bool_flags_as_mutually_exclusive(['false_1', 'false_2'], False)
argv = ('./program', '--false_1', '--false_2')
expected = (
'flags false_1=True, false_2=True: At most one of (false_1, '
'false_2) must be True.')
self.assertRaisesWithLiteralMatch(_exceptions.IllegalFlagValueError,
expected, self.flag_values, argv)
def test_non_bool_flag(self):
expected = ('Flag --non_bool is not Boolean, which is required for flags '
'used in mark_bool_flags_as_mutual_exclusive.')
with self.assertRaisesWithLiteralMatch(_exceptions.ValidationError,
expected):
self._mark_bool_flags_as_mutually_exclusive(['false_1', 'non_bool'],
False)
class MarkFlagAsRequiredTest(absltest.TestCase):
def setUp(self):
super(MarkFlagAsRequiredTest, self).setUp()
self.flag_values = _flagvalues.FlagValues()
def test_success(self):
_defines.DEFINE_string(
'string_flag', None, 'string flag', flag_values=self.flag_values)
_validators.mark_flag_as_required(
'string_flag', flag_values=self.flag_values)
argv = ('./program', '--string_flag=value')
self.flag_values(argv)
self.assertEqual('value', self.flag_values.string_flag)
def test_catch_none_as_default(self):
_defines.DEFINE_string(
'string_flag', None, 'string flag', flag_values=self.flag_values)
_validators.mark_flag_as_required(
'string_flag', flag_values=self.flag_values)
argv = ('./program',)
expected = (
r'flag --string_flag=None: Flag --string_flag must have a value other '
r'than None\.')
with self.assertRaisesRegex(_exceptions.IllegalFlagValueError, expected):
self.flag_values(argv)
def test_catch_setting_none_after_program_start(self):
_defines.DEFINE_string(
'string_flag', 'value', 'string flag', flag_values=self.flag_values)
_validators.mark_flag_as_required(
'string_flag', flag_values=self.flag_values)
argv = ('./program',)
self.flag_values(argv)
self.assertEqual('value', self.flag_values.string_flag)
expected = ('flag --string_flag=None: Flag --string_flag must have a value '
'other than None.')
with self.assertRaises(_exceptions.IllegalFlagValueError) as cm:
self.flag_values.string_flag = None
self.assertEqual(expected, str(cm.exception))
def test_flag_default_not_none_warning(self):
_defines.DEFINE_string(
'flag_not_none', '', 'empty default', flag_values=self.flag_values)
with warnings.catch_warnings(record=True) as caught_warnings:
warnings.simplefilter('always')
_validators.mark_flag_as_required(
'flag_not_none', flag_values=self.flag_values)
self.assertLen(caught_warnings, 1)
self.assertIn('--flag_not_none has a non-None default value',
str(caught_warnings[0].message))
class MarkFlagsAsRequiredTest(absltest.TestCase):
def setUp(self):
super(MarkFlagsAsRequiredTest, self).setUp()
self.flag_values = _flagvalues.FlagValues()
def test_success(self):
_defines.DEFINE_string(
'string_flag_1', None, 'string flag 1', flag_values=self.flag_values)
_defines.DEFINE_string(
'string_flag_2', None, 'string flag 2', flag_values=self.flag_values)
flag_names = ['string_flag_1', 'string_flag_2']
_validators.mark_flags_as_required(flag_names, flag_values=self.flag_values)
argv = ('./program', '--string_flag_1=value_1', '--string_flag_2=value_2')
self.flag_values(argv)
self.assertEqual('value_1', self.flag_values.string_flag_1)
self.assertEqual('value_2', self.flag_values.string_flag_2)
def test_catch_none_as_default(self):
_defines.DEFINE_string(
'string_flag_1', None, 'string flag 1', flag_values=self.flag_values)
_defines.DEFINE_string(
'string_flag_2', None, 'string flag 2', flag_values=self.flag_values)
_validators.mark_flags_as_required(
['string_flag_1', 'string_flag_2'], flag_values=self.flag_values)
argv = ('./program', '--string_flag_1=value_1')
expected = (
r'flag --string_flag_2=None: Flag --string_flag_2 must have a value '
r'other than None\.')
with self.assertRaisesRegex(_exceptions.IllegalFlagValueError, expected):
self.flag_values(argv)
def test_catch_setting_none_after_program_start(self):
_defines.DEFINE_string(
'string_flag_1',
'value_1',
'string flag 1',
flag_values=self.flag_values)
_defines.DEFINE_string(
'string_flag_2',
'value_2',
'string flag 2',
flag_values=self.flag_values)
_validators.mark_flags_as_required(
['string_flag_1', 'string_flag_2'], flag_values=self.flag_values)
argv = ('./program', '--string_flag_1=value_1')
self.flag_values(argv)
self.assertEqual('value_1', self.flag_values.string_flag_1)
expected = (
'flag --string_flag_1=None: Flag --string_flag_1 must have a value '
'other than None.')
with self.assertRaises(_exceptions.IllegalFlagValueError) as cm:
self.flag_values.string_flag_1 = None
self.assertEqual(expected, str(cm.exception))
def test_catch_multiple_flags_as_none_at_program_start(self):
_defines.DEFINE_float(
'float_flag_1',
None,
'string flag 1',
flag_values=self.flag_values)
_defines.DEFINE_float(
'float_flag_2',
None,
'string flag 2',
flag_values=self.flag_values)
_validators.mark_flags_as_required(
['float_flag_1', 'float_flag_2'], flag_values=self.flag_values)
argv = ('./program', '')
expected = (
'flag --float_flag_1=None: Flag --float_flag_1 must have a value '
'other than None.\n'
'flag --float_flag_2=None: Flag --float_flag_2 must have a value '
'other than None.')
with self.assertRaises(_exceptions.IllegalFlagValueError) as cm:
self.flag_values(argv)
self.assertEqual(expected, str(cm.exception))
def test_fail_fast_single_flag_and_skip_remaining_validators(self):
def raise_unexpected_error(x):
del x
raise _exceptions.ValidationError('Should not be raised.')
_defines.DEFINE_float(
'flag_1', None, 'flag 1', flag_values=self.flag_values)
_defines.DEFINE_float(
'flag_2', 4.2, 'flag 2', flag_values=self.flag_values)
_validators.mark_flag_as_required('flag_1', flag_values=self.flag_values)
_validators.register_validator(
'flag_1', raise_unexpected_error, flag_values=self.flag_values)
_validators.register_multi_flags_validator(['flag_2', 'flag_1'],
raise_unexpected_error,
flag_values=self.flag_values)
argv = ('./program', '')
expected = (
'flag --flag_1=None: Flag --flag_1 must have a value other than None.')
with self.assertRaises(_exceptions.IllegalFlagValueError) as cm:
self.flag_values(argv)
self.assertEqual(expected, str(cm.exception))
def test_fail_fast_multi_flag_and_skip_remaining_validators(self):
def raise_expected_error(x):
del x
raise _exceptions.ValidationError('Expected error.')
def raise_unexpected_error(x):
del x
raise _exceptions.ValidationError('Got unexpected error.')
_defines.DEFINE_float(
'flag_1', 5.1, 'flag 1', flag_values=self.flag_values)
_defines.DEFINE_float(
'flag_2', 10.0, 'flag 2', flag_values=self.flag_values)
_validators.register_multi_flags_validator(['flag_1', 'flag_2'],
raise_expected_error,
flag_values=self.flag_values)
_validators.register_multi_flags_validator(['flag_2', 'flag_1'],
raise_unexpected_error,
flag_values=self.flag_values)
_validators.register_validator(
'flag_1', raise_unexpected_error, flag_values=self.flag_values)
_validators.register_validator(
'flag_2', raise_unexpected_error, flag_values=self.flag_values)
argv = ('./program', '')
expected = ('flags flag_1=5.1, flag_2=10.0: Expected error.')
with self.assertRaises(_exceptions.IllegalFlagValueError) as cm:
self.flag_values(argv)
self.assertEqual(expected, str(cm.exception))
if __name__ == '__main__':
absltest.main()
| 39.22953 | 80 | 0.685622 |
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import warnings
from absl.flags import _defines
from absl.flags import _exceptions
from absl.flags import _flagvalues
from absl.flags import _validators
from absl.testing import absltest
class SingleFlagValidatorTest(absltest.TestCase):
def setUp(self):
super(SingleFlagValidatorTest, self).setUp()
self.flag_values = _flagvalues.FlagValues()
self.call_args = []
def test_success(self):
def checker(x):
self.call_args.append(x)
return True
_defines.DEFINE_integer(
'test_flag', None, 'Usual integer flag', flag_values=self.flag_values)
_validators.register_validator(
'test_flag',
checker,
message='Errors happen',
flag_values=self.flag_values)
argv = ('./program',)
self.flag_values(argv)
self.assertIsNone(self.flag_values.test_flag)
self.flag_values.test_flag = 2
self.assertEqual(2, self.flag_values.test_flag)
self.assertEqual([None, 2], self.call_args)
def test_default_value_not_used_success(self):
def checker(x):
self.call_args.append(x)
return True
_defines.DEFINE_integer(
'test_flag', None, 'Usual integer flag', flag_values=self.flag_values)
_validators.register_validator(
'test_flag',
checker,
message='Errors happen',
flag_values=self.flag_values)
argv = ('./program', '--test_flag=1')
self.flag_values(argv)
self.assertEqual(1, self.flag_values.test_flag)
self.assertEqual([1], self.call_args)
def test_validator_not_called_when_other_flag_is_changed(self):
def checker(x):
self.call_args.append(x)
return True
_defines.DEFINE_integer(
'test_flag', 1, 'Usual integer flag', flag_values=self.flag_values)
_defines.DEFINE_integer(
'other_flag', 2, 'Other integer flag', flag_values=self.flag_values)
_validators.register_validator(
'test_flag',
checker,
message='Errors happen',
flag_values=self.flag_values)
argv = ('./program',)
self.flag_values(argv)
self.assertEqual(1, self.flag_values.test_flag)
self.flag_values.other_flag = 3
self.assertEqual([1], self.call_args)
def test_exception_raised_if_checker_fails(self):
def checker(x):
self.call_args.append(x)
return x == 1
_defines.DEFINE_integer(
'test_flag', None, 'Usual integer flag', flag_values=self.flag_values)
_validators.register_validator(
'test_flag',
checker,
message='Errors happen',
flag_values=self.flag_values)
argv = ('./program', '--test_flag=1')
self.flag_values(argv)
with self.assertRaises(_exceptions.IllegalFlagValueError) as cm:
self.flag_values.test_flag = 2
self.assertEqual('flag --test_flag=2: Errors happen', str(cm.exception))
self.assertEqual([1, 2], self.call_args)
def test_exception_raised_if_checker_raises_exception(self):
def checker(x):
self.call_args.append(x)
if x == 1:
return True
raise _exceptions.ValidationError('Specific message')
_defines.DEFINE_integer(
'test_flag', None, 'Usual integer flag', flag_values=self.flag_values)
_validators.register_validator(
'test_flag',
checker,
message='Errors happen',
flag_values=self.flag_values)
argv = ('./program', '--test_flag=1')
self.flag_values(argv)
with self.assertRaises(_exceptions.IllegalFlagValueError) as cm:
self.flag_values.test_flag = 2
self.assertEqual('flag --test_flag=2: Specific message', str(cm.exception))
self.assertEqual([1, 2], self.call_args)
def test_error_message_when_checker_returns_false_on_start(self):
def checker(x):
self.call_args.append(x)
return False
_defines.DEFINE_integer(
'test_flag', None, 'Usual integer flag', flag_values=self.flag_values)
_validators.register_validator(
'test_flag',
checker,
message='Errors happen',
flag_values=self.flag_values)
argv = ('./program', '--test_flag=1')
with self.assertRaises(_exceptions.IllegalFlagValueError) as cm:
self.flag_values(argv)
self.assertEqual('flag --test_flag=1: Errors happen', str(cm.exception))
self.assertEqual([1], self.call_args)
def test_error_message_when_checker_raises_exception_on_start(self):
def checker(x):
self.call_args.append(x)
raise _exceptions.ValidationError('Specific message')
_defines.DEFINE_integer(
'test_flag', None, 'Usual integer flag', flag_values=self.flag_values)
_validators.register_validator(
'test_flag',
checker,
message='Errors happen',
flag_values=self.flag_values)
argv = ('./program', '--test_flag=1')
with self.assertRaises(_exceptions.IllegalFlagValueError) as cm:
self.flag_values(argv)
self.assertEqual('flag --test_flag=1: Specific message', str(cm.exception))
self.assertEqual([1], self.call_args)
def test_validators_checked_in_order(self):
def required(x):
self.calls.append('required')
return x is not None
def even(x):
self.calls.append('even')
return x % 2 == 0
self.calls = []
self._define_flag_and_validators(required, even)
self.assertEqual(['required', 'even'], self.calls)
self.calls = []
self._define_flag_and_validators(even, required)
self.assertEqual(['even', 'required'], self.calls)
def _define_flag_and_validators(self, first_validator, second_validator):
local_flags = _flagvalues.FlagValues()
_defines.DEFINE_integer(
'test_flag', 2, 'test flag', flag_values=local_flags)
_validators.register_validator(
'test_flag', first_validator, message='', flag_values=local_flags)
_validators.register_validator(
'test_flag', second_validator, message='', flag_values=local_flags)
argv = ('./program',)
local_flags(argv)
def test_validator_as_decorator(self):
_defines.DEFINE_integer(
'test_flag', None, 'Simple integer flag', flag_values=self.flag_values)
@_validators.validator('test_flag', flag_values=self.flag_values)
def checker(x):
self.call_args.append(x)
return True
argv = ('./program',)
self.flag_values(argv)
self.assertIsNone(self.flag_values.test_flag)
self.flag_values.test_flag = 2
self.assertEqual(2, self.flag_values.test_flag)
self.assertEqual([None, 2], self.call_args)
self.assertTrue(checker(3))
self.assertEqual([None, 2, 3], self.call_args)
class MultiFlagsValidatorTest(absltest.TestCase):
def setUp(self):
super(MultiFlagsValidatorTest, self).setUp()
self.flag_values = _flagvalues.FlagValues()
self.call_args = []
_defines.DEFINE_integer(
'foo', 1, 'Usual integer flag', flag_values=self.flag_values)
_defines.DEFINE_integer(
'bar', 2, 'Usual integer flag', flag_values=self.flag_values)
def test_success(self):
def checker(flags_dict):
self.call_args.append(flags_dict)
return True
_validators.register_multi_flags_validator(
['foo', 'bar'], checker, flag_values=self.flag_values)
argv = ('./program', '--bar=2')
self.flag_values(argv)
self.assertEqual(1, self.flag_values.foo)
self.assertEqual(2, self.flag_values.bar)
self.assertEqual([{'foo': 1, 'bar': 2}], self.call_args)
self.flag_values.foo = 3
self.assertEqual(3, self.flag_values.foo)
self.assertEqual([{'foo': 1, 'bar': 2}, {'foo': 3, 'bar': 2}],
self.call_args)
def test_validator_not_called_when_other_flag_is_changed(self):
def checker(flags_dict):
self.call_args.append(flags_dict)
return True
_defines.DEFINE_integer(
'other_flag', 3, 'Other integer flag', flag_values=self.flag_values)
_validators.register_multi_flags_validator(
['foo', 'bar'], checker, flag_values=self.flag_values)
argv = ('./program',)
self.flag_values(argv)
self.flag_values.other_flag = 3
self.assertEqual([{'foo': 1, 'bar': 2}], self.call_args)
def test_exception_raised_if_checker_fails(self):
def checker(flags_dict):
self.call_args.append(flags_dict)
values = flags_dict.values()
return len(set(values)) == len(values)
_validators.register_multi_flags_validator(
['foo', 'bar'],
checker,
message='Errors happen',
flag_values=self.flag_values)
argv = ('./program',)
self.flag_values(argv)
with self.assertRaises(_exceptions.IllegalFlagValueError) as cm:
self.flag_values.bar = 1
self.assertEqual('flags foo=1, bar=1: Errors happen', str(cm.exception))
self.assertEqual([{'foo': 1, 'bar': 2}, {'foo': 1, 'bar': 1}],
self.call_args)
def test_exception_raised_if_checker_raises_exception(self):
def checker(flags_dict):
self.call_args.append(flags_dict)
values = flags_dict.values()
if len(set(values)) != len(values):
raise _exceptions.ValidationError('Specific message')
return True
_validators.register_multi_flags_validator(
['foo', 'bar'],
checker,
message='Errors happen',
flag_values=self.flag_values)
argv = ('./program',)
self.flag_values(argv)
with self.assertRaises(_exceptions.IllegalFlagValueError) as cm:
self.flag_values.bar = 1
self.assertEqual('flags foo=1, bar=1: Specific message', str(cm.exception))
self.assertEqual([{'foo': 1, 'bar': 2}, {'foo': 1, 'bar': 1}],
self.call_args)
def test_decorator(self):
@_validators.multi_flags_validator(
['foo', 'bar'], message='Errors happen', flag_values=self.flag_values)
def checker(flags_dict):
self.call_args.append(flags_dict)
values = flags_dict.values()
return len(set(values)) == len(values)
argv = ('./program',)
self.flag_values(argv)
with self.assertRaises(_exceptions.IllegalFlagValueError) as cm:
self.flag_values.bar = 1
self.assertEqual('flags foo=1, bar=1: Errors happen', str(cm.exception))
self.assertEqual([{'foo': 1, 'bar': 2}, {'foo': 1, 'bar': 1}],
self.call_args)
class MarkFlagsAsMutualExclusiveTest(absltest.TestCase):
def setUp(self):
super(MarkFlagsAsMutualExclusiveTest, self).setUp()
self.flag_values = _flagvalues.FlagValues()
_defines.DEFINE_string(
'flag_one', None, 'flag one', flag_values=self.flag_values)
_defines.DEFINE_string(
'flag_two', None, 'flag two', flag_values=self.flag_values)
_defines.DEFINE_string(
'flag_three', None, 'flag three', flag_values=self.flag_values)
_defines.DEFINE_integer(
'int_flag_one', None, 'int flag one', flag_values=self.flag_values)
_defines.DEFINE_integer(
'int_flag_two', None, 'int flag two', flag_values=self.flag_values)
_defines.DEFINE_multi_string(
'multi_flag_one', None, 'multi flag one', flag_values=self.flag_values)
_defines.DEFINE_multi_string(
'multi_flag_two', None, 'multi flag two', flag_values=self.flag_values)
_defines.DEFINE_boolean(
'flag_not_none', False, 'false default', flag_values=self.flag_values)
def _mark_flags_as_mutually_exclusive(self, flag_names, required):
_validators.mark_flags_as_mutual_exclusive(
flag_names, required=required, flag_values=self.flag_values)
def test_no_flags_present(self):
self._mark_flags_as_mutually_exclusive(['flag_one', 'flag_two'], False)
argv = ('./program',)
self.flag_values(argv)
self.assertIsNone(self.flag_values.flag_one)
self.assertIsNone(self.flag_values.flag_two)
def test_no_flags_present_required(self):
self._mark_flags_as_mutually_exclusive(['flag_one', 'flag_two'], True)
argv = ('./program',)
expected = (
'flags flag_one=None, flag_two=None: '
'Exactly one of (flag_one, flag_two) must have a value other than '
'None.')
self.assertRaisesWithLiteralMatch(_exceptions.IllegalFlagValueError,
expected, self.flag_values, argv)
def test_one_flag_present(self):
self._mark_flags_as_mutually_exclusive(['flag_one', 'flag_two'], False)
self.flag_values(('./program', '--flag_one=1'))
self.assertEqual('1', self.flag_values.flag_one)
def test_one_flag_present_required(self):
self._mark_flags_as_mutually_exclusive(['flag_one', 'flag_two'], True)
self.flag_values(('./program', '--flag_two=2'))
self.assertEqual('2', self.flag_values.flag_two)
def test_one_flag_zero_required(self):
self._mark_flags_as_mutually_exclusive(
['int_flag_one', 'int_flag_two'], True)
self.flag_values(('./program', '--int_flag_one=0'))
self.assertEqual(0, self.flag_values.int_flag_one)
def test_mutual_exclusion_with_extra_flags(self):
self._mark_flags_as_mutually_exclusive(['flag_one', 'flag_two'], True)
argv = ('./program', '--flag_two=2', '--flag_three=3')
self.flag_values(argv)
self.assertEqual('2', self.flag_values.flag_two)
self.assertEqual('3', self.flag_values.flag_three)
def test_mutual_exclusion_with_zero(self):
self._mark_flags_as_mutually_exclusive(
['int_flag_one', 'int_flag_two'], False)
argv = ('./program', '--int_flag_one=0', '--int_flag_two=0')
expected = (
'flags int_flag_one=0, int_flag_two=0: '
'At most one of (int_flag_one, int_flag_two) must have a value other '
'than None.')
self.assertRaisesWithLiteralMatch(_exceptions.IllegalFlagValueError,
expected, self.flag_values, argv)
def test_multiple_flags_present(self):
self._mark_flags_as_mutually_exclusive(
['flag_one', 'flag_two', 'flag_three'], False)
argv = ('./program', '--flag_one=1', '--flag_two=2', '--flag_three=3')
expected = (
'flags flag_one=1, flag_two=2, flag_three=3: '
'At most one of (flag_one, flag_two, flag_three) must have a value '
'other than None.')
self.assertRaisesWithLiteralMatch(_exceptions.IllegalFlagValueError,
expected, self.flag_values, argv)
def test_multiple_flags_present_required(self):
self._mark_flags_as_mutually_exclusive(
['flag_one', 'flag_two', 'flag_three'], True)
argv = ('./program', '--flag_one=1', '--flag_two=2', '--flag_three=3')
expected = (
'flags flag_one=1, flag_two=2, flag_three=3: '
'Exactly one of (flag_one, flag_two, flag_three) must have a value '
'other than None.')
self.assertRaisesWithLiteralMatch(_exceptions.IllegalFlagValueError,
expected, self.flag_values, argv)
def test_no_multiflags_present(self):
self._mark_flags_as_mutually_exclusive(
['multi_flag_one', 'multi_flag_two'], False)
argv = ('./program',)
self.flag_values(argv)
self.assertIsNone(self.flag_values.multi_flag_one)
self.assertIsNone(self.flag_values.multi_flag_two)
def test_no_multistring_flags_present_required(self):
self._mark_flags_as_mutually_exclusive(
['multi_flag_one', 'multi_flag_two'], True)
argv = ('./program',)
expected = (
'flags multi_flag_one=None, multi_flag_two=None: '
'Exactly one of (multi_flag_one, multi_flag_two) must have a value '
'other than None.')
self.assertRaisesWithLiteralMatch(_exceptions.IllegalFlagValueError,
expected, self.flag_values, argv)
def test_one_multiflag_present(self):
self._mark_flags_as_mutually_exclusive(
['multi_flag_one', 'multi_flag_two'], True)
self.flag_values(('./program', '--multi_flag_one=1'))
self.assertEqual(['1'], self.flag_values.multi_flag_one)
def test_one_multiflag_present_repeated(self):
self._mark_flags_as_mutually_exclusive(
['multi_flag_one', 'multi_flag_two'], True)
self.flag_values(('./program', '--multi_flag_one=1', '--multi_flag_one=1b'))
self.assertEqual(['1', '1b'], self.flag_values.multi_flag_one)
def test_multiple_multiflags_present(self):
self._mark_flags_as_mutually_exclusive(
['multi_flag_one', 'multi_flag_two'], False)
argv = ('./program', '--multi_flag_one=1', '--multi_flag_two=2')
expected = (
"flags multi_flag_one=['1'], multi_flag_two=['2']: "
'At most one of (multi_flag_one, multi_flag_two) must have a value '
'other than None.')
self.assertRaisesWithLiteralMatch(_exceptions.IllegalFlagValueError,
expected, self.flag_values, argv)
def test_multiple_multiflags_present_required(self):
self._mark_flags_as_mutually_exclusive(
['multi_flag_one', 'multi_flag_two'], True)
argv = ('./program', '--multi_flag_one=1', '--multi_flag_two=2')
expected = (
"flags multi_flag_one=['1'], multi_flag_two=['2']: "
'Exactly one of (multi_flag_one, multi_flag_two) must have a value '
'other than None.')
self.assertRaisesWithLiteralMatch(_exceptions.IllegalFlagValueError,
expected, self.flag_values, argv)
def test_flag_default_not_none_warning(self):
with warnings.catch_warnings(record=True) as caught_warnings:
warnings.simplefilter('always')
self._mark_flags_as_mutually_exclusive(['flag_one', 'flag_not_none'],
False)
self.assertLen(caught_warnings, 1)
self.assertIn('--flag_not_none has a non-None default value',
str(caught_warnings[0].message))
class MarkBoolFlagsAsMutualExclusiveTest(absltest.TestCase):
def setUp(self):
super(MarkBoolFlagsAsMutualExclusiveTest, self).setUp()
self.flag_values = _flagvalues.FlagValues()
_defines.DEFINE_boolean(
'false_1', False, 'default false 1', flag_values=self.flag_values)
_defines.DEFINE_boolean(
'false_2', False, 'default false 2', flag_values=self.flag_values)
_defines.DEFINE_boolean(
'true_1', True, 'default true 1', flag_values=self.flag_values)
_defines.DEFINE_integer(
'non_bool', None, 'non bool', flag_values=self.flag_values)
def _mark_bool_flags_as_mutually_exclusive(self, flag_names, required):
_validators.mark_bool_flags_as_mutual_exclusive(
flag_names, required=required, flag_values=self.flag_values)
def test_no_flags_present(self):
self._mark_bool_flags_as_mutually_exclusive(['false_1', 'false_2'], False)
self.flag_values(('./program',))
self.assertEqual(False, self.flag_values.false_1)
self.assertEqual(False, self.flag_values.false_2)
def test_no_flags_present_required(self):
self._mark_bool_flags_as_mutually_exclusive(['false_1', 'false_2'], True)
argv = ('./program',)
expected = (
'flags false_1=False, false_2=False: '
'Exactly one of (false_1, false_2) must be True.')
self.assertRaisesWithLiteralMatch(_exceptions.IllegalFlagValueError,
expected, self.flag_values, argv)
def test_no_flags_present_with_default_true_required(self):
self._mark_bool_flags_as_mutually_exclusive(['false_1', 'true_1'], True)
self.flag_values(('./program',))
self.assertEqual(False, self.flag_values.false_1)
self.assertEqual(True, self.flag_values.true_1)
def test_two_flags_true(self):
self._mark_bool_flags_as_mutually_exclusive(['false_1', 'false_2'], False)
argv = ('./program', '--false_1', '--false_2')
expected = (
'flags false_1=True, false_2=True: At most one of (false_1, '
'false_2) must be True.')
self.assertRaisesWithLiteralMatch(_exceptions.IllegalFlagValueError,
expected, self.flag_values, argv)
def test_non_bool_flag(self):
expected = ('Flag --non_bool is not Boolean, which is required for flags '
'used in mark_bool_flags_as_mutual_exclusive.')
with self.assertRaisesWithLiteralMatch(_exceptions.ValidationError,
expected):
self._mark_bool_flags_as_mutually_exclusive(['false_1', 'non_bool'],
False)
class MarkFlagAsRequiredTest(absltest.TestCase):
def setUp(self):
super(MarkFlagAsRequiredTest, self).setUp()
self.flag_values = _flagvalues.FlagValues()
def test_success(self):
_defines.DEFINE_string(
'string_flag', None, 'string flag', flag_values=self.flag_values)
_validators.mark_flag_as_required(
'string_flag', flag_values=self.flag_values)
argv = ('./program', '--string_flag=value')
self.flag_values(argv)
self.assertEqual('value', self.flag_values.string_flag)
def test_catch_none_as_default(self):
_defines.DEFINE_string(
'string_flag', None, 'string flag', flag_values=self.flag_values)
_validators.mark_flag_as_required(
'string_flag', flag_values=self.flag_values)
argv = ('./program',)
expected = (
r'flag --string_flag=None: Flag --string_flag must have a value other '
r'than None\.')
with self.assertRaisesRegex(_exceptions.IllegalFlagValueError, expected):
self.flag_values(argv)
def test_catch_setting_none_after_program_start(self):
_defines.DEFINE_string(
'string_flag', 'value', 'string flag', flag_values=self.flag_values)
_validators.mark_flag_as_required(
'string_flag', flag_values=self.flag_values)
argv = ('./program',)
self.flag_values(argv)
self.assertEqual('value', self.flag_values.string_flag)
expected = ('flag --string_flag=None: Flag --string_flag must have a value '
'other than None.')
with self.assertRaises(_exceptions.IllegalFlagValueError) as cm:
self.flag_values.string_flag = None
self.assertEqual(expected, str(cm.exception))
def test_flag_default_not_none_warning(self):
_defines.DEFINE_string(
'flag_not_none', '', 'empty default', flag_values=self.flag_values)
with warnings.catch_warnings(record=True) as caught_warnings:
warnings.simplefilter('always')
_validators.mark_flag_as_required(
'flag_not_none', flag_values=self.flag_values)
self.assertLen(caught_warnings, 1)
self.assertIn('--flag_not_none has a non-None default value',
str(caught_warnings[0].message))
class MarkFlagsAsRequiredTest(absltest.TestCase):
def setUp(self):
super(MarkFlagsAsRequiredTest, self).setUp()
self.flag_values = _flagvalues.FlagValues()
def test_success(self):
_defines.DEFINE_string(
'string_flag_1', None, 'string flag 1', flag_values=self.flag_values)
_defines.DEFINE_string(
'string_flag_2', None, 'string flag 2', flag_values=self.flag_values)
flag_names = ['string_flag_1', 'string_flag_2']
_validators.mark_flags_as_required(flag_names, flag_values=self.flag_values)
argv = ('./program', '--string_flag_1=value_1', '--string_flag_2=value_2')
self.flag_values(argv)
self.assertEqual('value_1', self.flag_values.string_flag_1)
self.assertEqual('value_2', self.flag_values.string_flag_2)
def test_catch_none_as_default(self):
_defines.DEFINE_string(
'string_flag_1', None, 'string flag 1', flag_values=self.flag_values)
_defines.DEFINE_string(
'string_flag_2', None, 'string flag 2', flag_values=self.flag_values)
_validators.mark_flags_as_required(
['string_flag_1', 'string_flag_2'], flag_values=self.flag_values)
argv = ('./program', '--string_flag_1=value_1')
expected = (
r'flag --string_flag_2=None: Flag --string_flag_2 must have a value '
r'other than None\.')
with self.assertRaisesRegex(_exceptions.IllegalFlagValueError, expected):
self.flag_values(argv)
def test_catch_setting_none_after_program_start(self):
_defines.DEFINE_string(
'string_flag_1',
'value_1',
'string flag 1',
flag_values=self.flag_values)
_defines.DEFINE_string(
'string_flag_2',
'value_2',
'string flag 2',
flag_values=self.flag_values)
_validators.mark_flags_as_required(
['string_flag_1', 'string_flag_2'], flag_values=self.flag_values)
argv = ('./program', '--string_flag_1=value_1')
self.flag_values(argv)
self.assertEqual('value_1', self.flag_values.string_flag_1)
expected = (
'flag --string_flag_1=None: Flag --string_flag_1 must have a value '
'other than None.')
with self.assertRaises(_exceptions.IllegalFlagValueError) as cm:
self.flag_values.string_flag_1 = None
self.assertEqual(expected, str(cm.exception))
def test_catch_multiple_flags_as_none_at_program_start(self):
_defines.DEFINE_float(
'float_flag_1',
None,
'string flag 1',
flag_values=self.flag_values)
_defines.DEFINE_float(
'float_flag_2',
None,
'string flag 2',
flag_values=self.flag_values)
_validators.mark_flags_as_required(
['float_flag_1', 'float_flag_2'], flag_values=self.flag_values)
argv = ('./program', '')
expected = (
'flag --float_flag_1=None: Flag --float_flag_1 must have a value '
'other than None.\n'
'flag --float_flag_2=None: Flag --float_flag_2 must have a value '
'other than None.')
with self.assertRaises(_exceptions.IllegalFlagValueError) as cm:
self.flag_values(argv)
self.assertEqual(expected, str(cm.exception))
def test_fail_fast_single_flag_and_skip_remaining_validators(self):
def raise_unexpected_error(x):
del x
raise _exceptions.ValidationError('Should not be raised.')
_defines.DEFINE_float(
'flag_1', None, 'flag 1', flag_values=self.flag_values)
_defines.DEFINE_float(
'flag_2', 4.2, 'flag 2', flag_values=self.flag_values)
_validators.mark_flag_as_required('flag_1', flag_values=self.flag_values)
_validators.register_validator(
'flag_1', raise_unexpected_error, flag_values=self.flag_values)
_validators.register_multi_flags_validator(['flag_2', 'flag_1'],
raise_unexpected_error,
flag_values=self.flag_values)
argv = ('./program', '')
expected = (
'flag --flag_1=None: Flag --flag_1 must have a value other than None.')
with self.assertRaises(_exceptions.IllegalFlagValueError) as cm:
self.flag_values(argv)
self.assertEqual(expected, str(cm.exception))
def test_fail_fast_multi_flag_and_skip_remaining_validators(self):
def raise_expected_error(x):
del x
raise _exceptions.ValidationError('Expected error.')
def raise_unexpected_error(x):
del x
raise _exceptions.ValidationError('Got unexpected error.')
_defines.DEFINE_float(
'flag_1', 5.1, 'flag 1', flag_values=self.flag_values)
_defines.DEFINE_float(
'flag_2', 10.0, 'flag 2', flag_values=self.flag_values)
_validators.register_multi_flags_validator(['flag_1', 'flag_2'],
raise_expected_error,
flag_values=self.flag_values)
_validators.register_multi_flags_validator(['flag_2', 'flag_1'],
raise_unexpected_error,
flag_values=self.flag_values)
_validators.register_validator(
'flag_1', raise_unexpected_error, flag_values=self.flag_values)
_validators.register_validator(
'flag_2', raise_unexpected_error, flag_values=self.flag_values)
argv = ('./program', '')
expected = ('flags flag_1=5.1, flag_2=10.0: Expected error.')
with self.assertRaises(_exceptions.IllegalFlagValueError) as cm:
self.flag_values(argv)
self.assertEqual(expected, str(cm.exception))
if __name__ == '__main__':
absltest.main()
| true | true |
f72481a010acb7f8d898500cf94f600286f5da8d | 64,856 | py | Python | pybit/__init__.py | leftcoastgeek/pybit | 3564bd08a5e34c95a15f8a03b100282ddffd3edf | [
"MIT"
] | null | null | null | pybit/__init__.py | leftcoastgeek/pybit | 3564bd08a5e34c95a15f8a03b100282ddffd3edf | [
"MIT"
] | null | null | null | pybit/__init__.py | leftcoastgeek/pybit | 3564bd08a5e34c95a15f8a03b100282ddffd3edf | [
"MIT"
] | null | null | null | # -*- coding: utf-8 -*-
"""
pybit
------------------------
pybit is a lightweight and high-performance API connector for the
RESTful and WebSocket APIs of the Bybit exchange.
Documentation can be found at
https://github.com/verata-veritatis/pybit
:copyright: (c) 2020-2021 verata-veritatis
:license: MIT License
"""
import time
import hmac
import json
import logging
import threading
import requests
import websocket
from datetime import datetime as dt
from concurrent.futures import ThreadPoolExecutor
from .exceptions import FailedRequestError, InvalidRequestError
# Requests will use simplejson if available.
try:
from simplejson.errors import JSONDecodeError
except ImportError:
from json.decoder import JSONDecodeError
# Versioning.
VERSION = '1.1.18'
class HTTP:
"""
Connector for Bybit's HTTP API.
:param endpoint: The endpoint URL of the HTTP API, e.g.
'https://api-testnet.bybit.com'.
:type endpoint: str
:param api_key: Your API key. Required for authenticated endpoints. Defaults
to None.
:type api_key: str
:param api_secret: Your API secret key. Required for authenticated
endpoints. Defaults to None.
:type api_secret: str
:param logging_level: The logging level of the built-in logger. Defaults to
logging.INFO. Options are CRITICAL (50), ERROR (40), WARNING (30),
INFO (20), DEBUG (10), or NOTSET (0).
:type logging_level: Union[int, logging.level]
:param log_requests: Whether or not pybit should log each HTTP request.
:type log_requests: bool
:param request_timeout: The timeout of each API request in seconds. Defaults
to 10 seconds.
:type request_timeout: int
:param recv_window: How long an HTTP request is valid in ms. Default is
5000.
:type recv_window: int
:param force_retry: Whether or not pybit should retry a timed-out request.
:type force_retry: bool
:param retry_codes: A list of non-fatal status codes to retry on.
:type retry_codes: set
:param ignore_codes: A list of non-fatal status codes to ignore.
:type ignore_codes: set
:param max_retries: The number of times to re-attempt a request.
:type max_retries: int
:param retry_delay: Seconds between retries for returned error or timed-out
requests. Default is 3 seconds.
:type retry_delay: int
:param referral_id: An optional referer ID can be added to each request for
identification.
:type referral_id: str
:returns: pybit.HTTP session.
"""
def __init__(self, endpoint=None, api_key=None, api_secret=None,
logging_level=logging.INFO, log_requests=False,
request_timeout=10, recv_window=5000, force_retry=False,
retry_codes=None, ignore_codes=None, max_retries=3,
retry_delay=3, referral_id=None):
"""Initializes the HTTP class."""
# Set the endpoint.
if endpoint is None:
self.endpoint = 'https://api.bybit.com'
else:
self.endpoint = endpoint
# Setup logger.
self.logger = logging.getLogger(__name__)
if len(logging.root.handlers) == 0:
#no handler on root logger set -> we add handler just for this logger to not mess with custom logic from outside
handler = logging.StreamHandler()
handler.setFormatter(logging.Formatter(fmt='%(asctime)s - %(name)s - %(levelname)s - %(message)s',
datefmt='%Y-%m-%d %H:%M:%S'
)
)
handler.setLevel(logging_level)
self.logger.addHandler(handler)
self.logger.debug('Initializing HTTP session.')
self.log_requests = log_requests
# Set API keys.
self.api_key = api_key
self.api_secret = api_secret
# Set timeout.
self.timeout = request_timeout
self.recv_window = recv_window
self.force_retry = force_retry
self.max_retries = max_retries
self.retry_delay = retry_delay
# Set whitelist of non-fatal Bybit status codes to retry on.
if retry_codes is None:
self.retry_codes = {10002, 10006, 30034, 30035, 130035, 130150}
else:
self.retry_codes = retry_codes
# Set whitelist of non-fatal Bybit status codes to ignore.
if ignore_codes is None:
self.ignore_codes = set()
else:
self.ignore_codes = ignore_codes
# Initialize requests session.
self.client = requests.Session()
self.client.headers.update(
{
'User-Agent': 'pybit-' + VERSION,
'Content-Type': 'application/json',
'Accept': 'application/json',
}
)
# Add referral ID to header.
if referral_id:
self.client.headers.update({'Referer': referral_id})
def _exit(self):
"""Closes the request session."""
self.client.close()
self.logger.debug('HTTP session closed.')
def orderbook(self, **kwargs):
"""
Get the orderbook.
:param kwargs: See
https://bybit-exchange.github.io/docs/inverse/#t-orderbook.
:returns: Request results as dictionary.
"""
return self._submit_request(
method='GET',
path=self.endpoint + '/v2/public/orderBook/L2',
query=kwargs
)
def query_kline(self, **kwargs):
"""
Get kline.
:param kwargs: See
https://bybit-exchange.github.io/docs/inverse/#t-querykline.
:returns: Request results as dictionary.
"""
# Replace query param 'from_time' since 'from' keyword is reserved.
# Temporary workaround until Bybit updates official request params
if 'from_time' in kwargs:
kwargs['from'] = kwargs.pop('from_time')
if kwargs.get('symbol', '').endswith('USDT'):
suffix = '/public/linear/kline'
else:
suffix = '/v2/public/kline/list'
return self._submit_request(
method='GET',
path=self.endpoint + suffix,
query=kwargs
)
def latest_information_for_symbol(self, **kwargs):
"""
Get the latest information for symbol.
:param kwargs: See
https://bybit-exchange.github.io/docs/inverse/#t-latestsymbolinfo.
:returns: Request results as dictionary.
"""
return self._submit_request(
method='GET',
path=self.endpoint + '/v2/public/tickers',
query=kwargs
)
def public_trading_records(self, **kwargs):
"""
Get recent trades. You can find a complete history of trades on Bybit
at https://public.bybit.com/.
:param kwargs: See
https://bybit-exchange.github.io/docs/inverse/#t-latestsymbolinfo.
:returns: Request results as dictionary.
"""
# Replace query param 'from_id' since 'from' keyword is reserved.
# Temporary workaround until Bybit updates official request params
if 'from_id' in kwargs:
kwargs['from'] = kwargs.pop('from_id')
if kwargs.get('symbol', '').endswith('USDT'):
suffix = '/public/linear/recent-trading-records'
else:
suffix = '/v2/public/trading-records'
return self._submit_request(
method='GET',
path=self.endpoint + suffix,
query=kwargs
)
def query_symbol(self):
"""
Get symbol info.
:returns: Request results as dictionary.
"""
return self._submit_request(
method='GET',
path=self.endpoint + '/v2/public/symbols'
)
def liquidated_orders(self, **kwargs):
"""
Retrieve the liquidated orders. The query range is the last seven days
of data.
:param kwargs: See
https://bybit-exchange.github.io/docs/inverse/#t-query_liqrecords.
:returns: Request results as dictionary.
"""
# Replace query param 'from_id' since 'from' keyword is reserved.
# Temporary workaround until Bybit updates official request params
if 'from_id' in kwargs:
kwargs['from'] = kwargs.pop('from_id')
return self._submit_request(
method='GET',
path=self.endpoint + '/v2/public/liq-records',
query=kwargs
)
def query_mark_price_kline(self, **kwargs):
"""
Query mark price kline (like query_kline but for mark price).
:param kwargs: See
https://bybit-exchange.github.io/docs/inverse/#t-markpricekline.
:returns: Request results as dictionary.
"""
# Replace query param 'from_time' since 'from' keyword is reserved.
# Temporary workaround until Bybit updates official request params
if 'from_time' in kwargs:
kwargs['from'] = kwargs.pop('from_time')
if kwargs.get('symbol', '').endswith('USDT'):
suffix = '/public/linear/mark-price-kline'
else:
suffix = '/v2/public/mark-price-kline'
return self._submit_request(
method='GET',
path=self.endpoint + suffix,
query=kwargs
)
def open_interest(self, **kwargs):
"""
Gets the total amount of unsettled contracts. In other words, the total
number of contracts held in open positions.
:param kwargs: See
https://bybit-exchange.github.io/docs/inverse/#t-marketopeninterest.
:returns: Request results as dictionary.
"""
return self._submit_request(
method='GET',
path=self.endpoint + '/v2/public/open-interest',
query=kwargs
)
def latest_big_deal(self, **kwargs):
"""
Obtain filled orders worth more than 500,000 USD within the last 24h.
:param kwargs: See
https://bybit-exchange.github.io/docs/inverse/#t-marketbigdeal.
:returns: Request results as dictionary.
"""
return self._submit_request(
method='GET',
path=self.endpoint + '/v2/public/big-deal',
query=kwargs
)
def long_short_ratio(self, **kwargs):
"""
Gets the Bybit long-short ratio.
:param kwargs: See
https://bybit-exchange.github.io/docs/inverse/#t-marketaccountratio.
:returns: Request results as dictionary.
"""
return self._submit_request(
method='GET',
path=self.endpoint + '/v2/public/account-ratio',
query=kwargs
)
def place_active_order(self, **kwargs):
"""
Places an active order. For more information, see
https://bybit-exchange.github.io/docs/inverse/#t-activeorders.
:param kwargs: See
https://bybit-exchange.github.io/docs/inverse/#t-activeorders.
:returns: Request results as dictionary.
"""
if kwargs.get('symbol', '').endswith('USDT'):
suffix = '/private/linear/order/create'
elif kwargs.get('symbol', '')[-2:].isdigit():
suffix = '/futures/private/order/create'
else:
suffix = '/v2/private/order/create'
return self._submit_request(
method='POST',
path=self.endpoint + suffix,
query=kwargs,
auth=True
)
def place_active_order_bulk(self, orders: list, max_in_parallel=10):
"""
Places multiple active orders in bulk using multithreading. For more
information on place_active_order, see
https://bybit-exchange.github.io/docs/inverse/#t-activeorders.
:param list orders: A list of orders and their parameters.
:param max_in_parallel: The number of requests to be sent in parallel.
Note that you are limited to 50 requests per second.
:returns: Future request result dictionaries as a list.
"""
with ThreadPoolExecutor(max_workers=max_in_parallel) as executor:
executions = [
executor.submit(
self.place_active_order,
**order
) for order in orders
]
executor.shutdown()
return [execution.result() for execution in executions]
def get_active_order(self, **kwargs):
"""
Gets an active order. For more information, see
https://bybit-exchange.github.io/docs/inverse/#t-getactive.
:param kwargs: See
https://bybit-exchange.github.io/docs/inverse/#t-getactive.
:returns: Request results as dictionary.
"""
if kwargs.get('symbol', '').endswith('USDT'):
suffix = '/private/linear/order/list'
elif kwargs.get('symbol', '')[-2:].isdigit():
suffix = '/v2/private/order/list'
else:
suffix = '/futures/private/order/list'
return self._submit_request(
method='GET',
path=self.endpoint + suffix,
query=kwargs,
auth=True
)
def cancel_active_order(self, **kwargs):
"""
Cancels an active order. For more information, see
https://bybit-exchange.github.io/docs/inverse/#t-cancelactive.
:param kwargs: See
https://bybit-exchange.github.io/docs/inverse/#t-cancelactive.
:returns: Request results as dictionary.
"""
if kwargs.get('symbol', '').endswith('USDT'):
suffix = '/private/linear/order/cancel'
elif kwargs.get('symbol', '')[-2:].isdigit():
suffix = '/futures/private/order/cancel'
else:
suffix = '/v2/private/order/cancel'
return self._submit_request(
method='POST',
path=self.endpoint + suffix,
query=kwargs,
auth=True
)
def cancel_active_order_bulk(self, orders: list, max_in_parallel=10):
"""
Cancels multiple active orders in bulk using multithreading. For more
information on cancel_active_order, see
https://bybit-exchange.github.io/docs/inverse/#t-activeorders.
:param list orders: A list of orders and their parameters.
:param max_in_parallel: The number of requests to be sent in parallel.
Note that you are limited to 50 requests per second.
:returns: Future request result dictionaries as a list.
"""
with ThreadPoolExecutor(max_workers=max_in_parallel) as executor:
executions = [
executor.submit(
self.cancel_active_order,
**order
) for order in orders
]
executor.shutdown()
return [execution.result() for execution in executions]
def cancel_all_active_orders(self, **kwargs):
"""
Cancel all active orders that are unfilled or partially filled. Fully
filled orders cannot be cancelled.
:param kwargs: See
https://bybit-exchange.github.io/docs/inverse/#t-cancelallactive.
:returns: Request results as dictionary.
"""
if kwargs.get('symbol', '').endswith('USDT'):
suffix = '/private/linear/order/cancel-all'
elif kwargs.get('symbol', '')[-2:].isdigit():
suffix = '/futures/private/order/cancelAll'
else:
suffix = '/v2/private/order/cancelAll'
return self._submit_request(
method='POST',
path=self.endpoint + suffix,
query=kwargs,
auth=True
)
def replace_active_order(self, **kwargs):
"""
Replace order can modify/amend your active orders.
:param kwargs: See
https://bybit-exchange.github.io/docs/inverse/#t-replaceactive.
:returns: Request results as dictionary.
"""
if kwargs.get('symbol', '').endswith('USDT'):
suffix = '/private/linear/order/replace'
elif kwargs.get('symbol', '')[-2:].isdigit():
suffix = '/futures/private/order/replace'
else:
suffix = '/v2/private/order/replace'
return self._submit_request(
method='POST',
path=self.endpoint + suffix,
query=kwargs,
auth=True
)
def replace_active_order_bulk(self, orders: list, max_in_parallel=10):
"""
Replaces multiple active orders in bulk using multithreading. For more
information on replace_active_order, see
https://bybit-exchange.github.io/docs/inverse/#t-replaceactive.
:param list orders: A list of orders and their parameters.
:param max_in_parallel: The number of requests to be sent in parallel.
Note that you are limited to 50 requests per second.
:returns: Future request result dictionaries as a list.
"""
with ThreadPoolExecutor(max_workers=max_in_parallel) as executor:
executions = [
executor.submit(
self.replace_active_order,
**order
) for order in orders
]
executor.shutdown()
return [execution.result() for execution in executions]
def query_active_order(self, **kwargs):
"""
Query real-time active order information.
:param kwargs: See
https://bybit-exchange.github.io/docs/inverse/#t-queryactive.
:returns: Request results as dictionary.
"""
if kwargs.get('symbol', '').endswith('USDT'):
suffix = '/private/linear/order/search'
elif kwargs.get('symbol', '')[-2:].isdigit():
suffix = '/futures/private/order'
else:
suffix = '/v2/private/order'
return self._submit_request(
method='GET',
path=self.endpoint + suffix,
query=kwargs,
auth=True
)
def place_conditional_order(self, **kwargs):
"""
Places a conditional order. For more information, see
https://bybit-exchange.github.io/docs/inverse/#t-placecond.
:param kwargs: See
https://bybit-exchange.github.io/docs/inverse/#t-placecond.
:returns: Request results as dictionary.
"""
if kwargs.get('symbol', '').endswith('USDT'):
suffix = '/private/linear/stop-order/create'
elif kwargs.get('symbol', '')[-2:].isdigit():
suffix = '/futures/private/stop-order/create'
else:
suffix = '/v2/private/stop-order/create'
return self._submit_request(
method='POST',
path=self.endpoint + suffix,
query=kwargs,
auth=True
)
def place_conditional_order_bulk(self, orders: list, max_in_parallel=10):
"""
Places multiple conditional orders in bulk using multithreading. For
more information on place_active_order, see
https://bybit-exchange.github.io/docs/inverse/#t-placecond.
:param orders: A list of orders and their parameters.
:param max_in_parallel: The number of requests to be sent in parallel.
Note that you are limited to 50 requests per second.
:returns: Future request result dictionaries as a list.
"""
with ThreadPoolExecutor(max_workers=max_in_parallel) as executor:
executions = [
executor.submit(
self.place_conditional_order,
**order
) for order in orders
]
executor.shutdown()
return [execution.result() for execution in executions]
def get_conditional_order(self, **kwargs):
"""
Gets a conditional order. For more information, see
https://bybit-exchange.github.io/docs/inverse/#t-getcond.
:param kwargs: See
https://bybit-exchange.github.io/docs/inverse/#t-getcond.
:returns: Request results as dictionary.
"""
if kwargs.get('symbol', '').endswith('USDT'):
suffix = '/private/linear/stop-order/list'
elif kwargs.get('symbol', '')[-2:].isdigit():
suffix = '/futures/private/stop-order/list'
else:
suffix = '/v2/private/stop-order/list'
return self._submit_request(
method='GET',
path=self.endpoint + suffix,
query=kwargs,
auth=True
)
def cancel_conditional_order(self, **kwargs):
"""
Cancels a conditional order. For more information, see
https://bybit-exchange.github.io/docs/inverse/#t-cancelcond.
:param kwargs: See
https://bybit-exchange.github.io/docs/inverse/#t-cancelcond.
:returns: Request results as dictionary.
"""
if kwargs.get('symbol', '').endswith('USDT'):
suffix = '/private/linear/stop-order/cancel'
elif kwargs.get('symbol', '')[-2:].isdigit():
suffix = '/futures/private/stop-order/cancel'
else:
suffix = '/v2/private/stop-order/cancel'
return self._submit_request(
method='POST',
path=self.endpoint + suffix,
query=kwargs,
auth=True
)
def cancel_conditional_order_bulk(self, orders: list, max_in_parallel=10):
"""
Cancels multiple conditional orders in bulk using multithreading. For
more information on cancel_active_order, see
https://bybit-exchange.github.io/docs/inverse/#t-cancelcond.
:param list orders: A list of orders and their parameters.
:param max_in_parallel: The number of requests to be sent in parallel.
Note that you are limited to 50 requests per second.
:returns: Future request result dictionaries as a list.
"""
with ThreadPoolExecutor(max_workers=max_in_parallel) as executor:
executions = [
executor.submit(
self.cancel_conditional_order,
**order
) for order in orders
]
executor.shutdown()
return [execution.result() for execution in executions]
def cancel_all_conditional_orders(self, **kwargs):
"""
Cancel all conditional orders that are unfilled or partially filled.
Fully filled orders cannot be cancelled.
:param kwargs: See
https://bybit-exchange.github.io/docs/inverse/#t-cancelallcond.
:returns: Request results as dictionary.
"""
if kwargs.get('symbol', '').endswith('USDT'):
suffix = '/private/linear/stop-order/cancel-all'
elif kwargs.get('symbol', '')[-2:].isdigit():
suffix = '/futures/private/stop-order/cancelAll'
else:
suffix = '/v2/private/stop-order/cancelAll'
return self._submit_request(
method='POST',
path=self.endpoint + suffix,
query=kwargs,
auth=True
)
def replace_conditional_order(self, **kwargs):
"""
Replace conditional order can modify/amend your conditional orders.
:param kwargs: See
https://bybit-exchange.github.io/docs/inverse/#t-replacecond.
:returns: Request results as dictionary.
"""
if kwargs.get('symbol', '').endswith('USDT'):
suffix = '/private/linear/stop-order/replace'
elif kwargs.get('symbol', '')[-2:].isdigit():
suffix = '/futures/private/stop-order/replace'
else:
suffix = '/v2/private/stop-order/replace'
return self._submit_request(
method='POST',
path=self.endpoint + suffix,
query=kwargs,
auth=True
)
def replace_conditional_order_bulk(self, orders: list, max_in_parallel=10):
"""
Replaces multiple conditional orders in bulk using multithreading. For
more information on replace_active_order, see
https://bybit-exchange.github.io/docs/inverse/#t-replacecond.
:param list orders: A list of orders and their parameters.
:param max_in_parallel: The number of requests to be sent in parallel.
Note that you are limited to 50 requests per second.
:returns: Future request result dictionaries as a list.
"""
with ThreadPoolExecutor(max_workers=max_in_parallel) as executor:
executions = [
executor.submit(
self.replace_conditional_order,
**order
) for order in orders
]
executor.shutdown()
return [execution.result() for execution in executions]
def query_conditional_order(self, **kwargs):
"""
Query real-time conditional order information.
:param kwargs: See
https://bybit-exchange.github.io/docs/inverse/#t-querycond.
:returns: Request results as dictionary.
"""
if kwargs.get('symbol', '').endswith('USDT'):
suffix = '/private/linear/stop-order/search'
elif kwargs.get('symbol', '')[-2:].isdigit():
suffix = '/futures/private/stop-order'
else:
suffix = '/v2/private/stop-order'
return self._submit_request(
method='GET',
path=self.endpoint + suffix,
query=kwargs,
auth=True
)
def my_position(self, **kwargs):
"""
Get my position list.
:param kwargs: See
https://bybit-exchange.github.io/docs/inverse/#t-myposition.
:returns: Request results as dictionary.
"""
if kwargs.get('symbol', '').endswith('USDT'):
suffix = '/private/linear/position/list'
elif kwargs.get('symbol', '')[-2:].isdigit():
suffix = '/futures/private/position/list'
else:
suffix = '/v2/private/position/list'
return self._submit_request(
method='GET',
path=self.endpoint + suffix,
query=kwargs,
auth=True
)
def set_auto_add_margin(self, **kwargs):
"""
For linear markets only. Set auto add margin, or Auto-Margin
Replenishment.
:param kwargs: See
https://bybit-exchange.github.io/docs/linear/#t-setautoaddmargin.
:returns: Request results as dictionary.
"""
return self._submit_request(
method='POST',
path=self.endpoint + '/private/linear/position/set-auto-add-margin',
query=kwargs,
auth=True
)
def set_leverage(self, **kwargs):
"""
Change user leverage.
:param kwargs: See
https://bybit-exchange.github.io/docs/linear/#t-setleverage.
:returns: Request results as dictionary.
"""
if kwargs.get('symbol', '').endswith('USDT'):
suffix = '/private/linear/position/set-leverage'
elif kwargs.get('symbol', '')[-2:].isdigit():
suffix = '/futures/private/position/leverage/save'
else:
suffix = '/v2/private/position/leverage/save'
return self._submit_request(
method='POST',
path=self.endpoint + suffix,
query=kwargs,
auth=True
)
def cross_isolated_margin_switch(self, **kwargs):
"""
For linear markets only. Switch Cross/Isolated; must be leverage value
when switching from Cross to Isolated.
:param kwargs: See
https://bybit-exchange.github.io/docs/linear/#t-marginswitch.
:returns: Request results as dictionary.
"""
if kwargs.get('symbol', '').endswith('USDT'):
suffix = '/private/linear/position/switch-isolated'
else:
suffix = '/futures/private/position/switch-mode'
return self._submit_request(
method='POST',
path=self.endpoint + suffix,
query=kwargs,
auth=True
)
def position_mode_switch(self, **kwargs):
"""
For futures markets only. Switch Cross/Isolated; must set leverage
value when switching from Cross to Isolated.
:param kwargs: See
https://bybit-exchange.github.io/docs/inverse_futures/#t-marginswitch.
:returns: Request results as dictionary.
"""
return self._submit_request(
method='POST',
path=self.endpoint + '/futures/private/position/switch-mode',
query=kwargs,
auth=True
)
def change_margin(self, **kwargs):
"""
Update margin.
:param kwargs: See
https://bybit-exchange.github.io/docs/inverse/#t-changemargin.
:returns: Request results as dictionary.
"""
if kwargs.get('symbol', '')[-2:].isdigit():
suffix = '/futures/private/position/change-position-margin'
else:
suffix = '/v2/private/position/change-position-margin'
return self._submit_request(
method='POST',
path=self.endpoint + suffix,
query=kwargs,
auth=True
)
def set_trading_stop(self, **kwargs):
"""
Set take profit, stop loss, and trailing stop for your open position.
:param kwargs: See
https://bybit-exchange.github.io/docs/inverse/#t-tradingstop.
:returns: Request results as dictionary.
"""
if kwargs.get('symbol', '').endswith('USDT'):
suffix = '/private/linear/position/trading-stop'
elif kwargs.get('symbol', '')[-2:].isdigit():
suffix = '/futures/private/position/trading-stop'
else:
suffix = '/v2/private/position/trading-stop'
return self._submit_request(
method='POST',
path=self.endpoint + suffix,
query=kwargs,
auth=True
)
def add_reduce_margin(self, **kwargs):
"""
For linear markets only. Add margin.
:param kwargs: See
https://bybit-exchange.github.io/docs/linear/#t-addmargin.
:returns: Request results as dictionary.
"""
return self._submit_request(
method='GET',
path=self.endpoint + '/private/linear/position/add-margin',
query=kwargs,
auth=True
)
def user_leverage(self, **kwargs):
"""
ABANDONED! Please use my_position instead. Fetches user leverage by
fetching user position.
:param kwargs: See
https://bybit-exchange.github.io/docs/inverse/#t-getleverage.
:returns: Request results as dictionary.
"""
self.logger.warning('This endpoint is deprecated and will be removed. Use my_position()')
return self._submit_request(
method='GET',
path=self.endpoint + '/v2/private/position/list',
query=kwargs,
auth=True
)
def change_user_leverage(self, **kwargs):
"""
Change user leverage.
:param kwargs: See
https://bybit-exchange.github.io/docs/inverse/#t-changeleverage.
:returns: Request results as dictionary.
"""
self.logger.warning('This endpoint is deprecated and will be removed. Use set_leverage()')
return self._submit_request(
method='POST',
path=self.endpoint + '/user/leverage/save',
query=kwargs,
auth=True
)
def user_trade_records(self, **kwargs):
"""
Get user's trading records. The results are ordered in ascending order
(the first item is the oldest).
:param kwargs: See
https://bybit-exchange.github.io/docs/inverse/#t-usertraderecords.
:returns: Request results as dictionary.
"""
if kwargs.get('symbol', '').endswith('USDT'):
suffix = '/private/linear/trade/execution/list'
elif kwargs.get('symbol', '')[-2:].isdigit():
suffix = '/futures/private/execution/list'
else:
suffix = '/v2/private/execution/list'
return self._submit_request(
method='GET',
path=self.endpoint + suffix,
query=kwargs,
auth=True
)
def closed_profit_and_loss(self, **kwargs):
"""
Get user's closed profit and loss records. The results are ordered in
descending order (the first item is the latest).
:param kwargs: See
https://bybit-exchange.github.io/docs/inverse/#t-closedprofitandloss.
:returns: Request results as dictionary.
"""
if kwargs.get('symbol', '').endswith('USDT'):
suffix = '/private/linear/trade/closed-pnl/list'
elif kwargs.get('symbol', '')[-2:].isdigit():
suffix = '/futures/private/trade/closed-pnl/list'
else:
suffix = '/v2/private/trade/closed-pnl/list'
return self._submit_request(
method='GET',
path=self.endpoint + suffix,
query=kwargs,
auth=True
)
def get_risk_limit(self, is_linear=False):
"""
Get risk limit.
:param is_linear: True for linear, False for inverse. Defaults to
False.
:returns: Request results as dictionary.
"""
if is_linear:
suffix = '/public/linear/risk-limit'
else:
suffix = '/open-api/wallet/risk-limit/list'
return self._submit_request(
method='GET',
path=self.endpoint + suffix,
auth=True
)
def set_risk_limit(self, **kwargs):
"""
Set risk limit.
:param kwargs: See
https://bybit-exchange.github.io/docs/inverse/#t-setrisklimit.
:returns: Request results as dictionary.
"""
return self._submit_request(
method='POST',
path=self.endpoint + '/open-api/wallet/risk-limit',
query=kwargs,
auth=True
)
def get_the_last_funding_rate(self, **kwargs):
"""
The funding rate is generated every 8 hours at 00:00 UTC, 08:00 UTC and
16:00 UTC. For example, if a request is sent at 12:00 UTC, the funding
rate generated earlier that day at 08:00 UTC will be sent.
:param kwargs: See
https://bybit-exchange.github.io/docs/inverse/#t-fundingrate.
:returns: Request results as dictionary.
"""
if kwargs.get('symbol', '').endswith('USDT'):
suffix = '/public/linear/funding/prev-funding-rate'
else:
suffix = '/v2/private/funding/prev-funding-rate'
return self._submit_request(
method='GET',
path=self.endpoint + suffix,
query=kwargs
)
def my_last_funding_fee(self, **kwargs):
"""
Funding settlement occurs every 8 hours at 00:00 UTC, 08:00 UTC and
16:00 UTC. The current interval's fund fee settlement is based on the
previous interval's fund rate. For example, at 16:00, the settlement is
based on the fund rate generated at 8:00. The fund rate generated at
16:00 will be used at 0:00 the next day.
:param kwargs: See
https://bybit-exchange.github.io/docs/inverse/#t-mylastfundingfee.
:returns: Request results as dictionary.
"""
if kwargs.get('symbol', '').endswith('USDT'):
suffix = '/private/linear/funding/prev-funding'
else:
suffix = '/v2/private/funding/prev-funding'
return self._submit_request(
method='GET',
path=self.endpoint + suffix,
query=kwargs,
auth=True
)
def predicted_funding_rate(self, **kwargs):
"""
Get predicted funding rate and my funding fee.
:param kwargs: See
https://bybit-exchange.github.io/docs/inverse/#t-predictedfunding.
:returns: Request results as dictionary.
"""
if kwargs.get('symbol', '').endswith('USDT'):
suffix = '/private/linear/funding/predicted-funding'
else:
suffix = '/v2/private/funding/predicted-funding'
return self._submit_request(
method='GET',
path=self.endpoint + suffix,
query=kwargs,
auth=True
)
def api_key_info(self):
"""
Get user's API key info.
:returns: Request results as dictionary.
"""
return self._submit_request(
method='GET',
path=self.endpoint + '/v2/private/account/api-key',
auth=True
)
def lcp_info(self, **kwargs):
"""
Get user's LCP (data refreshes once an hour). Only supports inverse
perpetual at present. See
https://bybit-exchange.github.io/docs/inverse/#t-liquidity to learn
more.
:param kwargs: See
https://bybit-exchange.github.io/docs/inverse/#t-lcp.
:returns: Request results as dictionary.
"""
return self._submit_request(
method='GET',
path=self.endpoint + '/v2/private/account/lcp',
query=kwargs,
auth=True
)
def get_wallet_balance(self, **kwargs):
"""
Get wallet balance info.
:param kwargs: See
https://bybit-exchange.github.io/docs/inverse/#t-balance.
:returns: Request results as dictionary.
"""
return self._submit_request(
method='GET',
path=self.endpoint + '/v2/private/wallet/balance',
query=kwargs,
auth=True
)
def wallet_fund_records(self, **kwargs):
"""
Get wallet fund records. This endpoint also shows exchanges from the
Asset Exchange, where the types for the exchange are
ExchangeOrderWithdraw and ExchangeOrderDeposit.
:param kwargs: See
https://bybit-exchange.github.io/docs/inverse/#t-walletrecords.
:returns: Request results as dictionary.
"""
# Replace query param 'from_id' since 'from' keyword is reserved.
# Temporary workaround until Bybit updates official request params
if 'from_id' in kwargs:
kwargs['from'] = kwargs.pop('from_id')
return self._submit_request(
method='GET',
path=self.endpoint + '/v2/private/wallet/fund/records',
query=kwargs,
auth=True
)
def withdraw_records(self, **kwargs):
"""
Get withdrawal records.
:param kwargs: See
https://bybit-exchange.github.io/docs/inverse/#t-withdrawrecords.
:returns: Request results as dictionary.
"""
return self._submit_request(
method='GET',
path=self.endpoint + '/v2/private/wallet/withdraw/list',
query=kwargs,
auth=True
)
def asset_exchange_records(self, **kwargs):
"""
Get asset exchange records.
:param kwargs: See
https://bybit-exchange.github.io/docs/inverse/#t-assetexchangerecords.
:returns: Request results as dictionary.
"""
return self._submit_request(
method='GET',
path=self.endpoint + '/v2/private/exchange-order/list',
query=kwargs,
auth=True
)
def server_time(self):
"""
Get Bybit server time.
:returns: Request results as dictionary.
"""
return self._submit_request(
method='GET',
path=self.endpoint + '/v2/public/time'
)
def announcement(self):
"""
Get Bybit OpenAPI announcements in the last 30 days by reverse order.
:returns: Request results as dictionary.
"""
return self._submit_request(
method='GET',
path=self.endpoint + '/v2/public/announcement'
)
'''
Additional Methods
These methods use two or more requests to perform a specific
function and are exclusive to pybit.
'''
def close_position(self, symbol):
"""
Closes your open position. Makes two requests (position, order).
Parameters
------------------------
symbol : str
Required parameter. The symbol of the market as a string,
e.g. 'BTCUSD'.
"""
# First we fetch the user's position.
try:
r = self.my_position(symbol=symbol)['result']
# If there is no returned position, we want to handle that.
except KeyError:
return self.logger.error('No position detected.')
# Next we generate a list of market orders
orders = [
{
'symbol': symbol,
'order_type': 'Market',
'side': 'Buy' if p['side'] == 'Sell' else 'Sell',
'qty': p['size'],
'time_in_force': 'ImmediateOrCancel',
'reduce_only': True,
'close_on_trigger': True
} for p in (r if isinstance(r, list) else [r]) if p['size'] > 0
]
if len(orders) == 0:
return self.logger.error('No position detected.')
# Submit a market order against each open position for the same qty.
return self.place_active_order_bulk(orders)
'''
Internal methods; signature and request submission.
For more information about the request signature, see
https://bybit-exchange.github.io/docs/inverse/#t-authentication.
'''
def _auth(self, method, params, recv_window):
"""
Generates authentication signature per Bybit API specifications.
Notes
-------------------
Since the POST method requires a JSONified dict, we need to ensure
the signature uses lowercase booleans instead of Python's
capitalized booleans. This is done in the bug fix below.
"""
api_key = self.api_key
api_secret = self.api_secret
if api_key is None or api_secret is None:
raise PermissionError('Authenticated endpoints require keys.')
# Append required parameters.
params['api_key'] = api_key
params['recv_window'] = recv_window
params['timestamp'] = int(time.time() * 10 ** 3)
# Sort dictionary alphabetically to create querystring.
_val = '&'.join(
[str(k) + '=' + str(v) for k, v in sorted(params.items()) if
(k != 'sign') and (v is not None)]
)
# Bug fix. Replaces all capitalized booleans with lowercase.
if method == 'POST':
_val = _val.replace('True', 'true').replace('False', 'false')
# Return signature.
return str(hmac.new(
bytes(api_secret, 'utf-8'),
bytes(_val, 'utf-8'), digestmod='sha256'
).hexdigest())
def _submit_request(self, method=None, path=None, query=None, auth=False):
"""
Submits the request to the API.
Notes
-------------------
We use the params argument for the GET method, and data argument for
the POST method. Dicts passed to the data argument must be
JSONified prior to submitting request.
"""
# Store original recv_window.
recv_window = self.recv_window
# Bug fix: change floating whole numbers to integers to prevent
# auth signature errors.
if query is not None:
for i in query.keys():
if isinstance(query[i], float) and query[i] == int(query[i]):
query[i] = int(query[i])
# Send request and return headers with body. Retry if failed.
retries_attempted = self.max_retries
req_params = None
while True:
retries_attempted -= 1
if retries_attempted < 0:
raise FailedRequestError(
request=f'{method} {path}: {req_params}',
message='Bad Request. Retries exceeded maximum.',
status_code=400,
time=dt.utcnow().strftime("%H:%M:%S")
)
retries_remaining = f'{retries_attempted} retries remain.'
# Authenticate if we are using a private endpoint.
if auth:
# Prepare signature.
signature = self._auth(
method=method,
params=query,
recv_window=recv_window,
)
# Sort the dictionary alphabetically.
query = dict(sorted(query.items(), key=lambda x: x))
# Append the signature to the dictionary.
query['sign'] = signature
# Define parameters and log the request.
if query is not None:
req_params = {k: v for k, v in query.items() if
v is not None}
else:
req_params = {}
# Log the request.
if self.log_requests:
self.logger.debug(f'Request -> {method} {path}: {req_params}')
# Prepare request; use 'params' for GET and 'data' for POST.
if method == 'GET':
r = self.client.prepare_request(
requests.Request(method, path, params=req_params)
)
else:
r = self.client.prepare_request(
requests.Request(method, path, data=json.dumps(req_params))
)
# Attempt the request.
try:
s = self.client.send(r, timeout=self.timeout)
# If requests fires an error, retry.
except (
requests.exceptions.ReadTimeout,
requests.exceptions.SSLError,
requests.exceptions.ConnectionError
) as e:
if self.force_retry:
self.logger.error(f'{e}. {retries_remaining}')
time.sleep(self.retry_delay)
continue
else:
raise e
# Convert response to dictionary, or raise if requests error.
try:
s_json = s.json()
# If we have trouble converting, handle the error and retry.
except JSONDecodeError as e:
if self.force_retry:
self.logger.error(f'{e}. {retries_remaining}')
time.sleep(self.retry_delay)
continue
else:
raise FailedRequestError(
request=f'{method} {path}: {req_params}',
message='Conflict. Could not decode JSON.',
status_code=409,
time=dt.utcnow().strftime("%H:%M:%S")
)
# If Bybit returns an error, raise.
if s_json['ret_code']:
# Generate error message.
error_msg = (
f'{s_json["ret_msg"]} (ErrCode: {s_json["ret_code"]})'
)
# Set default retry delay.
err_delay = self.retry_delay
# Retry non-fatal whitelisted error requests.
if s_json['ret_code'] in self.retry_codes:
# 10002, recv_window error; add 2.5 seconds and retry.
if s_json['ret_code'] == 10002:
error_msg += '. Added 2.5 seconds to recv_window'
recv_window += 2500
# 10006, ratelimit error; wait until rate_limit_reset_ms
# and retry.
elif s_json['ret_code'] == 10006:
self.logger.error(
f'{error_msg}. Ratelimited on current request. '
f'Sleeping, then trying again. Request: {path}'
)
# Calculate how long we need to wait.
limit_reset = s_json['rate_limit_reset_ms'] / 1000
reset_str = time.strftime(
'%X', time.localtime(limit_reset)
)
err_delay = int(limit_reset) - int(time.time())
error_msg = (
f'Ratelimit will reset at {reset_str}. '
f'Sleeping for {err_delay} seconds'
)
# Log the error.
self.logger.error(f'{error_msg}. {retries_remaining}')
time.sleep(err_delay)
continue
elif s_json['ret_code'] in self.ignore_codes:
pass
else:
raise InvalidRequestError(
request=f'{method} {path}: {req_params}',
message=s_json["ret_msg"],
status_code=s_json["ret_code"],
time=dt.utcnow().strftime("%H:%M:%S")
)
else:
return s_json
class WebSocket:
"""
Connector for Bybit's WebSocket API.
"""
def __init__(self, endpoint, api_key=None, api_secret=None,
subscriptions=None, logging_level=logging.INFO,
max_data_length=200, ping_interval=30, ping_timeout=10,
restart_on_error=True, purge_on_fetch=True,
trim_data=True):
"""
Initializes the websocket session.
:param endpoint: Required parameter. The endpoint of the remote
websocket.
:param api_key: Your API key. Required for authenticated endpoints.
Defaults to None.
:param api_secret: Your API secret key. Required for authenticated
endpoints. Defaults to None.
:param subscriptions: A list of desired topics to subscribe to. See API
documentation for more information. Defaults to an empty list, which
will raise an error.
:param logging_level: The logging level of the built-in logger. Defaults
to logging.INFO. Options are CRITICAL (50), ERROR (40),
WARNING (30), INFO (20), DEBUG (10), or NOTSET (0).
:param max_data_length: The maximum number of rows for the stored
dataset. A smaller number will prevent performance or memory issues.
:param ping_interval: The number of seconds between each automated ping.
:param ping_timeout: The number of seconds to wait for 'pong' before an
Exception is raised.
:param restart_on_error: Whether or not the connection should restart on
error.
:param purge_on_fetch: Whether or not stored data should be purged each
fetch. For example, if the user subscribes to the 'trade' topic, and
fetches, should the data show all trade history up to the maximum
length or only get the data since the last fetch?
:param trim_data: Decide whether the returning data should be
trimmed to only provide the data value.
:returns: WebSocket session.
"""
if not subscriptions:
raise Exception('Subscription list cannot be empty!')
# Require symbol on 'trade' topic.
if 'trade' in subscriptions:
raise Exception('\'trade\' requires a ticker, e.g. '
'\'trade.BTCUSD\'.')
# Require currency on 'insurance' topic.
if 'insurance' in subscriptions:
raise Exception('\'insurance\' requires a currency, e.g. '
'\'insurance.BTC\'.')
# Require timeframe and ticker on 'klineV2' topic.
if 'klineV2' in subscriptions:
raise Exception('\'klineV2\' requires a timeframe and ticker, e.g.'
' \'klineV2.5.BTCUSD\'.')
# set websocket name for logging purposes
self.wsName = 'Authenticated' if api_key else 'Non-Authenticated'
# Setup logger.
self.logger = logging.getLogger(__name__)
if len(logging.root.handlers) == 0:
# no handler on root logger set -> we add handler just for this logger to not mess with custom logic from outside
handler = logging.StreamHandler()
handler.setFormatter(logging.Formatter(fmt='%(asctime)s - %(name)s - %(levelname)s - %(message)s',
datefmt='%Y-%m-%d %H:%M:%S'
)
)
handler.setLevel(logging_level)
self.logger.addHandler(handler)
self.logger.debug(f'Initializing {self.wsName} WebSocket.')
# Ensure authentication for private topics.
if any(i in subscriptions for i in [
'position',
'execution',
'order',
'stop_order',
'wallet'
]) and api_key is None:
raise PermissionError('You must be authorized to use '
'private topics!')
# Set endpoint.
self.endpoint = endpoint
# Set API keys.
self.api_key = api_key
self.api_secret = api_secret
# Set topic subscriptions for WebSocket.
self.subscriptions = subscriptions
self.max_length = max_data_length
# Set ping settings.
self.ping_interval = ping_interval
self.ping_timeout = ping_timeout
# Other optional data handling settings.
self.handle_error = restart_on_error
self.purge = purge_on_fetch
self.trim = trim_data
# Set initial state, initialize dictionary and connnect.
self._reset()
self._connect(self.endpoint)
def fetch(self, topic):
"""
Fetches data from the subscribed topic.
:param topic: Required parameter. The subscribed topic to poll.
:returns: Filtered data as dict.
"""
# If topic isn't a string.
if not isinstance(topic, str):
self.logger.error('Topic argument must be a string.')
return
# If the topic given isn't in the initial subscribed list.
if topic not in self.subscriptions:
self.logger.error(f'You aren\'t subscribed to the {topic} topic.')
return
# Pop all trade or execution data on each poll.
# dont pop order or stop_order data as we will lose valuable state
if topic.startswith((
'trade',
'execution'
)) and not topic.startswith('orderBook'):
data = self.data[topic].copy()
if self.purge:
self.data[topic] = []
return data
else:
try:
return self.data[topic]
except KeyError:
return []
def ping(self):
"""
Pings the remote server to test the connection. The status of the
connection can be monitored using ws.ping().
"""
self.ws.send(json.dumps({'op': 'ping'}))
def exit(self):
"""
Closes the websocket connection.
"""
self.ws.close()
while self.ws.sock:
continue
self.exited = True
def _auth(self):
"""
Authorize websocket connection.
"""
# Generate expires.
expires = int((time.time() + 1) * 1000)
# Generate signature.
_val = f'GET/realtime{expires}'
signature = str(hmac.new(
bytes(self.api_secret, 'utf-8'),
bytes(_val, 'utf-8'), digestmod='sha256'
).hexdigest())
# Authenticate with API.
self.ws.send(
json.dumps({
'op': 'auth',
'args': [self.api_key, expires, signature]
})
)
def _connect(self, url):
"""
Open websocket in a thread.
"""
self.ws = websocket.WebSocketApp(
url=url,
on_message=lambda ws, msg: self._on_message(msg),
on_close=self._on_close(),
on_open=self._on_open(),
on_error=lambda ws, err: self._on_error(err)
)
# Setup the thread running WebSocketApp.
self.wst = threading.Thread(target=lambda: self.ws.run_forever(
ping_interval=self.ping_interval,
ping_timeout=self.ping_timeout
))
# Configure as daemon; start.
self.wst.daemon = True
self.wst.start()
# Attempt to connect for X seconds.
retries = 10
while retries > 0 and (not self.ws.sock or not self.ws.sock.connected):
retries -= 1
time.sleep(1)
# If connection was not successful, raise error.
if retries <= 0:
self.exit()
raise websocket.WebSocketTimeoutException('Connection failed.')
# If given an api_key, authenticate.
if self.api_key and self.api_secret:
self._auth()
# Check if subscriptions is a list.
if isinstance(self.subscriptions, str):
self.subscriptions = [self.subscriptions]
# Subscribe to the requested topics.
self.ws.send(
json.dumps({
'op': 'subscribe',
'args': self.subscriptions
})
)
# Initialize the topics.
for topic in self.subscriptions:
if topic not in self.data:
self.data[topic] = {}
@staticmethod
def _find_index(source, target, key):
"""
Find the index in source list of the targeted ID.
"""
return next(i for i, j in enumerate(source) if j[key] == target[key])
def _on_message(self, message):
"""
Parse incoming messages. Similar structure to the
official WS connector.
"""
# Load dict of message.
msg_json = json.loads(message)
# If 'success' exists
if 'success' in msg_json:
if msg_json['success']:
# If 'request' exists.
if 'request' in msg_json:
# If we get succesful auth, notify user.
if msg_json['request']['op'] == 'auth':
self.logger.debug('Authorization successful.')
self.auth = True
# If we get successful subscription, notify user.
if msg_json['request']['op'] == 'subscribe':
sub = msg_json['request']['args']
self.logger.debug(f'Subscription to {sub} successful.')
else:
response = msg_json['ret_msg']
if 'unknown topic' in response:
self.logger.error('Couldn\'t subscribe to topic.'
f' Error: {response}.')
# If we get unsuccesful auth, notify user.
elif msg_json['request']['op'] == 'auth':
self.logger.debug('Authorization failed. Please check your '
'API keys and restart.')
elif 'topic' in msg_json:
topic = msg_json['topic']
# If incoming 'orderbookL2' data.
if 'orderBook' in topic:
# Make updates according to delta response.
if 'delta' in msg_json['type']:
# Delete.
for entry in msg_json['data']['delete']:
index = self._find_index(self.data[topic], entry, 'id')
self.data[topic].pop(index)
# Update.
for entry in msg_json['data']['update']:
index = self._find_index(self.data[topic], entry, 'id')
self.data[topic][index] = entry
# Insert.
for entry in msg_json['data']['insert']:
self.data[topic].append(entry)
# Record the initial snapshot.
elif 'snapshot' in msg_json['type']:
self.data[topic] = msg_json['data']
# For incoming 'order' and 'stop_order' data.
elif any(i in topic for i in ['order', 'stop_order']):
# record incoming data
for i in msg_json['data']:
try:
# update existing entries
# temporary workaround for field anomaly in stop_order data
ord_id = topic + '_id' if i['symbol'].endswith('USDT') else 'order_id'
index = self._find_index(self.data[topic], i, ord_id)
self.data[topic][index] = i
except StopIteration:
# Keep appending or create new list if not already created.
try:
self.data[topic].append(i)
except AttributeError:
self.data[topic] = msg_json['data']
# For incoming 'trade' and 'execution' data.
elif any(i in topic for i in ['trade', 'execution']):
# Keep appending or create new list if not already created.
try:
for i in msg_json['data']:
self.data[topic].append(i)
except AttributeError:
self.data[topic] = msg_json['data']
# If list is too long, pop the first entry.
if len(self.data[topic]) > self.max_length:
self.data[topic].pop(0)
# If incoming 'insurance', 'klineV2', or 'wallet' data.
elif any(i in topic for i in ['insurance', 'klineV2', 'wallet',
'candle']):
# Record incoming data.
self.data[topic] = msg_json['data'][0] if self.trim else msg_json
# If incoming 'instrument_info' data.
elif 'instrument_info' in topic:
# Make updates according to delta response.
if 'delta' in msg_json['type']:
for i in msg_json['data']['update'][0]:
self.data[topic][i] = msg_json['data']['update'][0][i]
# Record the initial snapshot.
elif 'snapshot' in msg_json['type']:
self.data[topic] = msg_json['data'] if self.trim else msg_json
# If incoming 'position' data.
elif 'position' in topic:
# Record incoming position data.
for p in msg_json['data']:
# linear (USDT) positions have Buy|Sell side and
# updates contain all USDT positions.
# For linear tickers...
if p['symbol'].endswith('USDT'):
try:
self.data[topic][p['symbol']][p['side']] = p
# if side key hasn't been created yet...
except KeyError:
self.data[topic][p['symbol']] = {p['side']: p}
# For non-linear tickers...
else:
self.data[topic][p['symbol']] = p
def _on_error(self, error):
"""
Exit on errors and raise exception, or attempt reconnect.
"""
if not self.exited:
self.logger.error(f'WebSocket {self.wsName} encountered error: {error}.')
self.exit()
# Reconnect.
if self.handle_error:
self._reset()
self._connect(self.endpoint)
def _on_open(self):
"""
Log WS open.
"""
self.logger.debug(f'WebSocket {self.wsName} opened.')
def _on_close(self):
"""
Log WS close.
"""
self.logger.debug(f'WebSocket {self.wsName} closed.')
def _reset(self):
"""
Set state booleans and initialize dictionary.
"""
self.exited = False
self.auth = False
self.data = {}
| 33.691429 | 125 | 0.556926 |
import time
import hmac
import json
import logging
import threading
import requests
import websocket
from datetime import datetime as dt
from concurrent.futures import ThreadPoolExecutor
from .exceptions import FailedRequestError, InvalidRequestError
try:
from simplejson.errors import JSONDecodeError
except ImportError:
from json.decoder import JSONDecodeError
VERSION = '1.1.18'
class HTTP:
def __init__(self, endpoint=None, api_key=None, api_secret=None,
logging_level=logging.INFO, log_requests=False,
request_timeout=10, recv_window=5000, force_retry=False,
retry_codes=None, ignore_codes=None, max_retries=3,
retry_delay=3, referral_id=None):
if endpoint is None:
self.endpoint = 'https://api.bybit.com'
else:
self.endpoint = endpoint
self.logger = logging.getLogger(__name__)
if len(logging.root.handlers) == 0:
handler = logging.StreamHandler()
handler.setFormatter(logging.Formatter(fmt='%(asctime)s - %(name)s - %(levelname)s - %(message)s',
datefmt='%Y-%m-%d %H:%M:%S'
)
)
handler.setLevel(logging_level)
self.logger.addHandler(handler)
self.logger.debug('Initializing HTTP session.')
self.log_requests = log_requests
self.api_key = api_key
self.api_secret = api_secret
self.timeout = request_timeout
self.recv_window = recv_window
self.force_retry = force_retry
self.max_retries = max_retries
self.retry_delay = retry_delay
if retry_codes is None:
self.retry_codes = {10002, 10006, 30034, 30035, 130035, 130150}
else:
self.retry_codes = retry_codes
if ignore_codes is None:
self.ignore_codes = set()
else:
self.ignore_codes = ignore_codes
self.client = requests.Session()
self.client.headers.update(
{
'User-Agent': 'pybit-' + VERSION,
'Content-Type': 'application/json',
'Accept': 'application/json',
}
)
if referral_id:
self.client.headers.update({'Referer': referral_id})
def _exit(self):
self.client.close()
self.logger.debug('HTTP session closed.')
def orderbook(self, **kwargs):
return self._submit_request(
method='GET',
path=self.endpoint + '/v2/public/orderBook/L2',
query=kwargs
)
def query_kline(self, **kwargs):
if 'from_time' in kwargs:
kwargs['from'] = kwargs.pop('from_time')
if kwargs.get('symbol', '').endswith('USDT'):
suffix = '/public/linear/kline'
else:
suffix = '/v2/public/kline/list'
return self._submit_request(
method='GET',
path=self.endpoint + suffix,
query=kwargs
)
def latest_information_for_symbol(self, **kwargs):
return self._submit_request(
method='GET',
path=self.endpoint + '/v2/public/tickers',
query=kwargs
)
def public_trading_records(self, **kwargs):
if 'from_id' in kwargs:
kwargs['from'] = kwargs.pop('from_id')
if kwargs.get('symbol', '').endswith('USDT'):
suffix = '/public/linear/recent-trading-records'
else:
suffix = '/v2/public/trading-records'
return self._submit_request(
method='GET',
path=self.endpoint + suffix,
query=kwargs
)
def query_symbol(self):
return self._submit_request(
method='GET',
path=self.endpoint + '/v2/public/symbols'
)
def liquidated_orders(self, **kwargs):
if 'from_id' in kwargs:
kwargs['from'] = kwargs.pop('from_id')
return self._submit_request(
method='GET',
path=self.endpoint + '/v2/public/liq-records',
query=kwargs
)
def query_mark_price_kline(self, **kwargs):
if 'from_time' in kwargs:
kwargs['from'] = kwargs.pop('from_time')
if kwargs.get('symbol', '').endswith('USDT'):
suffix = '/public/linear/mark-price-kline'
else:
suffix = '/v2/public/mark-price-kline'
return self._submit_request(
method='GET',
path=self.endpoint + suffix,
query=kwargs
)
def open_interest(self, **kwargs):
return self._submit_request(
method='GET',
path=self.endpoint + '/v2/public/open-interest',
query=kwargs
)
def latest_big_deal(self, **kwargs):
return self._submit_request(
method='GET',
path=self.endpoint + '/v2/public/big-deal',
query=kwargs
)
def long_short_ratio(self, **kwargs):
return self._submit_request(
method='GET',
path=self.endpoint + '/v2/public/account-ratio',
query=kwargs
)
def place_active_order(self, **kwargs):
if kwargs.get('symbol', '').endswith('USDT'):
suffix = '/private/linear/order/create'
elif kwargs.get('symbol', '')[-2:].isdigit():
suffix = '/futures/private/order/create'
else:
suffix = '/v2/private/order/create'
return self._submit_request(
method='POST',
path=self.endpoint + suffix,
query=kwargs,
auth=True
)
def place_active_order_bulk(self, orders: list, max_in_parallel=10):
with ThreadPoolExecutor(max_workers=max_in_parallel) as executor:
executions = [
executor.submit(
self.place_active_order,
**order
) for order in orders
]
executor.shutdown()
return [execution.result() for execution in executions]
def get_active_order(self, **kwargs):
if kwargs.get('symbol', '').endswith('USDT'):
suffix = '/private/linear/order/list'
elif kwargs.get('symbol', '')[-2:].isdigit():
suffix = '/v2/private/order/list'
else:
suffix = '/futures/private/order/list'
return self._submit_request(
method='GET',
path=self.endpoint + suffix,
query=kwargs,
auth=True
)
def cancel_active_order(self, **kwargs):
if kwargs.get('symbol', '').endswith('USDT'):
suffix = '/private/linear/order/cancel'
elif kwargs.get('symbol', '')[-2:].isdigit():
suffix = '/futures/private/order/cancel'
else:
suffix = '/v2/private/order/cancel'
return self._submit_request(
method='POST',
path=self.endpoint + suffix,
query=kwargs,
auth=True
)
def cancel_active_order_bulk(self, orders: list, max_in_parallel=10):
with ThreadPoolExecutor(max_workers=max_in_parallel) as executor:
executions = [
executor.submit(
self.cancel_active_order,
**order
) for order in orders
]
executor.shutdown()
return [execution.result() for execution in executions]
def cancel_all_active_orders(self, **kwargs):
if kwargs.get('symbol', '').endswith('USDT'):
suffix = '/private/linear/order/cancel-all'
elif kwargs.get('symbol', '')[-2:].isdigit():
suffix = '/futures/private/order/cancelAll'
else:
suffix = '/v2/private/order/cancelAll'
return self._submit_request(
method='POST',
path=self.endpoint + suffix,
query=kwargs,
auth=True
)
def replace_active_order(self, **kwargs):
if kwargs.get('symbol', '').endswith('USDT'):
suffix = '/private/linear/order/replace'
elif kwargs.get('symbol', '')[-2:].isdigit():
suffix = '/futures/private/order/replace'
else:
suffix = '/v2/private/order/replace'
return self._submit_request(
method='POST',
path=self.endpoint + suffix,
query=kwargs,
auth=True
)
def replace_active_order_bulk(self, orders: list, max_in_parallel=10):
with ThreadPoolExecutor(max_workers=max_in_parallel) as executor:
executions = [
executor.submit(
self.replace_active_order,
**order
) for order in orders
]
executor.shutdown()
return [execution.result() for execution in executions]
def query_active_order(self, **kwargs):
if kwargs.get('symbol', '').endswith('USDT'):
suffix = '/private/linear/order/search'
elif kwargs.get('symbol', '')[-2:].isdigit():
suffix = '/futures/private/order'
else:
suffix = '/v2/private/order'
return self._submit_request(
method='GET',
path=self.endpoint + suffix,
query=kwargs,
auth=True
)
def place_conditional_order(self, **kwargs):
if kwargs.get('symbol', '').endswith('USDT'):
suffix = '/private/linear/stop-order/create'
elif kwargs.get('symbol', '')[-2:].isdigit():
suffix = '/futures/private/stop-order/create'
else:
suffix = '/v2/private/stop-order/create'
return self._submit_request(
method='POST',
path=self.endpoint + suffix,
query=kwargs,
auth=True
)
def place_conditional_order_bulk(self, orders: list, max_in_parallel=10):
with ThreadPoolExecutor(max_workers=max_in_parallel) as executor:
executions = [
executor.submit(
self.place_conditional_order,
**order
) for order in orders
]
executor.shutdown()
return [execution.result() for execution in executions]
def get_conditional_order(self, **kwargs):
if kwargs.get('symbol', '').endswith('USDT'):
suffix = '/private/linear/stop-order/list'
elif kwargs.get('symbol', '')[-2:].isdigit():
suffix = '/futures/private/stop-order/list'
else:
suffix = '/v2/private/stop-order/list'
return self._submit_request(
method='GET',
path=self.endpoint + suffix,
query=kwargs,
auth=True
)
def cancel_conditional_order(self, **kwargs):
if kwargs.get('symbol', '').endswith('USDT'):
suffix = '/private/linear/stop-order/cancel'
elif kwargs.get('symbol', '')[-2:].isdigit():
suffix = '/futures/private/stop-order/cancel'
else:
suffix = '/v2/private/stop-order/cancel'
return self._submit_request(
method='POST',
path=self.endpoint + suffix,
query=kwargs,
auth=True
)
def cancel_conditional_order_bulk(self, orders: list, max_in_parallel=10):
with ThreadPoolExecutor(max_workers=max_in_parallel) as executor:
executions = [
executor.submit(
self.cancel_conditional_order,
**order
) for order in orders
]
executor.shutdown()
return [execution.result() for execution in executions]
def cancel_all_conditional_orders(self, **kwargs):
if kwargs.get('symbol', '').endswith('USDT'):
suffix = '/private/linear/stop-order/cancel-all'
elif kwargs.get('symbol', '')[-2:].isdigit():
suffix = '/futures/private/stop-order/cancelAll'
else:
suffix = '/v2/private/stop-order/cancelAll'
return self._submit_request(
method='POST',
path=self.endpoint + suffix,
query=kwargs,
auth=True
)
def replace_conditional_order(self, **kwargs):
if kwargs.get('symbol', '').endswith('USDT'):
suffix = '/private/linear/stop-order/replace'
elif kwargs.get('symbol', '')[-2:].isdigit():
suffix = '/futures/private/stop-order/replace'
else:
suffix = '/v2/private/stop-order/replace'
return self._submit_request(
method='POST',
path=self.endpoint + suffix,
query=kwargs,
auth=True
)
def replace_conditional_order_bulk(self, orders: list, max_in_parallel=10):
with ThreadPoolExecutor(max_workers=max_in_parallel) as executor:
executions = [
executor.submit(
self.replace_conditional_order,
**order
) for order in orders
]
executor.shutdown()
return [execution.result() for execution in executions]
def query_conditional_order(self, **kwargs):
if kwargs.get('symbol', '').endswith('USDT'):
suffix = '/private/linear/stop-order/search'
elif kwargs.get('symbol', '')[-2:].isdigit():
suffix = '/futures/private/stop-order'
else:
suffix = '/v2/private/stop-order'
return self._submit_request(
method='GET',
path=self.endpoint + suffix,
query=kwargs,
auth=True
)
def my_position(self, **kwargs):
if kwargs.get('symbol', '').endswith('USDT'):
suffix = '/private/linear/position/list'
elif kwargs.get('symbol', '')[-2:].isdigit():
suffix = '/futures/private/position/list'
else:
suffix = '/v2/private/position/list'
return self._submit_request(
method='GET',
path=self.endpoint + suffix,
query=kwargs,
auth=True
)
def set_auto_add_margin(self, **kwargs):
return self._submit_request(
method='POST',
path=self.endpoint + '/private/linear/position/set-auto-add-margin',
query=kwargs,
auth=True
)
def set_leverage(self, **kwargs):
if kwargs.get('symbol', '').endswith('USDT'):
suffix = '/private/linear/position/set-leverage'
elif kwargs.get('symbol', '')[-2:].isdigit():
suffix = '/futures/private/position/leverage/save'
else:
suffix = '/v2/private/position/leverage/save'
return self._submit_request(
method='POST',
path=self.endpoint + suffix,
query=kwargs,
auth=True
)
def cross_isolated_margin_switch(self, **kwargs):
if kwargs.get('symbol', '').endswith('USDT'):
suffix = '/private/linear/position/switch-isolated'
else:
suffix = '/futures/private/position/switch-mode'
return self._submit_request(
method='POST',
path=self.endpoint + suffix,
query=kwargs,
auth=True
)
def position_mode_switch(self, **kwargs):
return self._submit_request(
method='POST',
path=self.endpoint + '/futures/private/position/switch-mode',
query=kwargs,
auth=True
)
def change_margin(self, **kwargs):
if kwargs.get('symbol', '')[-2:].isdigit():
suffix = '/futures/private/position/change-position-margin'
else:
suffix = '/v2/private/position/change-position-margin'
return self._submit_request(
method='POST',
path=self.endpoint + suffix,
query=kwargs,
auth=True
)
def set_trading_stop(self, **kwargs):
if kwargs.get('symbol', '').endswith('USDT'):
suffix = '/private/linear/position/trading-stop'
elif kwargs.get('symbol', '')[-2:].isdigit():
suffix = '/futures/private/position/trading-stop'
else:
suffix = '/v2/private/position/trading-stop'
return self._submit_request(
method='POST',
path=self.endpoint + suffix,
query=kwargs,
auth=True
)
def add_reduce_margin(self, **kwargs):
return self._submit_request(
method='GET',
path=self.endpoint + '/private/linear/position/add-margin',
query=kwargs,
auth=True
)
def user_leverage(self, **kwargs):
self.logger.warning('This endpoint is deprecated and will be removed. Use my_position()')
return self._submit_request(
method='GET',
path=self.endpoint + '/v2/private/position/list',
query=kwargs,
auth=True
)
def change_user_leverage(self, **kwargs):
self.logger.warning('This endpoint is deprecated and will be removed. Use set_leverage()')
return self._submit_request(
method='POST',
path=self.endpoint + '/user/leverage/save',
query=kwargs,
auth=True
)
def user_trade_records(self, **kwargs):
if kwargs.get('symbol', '').endswith('USDT'):
suffix = '/private/linear/trade/execution/list'
elif kwargs.get('symbol', '')[-2:].isdigit():
suffix = '/futures/private/execution/list'
else:
suffix = '/v2/private/execution/list'
return self._submit_request(
method='GET',
path=self.endpoint + suffix,
query=kwargs,
auth=True
)
def closed_profit_and_loss(self, **kwargs):
if kwargs.get('symbol', '').endswith('USDT'):
suffix = '/private/linear/trade/closed-pnl/list'
elif kwargs.get('symbol', '')[-2:].isdigit():
suffix = '/futures/private/trade/closed-pnl/list'
else:
suffix = '/v2/private/trade/closed-pnl/list'
return self._submit_request(
method='GET',
path=self.endpoint + suffix,
query=kwargs,
auth=True
)
def get_risk_limit(self, is_linear=False):
if is_linear:
suffix = '/public/linear/risk-limit'
else:
suffix = '/open-api/wallet/risk-limit/list'
return self._submit_request(
method='GET',
path=self.endpoint + suffix,
auth=True
)
def set_risk_limit(self, **kwargs):
return self._submit_request(
method='POST',
path=self.endpoint + '/open-api/wallet/risk-limit',
query=kwargs,
auth=True
)
def get_the_last_funding_rate(self, **kwargs):
if kwargs.get('symbol', '').endswith('USDT'):
suffix = '/public/linear/funding/prev-funding-rate'
else:
suffix = '/v2/private/funding/prev-funding-rate'
return self._submit_request(
method='GET',
path=self.endpoint + suffix,
query=kwargs
)
def my_last_funding_fee(self, **kwargs):
if kwargs.get('symbol', '').endswith('USDT'):
suffix = '/private/linear/funding/prev-funding'
else:
suffix = '/v2/private/funding/prev-funding'
return self._submit_request(
method='GET',
path=self.endpoint + suffix,
query=kwargs,
auth=True
)
def predicted_funding_rate(self, **kwargs):
if kwargs.get('symbol', '').endswith('USDT'):
suffix = '/private/linear/funding/predicted-funding'
else:
suffix = '/v2/private/funding/predicted-funding'
return self._submit_request(
method='GET',
path=self.endpoint + suffix,
query=kwargs,
auth=True
)
def api_key_info(self):
return self._submit_request(
method='GET',
path=self.endpoint + '/v2/private/account/api-key',
auth=True
)
def lcp_info(self, **kwargs):
return self._submit_request(
method='GET',
path=self.endpoint + '/v2/private/account/lcp',
query=kwargs,
auth=True
)
def get_wallet_balance(self, **kwargs):
return self._submit_request(
method='GET',
path=self.endpoint + '/v2/private/wallet/balance',
query=kwargs,
auth=True
)
def wallet_fund_records(self, **kwargs):
if 'from_id' in kwargs:
kwargs['from'] = kwargs.pop('from_id')
return self._submit_request(
method='GET',
path=self.endpoint + '/v2/private/wallet/fund/records',
query=kwargs,
auth=True
)
def withdraw_records(self, **kwargs):
return self._submit_request(
method='GET',
path=self.endpoint + '/v2/private/wallet/withdraw/list',
query=kwargs,
auth=True
)
def asset_exchange_records(self, **kwargs):
return self._submit_request(
method='GET',
path=self.endpoint + '/v2/private/exchange-order/list',
query=kwargs,
auth=True
)
def server_time(self):
return self._submit_request(
method='GET',
path=self.endpoint + '/v2/public/time'
)
def announcement(self):
return self._submit_request(
method='GET',
path=self.endpoint + '/v2/public/announcement'
)
def close_position(self, symbol):
try:
r = self.my_position(symbol=symbol)['result']
# If there is no returned position, we want to handle that.
except KeyError:
return self.logger.error('No position detected.')
# Next we generate a list of market orders
orders = [
{
'symbol': symbol,
'order_type': 'Market',
'side': 'Buy' if p['side'] == 'Sell' else 'Sell',
'qty': p['size'],
'time_in_force': 'ImmediateOrCancel',
'reduce_only': True,
'close_on_trigger': True
} for p in (r if isinstance(r, list) else [r]) if p['size'] > 0
]
if len(orders) == 0:
return self.logger.error('No position detected.')
# Submit a market order against each open position for the same qty.
return self.place_active_order_bulk(orders)
def _auth(self, method, params, recv_window):
api_key = self.api_key
api_secret = self.api_secret
if api_key is None or api_secret is None:
raise PermissionError('Authenticated endpoints require keys.')
# Append required parameters.
params['api_key'] = api_key
params['recv_window'] = recv_window
params['timestamp'] = int(time.time() * 10 ** 3)
# Sort dictionary alphabetically to create querystring.
_val = '&'.join(
[str(k) + '=' + str(v) for k, v in sorted(params.items()) if
(k != 'sign') and (v is not None)]
)
# Bug fix. Replaces all capitalized booleans with lowercase.
if method == 'POST':
_val = _val.replace('True', 'true').replace('False', 'false')
# Return signature.
return str(hmac.new(
bytes(api_secret, 'utf-8'),
bytes(_val, 'utf-8'), digestmod='sha256'
).hexdigest())
def _submit_request(self, method=None, path=None, query=None, auth=False):
# Store original recv_window.
recv_window = self.recv_window
# Bug fix: change floating whole numbers to integers to prevent
# auth signature errors.
if query is not None:
for i in query.keys():
if isinstance(query[i], float) and query[i] == int(query[i]):
query[i] = int(query[i])
# Send request and return headers with body. Retry if failed.
retries_attempted = self.max_retries
req_params = None
while True:
retries_attempted -= 1
if retries_attempted < 0:
raise FailedRequestError(
request=f'{method} {path}: {req_params}',
message='Bad Request. Retries exceeded maximum.',
status_code=400,
time=dt.utcnow().strftime("%H:%M:%S")
)
retries_remaining = f'{retries_attempted} retries remain.'
# Authenticate if we are using a private endpoint.
if auth:
# Prepare signature.
signature = self._auth(
method=method,
params=query,
recv_window=recv_window,
)
# Sort the dictionary alphabetically.
query = dict(sorted(query.items(), key=lambda x: x))
# Append the signature to the dictionary.
query['sign'] = signature
# Define parameters and log the request.
if query is not None:
req_params = {k: v for k, v in query.items() if
v is not None}
else:
req_params = {}
# Log the request.
if self.log_requests:
self.logger.debug(f'Request -> {method} {path}: {req_params}')
# Prepare request; use 'params' for GET and 'data' for POST.
if method == 'GET':
r = self.client.prepare_request(
requests.Request(method, path, params=req_params)
)
else:
r = self.client.prepare_request(
requests.Request(method, path, data=json.dumps(req_params))
)
# Attempt the request.
try:
s = self.client.send(r, timeout=self.timeout)
# If requests fires an error, retry.
except (
requests.exceptions.ReadTimeout,
requests.exceptions.SSLError,
requests.exceptions.ConnectionError
) as e:
if self.force_retry:
self.logger.error(f'{e}. {retries_remaining}')
time.sleep(self.retry_delay)
continue
else:
raise e
# Convert response to dictionary, or raise if requests error.
try:
s_json = s.json()
# If we have trouble converting, handle the error and retry.
except JSONDecodeError as e:
if self.force_retry:
self.logger.error(f'{e}. {retries_remaining}')
time.sleep(self.retry_delay)
continue
else:
raise FailedRequestError(
request=f'{method} {path}: {req_params}',
message='Conflict. Could not decode JSON.',
status_code=409,
time=dt.utcnow().strftime("%H:%M:%S")
)
# If Bybit returns an error, raise.
if s_json['ret_code']:
# Generate error message.
error_msg = (
f'{s_json["ret_msg"]} (ErrCode: {s_json["ret_code"]})'
)
# Set default retry delay.
err_delay = self.retry_delay
# Retry non-fatal whitelisted error requests.
if s_json['ret_code'] in self.retry_codes:
# 10002, recv_window error; add 2.5 seconds and retry.
if s_json['ret_code'] == 10002:
error_msg += '. Added 2.5 seconds to recv_window'
recv_window += 2500
# 10006, ratelimit error; wait until rate_limit_reset_ms
# and retry.
elif s_json['ret_code'] == 10006:
self.logger.error(
f'{error_msg}. Ratelimited on current request. '
f'Sleeping, then trying again. Request: {path}'
)
# Calculate how long we need to wait.
limit_reset = s_json['rate_limit_reset_ms'] / 1000
reset_str = time.strftime(
'%X', time.localtime(limit_reset)
)
err_delay = int(limit_reset) - int(time.time())
error_msg = (
f'Ratelimit will reset at {reset_str}. '
f'Sleeping for {err_delay} seconds'
)
# Log the error.
self.logger.error(f'{error_msg}. {retries_remaining}')
time.sleep(err_delay)
continue
elif s_json['ret_code'] in self.ignore_codes:
pass
else:
raise InvalidRequestError(
request=f'{method} {path}: {req_params}',
message=s_json["ret_msg"],
status_code=s_json["ret_code"],
time=dt.utcnow().strftime("%H:%M:%S")
)
else:
return s_json
class WebSocket:
def __init__(self, endpoint, api_key=None, api_secret=None,
subscriptions=None, logging_level=logging.INFO,
max_data_length=200, ping_interval=30, ping_timeout=10,
restart_on_error=True, purge_on_fetch=True,
trim_data=True):
if not subscriptions:
raise Exception('Subscription list cannot be empty!')
# Require symbol on 'trade' topic.
if 'trade' in subscriptions:
raise Exception('\'trade\' requires a ticker, e.g. '
'\'trade.BTCUSD\'.')
# Require currency on 'insurance' topic.
if 'insurance' in subscriptions:
raise Exception('\'insurance\' requires a currency, e.g. '
'\'insurance.BTC\'.')
# Require timeframe and ticker on 'klineV2' topic.
if 'klineV2' in subscriptions:
raise Exception('\'klineV2\' requires a timeframe and ticker, e.g.'
' \'klineV2.5.BTCUSD\'.')
# set websocket name for logging purposes
self.wsName = 'Authenticated' if api_key else 'Non-Authenticated'
# Setup logger.
self.logger = logging.getLogger(__name__)
if len(logging.root.handlers) == 0:
# no handler on root logger set -> we add handler just for this logger to not mess with custom logic from outside
handler = logging.StreamHandler()
handler.setFormatter(logging.Formatter(fmt='%(asctime)s - %(name)s - %(levelname)s - %(message)s',
datefmt='%Y-%m-%d %H:%M:%S'
)
)
handler.setLevel(logging_level)
self.logger.addHandler(handler)
self.logger.debug(f'Initializing {self.wsName} WebSocket.')
# Ensure authentication for private topics.
if any(i in subscriptions for i in [
'position',
'execution',
'order',
'stop_order',
'wallet'
]) and api_key is None:
raise PermissionError('You must be authorized to use '
'private topics!')
# Set endpoint.
self.endpoint = endpoint
# Set API keys.
self.api_key = api_key
self.api_secret = api_secret
# Set topic subscriptions for WebSocket.
self.subscriptions = subscriptions
self.max_length = max_data_length
# Set ping settings.
self.ping_interval = ping_interval
self.ping_timeout = ping_timeout
# Other optional data handling settings.
self.handle_error = restart_on_error
self.purge = purge_on_fetch
self.trim = trim_data
# Set initial state, initialize dictionary and connnect.
self._reset()
self._connect(self.endpoint)
def fetch(self, topic):
# If topic isn't a string.
if not isinstance(topic, str):
self.logger.error('Topic argument must be a string.')
return
if topic not in self.subscriptions:
self.logger.error(f'You aren\'t subscribed to the {topic} topic.')
return
if topic.startswith((
'trade',
'execution'
)) and not topic.startswith('orderBook'):
data = self.data[topic].copy()
if self.purge:
self.data[topic] = []
return data
else:
try:
return self.data[topic]
except KeyError:
return []
def ping(self):
self.ws.send(json.dumps({'op': 'ping'}))
def exit(self):
self.ws.close()
while self.ws.sock:
continue
self.exited = True
def _auth(self):
expires = int((time.time() + 1) * 1000)
_val = f'GET/realtime{expires}'
signature = str(hmac.new(
bytes(self.api_secret, 'utf-8'),
bytes(_val, 'utf-8'), digestmod='sha256'
).hexdigest())
self.ws.send(
json.dumps({
'op': 'auth',
'args': [self.api_key, expires, signature]
})
)
def _connect(self, url):
self.ws = websocket.WebSocketApp(
url=url,
on_message=lambda ws, msg: self._on_message(msg),
on_close=self._on_close(),
on_open=self._on_open(),
on_error=lambda ws, err: self._on_error(err)
)
self.wst = threading.Thread(target=lambda: self.ws.run_forever(
ping_interval=self.ping_interval,
ping_timeout=self.ping_timeout
))
self.wst.daemon = True
self.wst.start()
retries = 10
while retries > 0 and (not self.ws.sock or not self.ws.sock.connected):
retries -= 1
time.sleep(1)
if retries <= 0:
self.exit()
raise websocket.WebSocketTimeoutException('Connection failed.')
if self.api_key and self.api_secret:
self._auth()
if isinstance(self.subscriptions, str):
self.subscriptions = [self.subscriptions]
self.ws.send(
json.dumps({
'op': 'subscribe',
'args': self.subscriptions
})
)
for topic in self.subscriptions:
if topic not in self.data:
self.data[topic] = {}
@staticmethod
def _find_index(source, target, key):
return next(i for i, j in enumerate(source) if j[key] == target[key])
def _on_message(self, message):
msg_json = json.loads(message)
if 'success' in msg_json:
if msg_json['success']:
if 'request' in msg_json:
if msg_json['request']['op'] == 'auth':
self.logger.debug('Authorization successful.')
self.auth = True
if msg_json['request']['op'] == 'subscribe':
sub = msg_json['request']['args']
self.logger.debug(f'Subscription to {sub} successful.')
else:
response = msg_json['ret_msg']
if 'unknown topic' in response:
self.logger.error('Couldn\'t subscribe to topic.'
f' Error: {response}.')
# If we get unsuccesful auth, notify user.
elif msg_json['request']['op'] == 'auth':
self.logger.debug('Authorization failed. Please check your '
'API keys and restart.')
elif 'topic' in msg_json:
topic = msg_json['topic']
# If incoming 'orderbookL2' data.
if 'orderBook' in topic:
# Make updates according to delta response.
if 'delta' in msg_json['type']:
# Delete.
for entry in msg_json['data']['delete']:
index = self._find_index(self.data[topic], entry, 'id')
self.data[topic].pop(index)
# Update.
for entry in msg_json['data']['update']:
index = self._find_index(self.data[topic], entry, 'id')
self.data[topic][index] = entry
# Insert.
for entry in msg_json['data']['insert']:
self.data[topic].append(entry)
# Record the initial snapshot.
elif 'snapshot' in msg_json['type']:
self.data[topic] = msg_json['data']
# For incoming 'order' and 'stop_order' data.
elif any(i in topic for i in ['order', 'stop_order']):
# record incoming data
for i in msg_json['data']:
try:
# update existing entries
# temporary workaround for field anomaly in stop_order data
ord_id = topic + '_id' if i['symbol'].endswith('USDT') else 'order_id'
index = self._find_index(self.data[topic], i, ord_id)
self.data[topic][index] = i
except StopIteration:
# Keep appending or create new list if not already created.
try:
self.data[topic].append(i)
except AttributeError:
self.data[topic] = msg_json['data']
# For incoming 'trade' and 'execution' data.
elif any(i in topic for i in ['trade', 'execution']):
# Keep appending or create new list if not already created.
try:
for i in msg_json['data']:
self.data[topic].append(i)
except AttributeError:
self.data[topic] = msg_json['data']
# If list is too long, pop the first entry.
if len(self.data[topic]) > self.max_length:
self.data[topic].pop(0)
# If incoming 'insurance', 'klineV2', or 'wallet' data.
elif any(i in topic for i in ['insurance', 'klineV2', 'wallet',
'candle']):
# Record incoming data.
self.data[topic] = msg_json['data'][0] if self.trim else msg_json
# If incoming 'instrument_info' data.
elif 'instrument_info' in topic:
# Make updates according to delta response.
if 'delta' in msg_json['type']:
for i in msg_json['data']['update'][0]:
self.data[topic][i] = msg_json['data']['update'][0][i]
# Record the initial snapshot.
elif 'snapshot' in msg_json['type']:
self.data[topic] = msg_json['data'] if self.trim else msg_json
# If incoming 'position' data.
elif 'position' in topic:
# Record incoming position data.
for p in msg_json['data']:
# linear (USDT) positions have Buy|Sell side and
# updates contain all USDT positions.
# For linear tickers...
if p['symbol'].endswith('USDT'):
try:
self.data[topic][p['symbol']][p['side']] = p
# if side key hasn't been created yet...
except KeyError:
self.data[topic][p['symbol']] = {p['side']: p}
else:
self.data[topic][p['symbol']] = p
def _on_error(self, error):
if not self.exited:
self.logger.error(f'WebSocket {self.wsName} encountered error: {error}.')
self.exit()
if self.handle_error:
self._reset()
self._connect(self.endpoint)
def _on_open(self):
self.logger.debug(f'WebSocket {self.wsName} opened.')
def _on_close(self):
self.logger.debug(f'WebSocket {self.wsName} closed.')
def _reset(self):
self.exited = False
self.auth = False
self.data = {}
| true | true |
f72482536502a08c92fdd47d1959c93914af950f | 1,330 | py | Python | Competition Codes/packageFunction.py | Harrdy2018/2018-Huawei-Code-Craft | 62fa76c658746550b2e5c8ef059a5c748e93c06c | [
"Apache-2.0"
] | 1 | 2019-04-08T04:15:51.000Z | 2019-04-08T04:15:51.000Z | Competition Codes/packageFunction.py | Harrdy2018/2018-Huawei-Code-Craft | 62fa76c658746550b2e5c8ef059a5c748e93c06c | [
"Apache-2.0"
] | null | null | null | Competition Codes/packageFunction.py | Harrdy2018/2018-Huawei-Code-Craft | 62fa76c658746550b2e5c8ef059a5c748e93c06c | [
"Apache-2.0"
] | 2 | 2018-04-16T10:11:24.000Z | 2019-06-28T06:30:28.000Z | #coding=utf-8
def MultiplePackage(N,C,weight,value,num,physic):
'''
多重背包问题(每个物品都有次数限制)
:param N: 预测的虚拟机种类,如N=pre_num
:param C:输入文件是CPU,那么背包总容量就是MEM,如C=
:param weight: 每个物品的容量数组表示,如weight=[0,5,4,7,2,6]
:param value: 每个物品的价值数组表示,如value=[0,12,3,10,3,6]
:param num:每个物品的个数限制,如num=[0,2,4,1,5,3]
:return: 返回总价值矩阵
'''
#初始化f[N+1][C+1]为0,f[i][j]表示前i件物品恰好放入一个容器为j的背包可以获得的最大价值
f=[[0 for col in range(C+1)] for row in range(N+1)]
for i in range(1,N+1):
for j in range(1,C+1):
#对于物品i最多能取的次数是j/weight[i]与num[i]的较小者
max_num_i=min(j/weight[i],num[i])
#初始取k=0为最大,下面的循环是把取了k个物品i能获得的最大价值赋值给f[i][j]
f[i][j]=f[i-1][j]
for k in range(max_num_i+1):
if f[i][j]<f[i-1][j-k*weight[i]]+k*value[i]<=physic:
#状态方程
f[i][j]=f[i-1][j-k*weight[i]]+k*value[i]
return f
def FindWhat(f,value,weight,i,j,item,num):
if i>=0:
if f[i][j]==f[i-1][j]:
item[i]=0
FindWhat(f,value,weight,i-1,j,item,num)
elif j-weight[i]>=0:
for k in range(num[i]+1):
if f[i][j]==f[i-1][j-k*weight[i]]+k*value[i]:
item[i]=k
break
FindWhat(f,value,weight,i-1,j-item[i]*weight[i],item,num)
| 35 | 69 | 0.52406 |
def MultiplePackage(N,C,weight,value,num,physic):
f=[[0 for col in range(C+1)] for row in range(N+1)]
for i in range(1,N+1):
for j in range(1,C+1):
max_num_i=min(j/weight[i],num[i])
f[i][j]=f[i-1][j]
for k in range(max_num_i+1):
if f[i][j]<f[i-1][j-k*weight[i]]+k*value[i]<=physic:
f[i][j]=f[i-1][j-k*weight[i]]+k*value[i]
return f
def FindWhat(f,value,weight,i,j,item,num):
if i>=0:
if f[i][j]==f[i-1][j]:
item[i]=0
FindWhat(f,value,weight,i-1,j,item,num)
elif j-weight[i]>=0:
for k in range(num[i]+1):
if f[i][j]==f[i-1][j-k*weight[i]]+k*value[i]:
item[i]=k
break
FindWhat(f,value,weight,i-1,j-item[i]*weight[i],item,num)
| true | true |
f72482abf2030cf6d190809b6914b9bdbeec552b | 3,074 | py | Python | users/migrations/0001_initial.py | SohailAQ/Rest_Alpha | 326009217f16164c7f4667e7b3dbb82e43cf2469 | [
"MIT"
] | null | null | null | users/migrations/0001_initial.py | SohailAQ/Rest_Alpha | 326009217f16164c7f4667e7b3dbb82e43cf2469 | [
"MIT"
] | null | null | null | users/migrations/0001_initial.py | SohailAQ/Rest_Alpha | 326009217f16164c7f4667e7b3dbb82e43cf2469 | [
"MIT"
] | null | null | null | # Generated by Django 3.2.8 on 2021-10-21 16:43
import django.contrib.auth.models
import django.contrib.auth.validators
from django.db import migrations, models
import django.utils.timezone
class Migration(migrations.Migration):
initial = True
dependencies = [
('auth', '0012_alter_user_first_name_max_length'),
]
operations = [
migrations.CreateModel(
name='CustomUser',
fields=[
('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('password', models.CharField(max_length=128, verbose_name='password')),
('last_login', models.DateTimeField(blank=True, null=True, verbose_name='last login')),
('is_superuser', models.BooleanField(default=False, help_text='Designates that this user has all permissions without explicitly assigning them.', verbose_name='superuser status')),
('username', models.CharField(error_messages={'unique': 'A user with that username already exists.'}, help_text='Required. 150 characters or fewer. Letters, digits and @/./+/-/_ only.', max_length=150, unique=True, validators=[django.contrib.auth.validators.UnicodeUsernameValidator()], verbose_name='username')),
('first_name', models.CharField(blank=True, max_length=150, verbose_name='first name')),
('last_name', models.CharField(blank=True, max_length=150, verbose_name='last name')),
('email', models.EmailField(blank=True, max_length=254, verbose_name='email address')),
('is_staff', models.BooleanField(default=False, help_text='Designates whether the user can log into this admin site.', verbose_name='staff status')),
('is_active', models.BooleanField(default=True, help_text='Designates whether this user should be treated as active. Unselect this instead of deleting accounts.', verbose_name='active')),
('date_joined', models.DateTimeField(default=django.utils.timezone.now, verbose_name='date joined')),
('gender', models.CharField(choices=[('M', 'Male'), ('F', 'Female'), ('NS', 'Not Specified')], max_length=20)),
('phone_number', models.CharField(max_length=30)),
('groups', models.ManyToManyField(blank=True, help_text='The groups this user belongs to. A user will get all permissions granted to each of their groups.', related_name='user_set', related_query_name='user', to='auth.Group', verbose_name='groups')),
('user_permissions', models.ManyToManyField(blank=True, help_text='Specific permissions for this user.', related_name='user_set', related_query_name='user', to='auth.Permission', verbose_name='user permissions')),
],
options={
'verbose_name': 'user',
'verbose_name_plural': 'users',
'abstract': False,
},
managers=[
('objects', django.contrib.auth.models.UserManager()),
],
),
]
| 65.404255 | 329 | 0.656474 |
import django.contrib.auth.models
import django.contrib.auth.validators
from django.db import migrations, models
import django.utils.timezone
class Migration(migrations.Migration):
initial = True
dependencies = [
('auth', '0012_alter_user_first_name_max_length'),
]
operations = [
migrations.CreateModel(
name='CustomUser',
fields=[
('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('password', models.CharField(max_length=128, verbose_name='password')),
('last_login', models.DateTimeField(blank=True, null=True, verbose_name='last login')),
('is_superuser', models.BooleanField(default=False, help_text='Designates that this user has all permissions without explicitly assigning them.', verbose_name='superuser status')),
('username', models.CharField(error_messages={'unique': 'A user with that username already exists.'}, help_text='Required. 150 characters or fewer. Letters, digits and @/./+/-/_ only.', max_length=150, unique=True, validators=[django.contrib.auth.validators.UnicodeUsernameValidator()], verbose_name='username')),
('first_name', models.CharField(blank=True, max_length=150, verbose_name='first name')),
('last_name', models.CharField(blank=True, max_length=150, verbose_name='last name')),
('email', models.EmailField(blank=True, max_length=254, verbose_name='email address')),
('is_staff', models.BooleanField(default=False, help_text='Designates whether the user can log into this admin site.', verbose_name='staff status')),
('is_active', models.BooleanField(default=True, help_text='Designates whether this user should be treated as active. Unselect this instead of deleting accounts.', verbose_name='active')),
('date_joined', models.DateTimeField(default=django.utils.timezone.now, verbose_name='date joined')),
('gender', models.CharField(choices=[('M', 'Male'), ('F', 'Female'), ('NS', 'Not Specified')], max_length=20)),
('phone_number', models.CharField(max_length=30)),
('groups', models.ManyToManyField(blank=True, help_text='The groups this user belongs to. A user will get all permissions granted to each of their groups.', related_name='user_set', related_query_name='user', to='auth.Group', verbose_name='groups')),
('user_permissions', models.ManyToManyField(blank=True, help_text='Specific permissions for this user.', related_name='user_set', related_query_name='user', to='auth.Permission', verbose_name='user permissions')),
],
options={
'verbose_name': 'user',
'verbose_name_plural': 'users',
'abstract': False,
},
managers=[
('objects', django.contrib.auth.models.UserManager()),
],
),
]
| true | true |
f72482c36144879a71a381f8ffacbf087f5e4594 | 28,655 | py | Python | sdk/python/pulumi_kubernetes/core/v1/Event.py | polivbr/pulumi-kubernetes | 36a5fb34240a38a60b52a5f4e55e66e248d9305f | [
"Apache-2.0"
] | 277 | 2018-06-18T14:57:09.000Z | 2022-03-29T04:05:06.000Z | sdk/python/pulumi_kubernetes/core/v1/Event.py | polivbr/pulumi-kubernetes | 36a5fb34240a38a60b52a5f4e55e66e248d9305f | [
"Apache-2.0"
] | 1,447 | 2018-06-20T00:58:34.000Z | 2022-03-31T21:28:43.000Z | sdk/python/pulumi_kubernetes/core/v1/Event.py | polivbr/pulumi-kubernetes | 36a5fb34240a38a60b52a5f4e55e66e248d9305f | [
"Apache-2.0"
] | 95 | 2018-06-30T03:30:05.000Z | 2022-03-29T04:05:09.000Z | # coding=utf-8
# *** WARNING: this file was generated by pulumigen. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union, overload
from ... import _utilities
from . import outputs
from ... import meta as _meta
from ._inputs import *
__all__ = ['EventInitArgs', 'Event']
@pulumi.input_type
class EventInitArgs:
def __init__(__self__, *,
involved_object: pulumi.Input['ObjectReferenceArgs'],
metadata: pulumi.Input['_meta.v1.ObjectMetaArgs'],
action: Optional[pulumi.Input[str]] = None,
api_version: Optional[pulumi.Input[str]] = None,
count: Optional[pulumi.Input[int]] = None,
event_time: Optional[pulumi.Input[str]] = None,
first_timestamp: Optional[pulumi.Input[str]] = None,
kind: Optional[pulumi.Input[str]] = None,
last_timestamp: Optional[pulumi.Input[str]] = None,
message: Optional[pulumi.Input[str]] = None,
reason: Optional[pulumi.Input[str]] = None,
related: Optional[pulumi.Input['ObjectReferenceArgs']] = None,
reporting_component: Optional[pulumi.Input[str]] = None,
reporting_instance: Optional[pulumi.Input[str]] = None,
series: Optional[pulumi.Input['EventSeriesArgs']] = None,
source: Optional[pulumi.Input['EventSourceArgs']] = None,
type: Optional[pulumi.Input[str]] = None):
"""
The set of arguments for constructing a Event resource.
:param pulumi.Input['ObjectReferenceArgs'] involved_object: The object that this event is about.
:param pulumi.Input['_meta.v1.ObjectMetaArgs'] metadata: Standard object's metadata. More info: https://git.k8s.io/community/contributors/devel/sig-architecture/api-conventions.md#metadata
:param pulumi.Input[str] action: What action was taken/failed regarding to the Regarding object.
:param pulumi.Input[str] api_version: APIVersion defines the versioned schema of this representation of an object. Servers should convert recognized schemas to the latest internal value, and may reject unrecognized values. More info: https://git.k8s.io/community/contributors/devel/sig-architecture/api-conventions.md#resources
:param pulumi.Input[int] count: The number of times this event has occurred.
:param pulumi.Input[str] event_time: Time when this Event was first observed.
:param pulumi.Input[str] first_timestamp: The time at which the event was first recorded. (Time of server receipt is in TypeMeta.)
:param pulumi.Input[str] kind: Kind is a string value representing the REST resource this object represents. Servers may infer this from the endpoint the client submits requests to. Cannot be updated. In CamelCase. More info: https://git.k8s.io/community/contributors/devel/sig-architecture/api-conventions.md#types-kinds
:param pulumi.Input[str] last_timestamp: The time at which the most recent occurrence of this event was recorded.
:param pulumi.Input[str] message: A human-readable description of the status of this operation.
:param pulumi.Input[str] reason: This should be a short, machine understandable string that gives the reason for the transition into the object's current status.
:param pulumi.Input['ObjectReferenceArgs'] related: Optional secondary object for more complex actions.
:param pulumi.Input[str] reporting_component: Name of the controller that emitted this Event, e.g. `kubernetes.io/kubelet`.
:param pulumi.Input[str] reporting_instance: ID of the controller instance, e.g. `kubelet-xyzf`.
:param pulumi.Input['EventSeriesArgs'] series: Data about the Event series this event represents or nil if it's a singleton Event.
:param pulumi.Input['EventSourceArgs'] source: The component reporting this event. Should be a short machine understandable string.
:param pulumi.Input[str] type: Type of this event (Normal, Warning), new types could be added in the future
"""
pulumi.set(__self__, "involved_object", involved_object)
pulumi.set(__self__, "metadata", metadata)
if action is not None:
pulumi.set(__self__, "action", action)
if api_version is not None:
pulumi.set(__self__, "api_version", 'v1')
if count is not None:
pulumi.set(__self__, "count", count)
if event_time is not None:
pulumi.set(__self__, "event_time", event_time)
if first_timestamp is not None:
pulumi.set(__self__, "first_timestamp", first_timestamp)
if kind is not None:
pulumi.set(__self__, "kind", 'Event')
if last_timestamp is not None:
pulumi.set(__self__, "last_timestamp", last_timestamp)
if message is not None:
pulumi.set(__self__, "message", message)
if reason is not None:
pulumi.set(__self__, "reason", reason)
if related is not None:
pulumi.set(__self__, "related", related)
if reporting_component is not None:
pulumi.set(__self__, "reporting_component", reporting_component)
if reporting_instance is not None:
pulumi.set(__self__, "reporting_instance", reporting_instance)
if series is not None:
pulumi.set(__self__, "series", series)
if source is not None:
pulumi.set(__self__, "source", source)
if type is not None:
pulumi.set(__self__, "type", type)
@property
@pulumi.getter(name="involvedObject")
def involved_object(self) -> pulumi.Input['ObjectReferenceArgs']:
"""
The object that this event is about.
"""
return pulumi.get(self, "involved_object")
@involved_object.setter
def involved_object(self, value: pulumi.Input['ObjectReferenceArgs']):
pulumi.set(self, "involved_object", value)
@property
@pulumi.getter
def metadata(self) -> pulumi.Input['_meta.v1.ObjectMetaArgs']:
"""
Standard object's metadata. More info: https://git.k8s.io/community/contributors/devel/sig-architecture/api-conventions.md#metadata
"""
return pulumi.get(self, "metadata")
@metadata.setter
def metadata(self, value: pulumi.Input['_meta.v1.ObjectMetaArgs']):
pulumi.set(self, "metadata", value)
@property
@pulumi.getter
def action(self) -> Optional[pulumi.Input[str]]:
"""
What action was taken/failed regarding to the Regarding object.
"""
return pulumi.get(self, "action")
@action.setter
def action(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "action", value)
@property
@pulumi.getter(name="apiVersion")
def api_version(self) -> Optional[pulumi.Input[str]]:
"""
APIVersion defines the versioned schema of this representation of an object. Servers should convert recognized schemas to the latest internal value, and may reject unrecognized values. More info: https://git.k8s.io/community/contributors/devel/sig-architecture/api-conventions.md#resources
"""
return pulumi.get(self, "api_version")
@api_version.setter
def api_version(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "api_version", value)
@property
@pulumi.getter
def count(self) -> Optional[pulumi.Input[int]]:
"""
The number of times this event has occurred.
"""
return pulumi.get(self, "count")
@count.setter
def count(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "count", value)
@property
@pulumi.getter(name="eventTime")
def event_time(self) -> Optional[pulumi.Input[str]]:
"""
Time when this Event was first observed.
"""
return pulumi.get(self, "event_time")
@event_time.setter
def event_time(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "event_time", value)
@property
@pulumi.getter(name="firstTimestamp")
def first_timestamp(self) -> Optional[pulumi.Input[str]]:
"""
The time at which the event was first recorded. (Time of server receipt is in TypeMeta.)
"""
return pulumi.get(self, "first_timestamp")
@first_timestamp.setter
def first_timestamp(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "first_timestamp", value)
@property
@pulumi.getter
def kind(self) -> Optional[pulumi.Input[str]]:
"""
Kind is a string value representing the REST resource this object represents. Servers may infer this from the endpoint the client submits requests to. Cannot be updated. In CamelCase. More info: https://git.k8s.io/community/contributors/devel/sig-architecture/api-conventions.md#types-kinds
"""
return pulumi.get(self, "kind")
@kind.setter
def kind(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "kind", value)
@property
@pulumi.getter(name="lastTimestamp")
def last_timestamp(self) -> Optional[pulumi.Input[str]]:
"""
The time at which the most recent occurrence of this event was recorded.
"""
return pulumi.get(self, "last_timestamp")
@last_timestamp.setter
def last_timestamp(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "last_timestamp", value)
@property
@pulumi.getter
def message(self) -> Optional[pulumi.Input[str]]:
"""
A human-readable description of the status of this operation.
"""
return pulumi.get(self, "message")
@message.setter
def message(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "message", value)
@property
@pulumi.getter
def reason(self) -> Optional[pulumi.Input[str]]:
"""
This should be a short, machine understandable string that gives the reason for the transition into the object's current status.
"""
return pulumi.get(self, "reason")
@reason.setter
def reason(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "reason", value)
@property
@pulumi.getter
def related(self) -> Optional[pulumi.Input['ObjectReferenceArgs']]:
"""
Optional secondary object for more complex actions.
"""
return pulumi.get(self, "related")
@related.setter
def related(self, value: Optional[pulumi.Input['ObjectReferenceArgs']]):
pulumi.set(self, "related", value)
@property
@pulumi.getter(name="reportingComponent")
def reporting_component(self) -> Optional[pulumi.Input[str]]:
"""
Name of the controller that emitted this Event, e.g. `kubernetes.io/kubelet`.
"""
return pulumi.get(self, "reporting_component")
@reporting_component.setter
def reporting_component(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "reporting_component", value)
@property
@pulumi.getter(name="reportingInstance")
def reporting_instance(self) -> Optional[pulumi.Input[str]]:
"""
ID of the controller instance, e.g. `kubelet-xyzf`.
"""
return pulumi.get(self, "reporting_instance")
@reporting_instance.setter
def reporting_instance(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "reporting_instance", value)
@property
@pulumi.getter
def series(self) -> Optional[pulumi.Input['EventSeriesArgs']]:
"""
Data about the Event series this event represents or nil if it's a singleton Event.
"""
return pulumi.get(self, "series")
@series.setter
def series(self, value: Optional[pulumi.Input['EventSeriesArgs']]):
pulumi.set(self, "series", value)
@property
@pulumi.getter
def source(self) -> Optional[pulumi.Input['EventSourceArgs']]:
"""
The component reporting this event. Should be a short machine understandable string.
"""
return pulumi.get(self, "source")
@source.setter
def source(self, value: Optional[pulumi.Input['EventSourceArgs']]):
pulumi.set(self, "source", value)
@property
@pulumi.getter
def type(self) -> Optional[pulumi.Input[str]]:
"""
Type of this event (Normal, Warning), new types could be added in the future
"""
return pulumi.get(self, "type")
@type.setter
def type(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "type", value)
class Event(pulumi.CustomResource):
@overload
def __init__(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
action: Optional[pulumi.Input[str]] = None,
api_version: Optional[pulumi.Input[str]] = None,
count: Optional[pulumi.Input[int]] = None,
event_time: Optional[pulumi.Input[str]] = None,
first_timestamp: Optional[pulumi.Input[str]] = None,
involved_object: Optional[pulumi.Input[pulumi.InputType['ObjectReferenceArgs']]] = None,
kind: Optional[pulumi.Input[str]] = None,
last_timestamp: Optional[pulumi.Input[str]] = None,
message: Optional[pulumi.Input[str]] = None,
metadata: Optional[pulumi.Input[pulumi.InputType['_meta.v1.ObjectMetaArgs']]] = None,
reason: Optional[pulumi.Input[str]] = None,
related: Optional[pulumi.Input[pulumi.InputType['ObjectReferenceArgs']]] = None,
reporting_component: Optional[pulumi.Input[str]] = None,
reporting_instance: Optional[pulumi.Input[str]] = None,
series: Optional[pulumi.Input[pulumi.InputType['EventSeriesArgs']]] = None,
source: Optional[pulumi.Input[pulumi.InputType['EventSourceArgs']]] = None,
type: Optional[pulumi.Input[str]] = None,
__props__=None):
"""
Event is a report of an event somewhere in the cluster. Events have a limited retention time and triggers and messages may evolve with time. Event consumers should not rely on the timing of an event with a given Reason reflecting a consistent underlying trigger, or the continued existence of events with that Reason. Events should be treated as informative, best-effort, supplemental data.
:param str resource_name: The name of the resource.
:param pulumi.ResourceOptions opts: Options for the resource.
:param pulumi.Input[str] action: What action was taken/failed regarding to the Regarding object.
:param pulumi.Input[str] api_version: APIVersion defines the versioned schema of this representation of an object. Servers should convert recognized schemas to the latest internal value, and may reject unrecognized values. More info: https://git.k8s.io/community/contributors/devel/sig-architecture/api-conventions.md#resources
:param pulumi.Input[int] count: The number of times this event has occurred.
:param pulumi.Input[str] event_time: Time when this Event was first observed.
:param pulumi.Input[str] first_timestamp: The time at which the event was first recorded. (Time of server receipt is in TypeMeta.)
:param pulumi.Input[pulumi.InputType['ObjectReferenceArgs']] involved_object: The object that this event is about.
:param pulumi.Input[str] kind: Kind is a string value representing the REST resource this object represents. Servers may infer this from the endpoint the client submits requests to. Cannot be updated. In CamelCase. More info: https://git.k8s.io/community/contributors/devel/sig-architecture/api-conventions.md#types-kinds
:param pulumi.Input[str] last_timestamp: The time at which the most recent occurrence of this event was recorded.
:param pulumi.Input[str] message: A human-readable description of the status of this operation.
:param pulumi.Input[pulumi.InputType['_meta.v1.ObjectMetaArgs']] metadata: Standard object's metadata. More info: https://git.k8s.io/community/contributors/devel/sig-architecture/api-conventions.md#metadata
:param pulumi.Input[str] reason: This should be a short, machine understandable string that gives the reason for the transition into the object's current status.
:param pulumi.Input[pulumi.InputType['ObjectReferenceArgs']] related: Optional secondary object for more complex actions.
:param pulumi.Input[str] reporting_component: Name of the controller that emitted this Event, e.g. `kubernetes.io/kubelet`.
:param pulumi.Input[str] reporting_instance: ID of the controller instance, e.g. `kubelet-xyzf`.
:param pulumi.Input[pulumi.InputType['EventSeriesArgs']] series: Data about the Event series this event represents or nil if it's a singleton Event.
:param pulumi.Input[pulumi.InputType['EventSourceArgs']] source: The component reporting this event. Should be a short machine understandable string.
:param pulumi.Input[str] type: Type of this event (Normal, Warning), new types could be added in the future
"""
...
@overload
def __init__(__self__,
resource_name: str,
args: EventInitArgs,
opts: Optional[pulumi.ResourceOptions] = None):
"""
Event is a report of an event somewhere in the cluster. Events have a limited retention time and triggers and messages may evolve with time. Event consumers should not rely on the timing of an event with a given Reason reflecting a consistent underlying trigger, or the continued existence of events with that Reason. Events should be treated as informative, best-effort, supplemental data.
:param str resource_name: The name of the resource.
:param EventInitArgs args: The arguments to use to populate this resource's properties.
:param pulumi.ResourceOptions opts: Options for the resource.
"""
...
def __init__(__self__, resource_name: str, *args, **kwargs):
resource_args, opts = _utilities.get_resource_args_opts(EventInitArgs, pulumi.ResourceOptions, *args, **kwargs)
if resource_args is not None:
__self__._internal_init(resource_name, opts, **resource_args.__dict__)
else:
__self__._internal_init(resource_name, *args, **kwargs)
def _internal_init(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
action: Optional[pulumi.Input[str]] = None,
api_version: Optional[pulumi.Input[str]] = None,
count: Optional[pulumi.Input[int]] = None,
event_time: Optional[pulumi.Input[str]] = None,
first_timestamp: Optional[pulumi.Input[str]] = None,
involved_object: Optional[pulumi.Input[pulumi.InputType['ObjectReferenceArgs']]] = None,
kind: Optional[pulumi.Input[str]] = None,
last_timestamp: Optional[pulumi.Input[str]] = None,
message: Optional[pulumi.Input[str]] = None,
metadata: Optional[pulumi.Input[pulumi.InputType['_meta.v1.ObjectMetaArgs']]] = None,
reason: Optional[pulumi.Input[str]] = None,
related: Optional[pulumi.Input[pulumi.InputType['ObjectReferenceArgs']]] = None,
reporting_component: Optional[pulumi.Input[str]] = None,
reporting_instance: Optional[pulumi.Input[str]] = None,
series: Optional[pulumi.Input[pulumi.InputType['EventSeriesArgs']]] = None,
source: Optional[pulumi.Input[pulumi.InputType['EventSourceArgs']]] = None,
type: Optional[pulumi.Input[str]] = None,
__props__=None):
if opts is None:
opts = pulumi.ResourceOptions()
if not isinstance(opts, pulumi.ResourceOptions):
raise TypeError('Expected resource options to be a ResourceOptions instance')
if opts.version is None:
opts.version = _utilities.get_version()
if opts.id is None:
if __props__ is not None:
raise TypeError('__props__ is only valid when passed in combination with a valid opts.id to get an existing resource')
__props__ = EventInitArgs.__new__(EventInitArgs)
__props__.__dict__["action"] = action
__props__.__dict__["api_version"] = 'v1'
__props__.__dict__["count"] = count
__props__.__dict__["event_time"] = event_time
__props__.__dict__["first_timestamp"] = first_timestamp
if involved_object is None and not opts.urn:
raise TypeError("Missing required property 'involved_object'")
__props__.__dict__["involved_object"] = involved_object
__props__.__dict__["kind"] = 'Event'
__props__.__dict__["last_timestamp"] = last_timestamp
__props__.__dict__["message"] = message
if metadata is None and not opts.urn:
raise TypeError("Missing required property 'metadata'")
__props__.__dict__["metadata"] = metadata
__props__.__dict__["reason"] = reason
__props__.__dict__["related"] = related
__props__.__dict__["reporting_component"] = reporting_component
__props__.__dict__["reporting_instance"] = reporting_instance
__props__.__dict__["series"] = series
__props__.__dict__["source"] = source
__props__.__dict__["type"] = type
alias_opts = pulumi.ResourceOptions(aliases=[pulumi.Alias(type_="kubernetes:events.k8s.io/v1:Event"), pulumi.Alias(type_="kubernetes:events.k8s.io/v1beta1:Event")])
opts = pulumi.ResourceOptions.merge(opts, alias_opts)
super(Event, __self__).__init__(
'kubernetes:core/v1:Event',
resource_name,
__props__,
opts)
@staticmethod
def get(resource_name: str,
id: pulumi.Input[str],
opts: Optional[pulumi.ResourceOptions] = None) -> 'Event':
"""
Get an existing Event resource's state with the given name, id, and optional extra
properties used to qualify the lookup.
:param str resource_name: The unique name of the resulting resource.
:param pulumi.Input[str] id: The unique provider ID of the resource to lookup.
:param pulumi.ResourceOptions opts: Options for the resource.
"""
opts = pulumi.ResourceOptions.merge(opts, pulumi.ResourceOptions(id=id))
__props__ = EventInitArgs.__new__(EventInitArgs)
__props__.__dict__["action"] = None
__props__.__dict__["api_version"] = None
__props__.__dict__["count"] = None
__props__.__dict__["event_time"] = None
__props__.__dict__["first_timestamp"] = None
__props__.__dict__["involved_object"] = None
__props__.__dict__["kind"] = None
__props__.__dict__["last_timestamp"] = None
__props__.__dict__["message"] = None
__props__.__dict__["metadata"] = None
__props__.__dict__["reason"] = None
__props__.__dict__["related"] = None
__props__.__dict__["reporting_component"] = None
__props__.__dict__["reporting_instance"] = None
__props__.__dict__["series"] = None
__props__.__dict__["source"] = None
__props__.__dict__["type"] = None
return Event(resource_name, opts=opts, __props__=__props__)
@property
@pulumi.getter
def action(self) -> pulumi.Output[Optional[str]]:
"""
What action was taken/failed regarding to the Regarding object.
"""
return pulumi.get(self, "action")
@property
@pulumi.getter(name="apiVersion")
def api_version(self) -> pulumi.Output[Optional[str]]:
"""
APIVersion defines the versioned schema of this representation of an object. Servers should convert recognized schemas to the latest internal value, and may reject unrecognized values. More info: https://git.k8s.io/community/contributors/devel/sig-architecture/api-conventions.md#resources
"""
return pulumi.get(self, "api_version")
@property
@pulumi.getter
def count(self) -> pulumi.Output[Optional[int]]:
"""
The number of times this event has occurred.
"""
return pulumi.get(self, "count")
@property
@pulumi.getter(name="eventTime")
def event_time(self) -> pulumi.Output[Optional[str]]:
"""
Time when this Event was first observed.
"""
return pulumi.get(self, "event_time")
@property
@pulumi.getter(name="firstTimestamp")
def first_timestamp(self) -> pulumi.Output[Optional[str]]:
"""
The time at which the event was first recorded. (Time of server receipt is in TypeMeta.)
"""
return pulumi.get(self, "first_timestamp")
@property
@pulumi.getter(name="involvedObject")
def involved_object(self) -> pulumi.Output['outputs.ObjectReference']:
"""
The object that this event is about.
"""
return pulumi.get(self, "involved_object")
@property
@pulumi.getter
def kind(self) -> pulumi.Output[Optional[str]]:
"""
Kind is a string value representing the REST resource this object represents. Servers may infer this from the endpoint the client submits requests to. Cannot be updated. In CamelCase. More info: https://git.k8s.io/community/contributors/devel/sig-architecture/api-conventions.md#types-kinds
"""
return pulumi.get(self, "kind")
@property
@pulumi.getter(name="lastTimestamp")
def last_timestamp(self) -> pulumi.Output[Optional[str]]:
"""
The time at which the most recent occurrence of this event was recorded.
"""
return pulumi.get(self, "last_timestamp")
@property
@pulumi.getter
def message(self) -> pulumi.Output[Optional[str]]:
"""
A human-readable description of the status of this operation.
"""
return pulumi.get(self, "message")
@property
@pulumi.getter
def metadata(self) -> pulumi.Output['_meta.v1.outputs.ObjectMeta']:
"""
Standard object's metadata. More info: https://git.k8s.io/community/contributors/devel/sig-architecture/api-conventions.md#metadata
"""
return pulumi.get(self, "metadata")
@property
@pulumi.getter
def reason(self) -> pulumi.Output[Optional[str]]:
"""
This should be a short, machine understandable string that gives the reason for the transition into the object's current status.
"""
return pulumi.get(self, "reason")
@property
@pulumi.getter
def related(self) -> pulumi.Output[Optional['outputs.ObjectReference']]:
"""
Optional secondary object for more complex actions.
"""
return pulumi.get(self, "related")
@property
@pulumi.getter(name="reportingComponent")
def reporting_component(self) -> pulumi.Output[Optional[str]]:
"""
Name of the controller that emitted this Event, e.g. `kubernetes.io/kubelet`.
"""
return pulumi.get(self, "reporting_component")
@property
@pulumi.getter(name="reportingInstance")
def reporting_instance(self) -> pulumi.Output[Optional[str]]:
"""
ID of the controller instance, e.g. `kubelet-xyzf`.
"""
return pulumi.get(self, "reporting_instance")
@property
@pulumi.getter
def series(self) -> pulumi.Output[Optional['outputs.EventSeries']]:
"""
Data about the Event series this event represents or nil if it's a singleton Event.
"""
return pulumi.get(self, "series")
@property
@pulumi.getter
def source(self) -> pulumi.Output[Optional['outputs.EventSource']]:
"""
The component reporting this event. Should be a short machine understandable string.
"""
return pulumi.get(self, "source")
@property
@pulumi.getter
def type(self) -> pulumi.Output[Optional[str]]:
"""
Type of this event (Normal, Warning), new types could be added in the future
"""
return pulumi.get(self, "type")
| 48.322091 | 401 | 0.658873 |
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union, overload
from ... import _utilities
from . import outputs
from ... import meta as _meta
from ._inputs import *
__all__ = ['EventInitArgs', 'Event']
@pulumi.input_type
class EventInitArgs:
def __init__(__self__, *,
involved_object: pulumi.Input['ObjectReferenceArgs'],
metadata: pulumi.Input['_meta.v1.ObjectMetaArgs'],
action: Optional[pulumi.Input[str]] = None,
api_version: Optional[pulumi.Input[str]] = None,
count: Optional[pulumi.Input[int]] = None,
event_time: Optional[pulumi.Input[str]] = None,
first_timestamp: Optional[pulumi.Input[str]] = None,
kind: Optional[pulumi.Input[str]] = None,
last_timestamp: Optional[pulumi.Input[str]] = None,
message: Optional[pulumi.Input[str]] = None,
reason: Optional[pulumi.Input[str]] = None,
related: Optional[pulumi.Input['ObjectReferenceArgs']] = None,
reporting_component: Optional[pulumi.Input[str]] = None,
reporting_instance: Optional[pulumi.Input[str]] = None,
series: Optional[pulumi.Input['EventSeriesArgs']] = None,
source: Optional[pulumi.Input['EventSourceArgs']] = None,
type: Optional[pulumi.Input[str]] = None):
pulumi.set(__self__, "involved_object", involved_object)
pulumi.set(__self__, "metadata", metadata)
if action is not None:
pulumi.set(__self__, "action", action)
if api_version is not None:
pulumi.set(__self__, "api_version", 'v1')
if count is not None:
pulumi.set(__self__, "count", count)
if event_time is not None:
pulumi.set(__self__, "event_time", event_time)
if first_timestamp is not None:
pulumi.set(__self__, "first_timestamp", first_timestamp)
if kind is not None:
pulumi.set(__self__, "kind", 'Event')
if last_timestamp is not None:
pulumi.set(__self__, "last_timestamp", last_timestamp)
if message is not None:
pulumi.set(__self__, "message", message)
if reason is not None:
pulumi.set(__self__, "reason", reason)
if related is not None:
pulumi.set(__self__, "related", related)
if reporting_component is not None:
pulumi.set(__self__, "reporting_component", reporting_component)
if reporting_instance is not None:
pulumi.set(__self__, "reporting_instance", reporting_instance)
if series is not None:
pulumi.set(__self__, "series", series)
if source is not None:
pulumi.set(__self__, "source", source)
if type is not None:
pulumi.set(__self__, "type", type)
@property
@pulumi.getter(name="involvedObject")
def involved_object(self) -> pulumi.Input['ObjectReferenceArgs']:
return pulumi.get(self, "involved_object")
@involved_object.setter
def involved_object(self, value: pulumi.Input['ObjectReferenceArgs']):
pulumi.set(self, "involved_object", value)
@property
@pulumi.getter
def metadata(self) -> pulumi.Input['_meta.v1.ObjectMetaArgs']:
return pulumi.get(self, "metadata")
@metadata.setter
def metadata(self, value: pulumi.Input['_meta.v1.ObjectMetaArgs']):
pulumi.set(self, "metadata", value)
@property
@pulumi.getter
def action(self) -> Optional[pulumi.Input[str]]:
return pulumi.get(self, "action")
@action.setter
def action(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "action", value)
@property
@pulumi.getter(name="apiVersion")
def api_version(self) -> Optional[pulumi.Input[str]]:
return pulumi.get(self, "api_version")
@api_version.setter
def api_version(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "api_version", value)
@property
@pulumi.getter
def count(self) -> Optional[pulumi.Input[int]]:
return pulumi.get(self, "count")
@count.setter
def count(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "count", value)
@property
@pulumi.getter(name="eventTime")
def event_time(self) -> Optional[pulumi.Input[str]]:
return pulumi.get(self, "event_time")
@event_time.setter
def event_time(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "event_time", value)
@property
@pulumi.getter(name="firstTimestamp")
def first_timestamp(self) -> Optional[pulumi.Input[str]]:
return pulumi.get(self, "first_timestamp")
@first_timestamp.setter
def first_timestamp(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "first_timestamp", value)
@property
@pulumi.getter
def kind(self) -> Optional[pulumi.Input[str]]:
return pulumi.get(self, "kind")
@kind.setter
def kind(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "kind", value)
@property
@pulumi.getter(name="lastTimestamp")
def last_timestamp(self) -> Optional[pulumi.Input[str]]:
return pulumi.get(self, "last_timestamp")
@last_timestamp.setter
def last_timestamp(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "last_timestamp", value)
@property
@pulumi.getter
def message(self) -> Optional[pulumi.Input[str]]:
return pulumi.get(self, "message")
@message.setter
def message(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "message", value)
@property
@pulumi.getter
def reason(self) -> Optional[pulumi.Input[str]]:
return pulumi.get(self, "reason")
@reason.setter
def reason(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "reason", value)
@property
@pulumi.getter
def related(self) -> Optional[pulumi.Input['ObjectReferenceArgs']]:
return pulumi.get(self, "related")
@related.setter
def related(self, value: Optional[pulumi.Input['ObjectReferenceArgs']]):
pulumi.set(self, "related", value)
@property
@pulumi.getter(name="reportingComponent")
def reporting_component(self) -> Optional[pulumi.Input[str]]:
return pulumi.get(self, "reporting_component")
@reporting_component.setter
def reporting_component(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "reporting_component", value)
@property
@pulumi.getter(name="reportingInstance")
def reporting_instance(self) -> Optional[pulumi.Input[str]]:
return pulumi.get(self, "reporting_instance")
@reporting_instance.setter
def reporting_instance(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "reporting_instance", value)
@property
@pulumi.getter
def series(self) -> Optional[pulumi.Input['EventSeriesArgs']]:
return pulumi.get(self, "series")
@series.setter
def series(self, value: Optional[pulumi.Input['EventSeriesArgs']]):
pulumi.set(self, "series", value)
@property
@pulumi.getter
def source(self) -> Optional[pulumi.Input['EventSourceArgs']]:
return pulumi.get(self, "source")
@source.setter
def source(self, value: Optional[pulumi.Input['EventSourceArgs']]):
pulumi.set(self, "source", value)
@property
@pulumi.getter
def type(self) -> Optional[pulumi.Input[str]]:
return pulumi.get(self, "type")
@type.setter
def type(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "type", value)
class Event(pulumi.CustomResource):
@overload
def __init__(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
action: Optional[pulumi.Input[str]] = None,
api_version: Optional[pulumi.Input[str]] = None,
count: Optional[pulumi.Input[int]] = None,
event_time: Optional[pulumi.Input[str]] = None,
first_timestamp: Optional[pulumi.Input[str]] = None,
involved_object: Optional[pulumi.Input[pulumi.InputType['ObjectReferenceArgs']]] = None,
kind: Optional[pulumi.Input[str]] = None,
last_timestamp: Optional[pulumi.Input[str]] = None,
message: Optional[pulumi.Input[str]] = None,
metadata: Optional[pulumi.Input[pulumi.InputType['_meta.v1.ObjectMetaArgs']]] = None,
reason: Optional[pulumi.Input[str]] = None,
related: Optional[pulumi.Input[pulumi.InputType['ObjectReferenceArgs']]] = None,
reporting_component: Optional[pulumi.Input[str]] = None,
reporting_instance: Optional[pulumi.Input[str]] = None,
series: Optional[pulumi.Input[pulumi.InputType['EventSeriesArgs']]] = None,
source: Optional[pulumi.Input[pulumi.InputType['EventSourceArgs']]] = None,
type: Optional[pulumi.Input[str]] = None,
__props__=None):
...
@overload
def __init__(__self__,
resource_name: str,
args: EventInitArgs,
opts: Optional[pulumi.ResourceOptions] = None):
...
def __init__(__self__, resource_name: str, *args, **kwargs):
resource_args, opts = _utilities.get_resource_args_opts(EventInitArgs, pulumi.ResourceOptions, *args, **kwargs)
if resource_args is not None:
__self__._internal_init(resource_name, opts, **resource_args.__dict__)
else:
__self__._internal_init(resource_name, *args, **kwargs)
def _internal_init(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
action: Optional[pulumi.Input[str]] = None,
api_version: Optional[pulumi.Input[str]] = None,
count: Optional[pulumi.Input[int]] = None,
event_time: Optional[pulumi.Input[str]] = None,
first_timestamp: Optional[pulumi.Input[str]] = None,
involved_object: Optional[pulumi.Input[pulumi.InputType['ObjectReferenceArgs']]] = None,
kind: Optional[pulumi.Input[str]] = None,
last_timestamp: Optional[pulumi.Input[str]] = None,
message: Optional[pulumi.Input[str]] = None,
metadata: Optional[pulumi.Input[pulumi.InputType['_meta.v1.ObjectMetaArgs']]] = None,
reason: Optional[pulumi.Input[str]] = None,
related: Optional[pulumi.Input[pulumi.InputType['ObjectReferenceArgs']]] = None,
reporting_component: Optional[pulumi.Input[str]] = None,
reporting_instance: Optional[pulumi.Input[str]] = None,
series: Optional[pulumi.Input[pulumi.InputType['EventSeriesArgs']]] = None,
source: Optional[pulumi.Input[pulumi.InputType['EventSourceArgs']]] = None,
type: Optional[pulumi.Input[str]] = None,
__props__=None):
if opts is None:
opts = pulumi.ResourceOptions()
if not isinstance(opts, pulumi.ResourceOptions):
raise TypeError('Expected resource options to be a ResourceOptions instance')
if opts.version is None:
opts.version = _utilities.get_version()
if opts.id is None:
if __props__ is not None:
raise TypeError('__props__ is only valid when passed in combination with a valid opts.id to get an existing resource')
__props__ = EventInitArgs.__new__(EventInitArgs)
__props__.__dict__["action"] = action
__props__.__dict__["api_version"] = 'v1'
__props__.__dict__["count"] = count
__props__.__dict__["event_time"] = event_time
__props__.__dict__["first_timestamp"] = first_timestamp
if involved_object is None and not opts.urn:
raise TypeError("Missing required property 'involved_object'")
__props__.__dict__["involved_object"] = involved_object
__props__.__dict__["kind"] = 'Event'
__props__.__dict__["last_timestamp"] = last_timestamp
__props__.__dict__["message"] = message
if metadata is None and not opts.urn:
raise TypeError("Missing required property 'metadata'")
__props__.__dict__["metadata"] = metadata
__props__.__dict__["reason"] = reason
__props__.__dict__["related"] = related
__props__.__dict__["reporting_component"] = reporting_component
__props__.__dict__["reporting_instance"] = reporting_instance
__props__.__dict__["series"] = series
__props__.__dict__["source"] = source
__props__.__dict__["type"] = type
alias_opts = pulumi.ResourceOptions(aliases=[pulumi.Alias(type_="kubernetes:events.k8s.io/v1:Event"), pulumi.Alias(type_="kubernetes:events.k8s.io/v1beta1:Event")])
opts = pulumi.ResourceOptions.merge(opts, alias_opts)
super(Event, __self__).__init__(
'kubernetes:core/v1:Event',
resource_name,
__props__,
opts)
@staticmethod
def get(resource_name: str,
id: pulumi.Input[str],
opts: Optional[pulumi.ResourceOptions] = None) -> 'Event':
opts = pulumi.ResourceOptions.merge(opts, pulumi.ResourceOptions(id=id))
__props__ = EventInitArgs.__new__(EventInitArgs)
__props__.__dict__["action"] = None
__props__.__dict__["api_version"] = None
__props__.__dict__["count"] = None
__props__.__dict__["event_time"] = None
__props__.__dict__["first_timestamp"] = None
__props__.__dict__["involved_object"] = None
__props__.__dict__["kind"] = None
__props__.__dict__["last_timestamp"] = None
__props__.__dict__["message"] = None
__props__.__dict__["metadata"] = None
__props__.__dict__["reason"] = None
__props__.__dict__["related"] = None
__props__.__dict__["reporting_component"] = None
__props__.__dict__["reporting_instance"] = None
__props__.__dict__["series"] = None
__props__.__dict__["source"] = None
__props__.__dict__["type"] = None
return Event(resource_name, opts=opts, __props__=__props__)
@property
@pulumi.getter
def action(self) -> pulumi.Output[Optional[str]]:
return pulumi.get(self, "action")
@property
@pulumi.getter(name="apiVersion")
def api_version(self) -> pulumi.Output[Optional[str]]:
return pulumi.get(self, "api_version")
@property
@pulumi.getter
def count(self) -> pulumi.Output[Optional[int]]:
return pulumi.get(self, "count")
@property
@pulumi.getter(name="eventTime")
def event_time(self) -> pulumi.Output[Optional[str]]:
return pulumi.get(self, "event_time")
@property
@pulumi.getter(name="firstTimestamp")
def first_timestamp(self) -> pulumi.Output[Optional[str]]:
return pulumi.get(self, "first_timestamp")
@property
@pulumi.getter(name="involvedObject")
def involved_object(self) -> pulumi.Output['outputs.ObjectReference']:
return pulumi.get(self, "involved_object")
@property
@pulumi.getter
def kind(self) -> pulumi.Output[Optional[str]]:
return pulumi.get(self, "kind")
@property
@pulumi.getter(name="lastTimestamp")
def last_timestamp(self) -> pulumi.Output[Optional[str]]:
return pulumi.get(self, "last_timestamp")
@property
@pulumi.getter
def message(self) -> pulumi.Output[Optional[str]]:
return pulumi.get(self, "message")
@property
@pulumi.getter
def metadata(self) -> pulumi.Output['_meta.v1.outputs.ObjectMeta']:
return pulumi.get(self, "metadata")
@property
@pulumi.getter
def reason(self) -> pulumi.Output[Optional[str]]:
return pulumi.get(self, "reason")
@property
@pulumi.getter
def related(self) -> pulumi.Output[Optional['outputs.ObjectReference']]:
return pulumi.get(self, "related")
@property
@pulumi.getter(name="reportingComponent")
def reporting_component(self) -> pulumi.Output[Optional[str]]:
return pulumi.get(self, "reporting_component")
@property
@pulumi.getter(name="reportingInstance")
def reporting_instance(self) -> pulumi.Output[Optional[str]]:
return pulumi.get(self, "reporting_instance")
@property
@pulumi.getter
def series(self) -> pulumi.Output[Optional['outputs.EventSeries']]:
return pulumi.get(self, "series")
@property
@pulumi.getter
def source(self) -> pulumi.Output[Optional['outputs.EventSource']]:
return pulumi.get(self, "source")
@property
@pulumi.getter
def type(self) -> pulumi.Output[Optional[str]]:
return pulumi.get(self, "type")
| true | true |
f724835605b65f1f1e1ae6d86dd97f931443263b | 2,525 | py | Python | stats.py | JoKalliauer/resvg-test-suite | c1a0e510bcdb89275b30caeb3725208304a26754 | [
"MIT"
] | 1 | 2021-06-02T11:21:41.000Z | 2021-06-02T11:21:41.000Z | stats.py | adarshkrtiwari/resvg-test-suite | c2cb84b630332ea585fc54ba62e62d35fb4a33dd | [
"MIT"
] | null | null | null | stats.py | adarshkrtiwari/resvg-test-suite | c2cb84b630332ea585fc54ba62e62d35fb4a33dd | [
"MIT"
] | null | null | null | #!/usr/bin/env python3
# Usage:
# ./stats.py results.csv chart.svg
# ./stats.py official.csv official_chart.svg
import argparse
import csv
import json
import subprocess
UNKNOWN = 0
PASSED = 1
FAILED = 2
CRASHED = 3
PARTIAL = 4
OUT_OF_SCOPE = 5
class RowData:
def __init__(self, name, flags):
self.name = name
self.flags = flags
parser = argparse.ArgumentParser()
parser.add_argument('input', help='CSV file')
parser.add_argument('output', help='SVG file')
args = parser.parse_args()
rows = []
with open(args.input, 'r') as f:
for row in csv.reader(f):
if row[0] == 'title':
continue
file_name = row[0]
flags = [int(row[1]), int(row[2]), int(row[3]), int(row[4]),
int(row[5]), int(row[6]), int(row[7]), int(row[8]),
int(row[9])]
rows.append(RowData(file_name, flags))
passed = [0, 0, 0, 0, 0, 0, 0, 0, 0]
for row in rows:
for idx, flag in enumerate(row.flags):
if flag == PASSED or flag == UNKNOWN:
passed[idx] = passed[idx] + 1
barh_data = json.dumps(
{
"items_font": {
"family": "Arial",
"size": 12
},
"items": [
{
"name": "resvg 0.14.1",
"value": passed[2]
},
{
"name": "Chromium r856583",
"value": passed[0]
},
{
"name": "Firefox 87",
"value": passed[1]
},
{
"name": "Inkscape 1.0.2",
"value": passed[4]
},
{
"name": "librsvg 2.51.1",
"value": passed[5]
},
{
"name": "Batik 1.14",
"value": passed[3]
},
{
"name": "SVG.NET 3.2.3",
"value": passed[6]
},
{
"name": "QtSvg 5.15.2",
"value": passed[8]
},
{
"name": "wxSvg 1.5.11",
"value": passed[7]
}
],
"hor_axis": {
"title": "Tests passed",
"round_tick_values": True,
"width": 700,
"max_value": len(rows)
}
}, indent=4)
with open('chart.json', 'w') as f:
f.write(barh_data)
try:
subprocess.check_call(['./barh', 'chart.json', 'site/images/' + args.output])
except FileNotFoundError:
print('Error: \'barh\' executable is not found.\n'
'You should build https://github.com/RazrFalcon/barh '
'and link resultig binary to the current directory.')
| 22.747748 | 81 | 0.486337 |
import argparse
import csv
import json
import subprocess
UNKNOWN = 0
PASSED = 1
FAILED = 2
CRASHED = 3
PARTIAL = 4
OUT_OF_SCOPE = 5
class RowData:
def __init__(self, name, flags):
self.name = name
self.flags = flags
parser = argparse.ArgumentParser()
parser.add_argument('input', help='CSV file')
parser.add_argument('output', help='SVG file')
args = parser.parse_args()
rows = []
with open(args.input, 'r') as f:
for row in csv.reader(f):
if row[0] == 'title':
continue
file_name = row[0]
flags = [int(row[1]), int(row[2]), int(row[3]), int(row[4]),
int(row[5]), int(row[6]), int(row[7]), int(row[8]),
int(row[9])]
rows.append(RowData(file_name, flags))
passed = [0, 0, 0, 0, 0, 0, 0, 0, 0]
for row in rows:
for idx, flag in enumerate(row.flags):
if flag == PASSED or flag == UNKNOWN:
passed[idx] = passed[idx] + 1
barh_data = json.dumps(
{
"items_font": {
"family": "Arial",
"size": 12
},
"items": [
{
"name": "resvg 0.14.1",
"value": passed[2]
},
{
"name": "Chromium r856583",
"value": passed[0]
},
{
"name": "Firefox 87",
"value": passed[1]
},
{
"name": "Inkscape 1.0.2",
"value": passed[4]
},
{
"name": "librsvg 2.51.1",
"value": passed[5]
},
{
"name": "Batik 1.14",
"value": passed[3]
},
{
"name": "SVG.NET 3.2.3",
"value": passed[6]
},
{
"name": "QtSvg 5.15.2",
"value": passed[8]
},
{
"name": "wxSvg 1.5.11",
"value": passed[7]
}
],
"hor_axis": {
"title": "Tests passed",
"round_tick_values": True,
"width": 700,
"max_value": len(rows)
}
}, indent=4)
with open('chart.json', 'w') as f:
f.write(barh_data)
try:
subprocess.check_call(['./barh', 'chart.json', 'site/images/' + args.output])
except FileNotFoundError:
print('Error: \'barh\' executable is not found.\n'
'You should build https://github.com/RazrFalcon/barh '
'and link resultig binary to the current directory.')
| true | true |
f724837677091a92b829fe1dee99ea0c985b84da | 4,804 | py | Python | docs/conf.py | cartovarc/mac-to-ip | 94098096297fe3d64022ecc850923d3cdc9691b1 | [
"MIT"
] | null | null | null | docs/conf.py | cartovarc/mac-to-ip | 94098096297fe3d64022ecc850923d3cdc9691b1 | [
"MIT"
] | null | null | null | docs/conf.py | cartovarc/mac-to-ip | 94098096297fe3d64022ecc850923d3cdc9691b1 | [
"MIT"
] | null | null | null | #!/usr/bin/env python
#
# mac_to_ip documentation build configuration file, created by
# sphinx-quickstart on Fri Jun 9 13:47:02 2017.
#
# This file is execfile()d with the current directory set to its
# containing dir.
#
# Note that not all possible configuration values are present in this
# autogenerated file.
#
# All configuration values have a default; values that are commented out
# serve to show the default.
# If extensions (or modules to document with autodoc) are in another
# directory, add these directories to sys.path here. If the directory is
# relative to the documentation root, use os.path.abspath to make it
# absolute, like shown here.
#
import os
import sys
sys.path.insert(0, os.path.abspath('..'))
import mac_to_ip
# -- General configuration ---------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
#
# needs_sphinx = '1.0'
# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom ones.
extensions = ['sphinx.ext.autodoc', 'sphinx.ext.viewcode']
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# The suffix(es) of source filenames.
# You can specify multiple suffix as a list of string:
#
# source_suffix = ['.rst', '.md']
source_suffix = '.rst'
# The master toctree document.
master_doc = 'index'
# General information about the project.
project = 'MAC to IP'
copyright = "2021, Carlos Tovar"
author = "Carlos Tovar"
# The version info for the project you're documenting, acts as replacement
# for |version| and |release|, also used in various other places throughout
# the built documents.
#
# The short X.Y version.
version = mac_to_ip.__version__
# The full version, including alpha/beta/rc tags.
release = mac_to_ip.__version__
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#
# This is also used if you do content translation via gettext catalogs.
# Usually you set "language" from the command line for these cases.
language = None
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
# This patterns also effect to html_static_path and html_extra_path
exclude_patterns = ['_build', 'Thumbs.db', '.DS_Store']
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# If true, `todo` and `todoList` produce output, else they produce nothing.
todo_include_todos = False
# -- Options for HTML output -------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
#
html_theme = 'alabaster'
# Theme options are theme-specific and customize the look and feel of a
# theme further. For a list of options available for each theme, see the
# documentation.
#
# html_theme_options = {}
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['_static']
# -- Options for HTMLHelp output ---------------------------------------
# Output file base name for HTML help builder.
htmlhelp_basename = 'mac_to_ipdoc'
# -- Options for LaTeX output ------------------------------------------
latex_elements = {
# The paper size ('letterpaper' or 'a4paper').
#
# 'papersize': 'letterpaper',
# The font size ('10pt', '11pt' or '12pt').
#
# 'pointsize': '10pt',
# Additional stuff for the LaTeX preamble.
#
# 'preamble': '',
# Latex figure (float) alignment
#
# 'figure_align': 'htbp',
}
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title, author, documentclass
# [howto, manual, or own class]).
latex_documents = [
(master_doc, 'mac_to_ip.tex',
'MAC to IP Documentation',
'Carlos Tovar', 'manual'),
]
# -- Options for manual page output ------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
(master_doc, 'mac_to_ip',
'MAC to IP Documentation',
[author], 1)
]
# -- Options for Texinfo output ----------------------------------------
# Grouping the document tree into Texinfo files. List of tuples
# (source start file, target name, title, author,
# dir menu entry, description, category)
texinfo_documents = [
(master_doc, 'mac_to_ip',
'MAC to IP Documentation',
author,
'mac_to_ip',
'One line description of project.',
'Miscellaneous'),
]
| 29.472393 | 77 | 0.684013 |
import os
import sys
sys.path.insert(0, os.path.abspath('..'))
import mac_to_ip
extensions = ['sphinx.ext.autodoc', 'sphinx.ext.viewcode']
templates_path = ['_templates']
source_suffix = '.rst'
master_doc = 'index'
project = 'MAC to IP'
copyright = "2021, Carlos Tovar"
author = "Carlos Tovar"
# for |version| and |release|, also used in various other places throughout
# the built documents.
#
# The short X.Y version.
version = mac_to_ip.__version__
# The full version, including alpha/beta/rc tags.
release = mac_to_ip.__version__
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#
# This is also used if you do content translation via gettext catalogs.
# Usually you set "language" from the command line for these cases.
language = None
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
# This patterns also effect to html_static_path and html_extra_path
exclude_patterns = ['_build', 'Thumbs.db', '.DS_Store']
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# If true, `todo` and `todoList` produce output, else they produce nothing.
todo_include_todos = False
# -- Options for HTML output -------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
#
html_theme = 'alabaster'
# Theme options are theme-specific and customize the look and feel of a
# theme further. For a list of options available for each theme, see the
# documentation.
#
# html_theme_options = {}
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['_static']
# -- Options for HTMLHelp output ---------------------------------------
# Output file base name for HTML help builder.
htmlhelp_basename = 'mac_to_ipdoc'
# -- Options for LaTeX output ------------------------------------------
latex_elements = {
# The paper size ('letterpaper' or 'a4paper').
#
# 'papersize': 'letterpaper',
# The font size ('10pt', '11pt' or '12pt').
#
# 'pointsize': '10pt',
# Additional stuff for the LaTeX preamble.
#
# 'preamble': '',
# Latex figure (float) alignment
#
# 'figure_align': 'htbp',
}
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title, author, documentclass
# [howto, manual, or own class]).
latex_documents = [
(master_doc, 'mac_to_ip.tex',
'MAC to IP Documentation',
'Carlos Tovar', 'manual'),
]
# -- Options for manual page output ------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
(master_doc, 'mac_to_ip',
'MAC to IP Documentation',
[author], 1)
]
# -- Options for Texinfo output ----------------------------------------
# Grouping the document tree into Texinfo files. List of tuples
# (source start file, target name, title, author,
# dir menu entry, description, category)
texinfo_documents = [
(master_doc, 'mac_to_ip',
'MAC to IP Documentation',
author,
'mac_to_ip',
'One line description of project.',
'Miscellaneous'),
]
| true | true |
f72483c20810353d6391f7a8ef332c319e49ecf0 | 2,956 | py | Python | pygyver/etl/ometria.py | madedotcom/pygyver | 77da52570951c4ddaba3d60f36a82c64828a9121 | [
"MIT"
] | 1 | 2020-11-25T11:33:11.000Z | 2020-11-25T11:33:11.000Z | pygyver/etl/ometria.py | madedotcom/pygyver | 77da52570951c4ddaba3d60f36a82c64828a9121 | [
"MIT"
] | 21 | 2020-04-17T17:21:05.000Z | 2021-05-21T13:34:51.000Z | pygyver/etl/ometria.py | madedotcom/pygyver | 77da52570951c4ddaba3d60f36a82c64828a9121 | [
"MIT"
] | 2 | 2021-03-05T14:13:00.000Z | 2021-12-30T18:36:40.000Z | """
Ometria API
http://docs.ometria.com/apis/data_api_v2/
- the env vars for the authentication are stored on the kubernetes cluster
under 'ometria-access-credentials'
- functionality:
set api credentials
send custom events
"""
import logging
import os
import requests
class OmetriaExecutor:
"""
Ometria API handler.
Args:
env: switch between environments in Ometria,
'staging' - for testing
'prod_marketing' - for marketing emails
'prod_service' - for service emails
Attributes:
api_endpoint: the base API endpoint
api_key: required for authentication
api_headers: header to be included in the request
payload: the formatted payload to be sent
response: the response from the API call
Returns:
OmetriaExecutor object
"""
def __init__(self, env: str):
"""
Initiate and collect API credentials.
"""
self.env = env
self.api_endpoint = "https://api.ometria.com/v2"
self.api_key = None
self.api_headers = None
self.payload = None
self.response = None
self.set_api_credentials()
def set_api_credentials(self):
"""
Collect API credentials depending on the environment.
"""
# api key
if self.env == "staging":
api_key_env_var = "OMETRIA_STAGING_API_KEY"
elif self.env == "prod_marketing":
api_key_env_var = "OMETRIA_MARKETING_API_KEY"
elif self.env == "prod_service":
api_key_env_var = "OMETRIA_SERVICE_API_KEY"
else:
raise KeyError(f"Unknown env - {self.env}")
if api_key_env_var in os.environ:
self.api_key = os.getenv(api_key_env_var)
logging.info("API credentials set")
else:
raise KeyError(f"Env var {api_key_env_var} does not exist")
# headers
self.api_headers = {
"X-Ometria-Auth": self.api_key,
"Content-Type": "application/json"
}
def send_custom_events(self):
"""
Send custom_event type of payload to Ometria, save the API response.
"""
if self.payload:
# check if payload length is valid - 100 items per send
payload_len = len(self.payload)
if payload_len <= 100:
# request - not adding retry for POST request
self.response = requests.post(
json=self.payload,
url=f"{self.api_endpoint}/push",
headers=self.api_headers
)
logging.info(f"Sent {payload_len} 'custom_events' items")
self.payload = None
else:
raise ValueError(
f"Payload too big - {payload_len}, max 100 items"
)
else:
logging.info("No send - empty payload")
| 30.163265 | 76 | 0.579838 | import logging
import os
import requests
class OmetriaExecutor:
def __init__(self, env: str):
self.env = env
self.api_endpoint = "https://api.ometria.com/v2"
self.api_key = None
self.api_headers = None
self.payload = None
self.response = None
self.set_api_credentials()
def set_api_credentials(self):
if self.env == "staging":
api_key_env_var = "OMETRIA_STAGING_API_KEY"
elif self.env == "prod_marketing":
api_key_env_var = "OMETRIA_MARKETING_API_KEY"
elif self.env == "prod_service":
api_key_env_var = "OMETRIA_SERVICE_API_KEY"
else:
raise KeyError(f"Unknown env - {self.env}")
if api_key_env_var in os.environ:
self.api_key = os.getenv(api_key_env_var)
logging.info("API credentials set")
else:
raise KeyError(f"Env var {api_key_env_var} does not exist")
self.api_headers = {
"X-Ometria-Auth": self.api_key,
"Content-Type": "application/json"
}
def send_custom_events(self):
if self.payload:
payload_len = len(self.payload)
if payload_len <= 100:
self.response = requests.post(
json=self.payload,
url=f"{self.api_endpoint}/push",
headers=self.api_headers
)
logging.info(f"Sent {payload_len} 'custom_events' items")
self.payload = None
else:
raise ValueError(
f"Payload too big - {payload_len}, max 100 items"
)
else:
logging.info("No send - empty payload")
| true | true |
f724846c02029786eec80ed57f3b3c256cf3045a | 1,366 | py | Python | src/csharp/CSharpTemplate.py | slash-under/codenn | 747a7c5c6788256cdb1564d0936b5ea91f43ba6c | [
"MIT"
] | 216 | 2016-06-28T18:44:28.000Z | 2022-03-26T10:24:03.000Z | src/csharp/CSharpTemplate.py | slash-under/codenn | 747a7c5c6788256cdb1564d0936b5ea91f43ba6c | [
"MIT"
] | 17 | 2016-07-22T23:43:27.000Z | 2021-06-09T16:36:54.000Z | src/csharp/CSharpTemplate.py | slash-under/codenn | 747a7c5c6788256cdb1564d0936b5ea91f43ba6c | [
"MIT"
] | 86 | 2016-07-02T06:56:31.000Z | 2021-09-14T06:24:46.000Z |
import antlr4
from csharp.CSharp4Lexer import CSharp4Lexer
import re
def parseCSharp(code):
code = code.replace('\\n', '\n')
parsedVersion = []
stream = antlr4.InputStream(code)
lexer = CSharp4Lexer(stream)
toks = antlr4.CommonTokenStream(lexer)
toks.fetch(500)
identifiers = {}
identCount = 0
for token in toks.tokens:
if token.type == 109:
parsedVersion += ["CODE_INTEGER"]
elif token.type == 111:
parsedVersion += ["CODE_REAL"]
elif token.type == 112:
parsedVersion += ["CODE_CHAR"]
elif token.type == 113:
parsedVersion += ["CODE_STRING"]
elif token.type == 9 or token.type == 7 or token.type == 6: # whitespace and comments and newline
pass
else:
parsedVersion += [str(token.text)]
return parsedVersion
if __name__ == '__main__':
print parseCSharp("public Boolean SomeValue { get { return someValue; } set { someValue = value; } }")
print parseCSharp("Console.WriteLine('cat'); int mouse = 5; int cat = 0.4; int cow = 'c'; int moo = \"mouse\"; ")
print parseCSharp("int i = 4; // i is assigned the literal value of '4' \n int j = i // j is assigned the value of i. Since i is a variable, //it can change and is not a 'literal'")
try:
print parseCSharp('string `fixed = Regex.Replace(input, "\s*()","$1");');
except:
print "Error"
| 32.52381 | 201 | 0.635432 |
import antlr4
from csharp.CSharp4Lexer import CSharp4Lexer
import re
def parseCSharp(code):
code = code.replace('\\n', '\n')
parsedVersion = []
stream = antlr4.InputStream(code)
lexer = CSharp4Lexer(stream)
toks = antlr4.CommonTokenStream(lexer)
toks.fetch(500)
identifiers = {}
identCount = 0
for token in toks.tokens:
if token.type == 109:
parsedVersion += ["CODE_INTEGER"]
elif token.type == 111:
parsedVersion += ["CODE_REAL"]
elif token.type == 112:
parsedVersion += ["CODE_CHAR"]
elif token.type == 113:
parsedVersion += ["CODE_STRING"]
elif token.type == 9 or token.type == 7 or token.type == 6:
pass
else:
parsedVersion += [str(token.text)]
return parsedVersion
if __name__ == '__main__':
print parseCSharp("public Boolean SomeValue { get { return someValue; } set { someValue = value; } }")
print parseCSharp("Console.WriteLine('cat'); int mouse = 5; int cat = 0.4; int cow = 'c'; int moo = \"mouse\"; ")
print parseCSharp("int i = 4; // i is assigned the literal value of '4' \n int j = i // j is assigned the value of i. Since i is a variable, //it can change and is not a 'literal'")
try:
print parseCSharp('string `fixed = Regex.Replace(input, "\s*()","$1");');
except:
print "Error"
| false | true |
f724849d81b90b9dba25cf9ce12481c3d66b2f7c | 38,765 | py | Python | airflow/models/baseoperator.py | subrays/airflow | 3c8c0b3b6411762a4e4977e519374d9fb16b541d | [
"Apache-2.0",
"BSD-2-Clause",
"MIT",
"ECL-2.0",
"BSD-3-Clause"
] | null | null | null | airflow/models/baseoperator.py | subrays/airflow | 3c8c0b3b6411762a4e4977e519374d9fb16b541d | [
"Apache-2.0",
"BSD-2-Clause",
"MIT",
"ECL-2.0",
"BSD-3-Clause"
] | null | null | null | airflow/models/baseoperator.py | subrays/airflow | 3c8c0b3b6411762a4e4977e519374d9fb16b541d | [
"Apache-2.0",
"BSD-2-Clause",
"MIT",
"ECL-2.0",
"BSD-3-Clause"
] | null | null | null | # -*- coding: utf-8 -*-
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from abc import ABCMeta, abstractmethod
from cached_property import cached_property
import copy
import functools
import logging
import sys
import warnings
from datetime import timedelta, datetime
from typing import Callable, Dict, Iterable, List, Optional, Set
import jinja2
import six
from airflow import configuration, settings
from airflow.exceptions import AirflowException
from airflow.lineage import prepare_lineage, apply_lineage, DataSet
from airflow.models.dag import DAG
from airflow.models.taskinstance import TaskInstance, clear_task_instances
from airflow.models.xcom import XCOM_RETURN_KEY
from airflow.ti_deps.deps.not_in_retry_period_dep import NotInRetryPeriodDep
from airflow.ti_deps.deps.prev_dagrun_dep import PrevDagrunDep
from airflow.ti_deps.deps.trigger_rule_dep import TriggerRuleDep
from airflow.utils import timezone
from airflow.utils.db import provide_session
from airflow.utils.decorators import apply_defaults
from airflow.utils.helpers import validate_key
from airflow.utils.log.logging_mixin import LoggingMixin
from airflow.utils.operator_resources import Resources
from airflow.utils.trigger_rule import TriggerRule
from airflow.utils.weight_rule import WeightRule
@functools.total_ordering
class BaseOperator(LoggingMixin):
"""
Abstract base class for all operators. Since operators create objects that
become nodes in the dag, BaseOperator contains many recursive methods for
dag crawling behavior. To derive this class, you are expected to override
the constructor as well as the 'execute' method.
Operators derived from this class should perform or trigger certain tasks
synchronously (wait for completion). Example of operators could be an
operator that runs a Pig job (PigOperator), a sensor operator that
waits for a partition to land in Hive (HiveSensorOperator), or one that
moves data from Hive to MySQL (Hive2MySqlOperator). Instances of these
operators (tasks) target specific operations, running specific scripts,
functions or data transfers.
This class is abstract and shouldn't be instantiated. Instantiating a
class derived from this one results in the creation of a task object,
which ultimately becomes a node in DAG objects. Task dependencies should
be set by using the set_upstream and/or set_downstream methods.
:param task_id: a unique, meaningful id for the task
:type task_id: str
:param owner: the owner of the task, using the unix username is recommended
:type owner: str
:param retries: the number of retries that should be performed before
failing the task
:type retries: int
:param retry_delay: delay between retries
:type retry_delay: datetime.timedelta
:param retry_exponential_backoff: allow progressive longer waits between
retries by using exponential backoff algorithm on retry delay (delay
will be converted into seconds)
:type retry_exponential_backoff: bool
:param max_retry_delay: maximum delay interval between retries
:type max_retry_delay: datetime.timedelta
:param start_date: The ``start_date`` for the task, determines
the ``execution_date`` for the first task instance. The best practice
is to have the start_date rounded
to your DAG's ``schedule_interval``. Daily jobs have their start_date
some day at 00:00:00, hourly jobs have their start_date at 00:00
of a specific hour. Note that Airflow simply looks at the latest
``execution_date`` and adds the ``schedule_interval`` to determine
the next ``execution_date``. It is also very important
to note that different tasks' dependencies
need to line up in time. If task A depends on task B and their
start_date are offset in a way that their execution_date don't line
up, A's dependencies will never be met. If you are looking to delay
a task, for example running a daily task at 2AM, look into the
``TimeSensor`` and ``TimeDeltaSensor``. We advise against using
dynamic ``start_date`` and recommend using fixed ones. Read the
FAQ entry about start_date for more information.
:type start_date: datetime.datetime
:param end_date: if specified, the scheduler won't go beyond this date
:type end_date: datetime.datetime
:param depends_on_past: when set to true, task instances will run
sequentially while relying on the previous task's schedule to
succeed. The task instance for the start_date is allowed to run.
:type depends_on_past: bool
:param wait_for_downstream: when set to true, an instance of task
X will wait for tasks immediately downstream of the previous instance
of task X to finish successfully before it runs. This is useful if the
different instances of a task X alter the same asset, and this asset
is used by tasks downstream of task X. Note that depends_on_past
is forced to True wherever wait_for_downstream is used.
:type wait_for_downstream: bool
:param queue: which queue to target when running this job. Not
all executors implement queue management, the CeleryExecutor
does support targeting specific queues.
:type queue: str
:param dag: a reference to the dag the task is attached to (if any)
:type dag: airflow.models.DAG
:param priority_weight: priority weight of this task against other task.
This allows the executor to trigger higher priority tasks before
others when things get backed up. Set priority_weight as a higher
number for more important tasks.
:type priority_weight: int
:param weight_rule: weighting method used for the effective total
priority weight of the task. Options are:
``{ downstream | upstream | absolute }`` default is ``downstream``
When set to ``downstream`` the effective weight of the task is the
aggregate sum of all downstream descendants. As a result, upstream
tasks will have higher weight and will be scheduled more aggressively
when using positive weight values. This is useful when you have
multiple dag run instances and desire to have all upstream tasks to
complete for all runs before each dag can continue processing
downstream tasks. When set to ``upstream`` the effective weight is the
aggregate sum of all upstream ancestors. This is the opposite where
downtream tasks have higher weight and will be scheduled more
aggressively when using positive weight values. This is useful when you
have multiple dag run instances and prefer to have each dag complete
before starting upstream tasks of other dags. When set to
``absolute``, the effective weight is the exact ``priority_weight``
specified without additional weighting. You may want to do this when
you know exactly what priority weight each task should have.
Additionally, when set to ``absolute``, there is bonus effect of
significantly speeding up the task creation process as for very large
DAGS. Options can be set as string or using the constants defined in
the static class ``airflow.utils.WeightRule``
:type weight_rule: str
:param pool: the slot pool this task should run in, slot pools are a
way to limit concurrency for certain tasks
:type pool: str
:param sla: time by which the job is expected to succeed. Note that
this represents the ``timedelta`` after the period is closed. For
example if you set an SLA of 1 hour, the scheduler would send an email
soon after 1:00AM on the ``2016-01-02`` if the ``2016-01-01`` instance
has not succeeded yet.
The scheduler pays special attention for jobs with an SLA and
sends alert
emails for sla misses. SLA misses are also recorded in the database
for future reference. All tasks that share the same SLA time
get bundled in a single email, sent soon after that time. SLA
notification are sent once and only once for each task instance.
:type sla: datetime.timedelta
:param execution_timeout: max time allowed for the execution of
this task instance, if it goes beyond it will raise and fail.
:type execution_timeout: datetime.timedelta
:param on_failure_callback: a function to be called when a task instance
of this task fails. a context dictionary is passed as a single
parameter to this function. Context contains references to related
objects to the task instance and is documented under the macros
section of the API.
:type on_failure_callback: callable
:param on_retry_callback: much like the ``on_failure_callback`` except
that it is executed when retries occur.
:type on_retry_callback: callable
:param on_success_callback: much like the ``on_failure_callback`` except
that it is executed when the task succeeds.
:type on_success_callback: callable
:param trigger_rule: defines the rule by which dependencies are applied
for the task to get triggered. Options are:
``{ all_success | all_failed | all_done | one_success |
one_failed | none_failed | none_skipped | dummy}``
default is ``all_success``. Options can be set as string or
using the constants defined in the static class
``airflow.utils.TriggerRule``
:type trigger_rule: str
:param resources: A map of resource parameter names (the argument names of the
Resources constructor) to their values.
:type resources: dict
:param run_as_user: unix username to impersonate while running the task
:type run_as_user: str
:param task_concurrency: When set, a task will be able to limit the concurrent
runs across execution_dates
:type task_concurrency: int
:param executor_config: Additional task-level configuration parameters that are
interpreted by a specific executor. Parameters are namespaced by the name of
executor.
**Example**: to run this task in a specific docker container through
the KubernetesExecutor ::
MyOperator(...,
executor_config={
"KubernetesExecutor":
{"image": "myCustomDockerImage"}
}
)
:type executor_config: dict
:param do_xcom_push: if True, an XCom is pushed containing the Operator's
result
:type do_xcom_push: bool
"""
# For derived classes to define which fields will get jinjaified
template_fields = [] # type: Iterable[str]
# Defines which files extensions to look for in the templated fields
template_ext = [] # type: Iterable[str]
# Defines the color in the UI
ui_color = '#fff'
ui_fgcolor = '#000'
# base list which includes all the attrs that don't need deep copy.
_base_operator_shallow_copy_attrs = ('user_defined_macros',
'user_defined_filters',
'params',
'_log',)
# each operator should override this class attr for shallow copy attrs.
shallow_copy_attrs = () # type: Iterable[str]
# Defines the operator level extra links
operator_extra_links = () # type: Iterable[BaseOperatorLink]
@apply_defaults
def __init__(
self,
task_id: str,
owner: str = configuration.conf.get('operators', 'DEFAULT_OWNER'),
email: Optional[str] = None,
email_on_retry: bool = True,
email_on_failure: bool = True,
retries: int = 0,
retry_delay: timedelta = timedelta(seconds=300),
retry_exponential_backoff: bool = False,
max_retry_delay: Optional[datetime] = None,
start_date: Optional[datetime] = None,
end_date: Optional[datetime] = None,
schedule_interval=None, # not hooked as of now
depends_on_past: bool = False,
wait_for_downstream: bool = False,
dag: Optional[DAG] = None,
params: Optional[Dict] = None,
default_args: Optional[Dict] = None,
priority_weight: int = 1,
weight_rule: str = WeightRule.DOWNSTREAM,
queue: str = configuration.conf.get('celery', 'default_queue'),
pool: Optional[str] = None,
sla: Optional[timedelta] = None,
execution_timeout: Optional[timedelta] = None,
on_failure_callback: Optional[Callable] = None,
on_success_callback: Optional[Callable] = None,
on_retry_callback: Optional[Callable] = None,
trigger_rule: str = TriggerRule.ALL_SUCCESS,
resources: Optional[Dict] = None,
run_as_user: Optional[str] = None,
task_concurrency: Optional[int] = None,
executor_config: Optional[Dict] = None,
do_xcom_push: bool = True,
inlets: Optional[Dict] = None,
outlets: Optional[Dict] = None,
*args,
**kwargs
):
if args or kwargs:
# TODO remove *args and **kwargs in Airflow 2.0
warnings.warn(
'Invalid arguments were passed to {c} (task_id: {t}). '
'Support for passing such arguments will be dropped in '
'Airflow 2.0. Invalid arguments were:'
'\n*args: {a}\n**kwargs: {k}'.format(
c=self.__class__.__name__, a=args, k=kwargs, t=task_id),
category=PendingDeprecationWarning,
stacklevel=3
)
validate_key(task_id)
self.task_id = task_id
self.owner = owner
self.email = email
self.email_on_retry = email_on_retry
self.email_on_failure = email_on_failure
self.start_date = start_date
if start_date and not isinstance(start_date, datetime):
self.log.warning("start_date for %s isn't datetime.datetime", self)
elif start_date:
self.start_date = timezone.convert_to_utc(start_date)
self.end_date = end_date
if end_date:
self.end_date = timezone.convert_to_utc(end_date)
if not TriggerRule.is_valid(trigger_rule):
raise AirflowException(
"The trigger_rule must be one of {all_triggers},"
"'{d}.{t}'; received '{tr}'."
.format(all_triggers=TriggerRule.all_triggers(),
d=dag.dag_id if dag else "", t=task_id, tr=trigger_rule))
self.trigger_rule = trigger_rule
self.depends_on_past = depends_on_past
self.wait_for_downstream = wait_for_downstream
if wait_for_downstream:
self.depends_on_past = True
if schedule_interval:
self.log.warning(
"schedule_interval is used for %s, though it has "
"been deprecated as a task parameter, you need to "
"specify it as a DAG parameter instead",
self
)
self._schedule_interval = schedule_interval
self.retries = retries
self.queue = queue
self.pool = pool
self.sla = sla
self.execution_timeout = execution_timeout
self.on_failure_callback = on_failure_callback
self.on_success_callback = on_success_callback
self.on_retry_callback = on_retry_callback
if isinstance(retry_delay, timedelta):
self.retry_delay = retry_delay
else:
self.log.debug("Retry_delay isn't timedelta object, assuming secs")
self.retry_delay = timedelta(seconds=retry_delay)
self.retry_exponential_backoff = retry_exponential_backoff
self.max_retry_delay = max_retry_delay
self.params = params or {} # Available in templates!
self.priority_weight = priority_weight
if not WeightRule.is_valid(weight_rule):
raise AirflowException(
"The weight_rule must be one of {all_weight_rules},"
"'{d}.{t}'; received '{tr}'."
.format(all_weight_rules=WeightRule.all_weight_rules,
d=dag.dag_id if dag else "", t=task_id, tr=weight_rule))
self.weight_rule = weight_rule
self.resources = Resources(**(resources or {}))
self.run_as_user = run_as_user
self.task_concurrency = task_concurrency
self.executor_config = executor_config or {}
self.do_xcom_push = do_xcom_push
# Private attributes
self._upstream_task_ids = set() # type: Set[str]
self._downstream_task_ids = set() # type: Set[str]
if not dag and settings.CONTEXT_MANAGER_DAG:
dag = settings.CONTEXT_MANAGER_DAG
if dag:
self.dag = dag
self._log = logging.getLogger("airflow.task.operators")
# lineage
self.inlets = [] # type: List[DataSet]
self.outlets = [] # type: List[DataSet]
self.lineage_data = None
self._inlets = {
"auto": False,
"task_ids": [],
"datasets": [],
}
self._outlets = {
"datasets": [],
} # type: Dict
if inlets:
self._inlets.update(inlets)
if outlets:
self._outlets.update(outlets)
self._comps = {
'task_id',
'dag_id',
'owner',
'email',
'email_on_retry',
'retry_delay',
'retry_exponential_backoff',
'max_retry_delay',
'start_date',
'schedule_interval',
'depends_on_past',
'wait_for_downstream',
'priority_weight',
'sla',
'execution_timeout',
'on_failure_callback',
'on_success_callback',
'on_retry_callback',
'do_xcom_push',
}
def __eq__(self, other):
if (type(self) == type(other) and
self.task_id == other.task_id):
return all(self.__dict__.get(c, None) == other.__dict__.get(c, None) for c in self._comps)
return False
def __ne__(self, other):
return not self == other
def __lt__(self, other):
return self.task_id < other.task_id
def __hash__(self):
hash_components = [type(self)]
for c in self._comps:
val = getattr(self, c, None)
try:
hash(val)
hash_components.append(val)
except TypeError:
hash_components.append(repr(val))
return hash(tuple(hash_components))
# Composing Operators -----------------------------------------------
def __rshift__(self, other):
"""
Implements Self >> Other == self.set_downstream(other)
If "Other" is a DAG, the DAG is assigned to the Operator.
"""
if isinstance(other, DAG):
# if this dag is already assigned, do nothing
# otherwise, do normal dag assignment
if not (self.has_dag() and self.dag is other):
self.dag = other
else:
self.set_downstream(other)
return other
def __lshift__(self, other):
"""
Implements Self << Other == self.set_upstream(other)
If "Other" is a DAG, the DAG is assigned to the Operator.
"""
if isinstance(other, DAG):
# if this dag is already assigned, do nothing
# otherwise, do normal dag assignment
if not (self.has_dag() and self.dag is other):
self.dag = other
else:
self.set_upstream(other)
return other
def __rrshift__(self, other):
"""
Called for [DAG] >> [Operator] because DAGs don't have
__rshift__ operators.
"""
self.__lshift__(other)
return self
def __rlshift__(self, other):
"""
Called for [DAG] << [Operator] because DAGs don't have
__lshift__ operators.
"""
self.__rshift__(other)
return self
# /Composing Operators ---------------------------------------------
@property
def dag(self):
"""
Returns the Operator's DAG if set, otherwise raises an error
"""
if self.has_dag():
return self._dag
else:
raise AirflowException(
'Operator {} has not been assigned to a DAG yet'.format(self))
@dag.setter
def dag(self, dag):
"""
Operators can be assigned to one DAG, one time. Repeat assignments to
that same DAG are ok.
"""
if not isinstance(dag, DAG):
raise TypeError(
'Expected DAG; received {}'.format(dag.__class__.__name__))
elif self.has_dag() and self.dag is not dag:
raise AirflowException(
"The DAG assigned to {} can not be changed.".format(self))
elif self.task_id not in dag.task_dict:
dag.add_task(self)
self._dag = dag
def has_dag(self):
"""
Returns True if the Operator has been assigned to a DAG.
"""
return getattr(self, '_dag', None) is not None
@property
def dag_id(self):
if self.has_dag():
return self.dag.dag_id
else:
return 'adhoc_' + self.owner
@property
def deps(self):
"""
Returns the list of dependencies for the operator. These differ from execution
context dependencies in that they are specific to tasks and can be
extended/overridden by subclasses.
"""
return {
NotInRetryPeriodDep(),
PrevDagrunDep(),
TriggerRuleDep(),
}
@property
def schedule_interval(self):
"""
The schedule interval of the DAG always wins over individual tasks so
that tasks within a DAG always line up. The task still needs a
schedule_interval as it may not be attached to a DAG.
"""
if self.has_dag():
return self.dag._schedule_interval
else:
return self._schedule_interval
@property
def priority_weight_total(self):
if self.weight_rule == WeightRule.ABSOLUTE:
return self.priority_weight
elif self.weight_rule == WeightRule.DOWNSTREAM:
upstream = False
elif self.weight_rule == WeightRule.UPSTREAM:
upstream = True
else:
upstream = False
return self.priority_weight + sum(
map(lambda task_id: self._dag.task_dict[task_id].priority_weight,
self.get_flat_relative_ids(upstream=upstream))
)
@cached_property
def operator_extra_link_dict(self):
return {link.name: link for link in self.operator_extra_links}
@cached_property
def global_operator_extra_link_dict(self):
from airflow.plugins_manager import global_operator_extra_links
return {link.name: link for link in global_operator_extra_links}
@prepare_lineage
def pre_execute(self, context):
"""
This hook is triggered right before self.execute() is called.
"""
def execute(self, context):
"""
This is the main method to derive when creating an operator.
Context is the same dictionary used as when rendering jinja templates.
Refer to get_template_context for more context.
"""
raise NotImplementedError()
@apply_lineage
def post_execute(self, context, result=None):
"""
This hook is triggered right after self.execute() is called.
It is passed the execution context and any results returned by the
operator.
"""
def on_kill(self):
"""
Override this method to cleanup subprocesses when a task instance
gets killed. Any use of the threading, subprocess or multiprocessing
module within an operator needs to be cleaned up or it will leave
ghost processes behind.
"""
def __deepcopy__(self, memo):
"""
Hack sorting double chained task lists by task_id to avoid hitting
max_depth on deepcopy operations.
"""
sys.setrecursionlimit(5000) # TODO fix this in a better way
cls = self.__class__
result = cls.__new__(cls)
memo[id(self)] = result
shallow_copy = cls.shallow_copy_attrs + cls._base_operator_shallow_copy_attrs
for k, v in self.__dict__.items():
if k not in shallow_copy:
setattr(result, k, copy.deepcopy(v, memo))
else:
setattr(result, k, copy.copy(v))
return result
def __getstate__(self):
state = dict(self.__dict__)
del state['_log']
return state
def __setstate__(self, state):
self.__dict__ = state
self._log = logging.getLogger("airflow.task.operators")
def render_template_from_field(self, attr, content, context, jinja_env):
"""
Renders a template from a field. If the field is a string, it will
simply render the string and return the result. If it is a collection or
nested set of collections, it will traverse the structure and render
all elements in it. If the field has another type, it will return it as it is.
"""
rt = self.render_template
if isinstance(content, six.string_types):
result = jinja_env.from_string(content).render(**context)
elif isinstance(content, (list, tuple)):
result = [rt(attr, e, context) for e in content]
elif isinstance(content, dict):
result = {
k: rt("{}[{}]".format(attr, k), v, context)
for k, v in list(content.items())}
else:
result = content
return result
def render_template(self, attr, content, context):
"""
Renders a template either from a file or directly in a field, and returns
the rendered result.
"""
jinja_env = self.get_template_env()
exts = self.__class__.template_ext
if (
isinstance(content, six.string_types) and
any([content.endswith(ext) for ext in exts])):
return jinja_env.get_template(content).render(**context)
else:
return self.render_template_from_field(attr, content, context, jinja_env)
def get_template_env(self):
return self.dag.get_template_env() \
if hasattr(self, 'dag') \
else jinja2.Environment(cache_size=0)
def prepare_template(self):
"""
Hook that is triggered after the templated fields get replaced
by their content. If you need your operator to alter the
content of the file before the template is rendered,
it should override this method to do so.
"""
def resolve_template_files(self):
# Getting the content of files for template_field / template_ext
for attr in self.template_fields:
content = getattr(self, attr)
if content is None:
continue
elif isinstance(content, six.string_types) and \
any([content.endswith(ext) for ext in self.template_ext]):
env = self.get_template_env()
try:
setattr(self, attr, env.loader.get_source(env, content)[0])
except Exception as e:
self.log.exception(e)
elif isinstance(content, list):
env = self.dag.get_template_env()
for i in range(len(content)):
if isinstance(content[i], six.string_types) and \
any([content[i].endswith(ext) for ext in self.template_ext]):
try:
content[i] = env.loader.get_source(env, content[i])[0]
except Exception as e:
self.log.exception(e)
self.prepare_template()
@property
def upstream_list(self):
"""@property: list of tasks directly upstream"""
return [self.dag.get_task(tid) for tid in self._upstream_task_ids]
@property
def upstream_task_ids(self):
return self._upstream_task_ids
@property
def downstream_list(self):
"""@property: list of tasks directly downstream"""
return [self.dag.get_task(tid) for tid in self._downstream_task_ids]
@property
def downstream_task_ids(self):
return self._downstream_task_ids
@provide_session
def clear(self,
start_date=None,
end_date=None,
upstream=False,
downstream=False,
session=None):
"""
Clears the state of task instances associated with the task, following
the parameters specified.
"""
TI = TaskInstance
qry = session.query(TI).filter(TI.dag_id == self.dag_id)
if start_date:
qry = qry.filter(TI.execution_date >= start_date)
if end_date:
qry = qry.filter(TI.execution_date <= end_date)
tasks = [self.task_id]
if upstream:
tasks += [
t.task_id for t in self.get_flat_relatives(upstream=True)]
if downstream:
tasks += [
t.task_id for t in self.get_flat_relatives(upstream=False)]
qry = qry.filter(TI.task_id.in_(tasks))
count = qry.count()
clear_task_instances(qry.all(), session, dag=self.dag)
session.commit()
return count
@provide_session
def get_task_instances(self, start_date=None, end_date=None, session=None):
"""
Get a set of task instance related to this task for a specific date
range.
"""
end_date = end_date or timezone.utcnow()
return session.query(TaskInstance)\
.filter(TaskInstance.dag_id == self.dag_id)\
.filter(TaskInstance.task_id == self.task_id)\
.filter(TaskInstance.execution_date >= start_date)\
.filter(TaskInstance.execution_date <= end_date)\
.order_by(TaskInstance.execution_date)\
.all()
def get_flat_relative_ids(self, upstream=False, found_descendants=None):
"""
Get a flat list of relatives' ids, either upstream or downstream.
"""
if not found_descendants:
found_descendants = set()
relative_ids = self.get_direct_relative_ids(upstream)
for relative_id in relative_ids:
if relative_id not in found_descendants:
found_descendants.add(relative_id)
relative_task = self._dag.task_dict[relative_id]
relative_task.get_flat_relative_ids(upstream,
found_descendants)
return found_descendants
def get_flat_relatives(self, upstream=False):
"""
Get a flat list of relatives, either upstream or downstream.
"""
return list(map(lambda task_id: self._dag.task_dict[task_id],
self.get_flat_relative_ids(upstream)))
def run(
self,
start_date=None,
end_date=None,
ignore_first_depends_on_past=False,
ignore_ti_state=False,
mark_success=False):
"""
Run a set of task instances for a date range.
"""
start_date = start_date or self.start_date
end_date = end_date or self.end_date or timezone.utcnow()
for dt in self.dag.date_range(start_date, end_date=end_date):
TaskInstance(self, dt).run(
mark_success=mark_success,
ignore_depends_on_past=(
dt == start_date and ignore_first_depends_on_past),
ignore_ti_state=ignore_ti_state)
def dry_run(self):
self.log.info('Dry run')
for attr in self.template_fields:
content = getattr(self, attr)
if content and isinstance(content, six.string_types):
self.log.info('Rendering template for %s', attr)
self.log.info(content)
def get_direct_relative_ids(self, upstream=False):
"""
Get the direct relative ids to the current task, upstream or
downstream.
"""
if upstream:
return self._upstream_task_ids
else:
return self._downstream_task_ids
def get_direct_relatives(self, upstream=False):
"""
Get the direct relatives to the current task, upstream or
downstream.
"""
if upstream:
return self.upstream_list
else:
return self.downstream_list
def __repr__(self):
return "<Task({self.__class__.__name__}): {self.task_id}>".format(
self=self)
@property
def task_type(self):
return self.__class__.__name__
def add_only_new(self, item_set, item):
if item in item_set:
self.log.warning(
'Dependency {self}, {item} already registered'
''.format(self=self, item=item))
else:
item_set.add(item)
def _set_relatives(self, task_or_task_list, upstream=False):
try:
task_list = list(task_or_task_list)
except TypeError:
task_list = [task_or_task_list]
for t in task_list:
if not isinstance(t, BaseOperator):
raise AirflowException(
"Relationships can only be set between "
"Operators; received {}".format(t.__class__.__name__))
# relationships can only be set if the tasks share a single DAG. Tasks
# without a DAG are assigned to that DAG.
dags = {t._dag.dag_id: t._dag for t in [self] + task_list if t.has_dag()}
if len(dags) > 1:
raise AirflowException(
'Tried to set relationships between tasks in '
'more than one DAG: {}'.format(dags.values()))
elif len(dags) == 1:
dag = dags.popitem()[1]
else:
raise AirflowException(
"Tried to create relationships between tasks that don't have "
"DAGs yet. Set the DAG for at least one "
"task and try again: {}".format([self] + task_list))
if dag and not self.has_dag():
self.dag = dag
for task in task_list:
if dag and not task.has_dag():
task.dag = dag
if upstream:
task.add_only_new(task.get_direct_relative_ids(upstream=False), self.task_id)
self.add_only_new(self._upstream_task_ids, task.task_id)
else:
self.add_only_new(self._downstream_task_ids, task.task_id)
task.add_only_new(task.get_direct_relative_ids(upstream=True), self.task_id)
def set_downstream(self, task_or_task_list):
"""
Set a task or a task list to be directly downstream from the current
task.
"""
self._set_relatives(task_or_task_list, upstream=False)
def set_upstream(self, task_or_task_list):
"""
Set a task or a task list to be directly upstream from the current
task.
"""
self._set_relatives(task_or_task_list, upstream=True)
def xcom_push(
self,
context,
key,
value,
execution_date=None):
"""
See TaskInstance.xcom_push()
"""
context['ti'].xcom_push(
key=key,
value=value,
execution_date=execution_date)
def xcom_pull(
self,
context,
task_ids=None,
dag_id=None,
key=XCOM_RETURN_KEY,
include_prior_dates=None):
"""
See TaskInstance.xcom_pull()
"""
return context['ti'].xcom_pull(
key=key,
task_ids=task_ids,
dag_id=dag_id,
include_prior_dates=include_prior_dates)
@cached_property
def extra_links(self) -> Iterable[str]:
return list(set(self.operator_extra_link_dict.keys())
.union(self.global_operator_extra_link_dict.keys()))
def get_extra_links(self, dttm, link_name):
"""
For an operator, gets the URL that the external links specified in
`extra_links` should point to.
:raise ValueError: The error message of a ValueError will be passed on through to
the fronted to show up as a tooltip on the disabled link
:param dttm: The datetime parsed execution date for the URL being searched for
:param link_name: The name of the link we're looking for the URL for. Should be
one of the options specified in `extra_links`
:return: A URL
"""
if link_name in self.operator_extra_link_dict:
return self.operator_extra_link_dict[link_name].get_link(self, dttm)
elif link_name in self.global_operator_extra_link_dict:
return self.global_operator_extra_link_dict[link_name].get_link(self, dttm)
class BaseOperatorLink(metaclass=ABCMeta):
"""
Abstract base class that defines how we get an operator link.
"""
@property
@abstractmethod
def name(self) -> str:
"""
Name of the link. This will be the button name on the task UI.
:return: link name
"""
@abstractmethod
def get_link(self, operator: BaseOperator, dttm: datetime) -> str:
"""
Link to external system.
:param operator: airflow operator
:param dttm: datetime
:return: link to external system
"""
| 38.765 | 102 | 0.629176 |
from abc import ABCMeta, abstractmethod
from cached_property import cached_property
import copy
import functools
import logging
import sys
import warnings
from datetime import timedelta, datetime
from typing import Callable, Dict, Iterable, List, Optional, Set
import jinja2
import six
from airflow import configuration, settings
from airflow.exceptions import AirflowException
from airflow.lineage import prepare_lineage, apply_lineage, DataSet
from airflow.models.dag import DAG
from airflow.models.taskinstance import TaskInstance, clear_task_instances
from airflow.models.xcom import XCOM_RETURN_KEY
from airflow.ti_deps.deps.not_in_retry_period_dep import NotInRetryPeriodDep
from airflow.ti_deps.deps.prev_dagrun_dep import PrevDagrunDep
from airflow.ti_deps.deps.trigger_rule_dep import TriggerRuleDep
from airflow.utils import timezone
from airflow.utils.db import provide_session
from airflow.utils.decorators import apply_defaults
from airflow.utils.helpers import validate_key
from airflow.utils.log.logging_mixin import LoggingMixin
from airflow.utils.operator_resources import Resources
from airflow.utils.trigger_rule import TriggerRule
from airflow.utils.weight_rule import WeightRule
@functools.total_ordering
class BaseOperator(LoggingMixin):
template_fields = []
template_ext = []
ui_color = '#fff'
ui_fgcolor = '#000'
_base_operator_shallow_copy_attrs = ('user_defined_macros',
'user_defined_filters',
'params',
'_log',)
# each operator should override this class attr for shallow copy attrs.
shallow_copy_attrs = () # type: Iterable[str]
# Defines the operator level extra links
operator_extra_links = () # type: Iterable[BaseOperatorLink]
@apply_defaults
def __init__(
self,
task_id: str,
owner: str = configuration.conf.get('operators', 'DEFAULT_OWNER'),
email: Optional[str] = None,
email_on_retry: bool = True,
email_on_failure: bool = True,
retries: int = 0,
retry_delay: timedelta = timedelta(seconds=300),
retry_exponential_backoff: bool = False,
max_retry_delay: Optional[datetime] = None,
start_date: Optional[datetime] = None,
end_date: Optional[datetime] = None,
schedule_interval=None, # not hooked as of now
depends_on_past: bool = False,
wait_for_downstream: bool = False,
dag: Optional[DAG] = None,
params: Optional[Dict] = None,
default_args: Optional[Dict] = None,
priority_weight: int = 1,
weight_rule: str = WeightRule.DOWNSTREAM,
queue: str = configuration.conf.get('celery', 'default_queue'),
pool: Optional[str] = None,
sla: Optional[timedelta] = None,
execution_timeout: Optional[timedelta] = None,
on_failure_callback: Optional[Callable] = None,
on_success_callback: Optional[Callable] = None,
on_retry_callback: Optional[Callable] = None,
trigger_rule: str = TriggerRule.ALL_SUCCESS,
resources: Optional[Dict] = None,
run_as_user: Optional[str] = None,
task_concurrency: Optional[int] = None,
executor_config: Optional[Dict] = None,
do_xcom_push: bool = True,
inlets: Optional[Dict] = None,
outlets: Optional[Dict] = None,
*args,
**kwargs
):
if args or kwargs:
# TODO remove *args and **kwargs in Airflow 2.0
warnings.warn(
'Invalid arguments were passed to {c} (task_id: {t}). '
'Support for passing such arguments will be dropped in '
'Airflow 2.0. Invalid arguments were:'
'\n*args: {a}\n**kwargs: {k}'.format(
c=self.__class__.__name__, a=args, k=kwargs, t=task_id),
category=PendingDeprecationWarning,
stacklevel=3
)
validate_key(task_id)
self.task_id = task_id
self.owner = owner
self.email = email
self.email_on_retry = email_on_retry
self.email_on_failure = email_on_failure
self.start_date = start_date
if start_date and not isinstance(start_date, datetime):
self.log.warning("start_date for %s isn't datetime.datetime", self)
elif start_date:
self.start_date = timezone.convert_to_utc(start_date)
self.end_date = end_date
if end_date:
self.end_date = timezone.convert_to_utc(end_date)
if not TriggerRule.is_valid(trigger_rule):
raise AirflowException(
"The trigger_rule must be one of {all_triggers},"
"'{d}.{t}'; received '{tr}'."
.format(all_triggers=TriggerRule.all_triggers(),
d=dag.dag_id if dag else "", t=task_id, tr=trigger_rule))
self.trigger_rule = trigger_rule
self.depends_on_past = depends_on_past
self.wait_for_downstream = wait_for_downstream
if wait_for_downstream:
self.depends_on_past = True
if schedule_interval:
self.log.warning(
"schedule_interval is used for %s, though it has "
"been deprecated as a task parameter, you need to "
"specify it as a DAG parameter instead",
self
)
self._schedule_interval = schedule_interval
self.retries = retries
self.queue = queue
self.pool = pool
self.sla = sla
self.execution_timeout = execution_timeout
self.on_failure_callback = on_failure_callback
self.on_success_callback = on_success_callback
self.on_retry_callback = on_retry_callback
if isinstance(retry_delay, timedelta):
self.retry_delay = retry_delay
else:
self.log.debug("Retry_delay isn't timedelta object, assuming secs")
self.retry_delay = timedelta(seconds=retry_delay)
self.retry_exponential_backoff = retry_exponential_backoff
self.max_retry_delay = max_retry_delay
self.params = params or {} # Available in templates!
self.priority_weight = priority_weight
if not WeightRule.is_valid(weight_rule):
raise AirflowException(
"The weight_rule must be one of {all_weight_rules},"
"'{d}.{t}'; received '{tr}'."
.format(all_weight_rules=WeightRule.all_weight_rules,
d=dag.dag_id if dag else "", t=task_id, tr=weight_rule))
self.weight_rule = weight_rule
self.resources = Resources(**(resources or {}))
self.run_as_user = run_as_user
self.task_concurrency = task_concurrency
self.executor_config = executor_config or {}
self.do_xcom_push = do_xcom_push
# Private attributes
self._upstream_task_ids = set() # type: Set[str]
self._downstream_task_ids = set() # type: Set[str]
if not dag and settings.CONTEXT_MANAGER_DAG:
dag = settings.CONTEXT_MANAGER_DAG
if dag:
self.dag = dag
self._log = logging.getLogger("airflow.task.operators")
# lineage
self.inlets = [] # type: List[DataSet]
self.outlets = [] # type: List[DataSet]
self.lineage_data = None
self._inlets = {
"auto": False,
"task_ids": [],
"datasets": [],
}
self._outlets = {
"datasets": [],
} # type: Dict
if inlets:
self._inlets.update(inlets)
if outlets:
self._outlets.update(outlets)
self._comps = {
'task_id',
'dag_id',
'owner',
'email',
'email_on_retry',
'retry_delay',
'retry_exponential_backoff',
'max_retry_delay',
'start_date',
'schedule_interval',
'depends_on_past',
'wait_for_downstream',
'priority_weight',
'sla',
'execution_timeout',
'on_failure_callback',
'on_success_callback',
'on_retry_callback',
'do_xcom_push',
}
def __eq__(self, other):
if (type(self) == type(other) and
self.task_id == other.task_id):
return all(self.__dict__.get(c, None) == other.__dict__.get(c, None) for c in self._comps)
return False
def __ne__(self, other):
return not self == other
def __lt__(self, other):
return self.task_id < other.task_id
def __hash__(self):
hash_components = [type(self)]
for c in self._comps:
val = getattr(self, c, None)
try:
hash(val)
hash_components.append(val)
except TypeError:
hash_components.append(repr(val))
return hash(tuple(hash_components))
# Composing Operators -----------------------------------------------
def __rshift__(self, other):
if isinstance(other, DAG):
# if this dag is already assigned, do nothing
# otherwise, do normal dag assignment
if not (self.has_dag() and self.dag is other):
self.dag = other
else:
self.set_downstream(other)
return other
def __lshift__(self, other):
if isinstance(other, DAG):
# if this dag is already assigned, do nothing
# otherwise, do normal dag assignment
if not (self.has_dag() and self.dag is other):
self.dag = other
else:
self.set_upstream(other)
return other
def __rrshift__(self, other):
self.__lshift__(other)
return self
def __rlshift__(self, other):
self.__rshift__(other)
return self
# /Composing Operators ---------------------------------------------
@property
def dag(self):
if self.has_dag():
return self._dag
else:
raise AirflowException(
'Operator {} has not been assigned to a DAG yet'.format(self))
@dag.setter
def dag(self, dag):
if not isinstance(dag, DAG):
raise TypeError(
'Expected DAG; received {}'.format(dag.__class__.__name__))
elif self.has_dag() and self.dag is not dag:
raise AirflowException(
"The DAG assigned to {} can not be changed.".format(self))
elif self.task_id not in dag.task_dict:
dag.add_task(self)
self._dag = dag
def has_dag(self):
return getattr(self, '_dag', None) is not None
@property
def dag_id(self):
if self.has_dag():
return self.dag.dag_id
else:
return 'adhoc_' + self.owner
@property
def deps(self):
return {
NotInRetryPeriodDep(),
PrevDagrunDep(),
TriggerRuleDep(),
}
@property
def schedule_interval(self):
if self.has_dag():
return self.dag._schedule_interval
else:
return self._schedule_interval
@property
def priority_weight_total(self):
if self.weight_rule == WeightRule.ABSOLUTE:
return self.priority_weight
elif self.weight_rule == WeightRule.DOWNSTREAM:
upstream = False
elif self.weight_rule == WeightRule.UPSTREAM:
upstream = True
else:
upstream = False
return self.priority_weight + sum(
map(lambda task_id: self._dag.task_dict[task_id].priority_weight,
self.get_flat_relative_ids(upstream=upstream))
)
@cached_property
def operator_extra_link_dict(self):
return {link.name: link for link in self.operator_extra_links}
@cached_property
def global_operator_extra_link_dict(self):
from airflow.plugins_manager import global_operator_extra_links
return {link.name: link for link in global_operator_extra_links}
@prepare_lineage
def pre_execute(self, context):
def execute(self, context):
raise NotImplementedError()
@apply_lineage
def post_execute(self, context, result=None):
def on_kill(self):
def __deepcopy__(self, memo):
sys.setrecursionlimit(5000) # TODO fix this in a better way
cls = self.__class__
result = cls.__new__(cls)
memo[id(self)] = result
shallow_copy = cls.shallow_copy_attrs + cls._base_operator_shallow_copy_attrs
for k, v in self.__dict__.items():
if k not in shallow_copy:
setattr(result, k, copy.deepcopy(v, memo))
else:
setattr(result, k, copy.copy(v))
return result
def __getstate__(self):
state = dict(self.__dict__)
del state['_log']
return state
def __setstate__(self, state):
self.__dict__ = state
self._log = logging.getLogger("airflow.task.operators")
def render_template_from_field(self, attr, content, context, jinja_env):
rt = self.render_template
if isinstance(content, six.string_types):
result = jinja_env.from_string(content).render(**context)
elif isinstance(content, (list, tuple)):
result = [rt(attr, e, context) for e in content]
elif isinstance(content, dict):
result = {
k: rt("{}[{}]".format(attr, k), v, context)
for k, v in list(content.items())}
else:
result = content
return result
def render_template(self, attr, content, context):
jinja_env = self.get_template_env()
exts = self.__class__.template_ext
if (
isinstance(content, six.string_types) and
any([content.endswith(ext) for ext in exts])):
return jinja_env.get_template(content).render(**context)
else:
return self.render_template_from_field(attr, content, context, jinja_env)
def get_template_env(self):
return self.dag.get_template_env() \
if hasattr(self, 'dag') \
else jinja2.Environment(cache_size=0)
def prepare_template(self):
def resolve_template_files(self):
# Getting the content of files for template_field / template_ext
for attr in self.template_fields:
content = getattr(self, attr)
if content is None:
continue
elif isinstance(content, six.string_types) and \
any([content.endswith(ext) for ext in self.template_ext]):
env = self.get_template_env()
try:
setattr(self, attr, env.loader.get_source(env, content)[0])
except Exception as e:
self.log.exception(e)
elif isinstance(content, list):
env = self.dag.get_template_env()
for i in range(len(content)):
if isinstance(content[i], six.string_types) and \
any([content[i].endswith(ext) for ext in self.template_ext]):
try:
content[i] = env.loader.get_source(env, content[i])[0]
except Exception as e:
self.log.exception(e)
self.prepare_template()
@property
def upstream_list(self):
return [self.dag.get_task(tid) for tid in self._upstream_task_ids]
@property
def upstream_task_ids(self):
return self._upstream_task_ids
@property
def downstream_list(self):
return [self.dag.get_task(tid) for tid in self._downstream_task_ids]
@property
def downstream_task_ids(self):
return self._downstream_task_ids
@provide_session
def clear(self,
start_date=None,
end_date=None,
upstream=False,
downstream=False,
session=None):
TI = TaskInstance
qry = session.query(TI).filter(TI.dag_id == self.dag_id)
if start_date:
qry = qry.filter(TI.execution_date >= start_date)
if end_date:
qry = qry.filter(TI.execution_date <= end_date)
tasks = [self.task_id]
if upstream:
tasks += [
t.task_id for t in self.get_flat_relatives(upstream=True)]
if downstream:
tasks += [
t.task_id for t in self.get_flat_relatives(upstream=False)]
qry = qry.filter(TI.task_id.in_(tasks))
count = qry.count()
clear_task_instances(qry.all(), session, dag=self.dag)
session.commit()
return count
@provide_session
def get_task_instances(self, start_date=None, end_date=None, session=None):
end_date = end_date or timezone.utcnow()
return session.query(TaskInstance)\
.filter(TaskInstance.dag_id == self.dag_id)\
.filter(TaskInstance.task_id == self.task_id)\
.filter(TaskInstance.execution_date >= start_date)\
.filter(TaskInstance.execution_date <= end_date)\
.order_by(TaskInstance.execution_date)\
.all()
def get_flat_relative_ids(self, upstream=False, found_descendants=None):
if not found_descendants:
found_descendants = set()
relative_ids = self.get_direct_relative_ids(upstream)
for relative_id in relative_ids:
if relative_id not in found_descendants:
found_descendants.add(relative_id)
relative_task = self._dag.task_dict[relative_id]
relative_task.get_flat_relative_ids(upstream,
found_descendants)
return found_descendants
def get_flat_relatives(self, upstream=False):
return list(map(lambda task_id: self._dag.task_dict[task_id],
self.get_flat_relative_ids(upstream)))
def run(
self,
start_date=None,
end_date=None,
ignore_first_depends_on_past=False,
ignore_ti_state=False,
mark_success=False):
start_date = start_date or self.start_date
end_date = end_date or self.end_date or timezone.utcnow()
for dt in self.dag.date_range(start_date, end_date=end_date):
TaskInstance(self, dt).run(
mark_success=mark_success,
ignore_depends_on_past=(
dt == start_date and ignore_first_depends_on_past),
ignore_ti_state=ignore_ti_state)
def dry_run(self):
self.log.info('Dry run')
for attr in self.template_fields:
content = getattr(self, attr)
if content and isinstance(content, six.string_types):
self.log.info('Rendering template for %s', attr)
self.log.info(content)
def get_direct_relative_ids(self, upstream=False):
if upstream:
return self._upstream_task_ids
else:
return self._downstream_task_ids
def get_direct_relatives(self, upstream=False):
if upstream:
return self.upstream_list
else:
return self.downstream_list
def __repr__(self):
return "<Task({self.__class__.__name__}): {self.task_id}>".format(
self=self)
@property
def task_type(self):
return self.__class__.__name__
def add_only_new(self, item_set, item):
if item in item_set:
self.log.warning(
'Dependency {self}, {item} already registered'
''.format(self=self, item=item))
else:
item_set.add(item)
def _set_relatives(self, task_or_task_list, upstream=False):
try:
task_list = list(task_or_task_list)
except TypeError:
task_list = [task_or_task_list]
for t in task_list:
if not isinstance(t, BaseOperator):
raise AirflowException(
"Relationships can only be set between "
"Operators; received {}".format(t.__class__.__name__))
# relationships can only be set if the tasks share a single DAG. Tasks
# without a DAG are assigned to that DAG.
dags = {t._dag.dag_id: t._dag for t in [self] + task_list if t.has_dag()}
if len(dags) > 1:
raise AirflowException(
'Tried to set relationships between tasks in '
'more than one DAG: {}'.format(dags.values()))
elif len(dags) == 1:
dag = dags.popitem()[1]
else:
raise AirflowException(
"Tried to create relationships between tasks that don't have "
"DAGs yet. Set the DAG for at least one "
"task and try again: {}".format([self] + task_list))
if dag and not self.has_dag():
self.dag = dag
for task in task_list:
if dag and not task.has_dag():
task.dag = dag
if upstream:
task.add_only_new(task.get_direct_relative_ids(upstream=False), self.task_id)
self.add_only_new(self._upstream_task_ids, task.task_id)
else:
self.add_only_new(self._downstream_task_ids, task.task_id)
task.add_only_new(task.get_direct_relative_ids(upstream=True), self.task_id)
def set_downstream(self, task_or_task_list):
self._set_relatives(task_or_task_list, upstream=False)
def set_upstream(self, task_or_task_list):
self._set_relatives(task_or_task_list, upstream=True)
def xcom_push(
self,
context,
key,
value,
execution_date=None):
context['ti'].xcom_push(
key=key,
value=value,
execution_date=execution_date)
def xcom_pull(
self,
context,
task_ids=None,
dag_id=None,
key=XCOM_RETURN_KEY,
include_prior_dates=None):
return context['ti'].xcom_pull(
key=key,
task_ids=task_ids,
dag_id=dag_id,
include_prior_dates=include_prior_dates)
@cached_property
def extra_links(self) -> Iterable[str]:
return list(set(self.operator_extra_link_dict.keys())
.union(self.global_operator_extra_link_dict.keys()))
def get_extra_links(self, dttm, link_name):
if link_name in self.operator_extra_link_dict:
return self.operator_extra_link_dict[link_name].get_link(self, dttm)
elif link_name in self.global_operator_extra_link_dict:
return self.global_operator_extra_link_dict[link_name].get_link(self, dttm)
class BaseOperatorLink(metaclass=ABCMeta):
@property
@abstractmethod
def name(self) -> str:
@abstractmethod
def get_link(self, operator: BaseOperator, dttm: datetime) -> str:
| true | true |
f72484a7592ee8ca18c8b0897a938b18606428a4 | 6,800 | py | Python | bindings/python/ensmallen_graph/datasets/string/paraprevotellaxylaniphila.py | caufieldjh/ensmallen_graph | 14e98b1cdbc73193a84a913d7d4f2b2b3eb2c43a | [
"MIT"
] | null | null | null | bindings/python/ensmallen_graph/datasets/string/paraprevotellaxylaniphila.py | caufieldjh/ensmallen_graph | 14e98b1cdbc73193a84a913d7d4f2b2b3eb2c43a | [
"MIT"
] | null | null | null | bindings/python/ensmallen_graph/datasets/string/paraprevotellaxylaniphila.py | caufieldjh/ensmallen_graph | 14e98b1cdbc73193a84a913d7d4f2b2b3eb2c43a | [
"MIT"
] | null | null | null | """
This file offers the methods to automatically retrieve the graph Paraprevotella xylaniphila.
The graph is automatically retrieved from the STRING repository.
Report
---------------------
At the time of rendering these methods (please see datetime below), the graph
had the following characteristics:
Datetime: 2021-02-02 21:42:47.483688
The undirected graph Paraprevotella xylaniphila has 3396 nodes and 309111
weighted edges, of which none are self-loops. The graph is dense as it
has a density of 0.05362 and has 22 connected components, where the component
with most nodes has 3350 nodes and the component with the least nodes has
2 nodes. The graph median node degree is 157, the mean node degree is 182.04,
and the node degree mode is 4. The top 5 most central nodes are 762982.HMPREF9442_02244
(degree 997), 762982.HMPREF9442_00670 (degree 895), 762982.HMPREF9442_01031
(degree 865), 762982.HMPREF9442_03225 (degree 794) and 762982.HMPREF9442_00174
(degree 776).
References
---------------------
Please cite the following if you use the data:
@article{szklarczyk2019string,
title={STRING v11: protein--protein association networks with increased coverage, supporting functional discovery in genome-wide experimental datasets},
author={Szklarczyk, Damian and Gable, Annika L and Lyon, David and Junge, Alexander and Wyder, Stefan and Huerta-Cepas, Jaime and Simonovic, Milan and Doncheva, Nadezhda T and Morris, John H and Bork, Peer and others},
journal={Nucleic acids research},
volume={47},
number={D1},
pages={D607--D613},
year={2019},
publisher={Oxford University Press}
}
Usage example
----------------------
The usage of this graph is relatively straightforward:
.. code:: python
# First import the function to retrieve the graph from the datasets
from ensmallen_graph.datasets.string import ParaprevotellaXylaniphila
# Then load the graph
graph = ParaprevotellaXylaniphila()
# Finally, you can do anything with it, for instance, compute its report:
print(graph)
# If you need to run a link prediction task with validation,
# you can split the graph using a connected holdout as follows:
train_graph, validation_graph = graph.connected_holdout(
# You can use an 80/20 split the holdout, for example.
train_size=0.8,
# The random state is used to reproduce the holdout.
random_state=42,
# Wether to show a loading bar.
verbose=True
)
# Remember that, if you need, you can enable the memory-time trade-offs:
train_graph.enable(
vector_sources=True,
vector_destinations=True,
vector_outbounds=True
)
# Consider using the methods made available in the Embiggen package
# to run graph embedding or link prediction tasks.
"""
from typing import Dict
from ..automatic_graph_retrieval import AutomaticallyRetrievedGraph
from ...ensmallen_graph import EnsmallenGraph # pylint: disable=import-error
def ParaprevotellaXylaniphila(
directed: bool = False,
verbose: int = 2,
cache_path: str = "graphs/string",
**additional_graph_kwargs: Dict
) -> EnsmallenGraph:
"""Return new instance of the Paraprevotella xylaniphila graph.
The graph is automatically retrieved from the STRING repository.
Parameters
-------------------
directed: bool = False,
Wether to load the graph as directed or undirected.
By default false.
verbose: int = 2,
Wether to show loading bars during the retrieval and building
of the graph.
cache_path: str = "graphs",
Where to store the downloaded graphs.
additional_graph_kwargs: Dict,
Additional graph kwargs.
Returns
-----------------------
Instace of Paraprevotella xylaniphila graph.
Report
---------------------
At the time of rendering these methods (please see datetime below), the graph
had the following characteristics:
Datetime: 2021-02-02 21:42:47.483688
The undirected graph Paraprevotella xylaniphila has 3396 nodes and 309111
weighted edges, of which none are self-loops. The graph is dense as it
has a density of 0.05362 and has 22 connected components, where the component
with most nodes has 3350 nodes and the component with the least nodes has
2 nodes. The graph median node degree is 157, the mean node degree is 182.04,
and the node degree mode is 4. The top 5 most central nodes are 762982.HMPREF9442_02244
(degree 997), 762982.HMPREF9442_00670 (degree 895), 762982.HMPREF9442_01031
(degree 865), 762982.HMPREF9442_03225 (degree 794) and 762982.HMPREF9442_00174
(degree 776).
References
---------------------
Please cite the following if you use the data:
@article{szklarczyk2019string,
title={STRING v11: protein--protein association networks with increased coverage, supporting functional discovery in genome-wide experimental datasets},
author={Szklarczyk, Damian and Gable, Annika L and Lyon, David and Junge, Alexander and Wyder, Stefan and Huerta-Cepas, Jaime and Simonovic, Milan and Doncheva, Nadezhda T and Morris, John H and Bork, Peer and others},
journal={Nucleic acids research},
volume={47},
number={D1},
pages={D607--D613},
year={2019},
publisher={Oxford University Press}
}
Usage example
----------------------
The usage of this graph is relatively straightforward:
.. code:: python
# First import the function to retrieve the graph from the datasets
from ensmallen_graph.datasets.string import ParaprevotellaXylaniphila
# Then load the graph
graph = ParaprevotellaXylaniphila()
# Finally, you can do anything with it, for instance, compute its report:
print(graph)
# If you need to run a link prediction task with validation,
# you can split the graph using a connected holdout as follows:
train_graph, validation_graph = graph.connected_holdout(
# You can use an 80/20 split the holdout, for example.
train_size=0.8,
# The random state is used to reproduce the holdout.
random_state=42,
# Wether to show a loading bar.
verbose=True
)
# Remember that, if you need, you can enable the memory-time trade-offs:
train_graph.enable(
vector_sources=True,
vector_destinations=True,
vector_outbounds=True
)
# Consider using the methods made available in the Embiggen package
# to run graph embedding or link prediction tasks.
"""
return AutomaticallyRetrievedGraph(
graph_name="ParaprevotellaXylaniphila",
dataset="string",
directed=directed,
verbose=verbose,
cache_path=cache_path,
additional_graph_kwargs=additional_graph_kwargs
)()
| 35.602094 | 223 | 0.708529 | from typing import Dict
from ..automatic_graph_retrieval import AutomaticallyRetrievedGraph
from ...ensmallen_graph import EnsmallenGraph
def ParaprevotellaXylaniphila(
directed: bool = False,
verbose: int = 2,
cache_path: str = "graphs/string",
**additional_graph_kwargs: Dict
) -> EnsmallenGraph:
return AutomaticallyRetrievedGraph(
graph_name="ParaprevotellaXylaniphila",
dataset="string",
directed=directed,
verbose=verbose,
cache_path=cache_path,
additional_graph_kwargs=additional_graph_kwargs
)()
| true | true |
f72484fc095b2c9a1f53f18824e2c7709dcae682 | 874 | py | Python | spec_parser/spec_parser/util.py | Parnassius/domify | 262a9b9bf68fd627d963f23abb68c06f424180f2 | [
"MIT"
] | null | null | null | spec_parser/spec_parser/util.py | Parnassius/domify | 262a9b9bf68fd627d963f23abb68c06f424180f2 | [
"MIT"
] | 5 | 2022-03-01T19:53:28.000Z | 2022-03-28T01:32:30.000Z | spec_parser/spec_parser/util.py | Parnassius/domify | 262a9b9bf68fd627d963f23abb68c06f424180f2 | [
"MIT"
] | 1 | 2021-11-08T20:36:36.000Z | 2021-11-08T20:36:36.000Z | from typing import Dict, List
import requests
from bs4 import BeautifulSoup # type: ignore[import]
class _RequestCache:
def __init__(self) -> None:
self._cache: Dict[str, BeautifulSoup] = {}
def __call__(self, page: str) -> BeautifulSoup:
if page.endswith(".html"):
page = page[:-5]
if page not in self._cache:
html = requests.get(
f"https://html.spec.whatwg.org/multipage/{page}.html"
).text
self._cache[page] = BeautifulSoup(html, "html5lib")
return self._cache[page]
request_cache = _RequestCache()
def get_input_type_keywords() -> List[str]:
soup = request_cache("input")
table = soup.find(id="attr-input-type-keywords")
keywords = [
row.contents[0].find("code").string for row in table.find("tbody").children
]
return keywords
| 26.484848 | 83 | 0.621281 | from typing import Dict, List
import requests
from bs4 import BeautifulSoup
class _RequestCache:
def __init__(self) -> None:
self._cache: Dict[str, BeautifulSoup] = {}
def __call__(self, page: str) -> BeautifulSoup:
if page.endswith(".html"):
page = page[:-5]
if page not in self._cache:
html = requests.get(
f"https://html.spec.whatwg.org/multipage/{page}.html"
).text
self._cache[page] = BeautifulSoup(html, "html5lib")
return self._cache[page]
request_cache = _RequestCache()
def get_input_type_keywords() -> List[str]:
soup = request_cache("input")
table = soup.find(id="attr-input-type-keywords")
keywords = [
row.contents[0].find("code").string for row in table.find("tbody").children
]
return keywords
| true | true |
f72485a6ca32241a57f395404a0c19eded7aae2d | 415 | py | Python | Data_Structures/Maps_Hash_Dict/DivisiblePairCount2.py | neha07kumari/algo_ds_101 | b5f87feb4aac5ad45d934a609e7e73eedf280f10 | [
"MIT"
] | 1 | 2022-02-11T19:25:01.000Z | 2022-02-11T19:25:01.000Z | Data_Structures/Maps_Hash_Dict/DivisiblePairCount2.py | neha07kumari/algo_ds_101 | b5f87feb4aac5ad45d934a609e7e73eedf280f10 | [
"MIT"
] | 2 | 2020-10-13T06:49:54.000Z | 2020-10-17T07:16:37.000Z | Data_Structures/Maps_Hash_Dict/DivisiblePairCount2.py | neha07kumari/algo_ds_101 | b5f87feb4aac5ad45d934a609e7e73eedf280f10 | [
"MIT"
] | 14 | 2020-10-13T04:20:57.000Z | 2021-10-01T16:16:13.000Z | def DivisiblePairCount(arr) :
count = 0
k = len(arr)
for i in range(0, k):
for j in range(i+1, k):
if (arr[i] % arr[j] == 0 or arr[j] % arr[i] == 0):
count += 1
return count
if __name__ == "__main__":
#give input in form of a list -- [1,2,3]
arr = [int(item) for item in ''.join(list(input())[1:-1]).split(',')]
print(DivisiblePairCount(arr))
| 24.411765 | 73 | 0.508434 | def DivisiblePairCount(arr) :
count = 0
k = len(arr)
for i in range(0, k):
for j in range(i+1, k):
if (arr[i] % arr[j] == 0 or arr[j] % arr[i] == 0):
count += 1
return count
if __name__ == "__main__":
arr = [int(item) for item in ''.join(list(input())[1:-1]).split(',')]
print(DivisiblePairCount(arr))
| true | true |
f72485e95740971bc6b7f5bd9e29a91909acdc48 | 92 | py | Python | project_mysql/sales/modelsa.py | righ/djangomodel2alchemymap | c156cd14ff7bfd7d858449819072c18059ecdcd0 | [
"MIT"
] | 17 | 2019-08-20T16:58:18.000Z | 2022-01-15T05:00:52.000Z | project_mysql/sales/modelsa.py | righ/djangomodel2alchemymap | c156cd14ff7bfd7d858449819072c18059ecdcd0 | [
"MIT"
] | 4 | 2020-06-02T00:14:38.000Z | 2021-10-14T16:45:13.000Z | project_mysql/sales/modelsa.py | righ/djangomodel2alchemymap | c156cd14ff7bfd7d858449819072c18059ecdcd0 | [
"MIT"
] | 2 | 2019-12-17T13:15:48.000Z | 2021-04-27T09:09:46.000Z | from d2a import transfer
from . import models
transfer(models, globals(), db_type='mysql')
| 18.4 | 44 | 0.76087 | from d2a import transfer
from . import models
transfer(models, globals(), db_type='mysql')
| true | true |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.