code stringlengths 22 1.05M | apis listlengths 1 3.31k | extract_api stringlengths 75 3.25M |
|---|---|---|
from django.db import models
from django.urls import reverse
from django.contrib.auth.models import User
from django.utils import timezone
import uuid
ANNOTATION = (
('asm', 'Asymmetry'),
('dst', 'Dystonia'),
('dsk', 'Dyskensia'),
('ebt', 'En Bloc Turning'),
('str', 'Short Stride Length'),
('mov', 'Slow/Hesitant Movement'),
('pos', 'Stooped Posture'),
('trm', 'Tremor'),
('oth', 'Other/Activity')
)
FRAME_RATES = (
('NTSC_Film', 23.98),
('Film', 24),
('PAL', 25),
('NTSC', 29.97),
('Web', 30),
('PAL_HD', 50),
('NTSC_HD', 59.94),
('High', 60),
)
class PatientData(models.Model):
id = models.AutoField(primary_key=True)
first_name = models.CharField(max_length=50, help_text='Patient first name')
last_name = models.CharField(max_length=50, help_text='Patient last name')
date_of_birth = models.DateField(help_text='Patient date of birth')
notes = models.CharField(max_length=500, help_text='Notes regarding patient')
class Meta:
ordering = ['last_name']
permissions = (("can_alter_patientdata", "Can create or edit patient data entries."),)
def get_absolute_url(self):
return reverse('patientdata-detail', args=[str(self.id)])
def __str__(self):
return f'{self.last_name}, {self.first_name}'
class WearableData(models.Model):
id = models.UUIDField(primary_key=True, default=uuid.uuid4, help_text='Unique ID for this wearable data')
patient = models.ForeignKey('PatientData', on_delete=models.CASCADE, null=True, related_name='wearables')
filename = models.FileField(upload_to='wearable/', help_text='Wearable data file')
time = models.DateTimeField(help_text='Session date & time')
note = models.CharField(max_length=500, help_text='Note regarding wearable data')
class Meta:
ordering = ['patient', '-time']
permissions = (("can_alter_wearabledata", "Can create or edit wearable data entries."),)
def get_absolute_url(self):
return reverse('wearabledata-detail', args=[str(self.id)])
def __str__(self):
return f'{self.patient} ({self.time})'
class CameraData(models.Model):
id = models.UUIDField(primary_key=True, default=uuid.uuid4, help_text='Unique ID for this wearable data')
patient = models.ForeignKey('PatientData', on_delete=models.CASCADE, null=True, related_name='cameras')
filename = models.FileField(upload_to='camera/', help_text='Camera video file')
framerate = models.CharField(
max_length=9,
choices=FRAME_RATES,
default='Film',
help_text='Video framerate',
)
time = models.DateTimeField(help_text='Session date & time')
note = models.CharField(max_length=500, help_text='Note regarding camera data')
class Meta:
ordering = ['patient', '-time']
permissions = (("can_alter_cameradata", "Can create or edit camera data entries."),)
def get_absolute_url(self):
return reverse('cameradata-detail', args=[str(self.id)])
def __str__(self):
return f'{self.patient} ({self.time})'
def get_user_annotations(self):
return self.c_annotations.filter(annotator=User)
class WearableAnnotation(models.Model):
id = models.AutoField(primary_key=True)
wearable = models.ForeignKey('WearableData', on_delete=models.CASCADE, null=True, related_name='w_annotations')
frame_begin = models.PositiveIntegerField()
frame_end = models.PositiveIntegerField()
annotator = models.ForeignKey(User, on_delete=models.SET_NULL, null=True)
annotation = models.CharField(
max_length=3,
choices=ANNOTATION,
default='oth',
help_text='PD Symptom',
)
note = models.CharField(max_length=500, help_text='Note regarding annotation', null=True, blank=True)
class Meta:
ordering = ['frame_begin']
permissions = (("can_alter_wearableannotation", "Can create or edit wearable annotations."),)
def get_absolute_url(self):
return reverse('wearableannotation-detail', args=[str(self.wearable.id), str(self.id)])
def __str__(self):
return f'{self.wearable} - ({self.frame_begin}-{self.frame_end}) - {self.get_annotation_display()}'
class CameraAnnotation(models.Model):
id = models.AutoField(primary_key=True)
camera = models.ForeignKey('CameraData', on_delete=models.CASCADE, null=True, related_name='c_annotations')
time_begin = models.CharField(max_length=11, help_text='hh:mm:ss:ff')
time_end = models.CharField(max_length=11, help_text='hh:mm:ss:ff')
annotator = models.ForeignKey(User, on_delete=models.SET_NULL, null=True)
annotation = models.CharField(
max_length=3,
choices=ANNOTATION,
default='oth',
help_text='PD Symptom',
)
note = models.CharField(max_length=500, help_text='Note regarding annotation', null=True, blank=True)
class Meta:
ordering = ['camera', 'time_begin']
permissions = (("can_alter_cameraannotation", "Can create or edit camera annotations."),)
def get_absolute_url(self):
return reverse('cameraannotation-detail', args=[str(self.camera.id), str(self.id)])
def __str__(self):
return f'{self.camera} - ({self.time_begin}-{self.time_end}) - {self.get_annotation_display()}'
class CameraAnnotationComment(models.Model):
id = models.AutoField(primary_key=True)
annotation = models.ForeignKey('CameraAnnotation', on_delete=models.CASCADE, related_name='comments')
author = models.ForeignKey(User, on_delete=models.SET_NULL, null=True)
timestamp = models.DateTimeField(default=timezone.now)
text = models.TextField()
class Meta:
ordering = ['annotation', 'timestamp']
permissions = (("can_alter_cameraannotation_comment", "Can create or edit camera annotation comments."),)
def __str__(self):
return self.text
class WearableDataPoint(models.Model):
id = models.AutoField(primary_key=True)
wearable = models.ForeignKey('WearableData', on_delete=models.CASCADE, null=True, related_name='data_point')
frame = models.PositiveIntegerField()
magnitude = models.FloatField()
class Meta:
ordering = ['frame']
permissions = (("can_alter_wearabledata_point", "Can create or edit wearable data point."),)
def __str__(self):
return f'{self.wearable.id} - ({self.frame}, {self.magnitude})'
| [
"django.db.models.DateField",
"django.db.models.FloatField",
"django.db.models.TextField",
"django.db.models.ForeignKey",
"django.db.models.FileField",
"django.db.models.AutoField",
"django.db.models.PositiveIntegerField",
"django.db.models.CharField",
"django.db.models.DateTimeField",
"django.db.... | [((662, 696), 'django.db.models.AutoField', 'models.AutoField', ([], {'primary_key': '(True)'}), '(primary_key=True)\n', (678, 696), False, 'from django.db import models\n'), ((714, 777), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(50)', 'help_text': '"""Patient first name"""'}), "(max_length=50, help_text='Patient first name')\n", (730, 777), False, 'from django.db import models\n'), ((794, 856), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(50)', 'help_text': '"""Patient last name"""'}), "(max_length=50, help_text='Patient last name')\n", (810, 856), False, 'from django.db import models\n'), ((877, 928), 'django.db.models.DateField', 'models.DateField', ([], {'help_text': '"""Patient date of birth"""'}), "(help_text='Patient date of birth')\n", (893, 928), False, 'from django.db import models\n'), ((941, 1010), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(500)', 'help_text': '"""Notes regarding patient"""'}), "(max_length=500, help_text='Notes regarding patient')\n", (957, 1010), False, 'from django.db import models\n'), ((1378, 1483), 'django.db.models.UUIDField', 'models.UUIDField', ([], {'primary_key': '(True)', 'default': 'uuid.uuid4', 'help_text': '"""Unique ID for this wearable data"""'}), "(primary_key=True, default=uuid.uuid4, help_text=\n 'Unique ID for this wearable data')\n", (1394, 1483), False, 'from django.db import models\n'), ((1493, 1592), 'django.db.models.ForeignKey', 'models.ForeignKey', (['"""PatientData"""'], {'on_delete': 'models.CASCADE', 'null': '(True)', 'related_name': '"""wearables"""'}), "('PatientData', on_delete=models.CASCADE, null=True,\n related_name='wearables')\n", (1510, 1592), False, 'from django.db import models\n'), ((1604, 1675), 'django.db.models.FileField', 'models.FileField', ([], {'upload_to': '"""wearable/"""', 'help_text': '"""Wearable data file"""'}), "(upload_to='wearable/', help_text='Wearable data file')\n", (1620, 1675), False, 'from django.db import models\n'), ((1687, 1740), 'django.db.models.DateTimeField', 'models.DateTimeField', ([], {'help_text': '"""Session date & time"""'}), "(help_text='Session date & time')\n", (1707, 1740), False, 'from django.db import models\n'), ((1752, 1826), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(500)', 'help_text': '"""Note regarding wearable data"""'}), "(max_length=500, help_text='Note regarding wearable data')\n", (1768, 1826), False, 'from django.db import models\n'), ((2195, 2300), 'django.db.models.UUIDField', 'models.UUIDField', ([], {'primary_key': '(True)', 'default': 'uuid.uuid4', 'help_text': '"""Unique ID for this wearable data"""'}), "(primary_key=True, default=uuid.uuid4, help_text=\n 'Unique ID for this wearable data')\n", (2211, 2300), False, 'from django.db import models\n'), ((2310, 2407), 'django.db.models.ForeignKey', 'models.ForeignKey', (['"""PatientData"""'], {'on_delete': 'models.CASCADE', 'null': '(True)', 'related_name': '"""cameras"""'}), "('PatientData', on_delete=models.CASCADE, null=True,\n related_name='cameras')\n", (2327, 2407), False, 'from django.db import models\n'), ((2419, 2487), 'django.db.models.FileField', 'models.FileField', ([], {'upload_to': '"""camera/"""', 'help_text': '"""Camera video file"""'}), "(upload_to='camera/', help_text='Camera video file')\n", (2435, 2487), False, 'from django.db import models\n'), ((2504, 2604), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(9)', 'choices': 'FRAME_RATES', 'default': '"""Film"""', 'help_text': '"""Video framerate"""'}), "(max_length=9, choices=FRAME_RATES, default='Film',\n help_text='Video framerate')\n", (2520, 2604), False, 'from django.db import models\n'), ((2651, 2704), 'django.db.models.DateTimeField', 'models.DateTimeField', ([], {'help_text': '"""Session date & time"""'}), "(help_text='Session date & time')\n", (2671, 2704), False, 'from django.db import models\n'), ((2716, 2788), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(500)', 'help_text': '"""Note regarding camera data"""'}), "(max_length=500, help_text='Note regarding camera data')\n", (2732, 2788), False, 'from django.db import models\n'), ((3253, 3287), 'django.db.models.AutoField', 'models.AutoField', ([], {'primary_key': '(True)'}), '(primary_key=True)\n', (3269, 3287), False, 'from django.db import models\n'), ((3303, 3407), 'django.db.models.ForeignKey', 'models.ForeignKey', (['"""WearableData"""'], {'on_delete': 'models.CASCADE', 'null': '(True)', 'related_name': '"""w_annotations"""'}), "('WearableData', on_delete=models.CASCADE, null=True,\n related_name='w_annotations')\n", (3320, 3407), False, 'from django.db import models\n'), ((3423, 3452), 'django.db.models.PositiveIntegerField', 'models.PositiveIntegerField', ([], {}), '()\n', (3450, 3452), False, 'from django.db import models\n'), ((3469, 3498), 'django.db.models.PositiveIntegerField', 'models.PositiveIntegerField', ([], {}), '()\n', (3496, 3498), False, 'from django.db import models\n'), ((3515, 3576), 'django.db.models.ForeignKey', 'models.ForeignKey', (['User'], {'on_delete': 'models.SET_NULL', 'null': '(True)'}), '(User, on_delete=models.SET_NULL, null=True)\n', (3532, 3576), False, 'from django.db import models\n'), ((3594, 3688), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(3)', 'choices': 'ANNOTATION', 'default': '"""oth"""', 'help_text': '"""PD Symptom"""'}), "(max_length=3, choices=ANNOTATION, default='oth', help_text\n ='PD Symptom')\n", (3610, 3688), False, 'from django.db import models\n'), ((3734, 3832), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(500)', 'help_text': '"""Note regarding annotation"""', 'null': '(True)', 'blank': '(True)'}), "(max_length=500, help_text='Note regarding annotation',\n null=True, blank=True)\n", (3750, 3832), False, 'from django.db import models\n'), ((4293, 4327), 'django.db.models.AutoField', 'models.AutoField', ([], {'primary_key': '(True)'}), '(primary_key=True)\n', (4309, 4327), False, 'from django.db import models\n'), ((4341, 4443), 'django.db.models.ForeignKey', 'models.ForeignKey', (['"""CameraData"""'], {'on_delete': 'models.CASCADE', 'null': '(True)', 'related_name': '"""c_annotations"""'}), "('CameraData', on_delete=models.CASCADE, null=True,\n related_name='c_annotations')\n", (4358, 4443), False, 'from django.db import models\n'), ((4457, 4513), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(11)', 'help_text': '"""hh:mm:ss:ff"""'}), "(max_length=11, help_text='hh:mm:ss:ff')\n", (4473, 4513), False, 'from django.db import models\n'), ((4529, 4585), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(11)', 'help_text': '"""hh:mm:ss:ff"""'}), "(max_length=11, help_text='hh:mm:ss:ff')\n", (4545, 4585), False, 'from django.db import models\n'), ((4602, 4663), 'django.db.models.ForeignKey', 'models.ForeignKey', (['User'], {'on_delete': 'models.SET_NULL', 'null': '(True)'}), '(User, on_delete=models.SET_NULL, null=True)\n', (4619, 4663), False, 'from django.db import models\n'), ((4681, 4775), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(3)', 'choices': 'ANNOTATION', 'default': '"""oth"""', 'help_text': '"""PD Symptom"""'}), "(max_length=3, choices=ANNOTATION, default='oth', help_text\n ='PD Symptom')\n", (4697, 4775), False, 'from django.db import models\n'), ((4821, 4919), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(500)', 'help_text': '"""Note regarding annotation"""', 'null': '(True)', 'blank': '(True)'}), "(max_length=500, help_text='Note regarding annotation',\n null=True, blank=True)\n", (4837, 4919), False, 'from django.db import models\n'), ((5384, 5418), 'django.db.models.AutoField', 'models.AutoField', ([], {'primary_key': '(True)'}), '(primary_key=True)\n', (5400, 5418), False, 'from django.db import models\n'), ((5436, 5528), 'django.db.models.ForeignKey', 'models.ForeignKey', (['"""CameraAnnotation"""'], {'on_delete': 'models.CASCADE', 'related_name': '"""comments"""'}), "('CameraAnnotation', on_delete=models.CASCADE,\n related_name='comments')\n", (5453, 5528), False, 'from django.db import models\n'), ((5538, 5599), 'django.db.models.ForeignKey', 'models.ForeignKey', (['User'], {'on_delete': 'models.SET_NULL', 'null': '(True)'}), '(User, on_delete=models.SET_NULL, null=True)\n', (5555, 5599), False, 'from django.db import models\n'), ((5616, 5658), 'django.db.models.DateTimeField', 'models.DateTimeField', ([], {'default': 'timezone.now'}), '(default=timezone.now)\n', (5636, 5658), False, 'from django.db import models\n'), ((5670, 5688), 'django.db.models.TextField', 'models.TextField', ([], {}), '()\n', (5686, 5688), False, 'from django.db import models\n'), ((5966, 6000), 'django.db.models.AutoField', 'models.AutoField', ([], {'primary_key': '(True)'}), '(primary_key=True)\n', (5982, 6000), False, 'from django.db import models\n'), ((6016, 6117), 'django.db.models.ForeignKey', 'models.ForeignKey', (['"""WearableData"""'], {'on_delete': 'models.CASCADE', 'null': '(True)', 'related_name': '"""data_point"""'}), "('WearableData', on_delete=models.CASCADE, null=True,\n related_name='data_point')\n", (6033, 6117), False, 'from django.db import models\n'), ((6126, 6155), 'django.db.models.PositiveIntegerField', 'models.PositiveIntegerField', ([], {}), '()\n', (6153, 6155), False, 'from django.db import models\n'), ((6172, 6191), 'django.db.models.FloatField', 'models.FloatField', ([], {}), '()\n', (6189, 6191), False, 'from django.db import models\n')] |
import datetime
import canal
from influxdb import InfluxDBClient
class IMU(canal.Measurement):
accelerometer_x = canal.IntegerField()
accelerometer_y = canal.IntegerField()
accelerometer_z = canal.IntegerField()
gyroscope_x = canal.IntegerField()
gyroscope_y = canal.IntegerField()
gyroscope_z = canal.IntegerField()
user_id = canal.Tag()
if __name__ == "__main__":
start_date = datetime.datetime.now(datetime.timezone.utc)
duration = datetime.timedelta(seconds=60)
user_id = 12345678
client = InfluxDBClient(
host="localhost",
port=8086,
database="canal"
)
# Write some dummy IMU data, sampled once per second
num_imu_samples = int(duration.total_seconds())
imu = IMU(
time=[start_date + datetime.timedelta(seconds=d) for d in
range(num_imu_samples)],
acc_x=range(0, 1 * num_imu_samples, 1),
acc_y=range(0, 2 * num_imu_samples, 2),
acc_z=range(0, 3 * num_imu_samples, 3),
gyro_x=range(0, 4 * num_imu_samples, 4),
gyro_y=range(0, 5 * num_imu_samples, 5),
gyro_z=range(0, 6 * num_imu_samples, 6),
user_id=user_id
)
client.write(
data=imu.to_line_protocol(),
params=dict(
db="canal"
)
)
# Read back the IMU data
imu_resp = client.query(IMU.make_query_string(
time__gte=start_date,
time__lte=start_date + duration,
user_id=user_id
))
assert imu == IMU.from_json(imu_resp.raw)
| [
"canal.Tag",
"influxdb.InfluxDBClient",
"canal.IntegerField",
"datetime.datetime.now",
"datetime.timedelta"
] | [((120, 140), 'canal.IntegerField', 'canal.IntegerField', ([], {}), '()\n', (138, 140), False, 'import canal\n'), ((163, 183), 'canal.IntegerField', 'canal.IntegerField', ([], {}), '()\n', (181, 183), False, 'import canal\n'), ((206, 226), 'canal.IntegerField', 'canal.IntegerField', ([], {}), '()\n', (224, 226), False, 'import canal\n'), ((245, 265), 'canal.IntegerField', 'canal.IntegerField', ([], {}), '()\n', (263, 265), False, 'import canal\n'), ((284, 304), 'canal.IntegerField', 'canal.IntegerField', ([], {}), '()\n', (302, 304), False, 'import canal\n'), ((323, 343), 'canal.IntegerField', 'canal.IntegerField', ([], {}), '()\n', (341, 343), False, 'import canal\n'), ((358, 369), 'canal.Tag', 'canal.Tag', ([], {}), '()\n', (367, 369), False, 'import canal\n'), ((416, 460), 'datetime.datetime.now', 'datetime.datetime.now', (['datetime.timezone.utc'], {}), '(datetime.timezone.utc)\n', (437, 460), False, 'import datetime\n'), ((476, 506), 'datetime.timedelta', 'datetime.timedelta', ([], {'seconds': '(60)'}), '(seconds=60)\n', (494, 506), False, 'import datetime\n'), ((544, 605), 'influxdb.InfluxDBClient', 'InfluxDBClient', ([], {'host': '"""localhost"""', 'port': '(8086)', 'database': '"""canal"""'}), "(host='localhost', port=8086, database='canal')\n", (558, 605), False, 'from influxdb import InfluxDBClient\n'), ((788, 817), 'datetime.timedelta', 'datetime.timedelta', ([], {'seconds': 'd'}), '(seconds=d)\n', (806, 817), False, 'import datetime\n')] |
#
# This file is part of ravstack. Ravstack is free software available under
# the terms of the MIT license. See the file "LICENSE" that was provided
# together with this source file for the licensing terms.
#
# Copyright (c) 2015 the ravstack authors. See the file "AUTHORS" for a
# complete list.
from __future__ import absolute_import, print_function
import sys
import logging
from . import config, defaults, util
prog_name = __name__.split('.')[0]
LOG = logging.getLogger(prog_name)
CONF = config.Config()
DEBUG = util.EnvInt('DEBUG')
VERBOSE = util.EnvInt('VERBOSE')
LOG_STDERR = util.EnvInt('LOG_STDERR')
log_context = ''
log_datetime = '%(asctime)s '
log_template = '%(levelname)s [%(name)s] %(message)s'
log_ctx_template = '%(levelname)s [{}] [%(name)s] %(message)s'
def setup_config():
"""Return the configuration object."""
CONF.set_schema(defaults.config_schema)
CONF.read_file(defaults.config_file)
CONF.update_from_env()
meta = util.get_ravello_metadata()
if 'appName' in meta and CONF['ravello']['application'] == '<None>':
CONF['ravello']['application'] = meta['appName']
CONF.update_to_env()
def setup_logging(context=None):
"""Set up or reconfigure logging."""
root = logging.getLogger()
if root.handlers:
del root.handlers[:]
global log_context
if context is not None:
log_context = context
template = log_ctx_template.format(log_context) if log_context else log_template
# Log to stderr?
if LOG_STDERR:
handler = logging.StreamHandler(sys.stderr)
handler.setFormatter(logging.Formatter(template))
root.addHandler(handler)
else:
root.addHandler(logging.NullHandler())
# Available log file?
logfile = defaults.log_file
if util.can_open(logfile, 'a'):
handler = logging.FileHandler(logfile)
handler.setFormatter(logging.Formatter(log_datetime + template))
root.addHandler(handler)
root.setLevel(logging.DEBUG if DEBUG else logging.INFO if VERBOSE else logging.ERROR)
# A little less verbosity for requests.
logger = logging.getLogger('requests.packages.urllib3.connectionpool')
logger.setLevel(logging.DEBUG if DEBUG else logging.ERROR)
# Silence "insecure platform" warning for requests module on Py2.7.x under
# default verbosity.
logging.captureWarnings(True)
logger = logging.getLogger('py.warnings')
logger.setLevel(logging.DEBUG if DEBUG else logging.ERROR)
# Run a main function
def run_main(func):
"""Run a main function."""
setup_config()
setup_logging()
# Run the provided main function.
try:
func()
except Exception as e:
LOG.error('Uncaught exception:', exc_info=True)
if DEBUG:
raise
print('Error: {!s}'.format(e))
| [
"logging.getLogger",
"logging.NullHandler",
"logging.StreamHandler",
"logging.captureWarnings",
"logging.Formatter",
"logging.FileHandler"
] | [((463, 491), 'logging.getLogger', 'logging.getLogger', (['prog_name'], {}), '(prog_name)\n', (480, 491), False, 'import logging\n'), ((1240, 1259), 'logging.getLogger', 'logging.getLogger', ([], {}), '()\n', (1257, 1259), False, 'import logging\n'), ((2111, 2172), 'logging.getLogger', 'logging.getLogger', (['"""requests.packages.urllib3.connectionpool"""'], {}), "('requests.packages.urllib3.connectionpool')\n", (2128, 2172), False, 'import logging\n'), ((2344, 2373), 'logging.captureWarnings', 'logging.captureWarnings', (['(True)'], {}), '(True)\n', (2367, 2373), False, 'import logging\n'), ((2387, 2419), 'logging.getLogger', 'logging.getLogger', (['"""py.warnings"""'], {}), "('py.warnings')\n", (2404, 2419), False, 'import logging\n'), ((1535, 1568), 'logging.StreamHandler', 'logging.StreamHandler', (['sys.stderr'], {}), '(sys.stderr)\n', (1556, 1568), False, 'import logging\n'), ((1829, 1857), 'logging.FileHandler', 'logging.FileHandler', (['logfile'], {}), '(logfile)\n', (1848, 1857), False, 'import logging\n'), ((1598, 1625), 'logging.Formatter', 'logging.Formatter', (['template'], {}), '(template)\n', (1615, 1625), False, 'import logging\n'), ((1694, 1715), 'logging.NullHandler', 'logging.NullHandler', ([], {}), '()\n', (1713, 1715), False, 'import logging\n'), ((1887, 1929), 'logging.Formatter', 'logging.Formatter', (['(log_datetime + template)'], {}), '(log_datetime + template)\n', (1904, 1929), False, 'import logging\n')] |
import os.path
import zipfile
import pandas as pd
from collections import defaultdict
from graphdb_builder import builder_utils
#########################
# SMPDB database #
#########################
def parser(databases_directory, download=True):
config = builder_utils.get_config(config_name="smpdbConfig.yml", data_type='databases')
urls = config['smpdb_urls']
entities = set()
relationships = defaultdict(set)
entities_header = config['pathway_header']
relationships_headers = config['relationships_header']
directory = os.path.join(databases_directory, "SMPDB")
builder_utils.checkDirectory(directory)
for dataset in urls:
url = urls[dataset]
file_name = url.split('/')[-1]
if download:
builder_utils.downloadDB(url, directory)
zipped_file = os.path.join(directory, file_name)
with zipfile.ZipFile(zipped_file) as rf:
if dataset == "pathway":
entities = parsePathways(config, rf)
elif dataset == "protein":
relationships.update(parsePathwayProteinRelationships(rf))
elif dataset == "metabolite":
relationships.update(parsePathwayMetaboliteDrugRelationships(rf))
builder_utils.remove_directory(directory)
return entities, relationships, entities_header, relationships_headers
def parsePathways(config, fhandler):
entities = set()
url = config['linkout_url']
organism = 9606
for filename in fhandler.namelist():
if not os.path.isdir(filename):
with fhandler.open(filename) as f:
df = pd.read_csv(f, sep=',', error_bad_lines=False, low_memory=False)
for index, row in df.iterrows():
identifier = row[0]
name = row[1]
description = row[3]
linkout = url.replace("PATHWAY", identifier)
entities.add((identifier, "Pathway", name, description, organism, linkout, "SMPDB"))
return entities
def parsePathwayProteinRelationships(fhandler):
relationships = defaultdict(set)
loc = "unspecified"
evidence = "unspecified"
organism = 9606
for filename in fhandler.namelist():
if not os.path.isdir(filename):
with fhandler.open(filename) as f:
df = pd.read_csv(f, sep=',', error_bad_lines=False, low_memory=False)
for index, row in df.iterrows():
identifier = row[0]
protein = row[3]
if protein != '':
relationships[("protein", "annotated_to_pathway")].add((protein, identifier, "ANNOTATED_TO_PATHWAY", evidence, organism, loc, "SMPDB"))
return relationships
def parsePathwayMetaboliteDrugRelationships(fhandler):
relationships = defaultdict(set)
loc = "unspecified"
evidence = "unspecified"
organism = 9606
for filename in fhandler.namelist():
if not os.path.isdir(filename):
with fhandler.open(filename) as f:
df = pd.read_csv(f, sep=',', error_bad_lines=False, low_memory=False)
for index, row in df.iterrows():
identifier = row[0]
metabolite = row[5]
drug = row[8]
if metabolite != '':
relationships[("metabolite", "annotated_to_pathway")].add((metabolite, identifier, "ANNOTATED_TO_PATHWAY", evidence, organism, loc, "SMPDB"))
if drug != "":
relationships[("drug", "annotated_to_pathway")].add((drug, identifier, "ANNOTATED_TO_PATHWAY", evidence, organism, loc, "SMPDB"))
return relationships
| [
"graphdb_builder.builder_utils.get_config",
"graphdb_builder.builder_utils.downloadDB",
"zipfile.ZipFile",
"pandas.read_csv",
"collections.defaultdict",
"graphdb_builder.builder_utils.remove_directory",
"graphdb_builder.builder_utils.checkDirectory"
] | [((268, 346), 'graphdb_builder.builder_utils.get_config', 'builder_utils.get_config', ([], {'config_name': '"""smpdbConfig.yml"""', 'data_type': '"""databases"""'}), "(config_name='smpdbConfig.yml', data_type='databases')\n", (292, 346), False, 'from graphdb_builder import builder_utils\n'), ((420, 436), 'collections.defaultdict', 'defaultdict', (['set'], {}), '(set)\n', (431, 436), False, 'from collections import defaultdict\n'), ((606, 645), 'graphdb_builder.builder_utils.checkDirectory', 'builder_utils.checkDirectory', (['directory'], {}), '(directory)\n', (634, 645), False, 'from graphdb_builder import builder_utils\n'), ((1252, 1293), 'graphdb_builder.builder_utils.remove_directory', 'builder_utils.remove_directory', (['directory'], {}), '(directory)\n', (1282, 1293), False, 'from graphdb_builder import builder_utils\n'), ((2121, 2137), 'collections.defaultdict', 'defaultdict', (['set'], {}), '(set)\n', (2132, 2137), False, 'from collections import defaultdict\n'), ((2853, 2869), 'collections.defaultdict', 'defaultdict', (['set'], {}), '(set)\n', (2864, 2869), False, 'from collections import defaultdict\n'), ((772, 812), 'graphdb_builder.builder_utils.downloadDB', 'builder_utils.downloadDB', (['url', 'directory'], {}), '(url, directory)\n', (796, 812), False, 'from graphdb_builder import builder_utils\n'), ((883, 911), 'zipfile.ZipFile', 'zipfile.ZipFile', (['zipped_file'], {}), '(zipped_file)\n', (898, 911), False, 'import zipfile\n'), ((1631, 1695), 'pandas.read_csv', 'pd.read_csv', (['f'], {'sep': '""","""', 'error_bad_lines': '(False)', 'low_memory': '(False)'}), "(f, sep=',', error_bad_lines=False, low_memory=False)\n", (1642, 1695), True, 'import pandas as pd\n'), ((2360, 2424), 'pandas.read_csv', 'pd.read_csv', (['f'], {'sep': '""","""', 'error_bad_lines': '(False)', 'low_memory': '(False)'}), "(f, sep=',', error_bad_lines=False, low_memory=False)\n", (2371, 2424), True, 'import pandas as pd\n'), ((3092, 3156), 'pandas.read_csv', 'pd.read_csv', (['f'], {'sep': '""","""', 'error_bad_lines': '(False)', 'low_memory': '(False)'}), "(f, sep=',', error_bad_lines=False, low_memory=False)\n", (3103, 3156), True, 'import pandas as pd\n')] |
import time
#Timing stuff
lastTime = None
prevFrameTime = 0;
def waitFramerate(T): #TODO if we have enough time, call the garbage collector
global lastTime, prevFrameTime
ctime = time.monotonic()
if lastTime:
frameTime = ctime-lastTime #how long the last frame took
sleepTime = T-frameTime #how much time is remaining in target framerate
if prevFrameTime > frameTime and prevFrameTime > 1.2*T:
print("Peak frame took "+str(prevFrameTime)[:5]+"/"+str(int(1.0/prevFrameTime))+" FPS (Target "+str(T)[:5]+")")
if(sleepTime <= 0): #we went overtime. set start of next frame to now, and continue
lastTime = ctime
else:
lastTime = lastTime+T
time.sleep(sleepTime)
prevFrameTime = frameTime
else:
lastTime = ctime
| [
"time.monotonic",
"time.sleep"
] | [((183, 199), 'time.monotonic', 'time.monotonic', ([], {}), '()\n', (197, 199), False, 'import time\n'), ((662, 683), 'time.sleep', 'time.sleep', (['sleepTime'], {}), '(sleepTime)\n', (672, 683), False, 'import time\n')] |
from __future__ import annotations
import re
from typing import Union
import warp.yul.ast as ast
from warp.yul.AstVisitor import AstVisitor
from warp.yul.WarpException import WarpException
class AstParser:
def __init__(self, text: str):
self.lines = text.splitlines()
if len(self.lines) == 0:
raise WarpException("Text should not be empty")
self.pos = 0
def parse_typed_name(self) -> ast.TypedName:
tabs = self.get_tabs()
node_type_name = self.get_word(tabs)
assert node_type_name == "TypedName:", "This node should be of type TypedNode"
self.pos += 1
assert self.get_tabs() == tabs + 1, "Wrong indentation"
node_name, node_type = self.get_word(tabs + 1).split(":")
self.pos += 1
return ast.TypedName(name=node_name, type=node_type)
def parse_literal(self) -> ast.Literal:
tabs = self.get_tabs()
assert self.get_word(tabs).startswith(
"Literal:"
), "This node should be of type Literal"
value = self.get_word(tabs + 8)
self.pos += 1
try:
value = int(value)
except ValueError:
pass
return ast.Literal(value=value)
def parse_identifier(self) -> ast.Identifier:
tabs = self.get_tabs()
assert self.get_word(tabs).startswith(
"Identifier:"
), "This node should be of type Identifier"
name = self.get_word(tabs + 11)
self.pos += 1
return ast.Identifier(name=name)
def parse_assignment(self) -> ast.Assignment:
tabs = self.get_tabs()
assert (
self.get_word(tabs) == "Assignment:"
), "This node should be of type Assignment"
self.pos += 1
assert self.get_word(tabs + 1) == "Variables:"
self.pos += 1
variables_list = self.parse_list(tabs + 1, self.parse_identifier)
assert self.get_word(tabs + 1) == "Value:"
self.pos += 1
return ast.Assignment(
variable_names=variables_list, value=self.parse_expression()
)
def parse_function_call(self) -> ast.FunctionCall:
tabs = self.get_tabs()
assert (
self.get_word(tabs) == "FunctionCall:"
), "This node should be of type FunctionCall"
self.pos += 1
return ast.FunctionCall(
function_name=self.parse_identifier(),
arguments=self.parse_list(tabs, self.parse_expression),
)
def parse_expression_statement(self) -> ast.Statement:
tabs = self.get_tabs()
assert (
self.get_word(tabs) == "ExpressionStatement:"
), "This node should be of type ExpressionStatement"
self.pos += 1
return ast.ExpressionStatement(expression=self.parse_expression())
def parse_variable_declaration(self) -> ast.VariableDeclaration:
tabs = self.get_tabs()
assert (
self.get_word(tabs) == "VariableDeclaration:"
), "This node should be of type VariableDeclaration"
self.pos += 1
assert self.get_tabs() == tabs + 1
assert self.get_word(tabs + 1) == "Variables:"
self.pos += 1
variables = self.parse_list(tabs + 1, self.parse_typed_name)
assert self.get_tabs() == tabs + 1
word = self.get_word(tabs + 1)
self.pos += 1
assert word.startswith("Value")
if word.endswith("None"):
value = None
else:
value = self.parse_expression()
return ast.VariableDeclaration(variables=variables, value=value)
def parse_block(self) -> ast.Block:
tabs = self.get_tabs()
assert self.get_word(tabs) == "Block:", "This node should be of type Block"
self.pos += 1
return ast.Block(statements=tuple(self.parse_list(tabs, self.parse_statement)))
def parse_function_definition(self) -> ast.FunctionDefinition:
tabs = self.get_tabs()
assert (
self.get_word(tabs) == "FunctionDefinition:"
), "This node should be of type FunctionDefinition"
self.pos += 1
assert self.get_tabs() == tabs + 1 and self.get_word(tabs + 1).startswith(
"Name:"
)
fun_name = self.get_word(tabs + 7)
self.pos += 1
assert self.get_tabs() == tabs + 1 and self.get_word(tabs + 1) == "Parameters:"
self.pos += 1
params = self.parse_list(tabs + 1, self.parse_typed_name)
assert (
self.get_tabs() == tabs + 1
and self.get_word(tabs + 1) == "Return Variables:"
)
self.pos += 1
returns = self.parse_list(tabs + 1, self.parse_typed_name)
assert self.get_tabs() == tabs + 1 and self.get_word(tabs + 1) == "Body:"
self.pos += 1
body = self.parse_block()
return ast.FunctionDefinition(
name=fun_name, parameters=params, return_variables=returns, body=body
)
def parse_if(self) -> ast.If:
tabs = self.get_tabs()
assert self.get_word(tabs) == "If:", "This node should be of type If"
self.pos += 1
condition = self.parse_expression()
body = self.parse_block()
else_body = None
if self.get_tabs() > tabs:
else_body = self.parse_block()
return ast.If(condition=condition, body=body, else_body=else_body)
def parse_case(self) -> ast.Case:
tabs = self.get_tabs()
assert self.get_word(tabs) == "Case:", "This node should be of type Case"
self.pos += 1
try:
value = self.parse_literal()
except AssertionError:
assert (
self.get_tabs() == tabs + 1 and self.get_word(tabs + 1) == "Default"
), "The value must be a literal or None (when it's the default case)"
value = None
self.pos += 1
return ast.Case(value=value, body=self.parse_block())
def parse_switch(self) -> ast.Switch:
tabs = self.get_tabs()
assert self.get_word(tabs) == "Switch:", "This node should be of type Switch"
self.pos += 1
return ast.Switch(
expression=self.parse_expression(),
cases=self.parse_list(tabs, self.parse_case),
)
def parse_for_loop(self) -> ast.ForLoop:
tabs = self.get_tabs()
assert self.get_word(tabs) == "ForLoop:", "This node should be of type ForLoop"
self.pos += 1
return ast.ForLoop(
pre=self.parse_block(),
condition=self.parse_expression(),
post=self.parse_block(),
body=self.parse_block(),
)
def parse_break(self) -> ast.Break:
tabs = self.get_tabs()
assert self.get_word(tabs) == "Break", "This node should be of type Break"
self.pos += 1
return ast.Break()
def parse_continue(self) -> ast.Continue:
tabs = self.get_tabs()
assert self.get_word(tabs) == "Continue", "This node should be of type Continue"
self.pos += 1
return ast.Continue()
def parse_leave(self) -> ast.Leave:
tabs = self.get_tabs()
assert self.get_word(tabs) == "Leave", "This node should be of type Leave"
self.pos += 1
return ast.LEAVE
def parse_node(self) -> ast.Node:
tabs = self.get_tabs()
node_type_name = self.get_word(tabs).split(":")[0]
parser_name = f"parse_{self.get_name(node_type_name)}"
parser = getattr(self, parser_name, None)
if parser is None:
raise WarpException("Wrong node type name!")
return parser()
def parse_statement(self) -> ast.Statement:
statements = [
"ExpressionStatement",
"Assignment",
"VariableDeclaration",
"FunctionDefinition",
"If",
"Switch",
"ForLoop",
"Break",
"Continue",
"Leave",
"Block",
]
tabs = self.get_tabs()
node_type_name = self.get_word(tabs).split(":")[0]
assert node_type_name in statements, "Not a valid statement"
return ast.assert_statement(self.parse_node())
def parse_expression(self) -> ast.Expression:
tabs = self.get_tabs()
node_type_name = self.get_word(tabs).split(":")[0]
assert node_type_name in [
"Literal",
"Identifier",
"FunctionCall",
], "Node type must be an expression"
return ast.assert_expression(self.parse_node())
def parse_list(self, tabs, parser):
items = []
while self.pos < len(self.lines) and self.get_tabs() > tabs:
item = parser()
items.append(item)
return items
def get_tabs(self):
tabs = 0
if self.pos < len(self.lines):
for c in self.lines[self.pos]:
if not c == "\t":
break
tabs += 1
else:
raise WarpException(
"Lines are not supposed to be filled only with tabs"
)
return tabs
def get_word(self, start: int) -> str:
return self.lines[self.pos][start:]
def get_name(self, name):
name = "_".join(re.findall("[A-Z][^A-Z]*", name))
return name.lower()
class YulPrinter(AstVisitor):
def format(self, node: ast.Node, tabs: int = 0) -> str:
return self.visit(node, tabs)
def visit_typed_name(self, node: ast.TypedName, tabs: int = 0) -> str:
return f"{node.name}"
def visit_literal(self, node: ast.Literal, tabs: int = 0) -> str:
return f"{node.value}"
def visit_identifier(self, node: ast.Identifier, tabs: int = 0) -> str:
return f"{node.name}"
def visit_assignment(self, node: ast.Assignment, tabs: int = 0) -> str:
variables = ", ".join(self.visit_list(node.variable_names))
value = self.visit(node.value, 0)
return f"{variables} := {value}"
def visit_function_call(self, node: ast.FunctionCall, tabs: int = 0) -> str:
name = self.visit(node.function_name)
args = ", ".join(self.visit_list(node.arguments))
return f"{name}({args})"
def visit_expression_statement(
self, node: ast.ExpressionStatement, tabs: int = 0
) -> str:
return self.visit(node.expression, tabs)
def visit_variable_declaration(
self, node: ast.VariableDeclaration, tabs: int = 0
) -> str:
variables = ", ".join(self.visit_list(node.variables))
value = ""
if node.value is not None:
value = f" := {self.visit(node.value)}"
return f"let {variables}{value}"
def visit_block(self, node: ast.Block, tabs: int = 0) -> str:
open_block = "{"
close_block = "}"
if self.is_short(node.statements):
statements = "".join(self.visit_list(node.statements))
return " ".join([open_block, statements, close_block])
statements = self.visit_list(node.statements, tabs + 1)
statements = ["\t" * (tabs + 1) + stmt for stmt in statements]
statements = "\n".join(statements)
close_block = "\t" * tabs + close_block
res = "\n".join([open_block, statements, close_block])
return res
def visit_function_definition(
self, node: ast.FunctionDefinition, tabs: int = 0
) -> str:
parameters = ", ".join(self.visit_list(node.parameters, 0))
ret_vars = ", ".join(self.visit_list(node.return_variables, 0))
body = self.visit(node.body, tabs)
res = f"function {node.name}({parameters})"
if len(node.return_variables) > 0:
res += f" -> {ret_vars}"
res += f" {body}"
return res
def visit_if(self, node: ast.If, tabs: int = 0) -> str:
res = f"if {self.visit(node.condition)} "
res += self.visit(node.body, tabs)
if node.else_body is not None:
res += "\n" + "\t" * tabs + "else "
res += self.visit(node.else_body, tabs)
return res
def visit_case(self, node: ast.Case, tabs: int = 0) -> str:
res = "\t" * tabs
if node.value is not None:
res += f"case {self.visit(node.value)} "
else:
res += "default "
res += self.visit(node.body, tabs)
return res
def visit_switch(self, node: ast.Switch, tabs: int = 0) -> str:
res = f"switch {self.visit(node.expression)}\n"
res += "\n".join(self.visit_list(node.cases, tabs))
return res
def visit_for_loop(self, node: ast.ForLoop, tabs: int = 0) -> str:
res = "for "
res += self.visit(node.pre, tabs)
res += f" {self.visit(node.condition)} "
res += self.visit(node.post, tabs)
res += f"\n{self.visit(node.body, tabs)}"
return res
def visit_break(self, node: ast.Break, tabs: int = 0) -> str:
return "break"
def visit_continue(self, node: ast.Continue, tabs: int = 0) -> str:
return "continue"
def visit_leave(self, node: ast.Leave, tabs: int = 0) -> str:
return "leave"
def is_short(self, stmts: tuple) -> bool:
if len(stmts) == 0:
return True
return len(stmts) == 1 and type(stmts[0]).__name__ not in [
"Block",
"FunctionDefinition",
"If",
"Switch",
"ForLoop",
]
| [
"warp.yul.ast.If",
"warp.yul.ast.Break",
"warp.yul.WarpException.WarpException",
"warp.yul.ast.Literal",
"warp.yul.ast.Identifier",
"warp.yul.ast.Continue",
"warp.yul.ast.VariableDeclaration",
"re.findall",
"warp.yul.ast.FunctionDefinition",
"warp.yul.ast.TypedName"
] | [((802, 847), 'warp.yul.ast.TypedName', 'ast.TypedName', ([], {'name': 'node_name', 'type': 'node_type'}), '(name=node_name, type=node_type)\n', (815, 847), True, 'import warp.yul.ast as ast\n'), ((1210, 1234), 'warp.yul.ast.Literal', 'ast.Literal', ([], {'value': 'value'}), '(value=value)\n', (1221, 1234), True, 'import warp.yul.ast as ast\n'), ((1520, 1545), 'warp.yul.ast.Identifier', 'ast.Identifier', ([], {'name': 'name'}), '(name=name)\n', (1534, 1545), True, 'import warp.yul.ast as ast\n'), ((3554, 3611), 'warp.yul.ast.VariableDeclaration', 'ast.VariableDeclaration', ([], {'variables': 'variables', 'value': 'value'}), '(variables=variables, value=value)\n', (3577, 3611), True, 'import warp.yul.ast as ast\n'), ((4866, 4964), 'warp.yul.ast.FunctionDefinition', 'ast.FunctionDefinition', ([], {'name': 'fun_name', 'parameters': 'params', 'return_variables': 'returns', 'body': 'body'}), '(name=fun_name, parameters=params, return_variables=\n returns, body=body)\n', (4888, 4964), True, 'import warp.yul.ast as ast\n'), ((5346, 5405), 'warp.yul.ast.If', 'ast.If', ([], {'condition': 'condition', 'body': 'body', 'else_body': 'else_body'}), '(condition=condition, body=body, else_body=else_body)\n', (5352, 5405), True, 'import warp.yul.ast as ast\n'), ((6870, 6881), 'warp.yul.ast.Break', 'ast.Break', ([], {}), '()\n', (6879, 6881), True, 'import warp.yul.ast as ast\n'), ((7087, 7101), 'warp.yul.ast.Continue', 'ast.Continue', ([], {}), '()\n', (7099, 7101), True, 'import warp.yul.ast as ast\n'), ((335, 376), 'warp.yul.WarpException.WarpException', 'WarpException', (['"""Text should not be empty"""'], {}), "('Text should not be empty')\n", (348, 376), False, 'from warp.yul.WarpException import WarpException\n'), ((7593, 7631), 'warp.yul.WarpException.WarpException', 'WarpException', (['"""Wrong node type name!"""'], {}), "('Wrong node type name!')\n", (7606, 7631), False, 'from warp.yul.WarpException import WarpException\n'), ((9317, 9349), 're.findall', 're.findall', (['"""[A-Z][^A-Z]*"""', 'name'], {}), "('[A-Z][^A-Z]*', name)\n", (9327, 9349), False, 'import re\n'), ((9047, 9114), 'warp.yul.WarpException.WarpException', 'WarpException', (['"""Lines are not supposed to be filled only with tabs"""'], {}), "('Lines are not supposed to be filled only with tabs')\n", (9060, 9114), False, 'from warp.yul.WarpException import WarpException\n')] |
# encoding: utf-8
'''
@author: <NAME>
@contact: <EMAIL>
@software: basenef
@file: doc_generator.py
@date: 4/13/2019
@desc:
'''
import os
import sys
import time
from getpass import getuser
import matplotlib
import numpy as np
import json
from srfnef import Image, MlemFull
matplotlib.use('Agg')
author = getuser()
def title_block_gen():
timestamp = time.time()
datetime = time.strftime("%Y-%m-%d %H:%M:%S", time.localtime(int(timestamp)))
title_block = f'''
# NEF AutoDoc {datetime}
- Author: {author}
- Generation time: {datetime}
- Operation system: {sys.platform}
- OS language: {os.environ['LANG']}
- Duration: 0.0 sec
- Total errors: 0
- Total warning: 0
- Description:
'''
return title_block
def _text_gen_as_table(dct: dict = {}):
out_text = ['|key|values|\n|:---|:---|\n']
for key, val in dct.items():
if key == 'data':
out_text.append(f"| {key} | Ignored |\n")
elif not isinstance(val, dict):
if isinstance(val, str) and len(val) > 30:
out_text.append(f"| {key} | Ignored |\n")
else:
out_text.append(f"| {key} | {val} |\n")
else:
out_text.append(f"| {key} | {'Ignored'} |\n")
return out_text
def json_block_gen(dct: dict = {}):
if isinstance(dct, str):
dct = json.loads(dct)
dct['image_config']['size'] = np.round(dct['image_config']['size'], decimals = 3).tolist()
if dct['emap'] is not None:
dct['emap']['size'] = np.round(dct['emap']['size'], decimals = 3).tolist()
json_str = json.dumps(dct, indent = 4)
out_text = "## RECON JSON\n"
out_text += "```javascript\n"
out_text += json_str + '\n'
out_text += "```\n"
return out_text
def image_block_gen(img: Image, path: str):
print('Generating text blocks...')
from matplotlib import pyplot as plt
vmax = np.percentile(img.data, 99.99)
midind = [int(img.shape[i] / 2) for i in range(3)]
plt.figure(figsize = (30, 10))
plt.subplot(231)
plt.imshow(img.data[midind[0], :, :], vmax = vmax)
plt.subplot(232)
plt.imshow(img.data[:, midind[1], :].transpose(), vmax = vmax)
plt.subplot(233)
plt.imshow(img.data[:, :, midind[2]].transpose(), vmax = vmax)
plt.subplot(234)
plt.plot(img.data[midind[0], midind[1], :])
plt.subplot(235)
plt.plot(img.data[midind[0], :, midind[2]])
plt.subplot(236)
plt.plot(img.data[:, midind[1], midind[2]])
timestamp = time.time()
datetime_str = time.strftime("%Y-%m-%d-%H-%M-%S", time.localtime(int(timestamp)))
plt.savefig(path + f'/out_img{datetime_str}.png')
out_text = f'\n'
return out_text
def statistic_block_gen(dct: dict = {}):
out_text = []
key_set = set()
for name, sub_dct in dct.items():
for key, val in sub_dct.items():
if isinstance(val, str) and len(val) < 30:
key_set.add(key)
col_names = ['|name ', '|:---']
for key in key_set:
col_names[0] += '|' + key + ''
else:
col_names[0] += '|\n'
for _ in key_set:
col_names[1] += '|:---'
else:
col_names[1] += '|\n'
out_text += col_names
for name, sub_dct in dct.items():
row = '| ' + name + ' '
for key in key_set:
if key in sub_dct:
row += '|' + str(sub_dct[key]) + ''
else:
row += '|-'
else:
row += '|\n'
out_text += [row]
return out_text
def metric_block_gen(mask: np.ndarray, img: Image):
from srfnef import image_metric as metric
dct = {}
# contrast hot
dct.update(
contrast_hot = {str(ind_): float(val_) for ind_, val_ in metric.contrast_hot(mask, img)})
dct.update(
contrast_cold = {str(ind_): float(val_) for ind_, val_ in metric.contrast_cold(mask, img)})
dct.update(contrast_noise_ratio1 = metric.cnr1(mask, img))
dct.update(contrast_noise_ratio2 = metric.cnr2(mask, img))
dct.update(contrast_recovery_coefficiency1 = metric.crc1(mask, img))
dct.update(contrast_recovery_coefficiency2 = metric.crc2(mask, img))
dct.update(standard_error = metric.standard_error(mask, img))
dct.update(normalized_standard_error = metric.nsd(mask, img))
dct.update(standard_deviation = metric.sd(mask, img))
dct.update(background_visibility = metric.bg_visibility(mask, img))
dct.update(noise1 = metric.noise1(mask, img))
dct.update(noise2 = metric.noise2(mask, img))
dct.update(signal_noise_ratio1 = metric.snr1(mask, img))
dct.update(signal_noise_ratio2 = metric.snr2(mask, img))
dct.update(positive_deviation = metric.pos_dev(mask, img))
for ind, val in dct.items():
if not isinstance(val, dict):
dct[ind] = float(val)
json_str = json.dumps(dct, indent = 4)
out_text = "## IMAGE METRIC JSON\n"
out_text += "```javascript\n"
out_text += json_str + '\n'
out_text += "```\n"
return out_text
def doc_gen(mlem_obj: MlemFull, img: Image, path: str, filename: str = None,
mask: np.ndarray = None):
timestamp = time.time()
datetime_str = time.strftime("%Y-%m-%d-%H-%M-%S", time.localtime(int(timestamp)))
if filename is None:
filename = 'doc_gen-' + datetime_str + '.md'
out_text = title_block_gen()
out_text += image_block_gen(img, path)
out_text += json_block_gen(mlem_obj.asdict(recurse = True))
if mask is not None:
if isinstance(mask, str):
mask = np.load(mask)
out_text += metric_block_gen(mask, img)
# out_text += statistic_block_gen(dct)
with open(filename, 'w') as fout:
fout.writelines(out_text)
# print('Converting MD to PDF...')
# import pypandoc
# print(filename)
# pypandoc.convert_file(filename, 'pdf', outputfile = filename + '.pdf')
return filename
| [
"srfnef.image_metric.nsd",
"getpass.getuser",
"matplotlib.pyplot.imshow",
"srfnef.image_metric.noise2",
"json.dumps",
"matplotlib.pyplot.plot",
"srfnef.image_metric.pos_dev",
"srfnef.image_metric.bg_visibility",
"srfnef.image_metric.crc2",
"numpy.round",
"srfnef.image_metric.standard_error",
"... | [((274, 295), 'matplotlib.use', 'matplotlib.use', (['"""Agg"""'], {}), "('Agg')\n", (288, 295), False, 'import matplotlib\n'), ((306, 315), 'getpass.getuser', 'getuser', ([], {}), '()\n', (313, 315), False, 'from getpass import getuser\n'), ((357, 368), 'time.time', 'time.time', ([], {}), '()\n', (366, 368), False, 'import time\n'), ((1567, 1592), 'json.dumps', 'json.dumps', (['dct'], {'indent': '(4)'}), '(dct, indent=4)\n', (1577, 1592), False, 'import json\n'), ((1875, 1905), 'numpy.percentile', 'np.percentile', (['img.data', '(99.99)'], {}), '(img.data, 99.99)\n', (1888, 1905), True, 'import numpy as np\n'), ((1965, 1993), 'matplotlib.pyplot.figure', 'plt.figure', ([], {'figsize': '(30, 10)'}), '(figsize=(30, 10))\n', (1975, 1993), True, 'from matplotlib import pyplot as plt\n'), ((2000, 2016), 'matplotlib.pyplot.subplot', 'plt.subplot', (['(231)'], {}), '(231)\n', (2011, 2016), True, 'from matplotlib import pyplot as plt\n'), ((2021, 2069), 'matplotlib.pyplot.imshow', 'plt.imshow', (['img.data[midind[0], :, :]'], {'vmax': 'vmax'}), '(img.data[midind[0], :, :], vmax=vmax)\n', (2031, 2069), True, 'from matplotlib import pyplot as plt\n'), ((2076, 2092), 'matplotlib.pyplot.subplot', 'plt.subplot', (['(232)'], {}), '(232)\n', (2087, 2092), True, 'from matplotlib import pyplot as plt\n'), ((2164, 2180), 'matplotlib.pyplot.subplot', 'plt.subplot', (['(233)'], {}), '(233)\n', (2175, 2180), True, 'from matplotlib import pyplot as plt\n'), ((2253, 2269), 'matplotlib.pyplot.subplot', 'plt.subplot', (['(234)'], {}), '(234)\n', (2264, 2269), True, 'from matplotlib import pyplot as plt\n'), ((2274, 2317), 'matplotlib.pyplot.plot', 'plt.plot', (['img.data[midind[0], midind[1], :]'], {}), '(img.data[midind[0], midind[1], :])\n', (2282, 2317), True, 'from matplotlib import pyplot as plt\n'), ((2322, 2338), 'matplotlib.pyplot.subplot', 'plt.subplot', (['(235)'], {}), '(235)\n', (2333, 2338), True, 'from matplotlib import pyplot as plt\n'), ((2343, 2386), 'matplotlib.pyplot.plot', 'plt.plot', (['img.data[midind[0], :, midind[2]]'], {}), '(img.data[midind[0], :, midind[2]])\n', (2351, 2386), True, 'from matplotlib import pyplot as plt\n'), ((2391, 2407), 'matplotlib.pyplot.subplot', 'plt.subplot', (['(236)'], {}), '(236)\n', (2402, 2407), True, 'from matplotlib import pyplot as plt\n'), ((2412, 2455), 'matplotlib.pyplot.plot', 'plt.plot', (['img.data[:, midind[1], midind[2]]'], {}), '(img.data[:, midind[1], midind[2]])\n', (2420, 2455), True, 'from matplotlib import pyplot as plt\n'), ((2472, 2483), 'time.time', 'time.time', ([], {}), '()\n', (2481, 2483), False, 'import time\n'), ((2574, 2623), 'matplotlib.pyplot.savefig', 'plt.savefig', (["(path + f'/out_img{datetime_str}.png')"], {}), "(path + f'/out_img{datetime_str}.png')\n", (2585, 2623), True, 'from matplotlib import pyplot as plt\n'), ((4830, 4855), 'json.dumps', 'json.dumps', (['dct'], {'indent': '(4)'}), '(dct, indent=4)\n', (4840, 4855), False, 'import json\n'), ((5141, 5152), 'time.time', 'time.time', ([], {}), '()\n', (5150, 5152), False, 'import time\n'), ((1324, 1339), 'json.loads', 'json.loads', (['dct'], {}), '(dct)\n', (1334, 1339), False, 'import json\n'), ((1375, 1424), 'numpy.round', 'np.round', (["dct['image_config']['size']"], {'decimals': '(3)'}), "(dct['image_config']['size'], decimals=3)\n", (1383, 1424), True, 'import numpy as np\n'), ((3930, 3952), 'srfnef.image_metric.cnr1', 'metric.cnr1', (['mask', 'img'], {}), '(mask, img)\n', (3941, 3952), True, 'from srfnef import image_metric as metric\n'), ((3993, 4015), 'srfnef.image_metric.cnr2', 'metric.cnr2', (['mask', 'img'], {}), '(mask, img)\n', (4004, 4015), True, 'from srfnef import image_metric as metric\n'), ((4066, 4088), 'srfnef.image_metric.crc1', 'metric.crc1', (['mask', 'img'], {}), '(mask, img)\n', (4077, 4088), True, 'from srfnef import image_metric as metric\n'), ((4139, 4161), 'srfnef.image_metric.crc2', 'metric.crc2', (['mask', 'img'], {}), '(mask, img)\n', (4150, 4161), True, 'from srfnef import image_metric as metric\n'), ((4195, 4227), 'srfnef.image_metric.standard_error', 'metric.standard_error', (['mask', 'img'], {}), '(mask, img)\n', (4216, 4227), True, 'from srfnef import image_metric as metric\n'), ((4272, 4293), 'srfnef.image_metric.nsd', 'metric.nsd', (['mask', 'img'], {}), '(mask, img)\n', (4282, 4293), True, 'from srfnef import image_metric as metric\n'), ((4331, 4351), 'srfnef.image_metric.sd', 'metric.sd', (['mask', 'img'], {}), '(mask, img)\n', (4340, 4351), True, 'from srfnef import image_metric as metric\n'), ((4392, 4423), 'srfnef.image_metric.bg_visibility', 'metric.bg_visibility', (['mask', 'img'], {}), '(mask, img)\n', (4412, 4423), True, 'from srfnef import image_metric as metric\n'), ((4449, 4473), 'srfnef.image_metric.noise1', 'metric.noise1', (['mask', 'img'], {}), '(mask, img)\n', (4462, 4473), True, 'from srfnef import image_metric as metric\n'), ((4499, 4523), 'srfnef.image_metric.noise2', 'metric.noise2', (['mask', 'img'], {}), '(mask, img)\n', (4512, 4523), True, 'from srfnef import image_metric as metric\n'), ((4562, 4584), 'srfnef.image_metric.snr1', 'metric.snr1', (['mask', 'img'], {}), '(mask, img)\n', (4573, 4584), True, 'from srfnef import image_metric as metric\n'), ((4623, 4645), 'srfnef.image_metric.snr2', 'metric.snr2', (['mask', 'img'], {}), '(mask, img)\n', (4634, 4645), True, 'from srfnef import image_metric as metric\n'), ((4683, 4708), 'srfnef.image_metric.pos_dev', 'metric.pos_dev', (['mask', 'img'], {}), '(mask, img)\n', (4697, 4708), True, 'from srfnef import image_metric as metric\n'), ((5536, 5549), 'numpy.load', 'np.load', (['mask'], {}), '(mask)\n', (5543, 5549), True, 'import numpy as np\n'), ((1498, 1539), 'numpy.round', 'np.round', (["dct['emap']['size']"], {'decimals': '(3)'}), "(dct['emap']['size'], decimals=3)\n", (1506, 1539), True, 'import numpy as np\n'), ((3742, 3772), 'srfnef.image_metric.contrast_hot', 'metric.contrast_hot', (['mask', 'img'], {}), '(mask, img)\n', (3761, 3772), True, 'from srfnef import image_metric as metric\n'), ((3857, 3888), 'srfnef.image_metric.contrast_cold', 'metric.contrast_cold', (['mask', 'img'], {}), '(mask, img)\n', (3877, 3888), True, 'from srfnef import image_metric as metric\n')] |
import requests
import json
class WordDefinition:
def __init__(self, word):
self.word = word
self.definisi = None
self.json_data = None
def url_data(self):
api_url = 'http://kateglo.com/api.php'
r = requests.get(api_url, params={
'format': 'json', 'phrase': self.word})
try:
self.json_data = r.json()
return self.json_data
except json.decoder.JSONDecodeError:
return 'Oooopss, It looks like you type the wrong word!'
@staticmethod
def format_def(data):
def_texts = ['({}){}'.format(i+1, data[i]['def_text']) for i in range(len(data))]
return '\n'.join(def_texts)
def definition(self):
try:
all_definisi = self.url_data()["kateglo"]["definition"]
self.definisi = self.format_def(all_definisi)
return self.definisi
except TypeError:
return self.url_data()
| [
"requests.get"
] | [((251, 320), 'requests.get', 'requests.get', (['api_url'], {'params': "{'format': 'json', 'phrase': self.word}"}), "(api_url, params={'format': 'json', 'phrase': self.word})\n", (263, 320), False, 'import requests\n')] |
# Copyright (c) 2015 Hewlett-Packard Development Company, L.P.
#
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import mock
import time
import logging
from oslo_config import cfg
from networking_vsphere.agent import ovsvapp_agent
from networking_vsphere.common import constants as ovsvapp_const
from networking_vsphere.common import error
from networking_vsphere.tests import base
from networking_vsphere.tests.unit.drivers import fake_manager
from networking_vsphere.utils import resource_util
from neutron.agent.common import ovs_lib
from neutron.common import utils as n_utils
from neutron.plugins.common import constants as p_const
from neutron.plugins.common import utils as p_utils
from neutron.plugins.ml2.drivers.openvswitch.agent import ovs_neutron_agent as ovs_agent # noqa
from neutron.plugins.ml2.drivers.openvswitch.agent import vlanmanager
NETWORK_ID = 'fake_net_id'
VNIC_ADDED = 'VNIC_ADDED'
FAKE_DEVICE_ID = 'fake_device_id'
FAKE_VM = 'fake_vm'
FAKE_HOST_1 = 'fake_host_1'
FAKE_HOST_2 = 'fake_host_2'
FAKE_CLUSTER_MOID = 'fake_cluster_moid'
FAKE_CLUSTER_1 = 'fake_cluster_1'
FAKE_CLUSTER_2 = 'fake_cluster_2'
FAKE_VCENTER = 'fake_vcenter'
FAKE_PORT_1 = 'fake_port_1'
FAKE_PORT_2 = 'fake_port_2'
FAKE_PORT_3 = 'fake_port_3'
FAKE_PORT_4 = 'fake_port_4'
MAC_ADDRESS = '01:02:03:04:05:06'
FAKE_CONTEXT = 'fake_context'
FAKE_SG = {'fake_sg': 'fake_sg_rule'}
FAKE_SG_RULE = {'security_group_source_groups': ['fake_rule_1',
'fake_rule_2',
'fake_rule_3'],
'security_group_rules': [
{'ethertype': 'IPv4',
'direction': 'egress',
'security_group_id': 'fake_id'
}],
'sg_provider_rules': [
{'ethertype': 'IPv4',
'direction': 'egress',
'source_port_range_min': 67,
'source_port_range_max': 67,
'port_range_min': 68,
'port_range_max': 68
}]
}
FAKE_SG_RULES = {FAKE_PORT_1: FAKE_SG_RULE}
FAKE_SG_RULES_MULTI_PORTS = {FAKE_PORT_1: FAKE_SG_RULE,
FAKE_PORT_2: FAKE_SG_RULE
}
FAKE_SG_RULES_MISSING = {FAKE_PORT_1: {'security_group_source_groups': [
'fake_rule_1',
'fake_rule_2',
'fake_rule_3'],
'sg_provider_rules': [],
'security_group_rules': [
{'ethertype': 'IPv4',
'direction': 'egress'
}]
}
}
FAKE_SG_RULES_PARTIAL = {FAKE_PORT_1: {'security_group_source_groups': [
'fake_rule_1',
'fake_rule_2',
'fake_rule_3'],
'sg_provider_rules': [],
'security_group_rules': [
{'ethertype': 'IPv4',
'direction': 'egress',
'port_range_min': 22,
'port_range_max': 22
}]
}
}
DEVICE = {'id': FAKE_DEVICE_ID,
'cluster_id': FAKE_CLUSTER_1,
'host': FAKE_HOST_1,
'vcenter': FAKE_VCENTER}
class SampleEvent(object):
def __init__(self, type, host, cluster, srcobj, host_changed=False):
self.event_type = type
self.host_name = host
self.cluster_id = cluster
self.src_obj = srcobj
self.host_changed = host_changed
class VM(object):
def __init__(self, uuid, vnics):
self.uuid = uuid
self.vnics = vnics
class SamplePort(object):
def __init__(self, port_uuid, mac_address=None, pg_id=None):
self.port_uuid = port_uuid
self.mac_address = mac_address
self.pg_id = pg_id
class SamplePortUIDMac(object):
def __init__(self, port_uuid, mac_address):
self.port_uuid = port_uuid
self.mac_address = mac_address
class TestOVSvAppAgentRestart(base.TestCase):
@mock.patch('neutron.common.config.init')
@mock.patch('neutron.common.config.setup_logging')
@mock.patch('neutron.agent.ovsdb.api.'
'API.get')
@mock.patch('networking_vsphere.agent.ovsvapp_agent.RpcPluginApi')
@mock.patch('neutron.agent.securitygroups_rpc.SecurityGroupServerRpcApi')
@mock.patch('neutron.agent.rpc.PluginReportStateAPI')
@mock.patch('networking_vsphere.agent.ovsvapp_agent.OVSvAppPluginApi')
@mock.patch('neutron.context.get_admin_context_without_session')
@mock.patch('neutron.agent.rpc.create_consumers')
@mock.patch('neutron.plugins.ml2.drivers.openvswitch.agent.'
'ovs_neutron_agent.OVSNeutronAgent.setup_integration_br')
@mock.patch('networking_vsphere.agent.ovsvapp_agent.'
'OVSvAppAgent.setup_ovs_bridges')
@mock.patch('networking_vsphere.agent.ovsvapp_agent.'
'OVSvAppAgent.setup_security_br')
@mock.patch('networking_vsphere.agent.ovsvapp_agent.'
'OVSvAppAgent._init_ovs_flows')
@mock.patch('networking_vsphere.drivers.ovs_firewall.OVSFirewallDriver.'
'check_ovs_firewall_restart')
@mock.patch('networking_vsphere.drivers.ovs_firewall.'
'OVSFirewallDriver.setup_base_flows')
@mock.patch('neutron.agent.common.ovs_lib.OVSBridge.create')
@mock.patch('neutron.agent.common.ovs_lib.OVSBridge.set_secure_mode')
@mock.patch('neutron.agent.common.ovs_lib.OVSBridge.get_port_ofport')
@mock.patch('networking_vsphere.agent.ovsvapp_agent.OVSvAppAgent.__init__')
def setUp(self, mock_ovs_init, mock_get_port_ofport,
mock_set_secure_mode, mock_create_ovs_bridge,
mock_setup_base_flows, mock_check_ovs_firewall_restart,
mock_init_ovs_flows, mock_setup_security_br,
mock_setup_ovs_bridges,
mock_setup_integration_br, mock_create_consumers,
mock_get_admin_context_without_session, mock_ovsvapp_pluginapi,
mock_plugin_report_stateapi, mock_securitygroup_server_rpcapi,
mock_rpc_pluginapi, mock_ovsdb_api, mock_setup_logging,
mock_init):
super(TestOVSvAppAgentRestart, self).setUp()
cfg.CONF.set_override('security_bridge_mapping',
"fake_sec_br:fake_if", 'SECURITYGROUP')
mock_get_port_ofport.return_value = 5
mock_ovs_init.return_value = None
self.agent = ovsvapp_agent.OVSvAppAgent()
self.agent.run_refresh_firewall_loop = False
self.LOG = ovsvapp_agent.LOG
self.agent.monitor_log = logging.getLogger('monitor')
def test_check_ovsvapp_agent_restart(self):
self.agent.int_br = mock.Mock()
with mock.patch.object(self.agent.int_br, 'bridge_exists',
return_value=True) as mock_br_exists, \
mock.patch.object(self.agent.int_br, 'dump_flows_for_table',
return_value='') as mock_dump_flows:
self.assertFalse(self.agent.check_ovsvapp_agent_restart())
self.assertTrue(mock_br_exists.called)
self.assertTrue(mock_dump_flows.called)
mock_dump_flows.return_value = 'cookie = 0x0'
self.assertTrue(self.agent.check_ovsvapp_agent_restart())
self.assertTrue(mock_br_exists.called)
self.assertTrue(mock_dump_flows.called)
class TestOVSvAppAgent(base.TestCase):
@mock.patch('neutron.common.config.init')
@mock.patch('neutron.common.config.setup_logging')
@mock.patch('neutron.agent.ovsdb.api.'
'API.get')
@mock.patch('networking_vsphere.agent.ovsvapp_agent.RpcPluginApi')
@mock.patch('neutron.agent.securitygroups_rpc.SecurityGroupServerRpcApi')
@mock.patch('neutron.agent.rpc.PluginReportStateAPI')
@mock.patch('networking_vsphere.agent.ovsvapp_agent.OVSvAppPluginApi')
@mock.patch('neutron.context.get_admin_context_without_session')
@mock.patch('neutron.agent.rpc.create_consumers')
@mock.patch('neutron.plugins.ml2.drivers.openvswitch.agent.'
'ovs_neutron_agent.OVSNeutronAgent.setup_integration_br')
@mock.patch('networking_vsphere.agent.ovsvapp_agent.'
'OVSvAppAgent.check_ovsvapp_agent_restart')
@mock.patch('networking_vsphere.agent.ovsvapp_agent.'
'OVSvAppAgent.setup_ovs_bridges')
@mock.patch('networking_vsphere.agent.ovsvapp_agent.'
'OVSvAppAgent.setup_security_br')
@mock.patch('networking_vsphere.agent.ovsvapp_agent.'
'OVSvAppAgent._init_ovs_flows')
@mock.patch('networking_vsphere.drivers.ovs_firewall.OVSFirewallDriver.'
'check_ovs_firewall_restart')
@mock.patch('networking_vsphere.drivers.ovs_firewall.'
'OVSFirewallDriver.setup_base_flows')
@mock.patch('neutron.agent.common.ovs_lib.OVSBridge.create')
@mock.patch('neutron.agent.common.ovs_lib.OVSBridge.set_secure_mode')
@mock.patch('neutron.agent.common.ovs_lib.OVSBridge.get_port_ofport')
def setUp(self, mock_get_port_ofport,
mock_set_secure_mode, mock_create_ovs_bridge,
mock_setup_base_flows, mock_check_ovs_firewall_restart,
mock_init_ovs_flows, mock_setup_security_br,
mock_setup_ovs_bridges, mock_check_ovsvapp_agent_restart,
mock_setup_integration_br, mock_create_consumers,
mock_get_admin_context_without_session, mock_ovsvapp_pluginapi,
mock_plugin_report_stateapi, mock_securitygroup_server_rpcapi,
mock_rpc_pluginapi, mock_ovsdb_api, mock_setup_logging,
mock_init):
super(TestOVSvAppAgent, self).setUp()
cfg.CONF.set_override('security_bridge_mapping',
"fake_sec_br:fake_if", 'SECURITYGROUP')
mock_check_ovsvapp_agent_restart.return_value = False
mock_get_port_ofport.return_value = 5
self.agent = ovsvapp_agent.OVSvAppAgent()
self.agent.run_refresh_firewall_loop = False
self.LOG = ovsvapp_agent.LOG
self.agent.monitor_log = logging.getLogger('monitor')
def _build_port(self, port):
port = {'admin_state_up': False,
'id': port,
'device': DEVICE,
'network_id': NETWORK_ID,
'physical_network': 'physnet1',
'segmentation_id': '1001',
'lvid': 1,
'network_type': 'vlan',
'fixed_ips': [{'subnet_id': 'subnet_uuid',
'ip_address': '1.1.1.1'}],
'device_owner': 'compute:None',
'security_groups': FAKE_SG,
'mac_address': MAC_ADDRESS,
'device_id': FAKE_DEVICE_ID
}
return port
def _build_update_port(self, port):
port = {'admin_state_up': False,
'id': port,
'network_id': NETWORK_ID,
'fixed_ips': [{'subnet_id': 'subnet_uuid',
'ip_address': '1.1.1.1'}],
'device_owner': 'compute:None',
'security_groups': FAKE_SG,
'mac_address': MAC_ADDRESS,
'device_id': FAKE_DEVICE_ID
}
return port
def test_setup_security_br_none(self):
cfg.CONF.set_override('security_bridge_mapping',
None, 'SECURITYGROUP')
self.agent.sec_br = mock.Mock()
with mock.patch.object(self.LOG, 'warning') as mock_logger_warn,\
mock.patch.object(self.agent.sec_br, 'bridge_exists'
) as mock_ovs_bridge:
self.assertRaises(SystemExit,
self.agent.setup_security_br)
self.assertTrue(mock_logger_warn.called)
self.assertFalse(mock_ovs_bridge.called)
def test_setup_security_br(self):
cfg.CONF.set_override('security_bridge_mapping',
"br-fake:fake_if", 'SECURITYGROUP')
self.agent.sec_br = mock.Mock()
self.agent.int_br = mock.Mock()
with mock.patch.object(self.LOG, 'info') as mock_logger_info, \
mock.patch.object(ovs_lib, "OVSBridge") as mock_ovs_br, \
mock.patch.object(self.agent.sec_br,
"add_patch_port",
return_value=5), \
mock.patch.object(self.agent.int_br,
"add_patch_port",
return_value=6):
self.agent.setup_security_br()
self.assertTrue(mock_ovs_br.called)
self.assertTrue(self.agent.sec_br.add_patch_port.called)
self.assertTrue(mock_logger_info.called)
def test_recover_security_br_none(self):
cfg.CONF.set_override('security_bridge_mapping',
None, 'SECURITYGROUP')
self.agent.sec_br = mock.Mock()
with mock.patch.object(self.LOG, 'warning') as mock_logger_warn, \
mock.patch.object(self.agent.sec_br, 'bridge_exists'
) as mock_ovs_bridge:
self.assertRaises(SystemExit,
self.agent.recover_security_br)
self.assertTrue(mock_logger_warn.called)
self.assertFalse(mock_ovs_bridge.called)
@mock.patch('neutron.agent.common.ovs_lib.OVSBridge')
def test_recover_security_br(self, mock_ovs_bridge):
cfg.CONF.set_override('security_bridge_mapping',
"br-sec:physnet1", 'SECURITYGROUP')
self.agent.int_br = mock.Mock()
self.agent.sec_br = mock.Mock()
mock_br = mock_ovs_bridge.return_value
with mock.patch.object(self.LOG, 'info') as mock_logger_info, \
mock.patch.object(mock_br, 'bridge_exists'), \
mock.patch.object(mock_br, 'add_patch_port') as mock_add_patch_port, \
mock.patch.object(self.agent.int_br,
"get_port_ofport",
return_value=6), \
mock.patch.object(mock_br,
"get_port_ofport",
return_value=6), \
mock.patch.object(mock_br,
"delete_port") as mock_delete_port:
mock_br.get_bridge_for_iface.return_value = 'br-sec'
self.agent.recover_security_br()
self.assertTrue(mock_logger_info.called)
self.assertFalse(mock_delete_port.called)
self.assertFalse(mock_add_patch_port.called)
mock_br.get_bridge_for_iface.return_value = 'br-fake'
self.agent.recover_security_br()
self.assertTrue(mock_logger_info.called)
self.assertTrue(mock_delete_port.called)
self.assertTrue(mock_add_patch_port.called)
@mock.patch('neutron.agent.ovsdb.api.'
'API.get')
def test_recover_physical_bridges(self, mock_ovsdb_api):
cfg.CONF.set_override('bridge_mappings',
["physnet1:br-eth1"], 'OVSVAPP')
self.agent.bridge_mappings = n_utils.parse_mappings(
cfg.CONF.OVSVAPP.bridge_mappings)
with mock.patch.object(self.LOG, 'info') as mock_logger_info, \
mock.patch.object(self.LOG, 'error') as mock_logger_error, \
mock.patch.object(self.agent, "br_phys_cls") as mock_ovs_br, \
mock.patch.object(ovs_lib.BaseOVS,
"get_bridges",
return_value=['br-eth1']
), \
mock.patch.object(p_utils, 'get_interface_name'
) as mock_int_name, \
mock.patch.object(self.agent.int_br,
"get_port_ofport",
return_value=6) as mock_get_ofport:
self.agent.recover_physical_bridges(self.agent.bridge_mappings)
self.assertTrue(mock_logger_info.called)
self.assertFalse(mock_logger_error.called)
self.assertTrue(mock_ovs_br.called)
self.assertTrue(mock_get_ofport.called)
self.assertTrue(mock_int_name.called)
self.assertEqual(self.agent.int_ofports['physnet1'], 6)
def test_init_ovs_flows(self):
cfg.CONF.set_override('bridge_mappings',
["physnet1:br-eth1"], 'OVSVAPP')
self.agent.bridge_mappings = n_utils.parse_mappings(
cfg.CONF.OVSVAPP.bridge_mappings)
self.agent.patch_sec_ofport = 5
self.agent.int_ofports = {'physnet1': 'br-eth1'}
self.agent.phys_ofports = {"physnet1": "br-eth1"}
port = self._build_port(FAKE_PORT_1)
br = self._build_phys_brs(port)
self.agent.br = mock.Mock()
with mock.patch.object(self.agent.int_br,
"delete_flows"
) as mock_int_br_delete_flows, \
mock.patch.object(self.agent,
"br_phys_cls") as mock_ovs_br, \
mock.patch.object(self.agent.int_br,
"add_flow") as mock_int_br_add_flow:
self.agent._init_ovs_flows(self.agent.bridge_mappings)
self.assertTrue(mock_int_br_delete_flows.called)
self.assertTrue(mock_ovs_br.called)
self.assertTrue(br.delete_flows.called)
self.assertTrue(br.add_flows.called)
self.assertTrue(mock_int_br_add_flow.called)
def test_update_port_bindings(self):
self.agent.ports_to_bind.add("fake_port")
with mock.patch.object(self.agent.ovsvapp_rpc,
"update_ports_binding",
return_value=set(["fake_port"])
) as mock_update_ports_binding, \
mock.patch.object(self.LOG, 'exception'
) as mock_log_exception:
self.agent._update_port_bindings()
self.assertTrue(mock_update_ports_binding.called)
self.assertFalse(self.agent.ports_to_bind)
self.assertFalse(mock_log_exception.called)
def test_update_port_bindings_rpc_exception(self):
self.agent.ports_to_bind.add("fake_port")
with mock.patch.object(self.agent.ovsvapp_rpc,
"update_ports_binding",
side_effect=Exception()
) as mock_update_port_binding, \
mock.patch.object(self.LOG, 'exception'
) as mock_log_exception:
self.assertRaises(
error.OVSvAppNeutronAgentError,
self.agent._update_port_bindings)
self.assertTrue(mock_update_port_binding.called)
self.assertTrue(mock_log_exception.called)
self.assertEqual(set(['fake_port']),
self.agent.ports_to_bind)
def test_update_port_bindings_partial(self):
self.agent.ports_to_bind.add("fake_port1")
self.agent.ports_to_bind.add("fake_port2")
self.agent.ports_to_bind.add("fake_port3")
with mock.patch.object(self.agent.ovsvapp_rpc,
"update_ports_binding",
return_value=set(["fake_port1",
"fake_port2"])
) as mock_update_port_binding, \
mock.patch.object(self.LOG, 'exception'):
self.agent._update_port_bindings()
self.assertTrue(mock_update_port_binding.called)
self.assertEqual(set(["fake_port3"]),
self.agent.ports_to_bind)
def test_setup_ovs_bridges_vlan(self):
cfg.CONF.set_override('tenant_network_types',
"vlan", 'OVSVAPP')
cfg.CONF.set_override('bridge_mappings',
["physnet1:br-eth1"], 'OVSVAPP')
with mock.patch.object(self.agent, 'setup_physical_bridges'
) as mock_phys_brs, \
mock.patch.object(self.agent, '_init_ovs_flows'
) as mock_init_ovs_flows:
self.agent.setup_ovs_bridges()
mock_phys_brs.assert_called_with(self.agent.bridge_mappings)
mock_init_ovs_flows.assert_called_with(self.agent.bridge_mappings)
@mock.patch('neutron.agent.ovsdb.api.'
'API.get')
def test_setup_ovs_bridges_vxlan(self, mock_ovsdb_api):
self.agent.local_ip = "10.10.10.10"
self.agent.tenant_network_types = [p_const.TYPE_VXLAN]
with mock.patch.object(self.agent, 'setup_tunnel_br'
) as mock_setup_tunnel_br, \
mock.patch.object(self.agent, 'setup_tunnel_br_flows'
) as mock_setup_tunnel_br_flows:
self.agent.setup_ovs_bridges()
mock_setup_tunnel_br.assert_called_with("br-tun")
self.assertTrue(mock_setup_tunnel_br_flows.called)
def test_setup_ovs_bridges_vxlan_ofport(self):
cfg.CONF.set_override('tenant_network_types',
"vxlan", 'OVSVAPP')
cfg.CONF.set_override('local_ip',
"10.10.10.10", 'OVSVAPP')
cfg.CONF.set_override('tunnel_bridge',
"br-tun", 'OVSVAPP')
self.agent.tun_br = mock.Mock()
self.agent.int_br = mock.Mock()
self.agent.local_ip = "10.10.10.10"
self.agent.tenant_network_types = [p_const.TYPE_VXLAN]
with mock.patch.object(self.agent.tun_br,
"add_patch_port",
return_value=5), \
mock.patch.object(self.agent.int_br,
"add_patch_port",
return_value=6), \
mock.patch.object(self.agent, 'setup_tunnel_br_flows'
) as mock_setup_tunnel_br_flows:
self.agent.setup_ovs_bridges()
self.assertTrue(self.agent.tun_br.add_patch_port.called)
self.assertEqual(self.agent.patch_tun_ofport, 6)
self.assertEqual(self.agent.patch_int_ofport, 5)
self.assertTrue(mock_setup_tunnel_br_flows.called)
def test_mitigate_ovs_restart_vlan(self):
self.agent.refresh_firewall_required = False
self.agent.devices_to_filter = set(['1111'])
self.agent.cluster_host_ports = set(['1111'])
self.agent.cluster_other_ports = set(['2222'])
with mock.patch.object(self.LOG, 'info') as mock_logger_info, \
mock.patch.object(self.agent, "setup_integration_br"
) as mock_int_br, \
mock.patch.object(self.agent, "setup_physical_bridges"
) as mock_phys_brs, \
mock.patch.object(self.agent, "setup_security_br"
) as mock_sec_br, \
mock.patch.object(self.agent.sg_agent, "init_firewall"
) as mock_init_fw, \
mock.patch.object(self.agent, "setup_tunnel_br"
) as mock_setup_tunnel_br,\
mock.patch.object(self.agent, 'setup_tunnel_br_flows'
) as mock_setup_tunnel_br_flows, \
mock.patch.object(self.agent, "_init_ovs_flows"
) as mock_init_flows, \
mock.patch.object(self.agent.monitor_log, "warning"
) as monitor_warning, \
mock.patch.object(self.agent.monitor_log, "info"
) as monitor_info:
self.agent.mitigate_ovs_restart()
self.assertTrue(mock_int_br.called)
self.assertTrue(mock_phys_brs.called)
self.assertTrue(mock_sec_br.called)
self.assertFalse(mock_setup_tunnel_br.called)
self.assertFalse(mock_setup_tunnel_br_flows.called)
self.assertTrue(mock_init_fw.called)
self.assertTrue(mock_init_flows.called)
self.assertTrue(self.agent.refresh_firewall_required)
self.assertEqual(2, len(self.agent.devices_to_filter))
monitor_warning.assert_called_with("ovs: broken")
monitor_info.assert_called_with("ovs: ok")
self.assertTrue(mock_logger_info.called)
def test_mitigate_ovs_restart_vxlan(self):
self.agent.enable_tunneling = True
self.agent.refresh_firewall_required = False
self.agent.devices_to_filter = set(['1111'])
self.agent.cluster_host_ports = set(['1111'])
self.agent.cluster_other_ports = set(['2222'])
with mock.patch.object(self.LOG, 'info') as mock_logger_info, \
mock.patch.object(self.agent, "setup_integration_br"), \
mock.patch.object(self.agent, "setup_physical_bridges"
) as mock_phys_brs, \
mock.patch.object(self.agent, "setup_security_br"), \
mock.patch.object(self.agent.sg_agent, "init_firewall"
), \
mock.patch.object(self.agent, "setup_tunnel_br"
) as mock_setup_tunnel_br,\
mock.patch.object(self.agent, 'setup_tunnel_br_flows'
) as mock_setup_tunnel_br_flows, \
mock.patch.object(self.agent, "tunnel_sync"
) as mock_tun_sync, \
mock.patch.object(self.agent, "_init_ovs_flows"), \
mock.patch.object(self.agent.monitor_log, "warning"
) as monitor_warning, \
mock.patch.object(self.agent.monitor_log, "info"
) as monitor_info:
self.agent.mitigate_ovs_restart()
self.assertTrue(mock_setup_tunnel_br.called)
self.assertTrue(mock_setup_tunnel_br_flows.called)
self.assertFalse(mock_phys_brs.called)
self.assertTrue(mock_tun_sync.called)
self.assertTrue(self.agent.refresh_firewall_required)
self.assertEqual(len(self.agent.devices_to_filter), 2)
monitor_warning.assert_called_with("ovs: broken")
monitor_info.assert_called_with("ovs: ok")
self.assertTrue(mock_logger_info.called)
def test_mitigate_ovs_restart_exception(self):
self.agent.enable_tunneling = False
self.agent.refresh_firewall_required = False
self.agent.devices_to_filter = set()
self.agent.cluster_host_ports = set(['1111'])
self.agent.cluster_other_ports = set(['2222'])
with mock.patch.object(self.LOG, "info") as mock_logger_info, \
mock.patch.object(self.agent, "setup_integration_br",
side_effect=Exception()) as mock_int_br, \
mock.patch.object(self.agent, "setup_physical_bridges"
) as mock_phys_brs, \
mock.patch.object(self.agent, "setup_tunnel_br"
) as mock_setup_tunnel_br,\
mock.patch.object(self.agent, 'setup_tunnel_br_flows'
) as mock_setup_tunnel_br_flows, \
mock.patch.object(self.LOG, "exception"
) as mock_exception_log, \
mock.patch.object(self.agent.monitor_log, "warning"
) as monitor_warning, \
mock.patch.object(self.agent.monitor_log, "info"
) as monitor_info:
self.agent.mitigate_ovs_restart()
self.assertTrue(mock_int_br.called)
self.assertFalse(mock_phys_brs.called)
self.assertFalse(mock_setup_tunnel_br.called)
self.assertFalse(mock_setup_tunnel_br_flows.called)
self.assertFalse(mock_logger_info.called)
self.assertTrue(mock_exception_log.called)
self.assertFalse(self.agent.refresh_firewall_required)
self.assertEqual(0, len(self.agent.devices_to_filter))
monitor_warning.assert_called_with("ovs: broken")
self.assertFalse(monitor_info.called)
def _get_fake_port(self, port_id):
return {'id': port_id,
'port_id': port_id,
'mac_address': MAC_ADDRESS,
'fixed_ips': [{'subnet_id': 'subnet_uuid',
'ip_address': '1.1.1.1'}],
'security_groups': FAKE_SG,
'segmentation_id': 1232,
'lvid': 1,
'network_id': 'fake_network',
'device_id': FAKE_DEVICE_ID,
'admin_state_up': True,
'physical_network': 'physnet1',
'network_type': 'vlan'}
def _build_phys_brs(self, port):
phys_net = port['physical_network']
self.agent.phys_brs[phys_net] = {}
self.agent.phys_brs[phys_net]['eth_ofport'] = 5
br = self.agent.phys_brs[phys_net]['br'] = mock.Mock()
br.add_flows(port['segmentation_id'],
port['mac_address'],
5)
br.delete_flows(port['mac_address'],
port['segmentation_id'])
return br
def test_process_port(self):
fakeport = self._get_fake_port(FAKE_PORT_1)
self.agent.ports_dict = {}
self.agent.vlan_manager.mapping = {}
br = self._build_phys_brs(fakeport)
self.agent.tenant_network_types = [p_const.TYPE_VLAN]
self.agent.cluster_host_ports.add(FAKE_PORT_1)
self.agent.vnic_info[FAKE_PORT_1] = fakeport
with mock.patch.object(self.agent.sg_agent, 'add_devices_to_filter'
) as mock_add_devices, \
mock.patch.object(self.agent, '_provision_local_vlan'
) as mock_prov_local_vlan:
status = self.agent._process_port(fakeport)
self.assertIn(FAKE_PORT_1, self.agent.ports_dict)
self.assertTrue(status)
mock_add_devices.assert_called_with([fakeport])
mock_prov_local_vlan.assert_called_with(fakeport)
self.assertTrue(br.add_flows.called)
self.assertNotIn(FAKE_PORT_1, self.agent.vnic_info)
def test_process_port_existing_network(self):
fakeport = self._get_fake_port(FAKE_PORT_1)
self.agent.ports_dict = {}
self.agent.vlan_manager.mapping = {}
br = self._build_phys_brs(fakeport)
self.agent.tenant_network_types = [p_const.TYPE_VLAN]
self.agent.cluster_host_ports.add(FAKE_PORT_1)
self.agent.vnic_info[FAKE_PORT_1] = {}
self._build_lvm(fakeport)
with mock.patch.object(self.agent.sg_agent, 'add_devices_to_filter'
) as mock_add_devices, \
mock.patch.object(self.agent, '_provision_local_vlan'
) as mock_prov_local_vlan:
status = self.agent._process_port(fakeport)
self.assertIn(FAKE_PORT_1, self.agent.ports_dict)
self.assertTrue(status)
mock_add_devices.assert_called_with([fakeport])
self.assertFalse(mock_prov_local_vlan.called)
self.assertTrue(br.add_flows.called)
def test_process_uncached_devices_with_few_devices(self):
devices = set(['123', '234', '345', '456', '567', '678',
'1123', '1234', '1345', '1456', '1567', '1678'])
with mock.patch('eventlet.GreenPool.spawn_n') as mock_spawn_thread, \
mock.patch.object(self.LOG, 'exception') as mock_log_exception:
self.agent._process_uncached_devices(devices)
self.assertTrue(mock_spawn_thread.called)
self.assertEqual(1, mock_spawn_thread.call_count)
self.assertFalse(mock_log_exception.called)
def test_process_uncached_devices_with_more_devices(self):
devices = set(['123', '234', '345', '456', '567', '678',
'1123', '1234', '1345', '1456', '1567', '1678',
'2123', '2234', '2345', '2456', '2567', '2678',
'3123', '3234', '3345', '3456', '3567', '3678',
'4123', '4234', '4345', '4456', '4567', '4678',
'5123', '5234', '5345', '5456', '5567', '5678',
'6123', '6234', '6345', '6456', '6567', '6678'])
with mock.patch('eventlet.GreenPool.spawn_n') as mock_spawn_thread, \
mock.patch.object(self.LOG, 'exception') as mock_log_exception:
self.agent._process_uncached_devices(devices)
self.assertTrue(mock_spawn_thread.called)
self.assertEqual(2, mock_spawn_thread.call_count)
self.assertFalse(mock_log_exception.called)
def test_process_uncached_devices_sublist_single_port_vlan(self):
fakeport_1 = self._get_fake_port(FAKE_PORT_1)
self.agent.ports_dict = {}
br = self._build_phys_brs(fakeport_1)
self.agent.tenant_network_types = [p_const.TYPE_VLAN]
self.agent.cluster_host_ports.add(FAKE_PORT_1)
self.agent.vnic_info[FAKE_PORT_1] = fakeport_1
devices = [FAKE_PORT_1]
self.agent.vlan_manager.mapping = {}
with mock.patch.object(self.agent.ovsvapp_rpc,
'get_ports_details_list',
return_value=[fakeport_1]
) as mock_get_ports_details_list, \
mock.patch.object(self.agent.sg_agent, 'add_devices_to_filter'
) as mock_add_devices_to_filter, \
mock.patch.object(self.agent.sg_agent, 'refresh_firewall'
)as mock_refresh_firewall, \
mock.patch.object(self.agent, '_provision_local_vlan'
) as mock_provision_local_vlan, \
mock.patch.object(self.LOG, 'exception') as mock_log_exception:
self.agent._process_uncached_devices_sublist(devices)
self.assertTrue(mock_get_ports_details_list.called)
self.assertEqual(1, mock_add_devices_to_filter.call_count)
self.assertTrue(mock_refresh_firewall.called)
self.assertTrue(mock_provision_local_vlan.called)
self.assertFalse(mock_log_exception.called)
self.assertNotIn(FAKE_PORT_1, self.agent.vnic_info)
self.assertTrue(br.add_flows.called)
def test_process_uncached_devices_sublist_multiple_port_vlan(self):
fakeport_1 = self._get_fake_port(FAKE_PORT_1)
fakeport_2 = self._get_fake_port(FAKE_PORT_2)
self.agent.ports_dict = {}
self.agent.vlan_manager.mapping = {}
br = self._build_phys_brs(fakeport_1)
self.agent.tenant_network_types = [p_const.TYPE_VLAN]
self.agent.cluster_host_ports.add(FAKE_PORT_1)
self.agent.cluster_host_ports.add(FAKE_PORT_2)
self.agent.vnic_info[FAKE_PORT_1] = fakeport_1
self.agent.vnic_info[FAKE_PORT_2] = fakeport_2
devices = [FAKE_PORT_1, FAKE_PORT_2]
with mock.patch.object(self.agent.ovsvapp_rpc,
'get_ports_details_list',
return_value=[fakeport_1, fakeport_2]
) as mock_get_ports_details_list, \
mock.patch.object(self.agent.sg_agent, 'add_devices_to_filter'
) as mock_add_devices_to_filter, \
mock.patch.object(self.agent.sg_agent, 'refresh_firewall'
)as mock_refresh_firewall, \
mock.patch.object(self.agent, '_provision_local_vlan'
) as mock_prov_local_vlan, \
mock.patch.object(self.LOG, 'exception') as mock_log_exception:
self.agent._process_uncached_devices_sublist(devices)
self.assertTrue(mock_get_ports_details_list.called)
self.assertEqual(2, mock_add_devices_to_filter.call_count)
self.assertTrue(mock_refresh_firewall.called)
self.assertTrue(mock_prov_local_vlan.called)
self.assertFalse(mock_log_exception.called)
self.assertNotIn(FAKE_PORT_1, self.agent.vnic_info)
self.assertNotIn(FAKE_PORT_2, self.agent.vnic_info)
self.assertTrue(br.add_flows.called)
def test_process_uncached_devices_sublist_single_port_vxlan(self):
fakeport_1 = self._get_fake_port(FAKE_PORT_1)
fakeport_1["network_type"] = p_const.TYPE_VXLAN
self.agent.ports_dict = {}
self.agent.vlan_manager.mapping = {}
self.agent.tenant_network_types = [p_const.TYPE_VXLAN]
self.agent.cluster_host_ports.add(FAKE_PORT_1)
self.agent.vnic_info[FAKE_PORT_1] = fakeport_1
devices = [FAKE_PORT_1]
with mock.patch.object(self.agent.ovsvapp_rpc,
'get_ports_details_list',
return_value=[fakeport_1]
) as mock_get_ports_details_list, \
mock.patch.object(self.agent.sg_agent, 'add_devices_to_filter'
) as mock_add_devices_to_filter, \
mock.patch.object(self.agent.sg_agent, 'refresh_firewall'
)as mock_refresh_firewall, \
mock.patch.object(self.agent, '_populate_lvm'), \
mock.patch.object(self.agent, '_provision_local_vlan'
) as mock_prov_local_vlan, \
mock.patch.object(self.LOG, 'exception') as mock_log_exception:
self.agent._process_uncached_devices_sublist(devices)
self.assertTrue(mock_get_ports_details_list.called)
self.assertTrue(mock_prov_local_vlan.called)
self.assertEqual(1, mock_add_devices_to_filter.call_count)
self.assertTrue(mock_refresh_firewall.called)
self.assertFalse(mock_log_exception.called)
self.assertNotIn(FAKE_PORT_1, self.agent.vnic_info)
def test_process_uncached_devices_sublist_multiple_port_vxlan(self):
fakeport_1 = self._get_fake_port(FAKE_PORT_1)
fakeport_2 = self._get_fake_port(FAKE_PORT_2)
fakeport_1["network_type"] = p_const.TYPE_VXLAN
fakeport_2["network_type"] = p_const.TYPE_VXLAN
self.agent.ports_dict = {}
self.agent.vlan_manager.mapping = {}
self.agent.tenant_network_types = [p_const.TYPE_VXLAN]
self.agent.cluster_host_ports.add(FAKE_PORT_1)
self.agent.cluster_host_ports.add(FAKE_PORT_2)
self.agent.vnic_info[FAKE_PORT_1] = fakeport_1
self.agent.vnic_info[FAKE_PORT_2] = fakeport_2
devices = [FAKE_PORT_1, FAKE_PORT_2]
with mock.patch.object(self.agent.ovsvapp_rpc,
'get_ports_details_list',
return_value=[fakeport_1, fakeport_2]
) as mock_get_ports_details_list, \
mock.patch.object(self.agent.sg_agent, 'add_devices_to_filter'
) as mock_add_devices_to_filter, \
mock.patch.object(self.agent.sg_agent, 'refresh_firewall'
)as mock_refresh_firewall, \
mock.patch.object(self.agent, '_populate_lvm'), \
mock.patch.object(self.agent, '_provision_local_vlan'
) as mock_prov_local_vlan, \
mock.patch.object(self.LOG, 'exception') as mock_log_exception:
self.agent._process_uncached_devices_sublist(devices)
self.assertTrue(mock_get_ports_details_list.called)
self.assertTrue(mock_prov_local_vlan.called)
self.assertEqual(2, mock_add_devices_to_filter.call_count)
self.assertTrue(mock_refresh_firewall.called)
self.assertFalse(mock_log_exception.called)
self.assertNotIn(FAKE_PORT_1, self.agent.vnic_info)
self.assertNotIn(FAKE_PORT_2, self.agent.vnic_info)
def test_process_uncached_devices_sublist_stale_vm_port(self):
fakeport_1 = self._get_fake_port(FAKE_PORT_1)
fakeport_2 = self._get_fake_port(FAKE_PORT_2)
fakeport_3 = self._get_fake_port(FAKE_PORT_3)
self.agent.ports_dict = {}
self.agent.vlan_manager.mapping = {}
self._build_phys_brs(fakeport_1)
self._build_phys_brs(fakeport_2)
self._build_phys_brs(fakeport_3)
self.agent.tenant_network_types = [p_const.TYPE_VLAN]
self.agent.cluster_host_ports.add(FAKE_PORT_1)
self.agent.cluster_host_ports.add(FAKE_PORT_2)
self.agent.ports_to_bind = set([FAKE_PORT_3, FAKE_PORT_4])
self.agent.vnic_info[FAKE_PORT_1] = fakeport_1
self.agent.vnic_info[FAKE_PORT_2] = fakeport_2
self.agent.vnic_info[FAKE_PORT_3] = fakeport_3
devices = [FAKE_PORT_1, FAKE_PORT_2, FAKE_PORT_3]
self.agent.sg_agent.remove_devices_filter = mock.Mock()
with mock.patch.object(self.agent.ovsvapp_rpc,
'get_ports_details_list',
return_value=[fakeport_1, fakeport_2]
) as mock_get_ports_details_list, \
mock.patch.object(self.agent.sg_agent, 'add_devices_to_filter'
) as mock_add_devices_to_filter, \
mock.patch.object(self.agent.sg_agent, 'refresh_firewall'
)as mock_refresh_firewall, \
mock.patch.object(self.agent.sg_agent,
'remove_devices_filter'
)as mock_remove_device_filter, \
mock.patch.object(self.agent, '_provision_local_vlan'
) as mock_prov_local_vlan, \
mock.patch.object(self.agent, '_remove_stale_ports_flows'), \
mock.patch.object(self.agent, '_block_stale_ports'), \
mock.patch.object(self.LOG, 'exception') as mock_log_exception:
self.agent._process_uncached_devices_sublist(devices)
self.assertTrue(mock_get_ports_details_list.called)
self.assertEqual(2, mock_add_devices_to_filter.call_count)
self.assertTrue(mock_refresh_firewall.called)
self.assertTrue(mock_prov_local_vlan.called)
self.assertFalse(mock_log_exception.called)
self.assertNotIn(FAKE_PORT_3, self.agent.ports_to_bind)
self.assertIn(FAKE_PORT_4, self.agent.ports_to_bind)
self.assertNotIn(FAKE_PORT_1, self.agent.vnic_info)
self.assertNotIn(FAKE_PORT_2, self.agent.vnic_info)
self.assertNotIn(FAKE_PORT_3, self.agent.vnic_info)
mock_remove_device_filter.assert_called_with(FAKE_PORT_3)
def test_update_firewall(self):
fakeport_1 = self._get_fake_port(FAKE_PORT_1)
fakeport_2 = self._get_fake_port(FAKE_PORT_2)
self._build_phys_brs(fakeport_1)
self._build_phys_brs(fakeport_2)
self.agent.devices_to_filter = set([FAKE_PORT_1,
FAKE_PORT_2])
self.agent.ports_dict = {FAKE_PORT_1: fakeport_1}
self.agent.vnic_info[FAKE_PORT_1] = {}
self.agent.vnic_info[FAKE_PORT_2] = {}
self.agent.refresh_firewall_required = True
self.agent.tenant_network_types = [p_const.TYPE_VLAN]
self.agent.vcenter_id = FAKE_VCENTER
self.agent.cluster_id = FAKE_CLUSTER_1
with mock.patch.object(self.agent.ovsvapp_rpc,
'get_ports_details_list',
return_value=[fakeport_1, fakeport_2]
) as mock_get_ports_details_list, \
mock.patch.object(self.agent.sg_agent, 'refresh_firewall'
) as mock_refresh_firewall, \
mock.patch.object(self.agent, '_provision_local_vlan'
), \
mock.patch.object(self.agent, '_remove_stale_ports_flows'), \
mock.patch.object(self.agent, '_block_stale_ports'), \
mock.patch.object(self.agent.monitor_log, "warning"
) as monitor_warning, \
mock.patch.object(self.agent.monitor_log, "info"
) as monitor_info:
self.agent._update_firewall()
self.assertFalse(self.agent.refresh_firewall_required)
self.assertFalse(self.agent.devices_to_filter)
self.assertIn(FAKE_PORT_2, self.agent.ports_dict)
mock_get_ports_details_list.assert_called_with(
self.agent.context,
[FAKE_PORT_2],
self.agent.agent_id,
self.agent.vcenter_id,
self.agent.cluster_id)
mock_refresh_firewall.assert_called_with(set([FAKE_PORT_1,
FAKE_PORT_2]))
self.assertEqual(2, monitor_warning.call_count)
self.assertEqual(2, monitor_info.call_count)
def test_update_firewall_get_ports_exception(self):
fakeport_1 = self._get_fake_port(FAKE_PORT_1)
self.agent.devices_to_filter = set([FAKE_PORT_1,
FAKE_PORT_2])
self.agent.ports_dict = {FAKE_PORT_1: fakeport_1}
self.agent.refresh_firewall_required = True
self.agent.vcenter_id = FAKE_VCENTER
self.agent.cluster_id = FAKE_CLUSTER_1
with mock.patch.object(self.agent.ovsvapp_rpc,
'get_ports_details_list',
side_effect=Exception()
) as mock_get_ports_details_list, \
mock.patch.object(self.agent.sg_agent, 'refresh_firewall'
) as mock_refresh_firewall, \
mock.patch.object(self.agent.monitor_log, "warning"
) as monitor_warning, \
mock.patch.object(self.agent.monitor_log, "info"
) as monitor_info:
self.agent._update_firewall()
self.assertTrue(self.agent.refresh_firewall_required)
self.assertEqual(set([FAKE_PORT_2]), self.agent.devices_to_filter)
self.assertNotIn(FAKE_PORT_2, self.agent.ports_dict)
mock_get_ports_details_list.assert_called_with(
self.agent.context,
[FAKE_PORT_2],
self.agent.agent_id,
self.agent.vcenter_id,
self.agent.cluster_id)
mock_refresh_firewall.assert_called_with(set([FAKE_PORT_1]))
self.assertEqual(2, monitor_warning.call_count)
self.assertEqual(1, monitor_info.call_count)
def test_check_for_updates_no_updates(self):
self.agent.refresh_firewall_required = False
self.agent.ports_to_bind = None
with mock.patch.object(self.agent, 'check_ovs_status',
return_value=4) as mock_check_ovs, \
mock.patch.object(self.agent, '_update_firewall'
) as mock_update_firewall, \
mock.patch.object(self.agent.sg_agent,
'firewall_refresh_needed',
return_value=False
) as mock_firewall_refresh, \
mock.patch.object(self.agent.sg_agent, 'refresh_port_filters'
) as mock_refresh_port_filters, \
mock.patch.object(self.agent, '_update_port_bindings'
) as mock_update_port_bindings:
self.agent._check_for_updates()
self.assertTrue(mock_check_ovs.called)
self.assertFalse(mock_update_firewall.called)
self.assertTrue(mock_firewall_refresh.called)
self.assertFalse(mock_refresh_port_filters.called)
self.assertFalse(mock_update_port_bindings.called)
def test_check_for_updates_ovs_restarted(self):
self.agent.refresh_firewall_required = False
self.agent.ports_to_bind = None
with mock.patch.object(self.agent, 'check_ovs_status',
return_value=0) as mock_check_ovs, \
mock.patch.object(self.agent, 'mitigate_ovs_restart'
) as mock_mitigate, \
mock.patch.object(self.agent, '_update_firewall'
) as mock_update_firewall, \
mock.patch.object(self.agent.sg_agent,
'firewall_refresh_needed',
return_value=False
) as mock_firewall_refresh, \
mock.patch.object(self.agent, '_update_port_bindings'
) as mock_update_port_bindings:
self.agent._check_for_updates()
self.assertTrue(mock_check_ovs.called)
self.assertTrue(mock_mitigate.called)
self.assertFalse(mock_update_firewall.called)
self.assertTrue(mock_firewall_refresh.called)
self.assertFalse(mock_update_port_bindings.called)
@mock.patch.object(ovsvapp_agent.OVSvAppAgent, 'check_ovs_status')
def test_check_for_updates_ovs_dead(self, check_ovs_status):
check_ovs_status.return_value = 2
self.agent.refresh_firewall_required = False
self.agent.ports_to_bind = None
with mock.patch.object(self.agent, 'mitigate_ovs_restart'
) as mock_mitigate, \
mock.patch.object(self.agent, '_update_firewall'
) as mock_update_firewall, \
mock.patch.object(self.agent.sg_agent,
'firewall_refresh_needed',
return_value=False
) as mock_firewall_refresh, \
mock.patch.object(self.agent, '_update_port_bindings'
) as mock_update_port_bindings:
self.agent._check_for_updates()
self.assertTrue(self.agent.ovsvapp_mitigation_required)
self.assertTrue(check_ovs_status.called)
self.assertFalse(mock_mitigate.called)
self.assertTrue(mock_firewall_refresh.called)
self.assertFalse(mock_update_port_bindings.called)
check_ovs_status.return_value = 1
self.agent._check_for_updates()
self.assertTrue(check_ovs_status.called)
self.assertTrue(mock_mitigate.called)
self.assertFalse(mock_update_firewall.called)
self.assertTrue(mock_firewall_refresh.called)
self.assertFalse(mock_update_port_bindings.called)
self.assertFalse(self.agent.ovsvapp_mitigation_required)
def test_check_for_updates_devices_to_filter(self):
self.agent.refresh_firewall_required = True
self.agent.ports_to_bind = None
with mock.patch.object(self.agent, 'check_ovs_status',
return_value=4) as mock_check_ovs, \
mock.patch.object(self.agent, 'mitigate_ovs_restart'
) as mock_mitigate, \
mock.patch.object(self.agent, '_update_firewall'
) as mock_update_firewall,\
mock.patch.object(self.agent.sg_agent,
'firewall_refresh_needed',
return_value=False
) as mock_firewall_refresh, \
mock.patch.object(self.agent, '_update_port_bindings'
) as mock_update_port_bindings:
self.agent._check_for_updates()
self.assertTrue(mock_check_ovs.called)
self.assertFalse(mock_mitigate.called)
self.assertTrue(mock_update_firewall.called)
self.assertTrue(mock_firewall_refresh.called)
self.assertFalse(mock_update_port_bindings.called)
def test_check_for_updates_firewall_refresh(self):
self.agent.refresh_firewall_required = False
self.agent.ports_to_bind = None
with mock.patch.object(self.agent, 'check_ovs_status',
return_value=4) as mock_check_ovs, \
mock.patch.object(self.agent, '_update_firewall'
) as mock_update_firewall, \
mock.patch.object(self.agent.sg_agent,
'firewall_refresh_needed',
return_value=True
) as mock_firewall_refresh,\
mock.patch.object(self.agent.sg_agent, 'refresh_port_filters'
) as mock_refresh_port_filters, \
mock.patch.object(self.agent, '_update_port_bindings'
) as mock_update_port_bindings:
self.agent._check_for_updates()
self.assertTrue(mock_check_ovs.called)
self.assertFalse(mock_update_firewall.called)
self.assertTrue(mock_firewall_refresh.called)
self.assertTrue(mock_refresh_port_filters.called)
self.assertFalse(mock_update_port_bindings.called)
def test_check_for_updates_port_bindings(self):
self.agent.refresh_firewall_required = False
self.agent.ports_to_bind.add("fake_port")
with mock.patch.object(self.agent, 'check_ovs_status',
return_value=4) as mock_check_ovs, \
mock.patch.object(self.agent, '_update_firewall'
) as mock_update_firewall, \
mock.patch.object(self.agent.sg_agent,
'firewall_refresh_needed',
return_value=False
) as mock_firewall_refresh, \
mock.patch.object(self.agent, '_update_port_bindings'
) as mock_update_port_bindings:
self.agent._check_for_updates()
self.assertTrue(mock_check_ovs.called)
self.assertFalse(mock_update_firewall.called)
self.assertTrue(mock_firewall_refresh.called)
self.assertTrue(mock_update_port_bindings.called)
def test_update_devices_up(self):
self.agent.devices_up_list.append(FAKE_PORT_1)
ret_value = {'devices_up': [FAKE_PORT_1],
'failed_devices_up': []}
with mock.patch.object(self.agent.ovsvapp_rpc,
"update_devices_up",
return_value=ret_value
) as update_devices_up, \
mock.patch.object(self.LOG, 'exception'
) as log_exception:
self.agent._update_devices_up()
self.assertTrue(update_devices_up.called)
self.assertFalse(self.agent.devices_up_list)
self.assertFalse(log_exception.called)
def test_update_devices_up_rpc_exception(self):
self.agent.devices_up_list.append(FAKE_PORT_1)
with mock.patch.object(self.agent.ovsvapp_rpc,
"update_devices_up",
side_effect=Exception()
) as update_devices_up, \
mock.patch.object(self.LOG, 'exception'
) as log_exception:
self.agent._update_devices_up()
self.assertTrue(update_devices_up.called)
self.assertEqual([FAKE_PORT_1], self.agent.devices_up_list)
self.assertTrue(log_exception.called)
def test_update_devices_up_partial(self):
self.agent.devices_up_list = [FAKE_PORT_1, FAKE_PORT_2, FAKE_PORT_3]
ret_value = {'devices_up': [FAKE_PORT_1, FAKE_PORT_2],
'failed_devices_up': [FAKE_PORT_3]}
with mock.patch.object(self.agent.ovsvapp_rpc,
"update_devices_up",
return_value=ret_value
) as update_devices_up, \
mock.patch.object(self.LOG, 'exception'
) as log_exception:
self.agent._update_devices_up()
self.assertTrue(update_devices_up.called)
self.assertEqual([FAKE_PORT_3], self.agent.devices_up_list)
self.assertFalse(log_exception.called)
def test_update_devices_down(self):
self.agent.devices_down_list.append(FAKE_PORT_1)
ret_value = {'devices_down': [FAKE_PORT_1],
'failed_devices_down': []}
with mock.patch.object(self.agent.ovsvapp_rpc,
"update_devices_down",
return_value=ret_value
) as update_devices_down, \
mock.patch.object(self.LOG, 'exception'
) as log_exception:
self.agent._update_devices_down()
self.assertTrue(update_devices_down.called)
self.assertFalse(self.agent.devices_down_list)
self.assertFalse(log_exception.called)
def test_update_devices_down_rpc_exception(self):
self.agent.devices_down_list.append(FAKE_PORT_1)
with mock.patch.object(self.agent.ovsvapp_rpc,
"update_devices_down",
side_effect=Exception()
) as update_devices_down, \
mock.patch.object(self.LOG, 'exception'
) as log_exception:
self.agent._update_devices_down()
self.assertTrue(update_devices_down.called)
self.assertEqual([FAKE_PORT_1], self.agent.devices_down_list)
self.assertTrue(log_exception.called)
def test_update_devices_down_partial(self):
self.agent.devices_down_list = [FAKE_PORT_1, FAKE_PORT_2, FAKE_PORT_3]
ret_value = {'devices_down': [FAKE_PORT_1, FAKE_PORT_2],
'failed_devices_down': [FAKE_PORT_3]}
with mock.patch.object(self.agent.ovsvapp_rpc,
"update_devices_down",
return_value=ret_value
) as update_devices_down, \
mock.patch.object(self.LOG, 'exception'
) as log_exception:
self.agent._update_devices_down()
self.assertTrue(update_devices_down.called)
self.assertEqual([FAKE_PORT_3], self.agent.devices_down_list)
self.assertFalse(log_exception.called)
def test_report_state(self):
with mock.patch.object(self.agent.state_rpc,
"report_state") as report_st:
self.agent._report_state()
report_st.assert_called_with(self.agent.context,
self.agent.agent_state,
True)
self.assertNotIn("start_flag", self.agent.agent_state)
self.assertFalse(self.agent.use_call)
self.assertEqual(cfg.CONF.host,
self.agent.agent_state["host"])
def test_report_state_fail(self):
with mock.patch.object(self.agent.state_rpc,
"report_state",
side_effect=Exception()) as mock_report_st, \
mock.patch.object(self.LOG, 'exception'
) as mock_log_exception:
self.agent._report_state()
mock_report_st.assert_called_with(self.agent.context,
self.agent.agent_state,
True)
self.assertTrue(mock_log_exception.called)
def test_process_event_ignore_event(self):
vm = VM(FAKE_VM, [])
event = SampleEvent(VNIC_ADDED, FAKE_HOST_1,
FAKE_CLUSTER_MOID, vm)
with mock.patch.object(self.agent,
"_notify_device_added") as mock_add_vm, \
mock.patch.object(self.agent,
"_notify_device_updated") as mock_update_vm, \
mock.patch.object(self.agent,
"_notify_device_deleted") as mock_del_vm, \
mock.patch.object(self.LOG, 'debug') as mock_log_debug:
self.agent.process_event(event)
self.assertFalse(mock_add_vm.called)
self.assertFalse(mock_update_vm.called)
self.assertFalse(mock_del_vm.called)
self.assertTrue(mock_log_debug.called)
def test_process_event_exception(self):
vm = VM(FAKE_VM, [])
event = SampleEvent(ovsvapp_const.VM_CREATED,
FAKE_HOST_1, FAKE_CLUSTER_MOID, vm)
with mock.patch.object(self.agent,
"_notify_device_added",
side_effect=Exception()) as mock_add_vm, \
mock.patch.object(self.LOG, 'exception'
) as mock_log_exception, \
mock.patch.object(self.LOG, 'error') as mock_log_error:
self.agent.process_event(event)
self.assertTrue(mock_add_vm.called)
self.assertTrue(mock_log_error.called)
self.assertTrue(mock_log_exception.called)
def test_process_event_vm_create_nonics_non_host_non_cluster(self):
self.agent.esx_hostname = FAKE_HOST_2
vm = VM(FAKE_VM, [])
event = SampleEvent(ovsvapp_const.VM_CREATED,
FAKE_HOST_1, FAKE_CLUSTER_MOID, vm)
self.agent.state = ovsvapp_const.AGENT_RUNNING
with mock.patch.object(self.agent,
"_notify_device_added") as device_added:
self.agent.process_event(event)
self.assertTrue(device_added.called)
def test_process_event_vm_create_nonics_non_host(self):
self.agent.esx_hostname = FAKE_HOST_2
vm = VM(FAKE_VM, [])
event = SampleEvent(ovsvapp_const.VM_CREATED,
FAKE_HOST_1, FAKE_CLUSTER_MOID, vm)
self.agent.state = ovsvapp_const.AGENT_RUNNING
with mock.patch.object(self.agent,
"_notify_device_added") as device_added:
self.agent.process_event(event)
self.assertTrue(device_added.called)
self.assertEqual(FAKE_CLUSTER_MOID, self.agent.cluster_moid)
def test_process_event_vm_create_nics_non_host(self):
self.agent.esx_hostname = FAKE_HOST_2
vm_port1 = SamplePort(FAKE_PORT_1)
vm_port2 = SamplePort(FAKE_PORT_2)
vm = VM(FAKE_VM, ([vm_port1, vm_port2]))
event = SampleEvent(ovsvapp_const.VM_CREATED,
FAKE_HOST_1, FAKE_CLUSTER_MOID, vm)
self.agent.state = ovsvapp_const.AGENT_RUNNING
self.agent.sec_br = mock.Mock()
with mock.patch.object(self.agent.sec_br, 'dump_flows_for',
return_value='mock_flow') as mock_dump_flows:
self.agent.process_event(event)
self.assertTrue(mock_dump_flows.called)
for vnic in vm.vnics:
self.assertIn(vnic.port_uuid, self.agent.devices_to_filter)
self.assertIn(vnic.port_uuid, self.agent.cluster_other_ports)
self.assertNotIn(vnic.port_uuid, self.agent.cluster_host_ports)
def test_process_event_vm_create_nics_host(self):
self.agent.esx_hostname = FAKE_HOST_1
vm_port1 = SamplePort(FAKE_PORT_1)
vm_port2 = SamplePort(FAKE_PORT_2)
vm = VM(FAKE_VM, ([vm_port1, vm_port2]))
event = SampleEvent(ovsvapp_const.VM_CREATED,
FAKE_HOST_1, FAKE_CLUSTER_MOID, vm)
self.agent.state = ovsvapp_const.AGENT_RUNNING
self.agent.sec_br = mock.Mock()
with mock.patch.object(self.agent.sec_br, 'dump_flows_for',
return_value='mock_flow') as mock_dump_flows:
self.agent.process_event(event)
self.assertTrue(mock_dump_flows.called)
for vnic in vm.vnics:
self.assertIn(vnic.port_uuid, self.agent.devices_to_filter)
self.assertIn(vnic.port_uuid, self.agent.cluster_host_ports)
self.assertNotIn(vnic.port_uuid, self.agent.cluster_other_ports)
with mock.patch.object(self.agent.sec_br, 'dump_flows_for',
return_value='') as mock_dump_flows, \
mock.patch.object(self.agent.ovsvapp_rpc,
"get_ports_for_device",
return_value=True) as mock_get_ports:
self.agent.process_event(event)
self.assertTrue(mock_dump_flows.called)
self.assertTrue(mock_get_ports.called)
def test_process_event_vm_updated_nonhost(self):
self.agent.esx_hostname = FAKE_HOST_2
vm_port1 = SamplePort(FAKE_PORT_1)
port = self._build_port(FAKE_PORT_1)
self.agent.ports_dict[port['id']] = self.agent._build_port_info(
port)
vm = VM(FAKE_VM, [vm_port1])
event = SampleEvent(ovsvapp_const.VM_UPDATED,
FAKE_HOST_1, FAKE_CLUSTER_MOID, vm, True)
self.agent.state = ovsvapp_const.AGENT_RUNNING
self.agent.tenant_network_types = [p_const.TYPE_VLAN]
self.agent.process_event(event)
self.assertIn(FAKE_PORT_1, self.agent.cluster_other_ports)
def test_process_event_vm_delete_hosted_vm_vlan(self):
self.agent.esx_hostname = FAKE_HOST_1
self.agent.cluster_moid = FAKE_CLUSTER_MOID
self.agent.cluster_host_ports.add(FAKE_PORT_1)
self.agent.tenant_network_types = [p_const.TYPE_VLAN]
port = self._build_port(FAKE_PORT_1)
br = self._build_phys_brs(port)
self.agent.ports_dict[port['id']] = self.agent._build_port_info(
port)
vm_port = SamplePortUIDMac(FAKE_PORT_1, MAC_ADDRESS)
vm = VM(FAKE_VM, ([vm_port]))
event = SampleEvent(ovsvapp_const.VM_DELETED,
FAKE_HOST_1, FAKE_CLUSTER_MOID, vm)
self.agent.state = ovsvapp_const.AGENT_RUNNING
self.agent.net_mgr = fake_manager.MockNetworkManager("callback")
self._build_lvm(port)
self.agent.net_mgr.initialize_driver()
with mock.patch.object(self.agent.net_mgr.get_driver(),
"post_delete_vm",
) as mock_post_del_vm, \
mock.patch.object(self.LOG, 'debug'), \
mock.patch.object(self.agent.net_mgr.get_driver(),
"delete_network") as mock_del_net:
self.agent.process_event(event)
for vnic in vm.vnics:
self.assertNotIn(vnic.port_uuid, self.agent.cluster_host_ports)
self.assertTrue(mock_post_del_vm.called)
self.assertFalse(mock_del_net.called)
self.assertTrue(br.delete_flows.called)
def test_process_event_vm_delete_hosted_vm_vxlan(self):
self.agent.esx_hostname = FAKE_HOST_1
self.agent.cluster_host_ports.add(FAKE_PORT_1)
self.agent.tenant_network_types = [p_const.TYPE_VXLAN]
port = self._build_port(FAKE_PORT_1)
port['network_type'] = p_const.TYPE_VXLAN
self.agent.ports_dict[port['id']] = self.agent._build_port_info(
port)
vm_port = SamplePortUIDMac(FAKE_PORT_1, MAC_ADDRESS)
vm = VM(FAKE_VM, ([vm_port]))
event = SampleEvent(ovsvapp_const.VM_DELETED,
FAKE_HOST_1, FAKE_CLUSTER_MOID, vm)
self.agent.state = ovsvapp_const.AGENT_RUNNING
self.agent.net_mgr = fake_manager.MockNetworkManager("callback")
self.agent.net_mgr.initialize_driver()
with mock.patch.object(self.agent.net_mgr.get_driver(),
"post_delete_vm",
return_value=True) as (post_del_vm):
self.agent.process_event(event)
for vnic in vm.vnics:
self.assertNotIn(vnic.port_uuid, self.agent.cluster_host_ports)
self.assertTrue(post_del_vm.called)
def test_process_event_vm_delete_non_hosted_vm(self):
self.agent.esx_hostname = FAKE_HOST_2
self.agent.cluster_other_ports.add(FAKE_PORT_1)
self.agent.tenant_network_types = [p_const.TYPE_VLAN]
port = self._build_port(FAKE_PORT_1)
self.agent.ports_dict[port['id']] = self.agent._build_port_info(
port)
vm_port = SamplePortUIDMac(FAKE_PORT_1, MAC_ADDRESS)
vm = VM(FAKE_VM, ([vm_port]))
event = SampleEvent(ovsvapp_const.VM_DELETED,
FAKE_HOST_1, FAKE_CLUSTER_MOID, vm)
self.agent.net_mgr = fake_manager.MockNetworkManager("callback")
self.agent.net_mgr.initialize_driver()
self.agent.state = ovsvapp_const.AGENT_RUNNING
with mock.patch.object(self.agent.net_mgr.get_driver(),
"post_delete_vm",
return_value=True) as mock_post_del_vm, \
mock.patch.object(self.agent.net_mgr.get_driver(),
"delete_network") as mock_del_net:
self.agent.process_event(event)
for vnic in vm.vnics:
self.assertNotIn(vnic.port_uuid,
self.agent.cluster_other_ports)
self.assertTrue(mock_post_del_vm.called)
self.assertFalse(mock_del_net.called)
def test_notify_device_added_with_hosted_vm(self):
vm = VM(FAKE_VM, [])
host = FAKE_HOST_1
self.agent.esx_hostname = host
self.agent.state = ovsvapp_const.AGENT_RUNNING
with mock.patch.object(self.agent.ovsvapp_rpc,
"get_ports_for_device",
return_value=True) as mock_get_ports, \
mock.patch.object(self.LOG, 'exception'
) as mock_log_exception, \
mock.patch.object(time, "sleep") as mock_time_sleep:
self.agent._notify_device_added(vm, host)
self.assertTrue(mock_get_ports.called)
self.assertFalse(mock_time_sleep.called)
self.assertFalse(mock_log_exception.called)
def test_notify_device_added_rpc_exception(self):
vm = VM(FAKE_VM, [])
host = FAKE_HOST_1
self.agent.esx_hostname = host
self.agent.state = ovsvapp_const.AGENT_RUNNING
with mock.patch.object(self.agent.ovsvapp_rpc,
"get_ports_for_device",
side_effect=Exception()) as mock_get_ports, \
mock.patch.object(self.LOG, 'exception'
)as mock_log_exception, \
mock.patch.object(time, "sleep") as mock_time_sleep:
self.assertRaises(
error.OVSvAppNeutronAgentError,
self.agent._notify_device_added, vm, host)
self.assertTrue(mock_log_exception.called)
self.assertTrue(mock_get_ports.called)
self.assertFalse(mock_time_sleep.called)
def test_notify_device_added_with_retry(self):
vm = VM(FAKE_VM, [])
host = FAKE_HOST_1
self.agent.esx_hostname = host
self.agent.state = ovsvapp_const.AGENT_RUNNING
with mock.patch.object(self.agent.ovsvapp_rpc,
"get_ports_for_device",
return_value=False) as mock_get_ports, \
mock.patch.object(self.LOG, 'exception'
) as mock_log_exception, \
mock.patch.object(time, "sleep") as mock_time_sleep:
self.agent._notify_device_added(vm, host)
self.assertTrue(mock_get_ports.called)
self.assertTrue(mock_time_sleep.called)
self.assertFalse(mock_log_exception.called)
def test_notify_device_updated_migration_vlan(self):
host = FAKE_HOST_1
self.agent.esx_hostname = host
vm_port1 = SamplePort(FAKE_PORT_1)
vm = VM(FAKE_VM, [vm_port1])
port = self._build_port(FAKE_PORT_1)
self._build_phys_brs(port)
self.agent.ports_dict[port['id']] = self.agent._build_port_info(port)
self._build_lvm(port)
self.agent.state = ovsvapp_const.AGENT_RUNNING
self.agent.tenant_network_types = [p_const.TYPE_VLAN]
self.agent._add_ports_to_host_ports([FAKE_PORT_1])
with mock.patch.object(self.agent.ovsvapp_rpc,
"update_device_binding"
) as mock_update_device_binding, \
mock.patch.object(self.LOG, 'exception'
) as mock_log_exception:
self.agent._notify_device_updated(vm, FAKE_HOST_2, True)
self.assertNotIn(FAKE_PORT_1, self.agent.cluster_host_ports)
self.assertFalse(mock_update_device_binding.called)
self.assertFalse(mock_log_exception.called)
def test_notify_device_update_not_found(self):
host = FAKE_HOST_1
self.agent.esx_hostname = host
vm_port1 = SamplePort(FAKE_PORT_1)
vm = VM(FAKE_VM, [vm_port1])
port = self._build_port(FAKE_PORT_1)
self._build_phys_brs(port)
self._build_lvm(port)
self.agent.state = ovsvapp_const.AGENT_RUNNING
self.agent.tenant_network_types = [p_const.TYPE_VLAN]
br = self.agent.phys_brs[port['physical_network']]['br']
with mock.patch.object(self.agent.ovsvapp_rpc,
"update_device_binding"
):
self.agent._notify_device_updated(vm, host, True)
self.assertFalse(br.add_drop_flows.called)
self.agent.ports_dict[port['id']] = self.agent._build_port_info(port)
with mock.patch.object(self.agent.ovsvapp_rpc,
"update_device_binding"
):
self.agent._notify_device_updated(vm, host, True)
self.assertTrue(br.add_drop_flows.called)
def test_notify_device_updated_host_vlan(self):
host = FAKE_HOST_1
self.agent.esx_hostname = host
vm_port1 = SamplePort(FAKE_PORT_1)
vm = VM(FAKE_VM, [vm_port1])
port = self._build_port(FAKE_PORT_1)
self._build_phys_brs(port)
self.agent.ports_dict[port['id']] = self.agent._build_port_info(port)
self._build_lvm(port)
self.agent.state = ovsvapp_const.AGENT_RUNNING
self.agent.tenant_network_types = [p_const.TYPE_VLAN]
br = self.agent.phys_brs[port['physical_network']]['br']
with mock.patch.object(self.agent.ovsvapp_rpc,
"update_device_binding"
) as mock_update_device_binding:
self.agent._notify_device_updated(vm, host, True)
self.assertIn(FAKE_PORT_1, self.agent.cluster_host_ports)
self.assertTrue(mock_update_device_binding.called)
self.assertTrue(br.add_flows.called)
def test_notify_device_updated_vlan_rpc_exception(self):
host = FAKE_HOST_1
self.agent.esx_hostname = host
vm_port1 = SamplePort(FAKE_PORT_1)
vm = VM(FAKE_VM, [vm_port1])
port = self._build_port(FAKE_PORT_1)
br = self._build_phys_brs(port)
self.agent.ports_dict[port['id']] = self.agent._build_port_info(port)
self.agent.state = ovsvapp_const.AGENT_RUNNING
self.agent.tenant_network_types = [p_const.TYPE_VLAN]
with mock.patch.object(self.agent.ovsvapp_rpc,
"update_device_binding",
side_effect=Exception()
) as mock_update_device_binding, \
mock.patch.object(self.LOG, 'exception'
) as mock_log_exception:
self.assertRaises(
error.OVSvAppNeutronAgentError,
self.agent._notify_device_updated, vm, host, True)
self.assertIn(FAKE_PORT_1, self.agent.cluster_host_ports)
self.assertTrue(br.add_flows.called)
self.assertTrue(mock_update_device_binding.called)
self.assertTrue(mock_log_exception.called)
def test_notify_device_updated_host_vlan_multiple_nic(self):
host = FAKE_HOST_1
self.agent.esx_hostname = host
vm_port1 = SamplePort(FAKE_PORT_1)
vm_port2 = SamplePort(FAKE_PORT_2)
vm = VM(FAKE_VM, ([vm_port1, vm_port2]))
port1 = self._build_port(FAKE_PORT_1)
port2 = self._build_port(FAKE_PORT_2)
br1 = self._build_phys_brs(port1)
br2 = self._build_phys_brs(port2)
self.agent.ports_dict[port1['id']] = self.agent._build_port_info(port1)
self.agent.ports_dict[port2['id']] = self.agent._build_port_info(port2)
self._build_lvm(port1)
self._build_lvm(port2)
self.agent.state = ovsvapp_const.AGENT_RUNNING
self.agent.tenant_network_types = [p_const.TYPE_VLAN]
with mock.patch.object(self.agent.ovsvapp_rpc,
"update_device_binding"
) as mock_update_device_binding, \
mock.patch.object(self.LOG, 'exception'
) as mock_log_exception:
self.agent._notify_device_updated(vm, host, True)
self.assertIn(FAKE_PORT_1, self.agent.cluster_host_ports)
self.assertTrue(mock_update_device_binding.called)
self.assertFalse(mock_log_exception.called)
self.assertEqual(1, mock_update_device_binding.call_count)
self.assertTrue(br1.add_flows.called)
self.assertTrue(br2.add_flows.called)
def _build_lvm(self, port):
try:
self.agent.vlan_manager.add(port['network_id'], port['lvid'],
port['network_type'],
port['physical_network'], '1234')
except vlanmanager.MappingAlreadyExists:
return None
def test_notify_device_updated_host_vxlan(self):
host = FAKE_HOST_1
self.agent.esx_hostname = host
vm_port1 = SamplePort(FAKE_PORT_1)
port1 = self._build_port(FAKE_PORT_1)
port1['network_type'] = p_const.TYPE_VXLAN
self.agent.ports_dict[port1['id']] = self.agent._build_port_info(port1)
vm = VM(FAKE_VM, [vm_port1])
self.agent.state = ovsvapp_const.AGENT_RUNNING
self.agent.tenant_network_types = [p_const.TYPE_VXLAN]
with mock.patch.object(self.agent.ovsvapp_rpc,
"update_device_binding"
) as mock_update_device_binding, \
mock.patch.object(self.LOG, 'exception'
) as mock_log_exception:
self.agent._notify_device_updated(vm, host, True)
self.assertIn(FAKE_PORT_1, self.agent.cluster_host_ports)
self.assertTrue(mock_update_device_binding.called)
self.assertFalse(mock_log_exception.called)
def test_notify_device_updated_vxlan_rpc_exception(self):
host = FAKE_HOST_1
self.agent.esx_hostname = host
vm_port1 = SamplePort(FAKE_PORT_1)
vm = VM(FAKE_VM, [vm_port1])
self.agent.state = ovsvapp_const.AGENT_RUNNING
self.agent.tenant_network_types = [p_const.TYPE_VXLAN]
with mock.patch.object(self.agent.ovsvapp_rpc,
"update_device_binding",
side_effect=Exception()
) as mock_update_device_binding, \
mock.patch.object(self.LOG, 'exception'
) as mock_log_exception:
self.assertRaises(
error.OVSvAppNeutronAgentError,
self.agent._notify_device_updated, vm, host, True)
self.assertIn(FAKE_PORT_1, self.agent.cluster_host_ports)
self.assertTrue(mock_update_device_binding.called)
self.assertTrue(mock_log_exception.called)
def test_map_port_to_common_model_vlan(self):
expected_port = self._build_port(FAKE_PORT_1)
self.agent.cluster_moid = FAKE_CLUSTER_MOID
self.agent.tenant_network_types = [p_const.TYPE_VLAN]
network, port = self.agent._map_port_to_common_model(expected_port)
expected_name = expected_port['network_id'] + "-" + FAKE_CLUSTER_MOID
self.assertEqual(expected_name, network.name)
self.assertEqual(expected_port['id'], port.uuid)
def test_map_port_to_common_model_vxlan(self):
expected_port = self._build_port(FAKE_PORT_1)
self.agent.cluster_moid = FAKE_CLUSTER_MOID
self.agent.tenant_network_types = [p_const.TYPE_VXLAN]
network, port = self.agent._map_port_to_common_model(expected_port, 1)
expected_name = expected_port['network_id'] + "-" + FAKE_CLUSTER_MOID
self.assertEqual(expected_name, network.name)
self.assertEqual(expected_port['id'], port.uuid)
def test_device_create_cluster_mismatch(self):
self.agent.vcenter_id = FAKE_VCENTER
self.agent.cluster_id = FAKE_CLUSTER_2
with mock.patch.object(self.agent,
'_process_create_ports',
return_value=True) as mock_create_ports, \
mock.patch.object(self.LOG, 'debug') as mock_logger_debug:
self.agent.device_create(FAKE_CONTEXT,
device=DEVICE)
self.assertTrue(mock_logger_debug.called)
self.assertFalse(mock_create_ports.called)
def test_device_create_non_hosted_vm(self):
ports = [self._build_port(FAKE_PORT_1)]
self._build_phys_brs(ports[0])
self.agent.vcenter_id = FAKE_VCENTER
self.agent.cluster_id = FAKE_CLUSTER_1
self.agent.esx_hostname = FAKE_HOST_2
self.agent.tenant_network_types = [p_const.TYPE_VLAN]
self.agent.devices_up_list = []
self.agent.vlan_manager.mapping = {}
with mock.patch.object(self.agent.sg_agent, 'add_devices_to_filter'
) as mock_add_devices_fn, \
mock.patch.object(self.agent.sg_agent, 'ovsvapp_sg_update'
) as mock_sg_update_fn, \
mock.patch.object(self.agent.sg_agent, 'expand_sg_rules',
return_value=FAKE_SG_RULES
) as mock_expand_sg_rules, \
mock.patch.object(self.agent, '_provision_local_vlan'
) as mock_prov_local_vlan, \
mock.patch.object(self.LOG, 'debug') as mock_logger_debug:
self.agent.device_create(FAKE_CONTEXT,
device=DEVICE,
ports=ports,
sg_rules=mock.MagicMock())
self.assertTrue(mock_logger_debug.called)
mock_add_devices_fn.assert_called_with(ports)
self.assertIn(FAKE_PORT_1, self.agent.cluster_other_ports)
self.assertNotIn(FAKE_PORT_1, self.agent.cluster_host_ports)
self.assertFalse(self.agent.devices_up_list)
self.assertTrue(mock_sg_update_fn.called)
self.assertTrue(mock_expand_sg_rules.called)
self.assertTrue(mock_prov_local_vlan.called)
def test_device_create_hosted_vm_vlan(self):
ports = [self._build_port(FAKE_PORT_1)]
self._build_phys_brs(ports[0])
self.agent.vcenter_id = FAKE_VCENTER
self.agent.cluster_id = FAKE_CLUSTER_1
self.agent.cluster_moid = FAKE_CLUSTER_MOID
self.agent.esx_hostname = FAKE_HOST_1
self.agent.tenant_network_types = [p_const.TYPE_VLAN]
self.agent.devices_up_list = []
self.agent.vlan_manager.mapping = {}
self.agent.net_mgr = fake_manager.MockNetworkManager("callback")
self.agent.net_mgr.initialize_driver()
with mock.patch.object(self.agent.sg_agent, 'add_devices_to_filter'
) as mock_add_devices_fn, \
mock.patch.object(self.agent.sg_agent, 'ovsvapp_sg_update'
) as mock_sg_update_fn, \
mock.patch.object(self.agent.sg_agent, 'expand_sg_rules',
return_value=FAKE_SG_RULES
) as mock_expand_sg_rules, \
mock.patch.object(self.agent, '_provision_local_vlan'
) as mock_prov_local_vlan, \
mock.patch.object(self.LOG, 'debug') as mock_logger_debug:
self.agent.device_create(FAKE_CONTEXT,
device=DEVICE,
ports=ports,
sg_rules=mock.MagicMock())
self.assertTrue(mock_logger_debug.called)
self.assertNotIn(FAKE_PORT_1, self.agent.cluster_other_ports)
self.assertIn(FAKE_PORT_1, self.agent.cluster_host_ports)
self.assertEqual([FAKE_PORT_1], self.agent.devices_up_list)
mock_add_devices_fn.assert_called_with(ports)
self.assertTrue(mock_sg_update_fn.called)
self.assertTrue(mock_expand_sg_rules.called)
self.assertTrue(mock_prov_local_vlan.called)
def test_device_create_hosted_vm_vlan_sg_rule_missing(self):
ports = [self._build_port(FAKE_PORT_1)]
self._build_phys_brs(ports[0])
self.agent.vcenter_id = FAKE_VCENTER
self.agent.cluster_id = FAKE_CLUSTER_1
self.agent.cluster_moid = FAKE_CLUSTER_MOID
self.agent.esx_hostname = FAKE_HOST_1
self.agent.tenant_network_types = [p_const.TYPE_VLAN]
self.agent.devices_up_list = []
self.agent.vlan_manager.mapping = {}
self.agent.devices_to_filter = set()
self.agent.net_mgr = fake_manager.MockNetworkManager("callback")
self.agent.net_mgr.initialize_driver()
with mock.patch.object(self.agent.sg_agent, 'add_devices_to_filter'
) as mock_add_devices_fn, \
mock.patch.object(self.agent.sg_agent, 'ovsvapp_sg_update'
) as mock_sg_update_fn, \
mock.patch.object(self.agent.sg_agent, 'expand_sg_rules',
return_value=FAKE_SG_RULES_MISSING
) as mock_expand_sg_rules, \
mock.patch.object(self.agent, '_provision_local_vlan'
) as mock_prov_local_vlan, \
mock.patch.object(self.LOG, 'debug') as mock_logger_debug:
self.agent.device_create(FAKE_CONTEXT,
device=DEVICE,
ports=ports,
sg_rules=mock.MagicMock())
self.assertTrue(mock_logger_debug.called)
self.assertNotIn(FAKE_PORT_1, self.agent.cluster_other_ports)
self.assertIn(FAKE_PORT_1, self.agent.cluster_host_ports)
self.assertEqual([FAKE_PORT_1], self.agent.devices_up_list)
self.assertIn(FAKE_PORT_1, self.agent.devices_to_filter)
mock_add_devices_fn.assert_called_with(ports)
self.assertFalse(mock_sg_update_fn.called)
self.assertTrue(mock_expand_sg_rules.called)
self.assertTrue(mock_prov_local_vlan.called)
def test_device_create_hosted_vm_vlan_sg_rule_partial_missing(self):
ports = [self._build_port(FAKE_PORT_1)]
self._build_phys_brs(ports[0])
self.agent.vcenter_id = FAKE_VCENTER
self.agent.cluster_id = FAKE_CLUSTER_1
self.agent.cluster_moid = FAKE_CLUSTER_MOID
self.agent.esx_hostname = FAKE_HOST_1
self.agent.tenant_network_types = [p_const.TYPE_VLAN]
self.agent.devices_up_list = []
self.agent.devices_to_filter = set()
self.agent.vlan_manager.mapping = {}
self.agent.net_mgr = fake_manager.MockNetworkManager("callback")
self.agent.net_mgr.initialize_driver()
with mock.patch.object(self.agent.sg_agent, 'add_devices_to_filter'
) as mock_add_devices_fn, \
mock.patch.object(self.agent.sg_agent, 'ovsvapp_sg_update'
) as mock_sg_update_fn, \
mock.patch.object(self.agent.sg_agent, 'expand_sg_rules',
return_value=FAKE_SG_RULES_PARTIAL
) as mock_expand_sg_rules, \
mock.patch.object(self.agent, '_provision_local_vlan'
) as mock_prov_local_vlan, \
mock.patch.object(self.LOG, 'debug') as mock_logger_debug:
self.agent.device_create(FAKE_CONTEXT,
device=DEVICE,
ports=ports,
sg_rules=mock.MagicMock())
self.assertTrue(mock_logger_debug.called)
self.assertNotIn(FAKE_PORT_1, self.agent.cluster_other_ports)
self.assertIn(FAKE_PORT_1, self.agent.cluster_host_ports)
self.assertEqual([FAKE_PORT_1], self.agent.devices_up_list)
self.assertIn(FAKE_PORT_1, self.agent.devices_to_filter)
mock_add_devices_fn.assert_called_with(ports)
self.assertFalse(mock_sg_update_fn.called)
self.assertTrue(mock_expand_sg_rules.called)
self.assertTrue(mock_prov_local_vlan.called)
def test_device_create_hosted_vm_vxlan(self):
port = self._build_port(FAKE_PORT_1)
port['network_type'] = p_const.TYPE_VXLAN
ports = [port]
self.agent.vlan_manager.mapping = {}
self.agent.vcenter_id = FAKE_VCENTER
self.agent.cluster_id = FAKE_CLUSTER_1
self.agent.cluster_moid = FAKE_CLUSTER_MOID
self.agent.esx_hostname = FAKE_HOST_1
self.agent.tenant_network_types = [p_const.TYPE_VXLAN]
self.agent.vlan_manager.mapping = {}
self.agent.devices_to_filter = set()
self.agent.net_mgr = fake_manager.MockNetworkManager("callback")
self.agent.net_mgr.initialize_driver()
with mock.patch.object(self.agent, '_provision_local_vlan'
) as mock_prov_local_vlan, \
mock.patch.object(self.agent.sg_agent,
'add_devices_to_filter'
) as mock_add_devices_fn, \
mock.patch.object(self.agent.sg_agent, 'ovsvapp_sg_update'
) as mock_sg_update_fn, \
mock.patch.object(self.agent.sg_agent, 'expand_sg_rules',
return_value=FAKE_SG_RULES
) as mock_expand_sg_rules, \
mock.patch.object(self.agent.plugin_rpc, 'update_device_up'
) as mock_update_device_up, \
mock.patch.object(self.LOG, 'debug') as mock_logger_debug:
self.agent.device_create(FAKE_CONTEXT,
device=DEVICE,
ports=ports,
sg_rules=mock.MagicMock())
self.assertTrue(mock_prov_local_vlan.called)
self.assertTrue(mock_logger_debug.called)
self.assertNotIn(FAKE_PORT_1, self.agent.cluster_other_ports)
self.assertNotIn(FAKE_PORT_1, self.agent.devices_to_filter)
self.assertIn(FAKE_PORT_1, self.agent.cluster_host_ports)
mock_add_devices_fn.assert_called_with(ports)
self.assertTrue(mock_sg_update_fn.called)
self.assertTrue(mock_expand_sg_rules.called)
self.assertTrue(mock_update_device_up.called)
def test_device_create_hosted_vm_vxlan_sg_rule_missing(self):
port = self._build_port(FAKE_PORT_1)
port['network_type'] = p_const.TYPE_VXLAN
ports = [port]
self.agent.vcenter_id = FAKE_VCENTER
self.agent.cluster_id = FAKE_CLUSTER_1
self.agent.cluster_moid = FAKE_CLUSTER_MOID
self.agent.esx_hostname = FAKE_HOST_1
self.agent.tenant_network_types = [p_const.TYPE_VXLAN]
self.agent.vlan_manager.mapping = {}
self.agent.devices_to_filter = set()
self.agent.net_mgr = fake_manager.MockNetworkManager("callback")
self.agent.net_mgr.initialize_driver()
with mock.patch.object(self.agent, '_provision_local_vlan'
) as mock_prov_local_vlan, \
mock.patch.object(self.agent.sg_agent,
'add_devices_to_filter'
) as mock_add_devices_fn, \
mock.patch.object(self.agent.sg_agent, 'ovsvapp_sg_update'
) as mock_sg_update_fn, \
mock.patch.object(self.agent.sg_agent, 'expand_sg_rules',
return_value=FAKE_SG_RULES_MISSING
) as mock_expand_sg_rules, \
mock.patch.object(self.agent.plugin_rpc, 'update_device_up'
) as mock_update_device_up, \
mock.patch.object(self.LOG, 'debug') as mock_logger_debug:
self.agent.device_create(FAKE_CONTEXT,
device=DEVICE,
ports=ports,
sg_rules=mock.MagicMock())
self.assertTrue(mock_prov_local_vlan.called)
self.assertTrue(mock_logger_debug.called)
self.assertNotIn(FAKE_PORT_1, self.agent.cluster_other_ports)
self.assertIn(FAKE_PORT_1, self.agent.devices_to_filter)
self.assertIn(FAKE_PORT_1, self.agent.cluster_host_ports)
mock_add_devices_fn.assert_called_with(ports)
self.assertFalse(mock_sg_update_fn.called)
self.assertTrue(mock_expand_sg_rules.called)
self.assertTrue(mock_update_device_up.called)
def test_device_create_hosted_vm_create_port_exception(self):
ports = [self._build_port(FAKE_PORT_1)]
self.agent.vcenter_id = FAKE_VCENTER
self.agent.cluster_id = FAKE_CLUSTER_1
self.agent.cluster_moid = FAKE_CLUSTER_MOID
self.agent.esx_hostname = FAKE_HOST_1
self.agent.tenant_network_types = [p_const.TYPE_VLAN]
self.agent.net_mgr = fake_manager.MockNetworkManager("callback")
self.agent.net_mgr.initialize_driver()
self.agent.net_mgr.get_driver().create_port = mock.Mock(
side_effect=Exception())
with mock.patch.object(self.agent.sg_agent, 'add_devices_to_filter'
), \
mock.patch.object(self.agent, '_provision_local_vlan'
), \
mock.patch.object(self.agent.sg_agent, 'ovsvapp_sg_update'
) as mock_sg_update_fn, \
mock.patch.object(self.agent.sg_agent, 'expand_sg_rules',
return_value=FAKE_SG_RULES
) as mock_expand_sg_rules, \
mock.patch.object(self.LOG, 'debug') as mock_logger_debug, \
mock.patch.object(self.LOG, 'exception') as mock_log_excep:
self.assertRaises(
error.OVSvAppNeutronAgentError,
self.agent.device_create,
FAKE_CONTEXT, device=DEVICE,
ports=ports, sg_rules=mock.MagicMock())
self.assertTrue(mock_logger_debug.called)
self.assertNotIn(FAKE_PORT_1, self.agent.cluster_other_ports)
self.assertIn(FAKE_PORT_1, self.agent.cluster_host_ports)
self.assertFalse(mock_sg_update_fn.called)
self.assertTrue(mock_expand_sg_rules.called)
self.assertTrue(mock_log_excep.called)
def test_port_update_admin_state_up(self):
port = self._build_port(FAKE_PORT_1)
self.agent.ports_dict[port['id']] = self.agent._build_port_info(
port)
self.agent.cluster_moid = FAKE_CLUSTER_MOID
self.agent.cluster_host_ports = set([port['id']])
self.agent.tenant_network_types = [p_const.TYPE_VLAN]
self.agent.net_mgr = fake_manager.MockNetworkManager("callback")
self.agent.net_mgr.initialize_driver()
updated_port = self._build_update_port(FAKE_PORT_1)
updated_port['admin_state_up'] = True
self.devices_up_list = []
neutron_port = {'port': updated_port,
'segmentation_id': port['segmentation_id']}
with mock.patch.object(self.LOG, 'exception'
) as mock_log_exception, \
mock.patch.object(self.LOG, 'debug') as mock_logger_debug:
self.agent.port_update(FAKE_CONTEXT, **neutron_port)
self.assertEqual(neutron_port['port']['admin_state_up'],
self.agent.ports_dict[port['id']].
admin_state_up)
self.assertEqual([FAKE_PORT_1], self.agent.devices_up_list)
self.assertFalse(mock_log_exception.called)
self.assertTrue(mock_logger_debug.called)
def test_device_update_maintenance_mode(self):
kwargs = {'device_data': {'ovsvapp_agent': 'fake_agent_host_1',
'esx_host_name': FAKE_HOST_1,
'assigned_agent_host': FAKE_HOST_2}}
self.agent.hostname = FAKE_HOST_2
self.agent.esx_maintenance_mode = True
self.agent.net_mgr = fake_manager.MockNetworkManager("callback")
self.agent.net_mgr.initialize_driver()
self.agent.net_mgr.get_driver().session = "fake_session"
self.agent.cluster_id = FAKE_CLUSTER_1
self.agent.vcenter_id = FAKE_VCENTER
with mock.patch.object(resource_util,
"get_vm_mor_by_name",
return_value="vm_mor") as vm_mor_by_name, \
mock.patch.object(resource_util,
"get_host_mor_by_name",
return_value="host_mor"
) as host_mor_by_name, \
mock.patch.object(resource_util,
"set_vm_poweroff") as power_off, \
mock.patch.object(resource_util,
"set_host_into_maintenance_mode"
) as maintenance_mode, \
mock.patch.object(resource_util,
"set_host_into_shutdown_mode"
) as shutdown_mode, \
mock.patch.object(self.agent.ovsvapp_rpc,
"update_cluster_lock") as cluster_lock, \
mock.patch.object(self.LOG, 'exception') as log_exception, \
mock.patch.object(time, 'sleep'):
self.agent.device_update(FAKE_CONTEXT, **kwargs)
self.assertTrue(vm_mor_by_name.called)
self.assertTrue(host_mor_by_name.called)
self.assertTrue(power_off.called)
self.assertTrue(maintenance_mode.called)
self.assertFalse(shutdown_mode.called)
self.assertTrue(cluster_lock.called)
cluster_lock.assert_called_with(self.agent.context,
cluster_id=self.agent.cluster_id,
vcenter_id=self.agent.vcenter_id,
success=True)
self.assertFalse(log_exception.called)
def test_device_update_shutdown_mode(self):
kwargs = {'device_data': {'ovsvapp_agent': 'fake_agent_host_1',
'esx_host_name': FAKE_HOST_1,
'assigned_agent_host': FAKE_HOST_2}}
self.agent.hostname = FAKE_HOST_2
self.agent.esx_maintenance_mode = False
self.agent.net_mgr = fake_manager.MockNetworkManager("callback")
self.agent.net_mgr.initialize_driver()
self.agent.net_mgr.get_driver().session = "fake_session"
self.agent.cluster_id = FAKE_CLUSTER_1
self.agent.vcenter_id = FAKE_VCENTER
with mock.patch.object(resource_util,
"get_vm_mor_by_name",
return_value="vm_mor") as vm_mor_by_name, \
mock.patch.object(resource_util,
"get_host_mor_by_name",
return_value="host_mor"
) as host_mor_by_name, \
mock.patch.object(resource_util,
"set_vm_poweroff") as power_off, \
mock.patch.object(resource_util,
"set_host_into_maintenance_mode"
) as maintenance_mode, \
mock.patch.object(resource_util,
"set_host_into_shutdown_mode"
) as shutdown_mode, \
mock.patch.object(self.agent.ovsvapp_rpc,
"update_cluster_lock") as cluster_lock, \
mock.patch.object(self.LOG, 'exception') as log_exception, \
mock.patch.object(time, 'sleep'):
self.agent.device_update(FAKE_CONTEXT, **kwargs)
self.assertTrue(vm_mor_by_name.called)
self.assertTrue(host_mor_by_name.called)
self.assertFalse(power_off.called)
self.assertFalse(maintenance_mode.called)
self.assertTrue(shutdown_mode.called)
self.assertTrue(cluster_lock.called)
cluster_lock.assert_called_with(self.agent.context,
cluster_id=self.agent.cluster_id,
vcenter_id=self.agent.vcenter_id,
success=True)
self.assertFalse(log_exception.called)
def test_device_update_ovsvapp_alreadly_powered_off(self):
kwargs = {'device_data': {'ovsvapp_agent': 'fake_agent_host_1',
'esx_host_name': FAKE_HOST_1,
'assigned_agent_host': FAKE_HOST_2}}
self.agent.hostname = FAKE_HOST_2
self.agent.esx_maintenance_mode = True
self.agent.net_mgr = fake_manager.MockNetworkManager("callback")
self.agent.net_mgr.initialize_driver()
self.agent.net_mgr.get_driver().session = "fake_session"
self.agent.cluster_id = FAKE_CLUSTER_1
self.agent.vcenter_id = FAKE_VCENTER
with mock.patch.object(resource_util,
"get_vm_mor_by_name",
return_value="vm_mor") as vm_mor_by_name, \
mock.patch.object(resource_util,
"get_host_mor_by_name",
return_value="host_mor"
) as host_mor_by_name, \
mock.patch.object(resource_util,
"set_vm_poweroff",
side_effect=Exception()) as power_off, \
mock.patch.object(resource_util,
"set_host_into_maintenance_mode"
) as maintenance_mode, \
mock.patch.object(resource_util,
"set_host_into_shutdown_mode"
) as shutdown_mode, \
mock.patch.object(self.agent.ovsvapp_rpc,
"update_cluster_lock") as cluster_lock, \
mock.patch.object(self.LOG, 'exception') as log_exception, \
mock.patch.object(time, 'sleep'):
self.agent.device_update(FAKE_CONTEXT, **kwargs)
self.assertTrue(vm_mor_by_name.called)
self.assertTrue(host_mor_by_name.called)
self.assertTrue(power_off.called)
self.assertTrue(maintenance_mode.called)
self.assertFalse(shutdown_mode.called)
self.assertTrue(cluster_lock.called)
cluster_lock.assert_called_with(self.agent.context,
cluster_id=self.agent.cluster_id,
vcenter_id=self.agent.vcenter_id,
success=True)
self.assertTrue(log_exception.called)
def test_device_update_maintenance_mode_exception(self):
kwargs = {'device_data': {'ovsvapp_agent': 'fake_agent_host_1',
'esx_host_name': FAKE_HOST_1,
'assigned_agent_host': FAKE_HOST_2}}
self.agent.hostname = FAKE_HOST_2
self.agent.esx_maintenance_mode = True
self.agent.net_mgr = fake_manager.MockNetworkManager("callback")
self.agent.net_mgr.initialize_driver()
self.agent.net_mgr.get_driver().session = "fake_session"
self.agent.cluster_id = FAKE_CLUSTER_1
self.agent.vcenter_id = FAKE_VCENTER
with mock.patch.object(resource_util,
"get_vm_mor_by_name",
return_value="vm_mor") as vm_mor_by_name, \
mock.patch.object(resource_util,
"get_host_mor_by_name",
return_value="host_mor"
) as host_mor_by_name, \
mock.patch.object(resource_util,
"set_vm_poweroff",
side_effect=Exception()) as power_off, \
mock.patch.object(resource_util,
"set_host_into_maintenance_mode",
side_effect=Exception()
) as maintenance_mode, \
mock.patch.object(resource_util,
"set_host_into_shutdown_mode"
) as shutdown_mode, \
mock.patch.object(self.agent.ovsvapp_rpc,
"update_cluster_lock") as cluster_lock, \
mock.patch.object(self.LOG, 'exception') as log_exception, \
mock.patch.object(time, 'sleep') as time_sleep:
self.agent.device_update(FAKE_CONTEXT, **kwargs)
self.assertTrue(vm_mor_by_name.called)
self.assertTrue(host_mor_by_name.called)
self.assertTrue(power_off.called)
self.assertTrue(maintenance_mode.called)
self.assertFalse(shutdown_mode.called)
self.assertTrue(cluster_lock.called)
cluster_lock.assert_called_with(self.agent.context,
cluster_id=self.agent.cluster_id,
vcenter_id=self.agent.vcenter_id,
success=False)
self.assertTrue(log_exception.called)
self.assertTrue(time_sleep.called)
def test_enhanced_sg_provider_updated(self):
kwargs = {'network_id': NETWORK_ID}
with mock.patch.object(self.LOG, 'info') as log_info, \
mock.patch.object(self.agent.sg_agent, "sg_provider_updated"
) as mock_sg_provider_updated:
self.agent.enhanced_sg_provider_updated(FAKE_CONTEXT, **kwargs)
self.assertTrue(log_info.called)
mock_sg_provider_updated.assert_called_with(NETWORK_ID)
def test_device_create_hosted_vm_vlan_multiple_physnet(self):
port1 = self._build_port(FAKE_PORT_1)
port2 = self._build_port(FAKE_PORT_2)
port2['physical_network'] = "physnet2"
port2['segmentation_id'] = "2005"
port2['network_id'] = "fake_net2"
ports = [port1, port2]
self._build_phys_brs(port1)
self._build_phys_brs(port2)
self.agent.phys_ofports = {}
self.agent.phys_ofports[port1['physical_network']] = 4
self.agent.phys_ofports[port2['physical_network']] = 5
self.agent.vcenter_id = FAKE_VCENTER
self.agent.cluster_id = FAKE_CLUSTER_1
self.agent.cluster_moid = FAKE_CLUSTER_MOID
self.agent.esx_hostname = FAKE_HOST_1
self.agent.tenant_network_types = [p_const.TYPE_VLAN]
self.agent.devices_up_list = []
self.agent.net_mgr = fake_manager.MockNetworkManager("callback")
self.agent.net_mgr.initialize_driver()
self.agent.int_br = mock.Mock()
self.agent.vlan_manager.mapping = {}
self.agent.patch_sec_ofport = 1
self.agent.int_ofports = {'physnet1': 2, 'physnet2': 3}
with mock.patch.object(self.agent.sg_agent, 'add_devices_to_filter'
) as mock_add_devices_fn, \
mock.patch.object(self.agent.sg_agent, 'ovsvapp_sg_update'
), \
mock.patch.object(self.agent.int_br, 'provision_local_vlan'
) as mock_prov_local_vlan, \
mock.patch.object(self.agent.sg_agent, 'expand_sg_rules',
return_value=FAKE_SG_RULES_MULTI_PORTS
), \
mock.patch.object(self.LOG, 'debug') as mock_logger_debug:
self.agent.device_create(FAKE_CONTEXT,
device=DEVICE,
ports=ports,
sg_rules=mock.MagicMock())
self.assertTrue(mock_logger_debug.called)
self.assertEqual([FAKE_PORT_1, FAKE_PORT_2],
self.agent.devices_up_list)
mock_add_devices_fn.assert_called_with(ports)
self.assertTrue(mock_prov_local_vlan.called)
mock_prov_local_vlan.assert_any_call(
port1['network_type'],
port1['lvid'],
port1['segmentation_id'],
self.agent.patch_sec_ofport,
self.agent.int_ofports['physnet1'], None)
mock_prov_local_vlan.assert_any_call(
port2['network_type'],
port2['lvid'],
port2['segmentation_id'],
self.agent.patch_sec_ofport,
self.agent.int_ofports['physnet2'], None)
| [
"logging.getLogger",
"mock.patch",
"mock.Mock",
"mock.patch.object",
"oslo_config.cfg.CONF.set_override",
"neutron.common.utils.parse_mappings",
"networking_vsphere.tests.unit.drivers.fake_manager.MockNetworkManager",
"mock.MagicMock",
"networking_vsphere.agent.ovsvapp_agent.OVSvAppAgent"
] | [((5066, 5106), 'mock.patch', 'mock.patch', (['"""neutron.common.config.init"""'], {}), "('neutron.common.config.init')\n", (5076, 5106), False, 'import mock\n'), ((5112, 5161), 'mock.patch', 'mock.patch', (['"""neutron.common.config.setup_logging"""'], {}), "('neutron.common.config.setup_logging')\n", (5122, 5161), False, 'import mock\n'), ((5167, 5212), 'mock.patch', 'mock.patch', (['"""neutron.agent.ovsdb.api.API.get"""'], {}), "('neutron.agent.ovsdb.api.API.get')\n", (5177, 5212), False, 'import mock\n'), ((5237, 5302), 'mock.patch', 'mock.patch', (['"""networking_vsphere.agent.ovsvapp_agent.RpcPluginApi"""'], {}), "('networking_vsphere.agent.ovsvapp_agent.RpcPluginApi')\n", (5247, 5302), False, 'import mock\n'), ((5308, 5380), 'mock.patch', 'mock.patch', (['"""neutron.agent.securitygroups_rpc.SecurityGroupServerRpcApi"""'], {}), "('neutron.agent.securitygroups_rpc.SecurityGroupServerRpcApi')\n", (5318, 5380), False, 'import mock\n'), ((5386, 5438), 'mock.patch', 'mock.patch', (['"""neutron.agent.rpc.PluginReportStateAPI"""'], {}), "('neutron.agent.rpc.PluginReportStateAPI')\n", (5396, 5438), False, 'import mock\n'), ((5444, 5513), 'mock.patch', 'mock.patch', (['"""networking_vsphere.agent.ovsvapp_agent.OVSvAppPluginApi"""'], {}), "('networking_vsphere.agent.ovsvapp_agent.OVSvAppPluginApi')\n", (5454, 5513), False, 'import mock\n'), ((5519, 5582), 'mock.patch', 'mock.patch', (['"""neutron.context.get_admin_context_without_session"""'], {}), "('neutron.context.get_admin_context_without_session')\n", (5529, 5582), False, 'import mock\n'), ((5588, 5636), 'mock.patch', 'mock.patch', (['"""neutron.agent.rpc.create_consumers"""'], {}), "('neutron.agent.rpc.create_consumers')\n", (5598, 5636), False, 'import mock\n'), ((5642, 5766), 'mock.patch', 'mock.patch', (['"""neutron.plugins.ml2.drivers.openvswitch.agent.ovs_neutron_agent.OVSNeutronAgent.setup_integration_br"""'], {}), "(\n 'neutron.plugins.ml2.drivers.openvswitch.agent.ovs_neutron_agent.OVSNeutronAgent.setup_integration_br'\n )\n", (5652, 5766), False, 'import mock\n'), ((5781, 5869), 'mock.patch', 'mock.patch', (['"""networking_vsphere.agent.ovsvapp_agent.OVSvAppAgent.setup_ovs_bridges"""'], {}), "(\n 'networking_vsphere.agent.ovsvapp_agent.OVSvAppAgent.setup_ovs_bridges')\n", (5791, 5869), False, 'import mock\n'), ((5889, 5977), 'mock.patch', 'mock.patch', (['"""networking_vsphere.agent.ovsvapp_agent.OVSvAppAgent.setup_security_br"""'], {}), "(\n 'networking_vsphere.agent.ovsvapp_agent.OVSvAppAgent.setup_security_br')\n", (5899, 5977), False, 'import mock\n'), ((5997, 6083), 'mock.patch', 'mock.patch', (['"""networking_vsphere.agent.ovsvapp_agent.OVSvAppAgent._init_ovs_flows"""'], {}), "(\n 'networking_vsphere.agent.ovsvapp_agent.OVSvAppAgent._init_ovs_flows')\n", (6007, 6083), False, 'import mock\n'), ((6103, 6211), 'mock.patch', 'mock.patch', (['"""networking_vsphere.drivers.ovs_firewall.OVSFirewallDriver.check_ovs_firewall_restart"""'], {}), "(\n 'networking_vsphere.drivers.ovs_firewall.OVSFirewallDriver.check_ovs_firewall_restart'\n )\n", (6113, 6211), False, 'import mock\n'), ((6226, 6324), 'mock.patch', 'mock.patch', (['"""networking_vsphere.drivers.ovs_firewall.OVSFirewallDriver.setup_base_flows"""'], {}), "(\n 'networking_vsphere.drivers.ovs_firewall.OVSFirewallDriver.setup_base_flows'\n )\n", (6236, 6324), False, 'import mock\n'), ((6339, 6398), 'mock.patch', 'mock.patch', (['"""neutron.agent.common.ovs_lib.OVSBridge.create"""'], {}), "('neutron.agent.common.ovs_lib.OVSBridge.create')\n", (6349, 6398), False, 'import mock\n'), ((6404, 6472), 'mock.patch', 'mock.patch', (['"""neutron.agent.common.ovs_lib.OVSBridge.set_secure_mode"""'], {}), "('neutron.agent.common.ovs_lib.OVSBridge.set_secure_mode')\n", (6414, 6472), False, 'import mock\n'), ((6478, 6546), 'mock.patch', 'mock.patch', (['"""neutron.agent.common.ovs_lib.OVSBridge.get_port_ofport"""'], {}), "('neutron.agent.common.ovs_lib.OVSBridge.get_port_ofport')\n", (6488, 6546), False, 'import mock\n'), ((6552, 6626), 'mock.patch', 'mock.patch', (['"""networking_vsphere.agent.ovsvapp_agent.OVSvAppAgent.__init__"""'], {}), "('networking_vsphere.agent.ovsvapp_agent.OVSvAppAgent.__init__')\n", (6562, 6626), False, 'import mock\n'), ((8523, 8563), 'mock.patch', 'mock.patch', (['"""neutron.common.config.init"""'], {}), "('neutron.common.config.init')\n", (8533, 8563), False, 'import mock\n'), ((8569, 8618), 'mock.patch', 'mock.patch', (['"""neutron.common.config.setup_logging"""'], {}), "('neutron.common.config.setup_logging')\n", (8579, 8618), False, 'import mock\n'), ((8624, 8669), 'mock.patch', 'mock.patch', (['"""neutron.agent.ovsdb.api.API.get"""'], {}), "('neutron.agent.ovsdb.api.API.get')\n", (8634, 8669), False, 'import mock\n'), ((8694, 8759), 'mock.patch', 'mock.patch', (['"""networking_vsphere.agent.ovsvapp_agent.RpcPluginApi"""'], {}), "('networking_vsphere.agent.ovsvapp_agent.RpcPluginApi')\n", (8704, 8759), False, 'import mock\n'), ((8765, 8837), 'mock.patch', 'mock.patch', (['"""neutron.agent.securitygroups_rpc.SecurityGroupServerRpcApi"""'], {}), "('neutron.agent.securitygroups_rpc.SecurityGroupServerRpcApi')\n", (8775, 8837), False, 'import mock\n'), ((8843, 8895), 'mock.patch', 'mock.patch', (['"""neutron.agent.rpc.PluginReportStateAPI"""'], {}), "('neutron.agent.rpc.PluginReportStateAPI')\n", (8853, 8895), False, 'import mock\n'), ((8901, 8970), 'mock.patch', 'mock.patch', (['"""networking_vsphere.agent.ovsvapp_agent.OVSvAppPluginApi"""'], {}), "('networking_vsphere.agent.ovsvapp_agent.OVSvAppPluginApi')\n", (8911, 8970), False, 'import mock\n'), ((8976, 9039), 'mock.patch', 'mock.patch', (['"""neutron.context.get_admin_context_without_session"""'], {}), "('neutron.context.get_admin_context_without_session')\n", (8986, 9039), False, 'import mock\n'), ((9045, 9093), 'mock.patch', 'mock.patch', (['"""neutron.agent.rpc.create_consumers"""'], {}), "('neutron.agent.rpc.create_consumers')\n", (9055, 9093), False, 'import mock\n'), ((9099, 9223), 'mock.patch', 'mock.patch', (['"""neutron.plugins.ml2.drivers.openvswitch.agent.ovs_neutron_agent.OVSNeutronAgent.setup_integration_br"""'], {}), "(\n 'neutron.plugins.ml2.drivers.openvswitch.agent.ovs_neutron_agent.OVSNeutronAgent.setup_integration_br'\n )\n", (9109, 9223), False, 'import mock\n'), ((9238, 9341), 'mock.patch', 'mock.patch', (['"""networking_vsphere.agent.ovsvapp_agent.OVSvAppAgent.check_ovsvapp_agent_restart"""'], {}), "(\n 'networking_vsphere.agent.ovsvapp_agent.OVSvAppAgent.check_ovsvapp_agent_restart'\n )\n", (9248, 9341), False, 'import mock\n'), ((9356, 9444), 'mock.patch', 'mock.patch', (['"""networking_vsphere.agent.ovsvapp_agent.OVSvAppAgent.setup_ovs_bridges"""'], {}), "(\n 'networking_vsphere.agent.ovsvapp_agent.OVSvAppAgent.setup_ovs_bridges')\n", (9366, 9444), False, 'import mock\n'), ((9464, 9552), 'mock.patch', 'mock.patch', (['"""networking_vsphere.agent.ovsvapp_agent.OVSvAppAgent.setup_security_br"""'], {}), "(\n 'networking_vsphere.agent.ovsvapp_agent.OVSvAppAgent.setup_security_br')\n", (9474, 9552), False, 'import mock\n'), ((9572, 9658), 'mock.patch', 'mock.patch', (['"""networking_vsphere.agent.ovsvapp_agent.OVSvAppAgent._init_ovs_flows"""'], {}), "(\n 'networking_vsphere.agent.ovsvapp_agent.OVSvAppAgent._init_ovs_flows')\n", (9582, 9658), False, 'import mock\n'), ((9678, 9786), 'mock.patch', 'mock.patch', (['"""networking_vsphere.drivers.ovs_firewall.OVSFirewallDriver.check_ovs_firewall_restart"""'], {}), "(\n 'networking_vsphere.drivers.ovs_firewall.OVSFirewallDriver.check_ovs_firewall_restart'\n )\n", (9688, 9786), False, 'import mock\n'), ((9801, 9899), 'mock.patch', 'mock.patch', (['"""networking_vsphere.drivers.ovs_firewall.OVSFirewallDriver.setup_base_flows"""'], {}), "(\n 'networking_vsphere.drivers.ovs_firewall.OVSFirewallDriver.setup_base_flows'\n )\n", (9811, 9899), False, 'import mock\n'), ((9914, 9973), 'mock.patch', 'mock.patch', (['"""neutron.agent.common.ovs_lib.OVSBridge.create"""'], {}), "('neutron.agent.common.ovs_lib.OVSBridge.create')\n", (9924, 9973), False, 'import mock\n'), ((9979, 10047), 'mock.patch', 'mock.patch', (['"""neutron.agent.common.ovs_lib.OVSBridge.set_secure_mode"""'], {}), "('neutron.agent.common.ovs_lib.OVSBridge.set_secure_mode')\n", (9989, 10047), False, 'import mock\n'), ((10053, 10121), 'mock.patch', 'mock.patch', (['"""neutron.agent.common.ovs_lib.OVSBridge.get_port_ofport"""'], {}), "('neutron.agent.common.ovs_lib.OVSBridge.get_port_ofport')\n", (10063, 10121), False, 'import mock\n'), ((14510, 14562), 'mock.patch', 'mock.patch', (['"""neutron.agent.common.ovs_lib.OVSBridge"""'], {}), "('neutron.agent.common.ovs_lib.OVSBridge')\n", (14520, 14562), False, 'import mock\n'), ((16066, 16111), 'mock.patch', 'mock.patch', (['"""neutron.agent.ovsdb.api.API.get"""'], {}), "('neutron.agent.ovsdb.api.API.get')\n", (16076, 16111), False, 'import mock\n'), ((21724, 21769), 'mock.patch', 'mock.patch', (['"""neutron.agent.ovsdb.api.API.get"""'], {}), "('neutron.agent.ovsdb.api.API.get')\n", (21734, 21769), False, 'import mock\n'), ((51063, 51128), 'mock.patch.object', 'mock.patch.object', (['ovsvapp_agent.OVSvAppAgent', '"""check_ovs_status"""'], {}), "(ovsvapp_agent.OVSvAppAgent, 'check_ovs_status')\n", (51080, 51128), False, 'import mock\n'), ((7287, 7379), 'oslo_config.cfg.CONF.set_override', 'cfg.CONF.set_override', (['"""security_bridge_mapping"""', '"""fake_sec_br:fake_if"""', '"""SECURITYGROUP"""'], {}), "('security_bridge_mapping', 'fake_sec_br:fake_if',\n 'SECURITYGROUP')\n", (7308, 7379), False, 'from oslo_config import cfg\n'), ((7515, 7543), 'networking_vsphere.agent.ovsvapp_agent.OVSvAppAgent', 'ovsvapp_agent.OVSvAppAgent', ([], {}), '()\n', (7541, 7543), False, 'from networking_vsphere.agent import ovsvapp_agent\n'), ((7667, 7695), 'logging.getLogger', 'logging.getLogger', (['"""monitor"""'], {}), "('monitor')\n", (7684, 7695), False, 'import logging\n'), ((7773, 7784), 'mock.Mock', 'mock.Mock', ([], {}), '()\n', (7782, 7784), False, 'import mock\n'), ((10794, 10886), 'oslo_config.cfg.CONF.set_override', 'cfg.CONF.set_override', (['"""security_bridge_mapping"""', '"""fake_sec_br:fake_if"""', '"""SECURITYGROUP"""'], {}), "('security_bridge_mapping', 'fake_sec_br:fake_if',\n 'SECURITYGROUP')\n", (10815, 10886), False, 'from oslo_config import cfg\n'), ((11042, 11070), 'networking_vsphere.agent.ovsvapp_agent.OVSvAppAgent', 'ovsvapp_agent.OVSvAppAgent', ([], {}), '()\n', (11068, 11070), False, 'from networking_vsphere.agent import ovsvapp_agent\n'), ((11194, 11222), 'logging.getLogger', 'logging.getLogger', (['"""monitor"""'], {}), "('monitor')\n", (11211, 11222), False, 'import logging\n'), ((12434, 12505), 'oslo_config.cfg.CONF.set_override', 'cfg.CONF.set_override', (['"""security_bridge_mapping"""', 'None', '"""SECURITYGROUP"""'], {}), "('security_bridge_mapping', None, 'SECURITYGROUP')\n", (12455, 12505), False, 'from oslo_config import cfg\n'), ((12564, 12575), 'mock.Mock', 'mock.Mock', ([], {}), '()\n', (12573, 12575), False, 'import mock\n'), ((13030, 13118), 'oslo_config.cfg.CONF.set_override', 'cfg.CONF.set_override', (['"""security_bridge_mapping"""', '"""br-fake:fake_if"""', '"""SECURITYGROUP"""'], {}), "('security_bridge_mapping', 'br-fake:fake_if',\n 'SECURITYGROUP')\n", (13051, 13118), False, 'from oslo_config import cfg\n'), ((13173, 13184), 'mock.Mock', 'mock.Mock', ([], {}), '()\n', (13182, 13184), False, 'import mock\n'), ((13213, 13224), 'mock.Mock', 'mock.Mock', ([], {}), '()\n', (13222, 13224), False, 'import mock\n'), ((13952, 14023), 'oslo_config.cfg.CONF.set_override', 'cfg.CONF.set_override', (['"""security_bridge_mapping"""', 'None', '"""SECURITYGROUP"""'], {}), "('security_bridge_mapping', None, 'SECURITYGROUP')\n", (13973, 14023), False, 'from oslo_config import cfg\n'), ((14082, 14093), 'mock.Mock', 'mock.Mock', ([], {}), '()\n', (14091, 14093), False, 'import mock\n'), ((14628, 14716), 'oslo_config.cfg.CONF.set_override', 'cfg.CONF.set_override', (['"""security_bridge_mapping"""', '"""br-sec:physnet1"""', '"""SECURITYGROUP"""'], {}), "('security_bridge_mapping', 'br-sec:physnet1',\n 'SECURITYGROUP')\n", (14649, 14716), False, 'from oslo_config import cfg\n'), ((14771, 14782), 'mock.Mock', 'mock.Mock', ([], {}), '()\n', (14780, 14782), False, 'import mock\n'), ((14811, 14822), 'mock.Mock', 'mock.Mock', ([], {}), '()\n', (14820, 14822), False, 'import mock\n'), ((16200, 16273), 'oslo_config.cfg.CONF.set_override', 'cfg.CONF.set_override', (['"""bridge_mappings"""', "['physnet1:br-eth1']", '"""OVSVAPP"""'], {}), "('bridge_mappings', ['physnet1:br-eth1'], 'OVSVAPP')\n", (16221, 16273), False, 'from oslo_config import cfg\n'), ((16341, 16397), 'neutron.common.utils.parse_mappings', 'n_utils.parse_mappings', (['cfg.CONF.OVSVAPP.bridge_mappings'], {}), '(cfg.CONF.OVSVAPP.bridge_mappings)\n', (16363, 16397), True, 'from neutron.common import utils as n_utils\n'), ((17579, 17652), 'oslo_config.cfg.CONF.set_override', 'cfg.CONF.set_override', (['"""bridge_mappings"""', "['physnet1:br-eth1']", '"""OVSVAPP"""'], {}), "('bridge_mappings', ['physnet1:br-eth1'], 'OVSVAPP')\n", (17600, 17652), False, 'from oslo_config import cfg\n'), ((17720, 17776), 'neutron.common.utils.parse_mappings', 'n_utils.parse_mappings', (['cfg.CONF.OVSVAPP.bridge_mappings'], {}), '(cfg.CONF.OVSVAPP.bridge_mappings)\n', (17742, 17776), True, 'from neutron.common import utils as n_utils\n'), ((18054, 18065), 'mock.Mock', 'mock.Mock', ([], {}), '()\n', (18063, 18065), False, 'import mock\n'), ((21071, 21135), 'oslo_config.cfg.CONF.set_override', 'cfg.CONF.set_override', (['"""tenant_network_types"""', '"""vlan"""', '"""OVSVAPP"""'], {}), "('tenant_network_types', 'vlan', 'OVSVAPP')\n", (21092, 21135), False, 'from oslo_config import cfg\n'), ((21174, 21247), 'oslo_config.cfg.CONF.set_override', 'cfg.CONF.set_override', (['"""bridge_mappings"""', "['physnet1:br-eth1']", '"""OVSVAPP"""'], {}), "('bridge_mappings', ['physnet1:br-eth1'], 'OVSVAPP')\n", (21195, 21247), False, 'from oslo_config import cfg\n'), ((22442, 22507), 'oslo_config.cfg.CONF.set_override', 'cfg.CONF.set_override', (['"""tenant_network_types"""', '"""vxlan"""', '"""OVSVAPP"""'], {}), "('tenant_network_types', 'vxlan', 'OVSVAPP')\n", (22463, 22507), False, 'from oslo_config import cfg\n'), ((22546, 22605), 'oslo_config.cfg.CONF.set_override', 'cfg.CONF.set_override', (['"""local_ip"""', '"""10.10.10.10"""', '"""OVSVAPP"""'], {}), "('local_ip', '10.10.10.10', 'OVSVAPP')\n", (22567, 22605), False, 'from oslo_config import cfg\n'), ((22644, 22703), 'oslo_config.cfg.CONF.set_override', 'cfg.CONF.set_override', (['"""tunnel_bridge"""', '"""br-tun"""', '"""OVSVAPP"""'], {}), "('tunnel_bridge', 'br-tun', 'OVSVAPP')\n", (22665, 22703), False, 'from oslo_config import cfg\n'), ((22762, 22773), 'mock.Mock', 'mock.Mock', ([], {}), '()\n', (22771, 22773), False, 'import mock\n'), ((22802, 22813), 'mock.Mock', 'mock.Mock', ([], {}), '()\n', (22811, 22813), False, 'import mock\n'), ((30591, 30602), 'mock.Mock', 'mock.Mock', ([], {}), '()\n', (30600, 30602), False, 'import mock\n'), ((42677, 42688), 'mock.Mock', 'mock.Mock', ([], {}), '()\n', (42686, 42688), False, 'import mock\n'), ((65031, 65042), 'mock.Mock', 'mock.Mock', ([], {}), '()\n', (65040, 65042), False, 'import mock\n'), ((65973, 65984), 'mock.Mock', 'mock.Mock', ([], {}), '()\n', (65982, 65984), False, 'import mock\n'), ((68367, 68410), 'networking_vsphere.tests.unit.drivers.fake_manager.MockNetworkManager', 'fake_manager.MockNetworkManager', (['"""callback"""'], {}), "('callback')\n", (68398, 68410), False, 'from networking_vsphere.tests.unit.drivers import fake_manager\n'), ((69874, 69917), 'networking_vsphere.tests.unit.drivers.fake_manager.MockNetworkManager', 'fake_manager.MockNetworkManager', (['"""callback"""'], {}), "('callback')\n", (69905, 69917), False, 'from networking_vsphere.tests.unit.drivers import fake_manager\n'), ((70957, 71000), 'networking_vsphere.tests.unit.drivers.fake_manager.MockNetworkManager', 'fake_manager.MockNetworkManager', (['"""callback"""'], {}), "('callback')\n", (70988, 71000), False, 'from networking_vsphere.tests.unit.drivers import fake_manager\n'), ((86315, 86358), 'networking_vsphere.tests.unit.drivers.fake_manager.MockNetworkManager', 'fake_manager.MockNetworkManager', (['"""callback"""'], {}), "('callback')\n", (86346, 86358), False, 'from networking_vsphere.tests.unit.drivers import fake_manager\n'), ((88359, 88402), 'networking_vsphere.tests.unit.drivers.fake_manager.MockNetworkManager', 'fake_manager.MockNetworkManager', (['"""callback"""'], {}), "('callback')\n", (88390, 88402), False, 'from networking_vsphere.tests.unit.drivers import fake_manager\n'), ((90489, 90532), 'networking_vsphere.tests.unit.drivers.fake_manager.MockNetworkManager', 'fake_manager.MockNetworkManager', (['"""callback"""'], {}), "('callback')\n", (90520, 90532), False, 'from networking_vsphere.tests.unit.drivers import fake_manager\n'), ((92633, 92676), 'networking_vsphere.tests.unit.drivers.fake_manager.MockNetworkManager', 'fake_manager.MockNetworkManager', (['"""callback"""'], {}), "('callback')\n", (92664, 92676), False, 'from networking_vsphere.tests.unit.drivers import fake_manager\n'), ((94902, 94945), 'networking_vsphere.tests.unit.drivers.fake_manager.MockNetworkManager', 'fake_manager.MockNetworkManager', (['"""callback"""'], {}), "('callback')\n", (94933, 94945), False, 'from networking_vsphere.tests.unit.drivers import fake_manager\n'), ((97016, 97059), 'networking_vsphere.tests.unit.drivers.fake_manager.MockNetworkManager', 'fake_manager.MockNetworkManager', (['"""callback"""'], {}), "('callback')\n", (97047, 97059), False, 'from networking_vsphere.tests.unit.drivers import fake_manager\n'), ((98884, 98927), 'networking_vsphere.tests.unit.drivers.fake_manager.MockNetworkManager', 'fake_manager.MockNetworkManager', (['"""callback"""'], {}), "('callback')\n", (98915, 98927), False, 'from networking_vsphere.tests.unit.drivers import fake_manager\n'), ((100217, 100260), 'networking_vsphere.tests.unit.drivers.fake_manager.MockNetworkManager', 'fake_manager.MockNetworkManager', (['"""callback"""'], {}), "('callback')\n", (100248, 100260), False, 'from networking_vsphere.tests.unit.drivers import fake_manager\n'), ((102654, 102697), 'networking_vsphere.tests.unit.drivers.fake_manager.MockNetworkManager', 'fake_manager.MockNetworkManager', (['"""callback"""'], {}), "('callback')\n", (102685, 102697), False, 'from networking_vsphere.tests.unit.drivers import fake_manager\n'), ((105106, 105149), 'networking_vsphere.tests.unit.drivers.fake_manager.MockNetworkManager', 'fake_manager.MockNetworkManager', (['"""callback"""'], {}), "('callback')\n", (105137, 105149), False, 'from networking_vsphere.tests.unit.drivers import fake_manager\n'), ((107613, 107656), 'networking_vsphere.tests.unit.drivers.fake_manager.MockNetworkManager', 'fake_manager.MockNetworkManager', (['"""callback"""'], {}), "('callback')\n", (107644, 107656), False, 'from networking_vsphere.tests.unit.drivers import fake_manager\n'), ((111220, 111263), 'networking_vsphere.tests.unit.drivers.fake_manager.MockNetworkManager', 'fake_manager.MockNetworkManager', (['"""callback"""'], {}), "('callback')\n", (111251, 111263), False, 'from networking_vsphere.tests.unit.drivers import fake_manager\n'), ((111339, 111350), 'mock.Mock', 'mock.Mock', ([], {}), '()\n', (111348, 111350), False, 'import mock\n'), ((7798, 7870), 'mock.patch.object', 'mock.patch.object', (['self.agent.int_br', '"""bridge_exists"""'], {'return_value': '(True)'}), "(self.agent.int_br, 'bridge_exists', return_value=True)\n", (7815, 7870), False, 'import mock\n'), ((7939, 8016), 'mock.patch.object', 'mock.patch.object', (['self.agent.int_br', '"""dump_flows_for_table"""'], {'return_value': '""""""'}), "(self.agent.int_br, 'dump_flows_for_table', return_value='')\n", (7956, 8016), False, 'import mock\n'), ((12589, 12627), 'mock.patch.object', 'mock.patch.object', (['self.LOG', '"""warning"""'], {}), "(self.LOG, 'warning')\n", (12606, 12627), False, 'import mock\n'), ((12666, 12719), 'mock.patch.object', 'mock.patch.object', (['self.agent.sec_br', '"""bridge_exists"""'], {}), "(self.agent.sec_br, 'bridge_exists')\n", (12683, 12719), False, 'import mock\n'), ((13238, 13273), 'mock.patch.object', 'mock.patch.object', (['self.LOG', '"""info"""'], {}), "(self.LOG, 'info')\n", (13255, 13273), False, 'import mock\n'), ((13313, 13352), 'mock.patch.object', 'mock.patch.object', (['ovs_lib', '"""OVSBridge"""'], {}), "(ovs_lib, 'OVSBridge')\n", (13330, 13352), False, 'import mock\n'), ((13387, 13457), 'mock.patch.object', 'mock.patch.object', (['self.agent.sec_br', '"""add_patch_port"""'], {'return_value': '(5)'}), "(self.agent.sec_br, 'add_patch_port', return_value=5)\n", (13404, 13457), False, 'import mock\n'), ((13545, 13615), 'mock.patch.object', 'mock.patch.object', (['self.agent.int_br', '"""add_patch_port"""'], {'return_value': '(6)'}), "(self.agent.int_br, 'add_patch_port', return_value=6)\n", (13562, 13615), False, 'import mock\n'), ((14107, 14145), 'mock.patch.object', 'mock.patch.object', (['self.LOG', '"""warning"""'], {}), "(self.LOG, 'warning')\n", (14124, 14145), False, 'import mock\n'), ((14185, 14238), 'mock.patch.object', 'mock.patch.object', (['self.agent.sec_br', '"""bridge_exists"""'], {}), "(self.agent.sec_br, 'bridge_exists')\n", (14202, 14238), False, 'import mock\n'), ((14883, 14918), 'mock.patch.object', 'mock.patch.object', (['self.LOG', '"""info"""'], {}), "(self.LOG, 'info')\n", (14900, 14918), False, 'import mock\n'), ((14958, 15001), 'mock.patch.object', 'mock.patch.object', (['mock_br', '"""bridge_exists"""'], {}), "(mock_br, 'bridge_exists')\n", (14975, 15001), False, 'import mock\n'), ((15021, 15065), 'mock.patch.object', 'mock.patch.object', (['mock_br', '"""add_patch_port"""'], {}), "(mock_br, 'add_patch_port')\n", (15038, 15065), False, 'import mock\n'), ((15108, 15179), 'mock.patch.object', 'mock.patch.object', (['self.agent.int_br', '"""get_port_ofport"""'], {'return_value': '(6)'}), "(self.agent.int_br, 'get_port_ofport', return_value=6)\n", (15125, 15179), False, 'import mock\n'), ((15267, 15328), 'mock.patch.object', 'mock.patch.object', (['mock_br', '"""get_port_ofport"""'], {'return_value': '(6)'}), "(mock_br, 'get_port_ofport', return_value=6)\n", (15284, 15328), False, 'import mock\n'), ((15416, 15457), 'mock.patch.object', 'mock.patch.object', (['mock_br', '"""delete_port"""'], {}), "(mock_br, 'delete_port')\n", (15433, 15457), False, 'import mock\n'), ((16424, 16459), 'mock.patch.object', 'mock.patch.object', (['self.LOG', '"""info"""'], {}), "(self.LOG, 'info')\n", (16441, 16459), False, 'import mock\n'), ((16499, 16535), 'mock.patch.object', 'mock.patch.object', (['self.LOG', '"""error"""'], {}), "(self.LOG, 'error')\n", (16516, 16535), False, 'import mock\n'), ((16576, 16620), 'mock.patch.object', 'mock.patch.object', (['self.agent', '"""br_phys_cls"""'], {}), "(self.agent, 'br_phys_cls')\n", (16593, 16620), False, 'import mock\n'), ((16655, 16730), 'mock.patch.object', 'mock.patch.object', (['ovs_lib.BaseOVS', '"""get_bridges"""'], {'return_value': "['br-eth1']"}), "(ovs_lib.BaseOVS, 'get_bridges', return_value=['br-eth1'])\n", (16672, 16730), False, 'import mock\n'), ((16853, 16901), 'mock.patch.object', 'mock.patch.object', (['p_utils', '"""get_interface_name"""'], {}), "(p_utils, 'get_interface_name')\n", (16870, 16901), False, 'import mock\n'), ((16973, 17044), 'mock.patch.object', 'mock.patch.object', (['self.agent.int_br', '"""get_port_ofport"""'], {'return_value': '(6)'}), "(self.agent.int_br, 'get_port_ofport', return_value=6)\n", (16990, 17044), False, 'import mock\n'), ((18079, 18131), 'mock.patch.object', 'mock.patch.object', (['self.agent.int_br', '"""delete_flows"""'], {}), "(self.agent.int_br, 'delete_flows')\n", (18096, 18131), False, 'import mock\n'), ((18238, 18282), 'mock.patch.object', 'mock.patch.object', (['self.agent', '"""br_phys_cls"""'], {}), "(self.agent, 'br_phys_cls')\n", (18255, 18282), False, 'import mock\n'), ((18343, 18391), 'mock.patch.object', 'mock.patch.object', (['self.agent.int_br', '"""add_flow"""'], {}), "(self.agent.int_br, 'add_flow')\n", (18360, 18391), False, 'import mock\n'), ((19127, 19167), 'mock.patch.object', 'mock.patch.object', (['self.LOG', '"""exception"""'], {}), "(self.LOG, 'exception')\n", (19144, 19167), False, 'import mock\n'), ((19797, 19837), 'mock.patch.object', 'mock.patch.object', (['self.LOG', '"""exception"""'], {}), "(self.LOG, 'exception')\n", (19814, 19837), False, 'import mock\n'), ((20764, 20804), 'mock.patch.object', 'mock.patch.object', (['self.LOG', '"""exception"""'], {}), "(self.LOG, 'exception')\n", (20781, 20804), False, 'import mock\n'), ((21291, 21346), 'mock.patch.object', 'mock.patch.object', (['self.agent', '"""setup_physical_bridges"""'], {}), "(self.agent, 'setup_physical_bridges')\n", (21308, 21346), False, 'import mock\n'), ((21415, 21463), 'mock.patch.object', 'mock.patch.object', (['self.agent', '"""_init_ovs_flows"""'], {}), "(self.agent, '_init_ovs_flows')\n", (21432, 21463), False, 'import mock\n'), ((21969, 22017), 'mock.patch.object', 'mock.patch.object', (['self.agent', '"""setup_tunnel_br"""'], {}), "(self.agent, 'setup_tunnel_br')\n", (21986, 22017), False, 'import mock\n'), ((22093, 22147), 'mock.patch.object', 'mock.patch.object', (['self.agent', '"""setup_tunnel_br_flows"""'], {}), "(self.agent, 'setup_tunnel_br_flows')\n", (22110, 22147), False, 'import mock\n'), ((22934, 23004), 'mock.patch.object', 'mock.patch.object', (['self.agent.tun_br', '"""add_patch_port"""'], {'return_value': '(5)'}), "(self.agent.tun_br, 'add_patch_port', return_value=5)\n", (22951, 23004), False, 'import mock\n'), ((23086, 23156), 'mock.patch.object', 'mock.patch.object', (['self.agent.int_br', '"""add_patch_port"""'], {'return_value': '(6)'}), "(self.agent.int_br, 'add_patch_port', return_value=6)\n", (23103, 23156), False, 'import mock\n'), ((23244, 23298), 'mock.patch.object', 'mock.patch.object', (['self.agent', '"""setup_tunnel_br_flows"""'], {}), "(self.agent, 'setup_tunnel_br_flows')\n", (23261, 23298), False, 'import mock\n'), ((23937, 23972), 'mock.patch.object', 'mock.patch.object', (['self.LOG', '"""info"""'], {}), "(self.LOG, 'info')\n", (23954, 23972), False, 'import mock\n'), ((24012, 24065), 'mock.patch.object', 'mock.patch.object', (['self.agent', '"""setup_integration_br"""'], {}), "(self.agent, 'setup_integration_br')\n", (24029, 24065), False, 'import mock\n'), ((24135, 24190), 'mock.patch.object', 'mock.patch.object', (['self.agent', '"""setup_physical_bridges"""'], {}), "(self.agent, 'setup_physical_bridges')\n", (24152, 24190), False, 'import mock\n'), ((24262, 24312), 'mock.patch.object', 'mock.patch.object', (['self.agent', '"""setup_security_br"""'], {}), "(self.agent, 'setup_security_br')\n", (24279, 24312), False, 'import mock\n'), ((24382, 24437), 'mock.patch.object', 'mock.patch.object', (['self.agent.sg_agent', '"""init_firewall"""'], {}), "(self.agent.sg_agent, 'init_firewall')\n", (24399, 24437), False, 'import mock\n'), ((24508, 24556), 'mock.patch.object', 'mock.patch.object', (['self.agent', '"""setup_tunnel_br"""'], {}), "(self.agent, 'setup_tunnel_br')\n", (24525, 24556), False, 'import mock\n'), ((24634, 24688), 'mock.patch.object', 'mock.patch.object', (['self.agent', '"""setup_tunnel_br_flows"""'], {}), "(self.agent, 'setup_tunnel_br_flows')\n", (24651, 24688), False, 'import mock\n'), ((24773, 24821), 'mock.patch.object', 'mock.patch.object', (['self.agent', '"""_init_ovs_flows"""'], {}), "(self.agent, '_init_ovs_flows')\n", (24790, 24821), False, 'import mock\n'), ((24895, 24947), 'mock.patch.object', 'mock.patch.object', (['self.agent.monitor_log', '"""warning"""'], {}), "(self.agent.monitor_log, 'warning')\n", (24912, 24947), False, 'import mock\n'), ((25021, 25070), 'mock.patch.object', 'mock.patch.object', (['self.agent.monitor_log', '"""info"""'], {}), "(self.agent.monitor_log, 'info')\n", (25038, 25070), False, 'import mock\n'), ((26160, 26195), 'mock.patch.object', 'mock.patch.object', (['self.LOG', '"""info"""'], {}), "(self.LOG, 'info')\n", (26177, 26195), False, 'import mock\n'), ((26235, 26288), 'mock.patch.object', 'mock.patch.object', (['self.agent', '"""setup_integration_br"""'], {}), "(self.agent, 'setup_integration_br')\n", (26252, 26288), False, 'import mock\n'), ((26308, 26363), 'mock.patch.object', 'mock.patch.object', (['self.agent', '"""setup_physical_bridges"""'], {}), "(self.agent, 'setup_physical_bridges')\n", (26325, 26363), False, 'import mock\n'), ((26435, 26485), 'mock.patch.object', 'mock.patch.object', (['self.agent', '"""setup_security_br"""'], {}), "(self.agent, 'setup_security_br')\n", (26452, 26485), False, 'import mock\n'), ((26505, 26560), 'mock.patch.object', 'mock.patch.object', (['self.agent.sg_agent', '"""init_firewall"""'], {}), "(self.agent.sg_agent, 'init_firewall')\n", (26522, 26560), False, 'import mock\n'), ((26615, 26663), 'mock.patch.object', 'mock.patch.object', (['self.agent', '"""setup_tunnel_br"""'], {}), "(self.agent, 'setup_tunnel_br')\n", (26632, 26663), False, 'import mock\n'), ((26741, 26795), 'mock.patch.object', 'mock.patch.object', (['self.agent', '"""setup_tunnel_br_flows"""'], {}), "(self.agent, 'setup_tunnel_br_flows')\n", (26758, 26795), False, 'import mock\n'), ((26880, 26924), 'mock.patch.object', 'mock.patch.object', (['self.agent', '"""tunnel_sync"""'], {}), "(self.agent, 'tunnel_sync')\n", (26897, 26924), False, 'import mock\n'), ((26996, 27044), 'mock.patch.object', 'mock.patch.object', (['self.agent', '"""_init_ovs_flows"""'], {}), "(self.agent, '_init_ovs_flows')\n", (27013, 27044), False, 'import mock\n'), ((27064, 27116), 'mock.patch.object', 'mock.patch.object', (['self.agent.monitor_log', '"""warning"""'], {}), "(self.agent.monitor_log, 'warning')\n", (27081, 27116), False, 'import mock\n'), ((27190, 27239), 'mock.patch.object', 'mock.patch.object', (['self.agent.monitor_log', '"""info"""'], {}), "(self.agent.monitor_log, 'info')\n", (27207, 27239), False, 'import mock\n'), ((28179, 28214), 'mock.patch.object', 'mock.patch.object', (['self.LOG', '"""info"""'], {}), "(self.LOG, 'info')\n", (28196, 28214), False, 'import mock\n'), ((28401, 28456), 'mock.patch.object', 'mock.patch.object', (['self.agent', '"""setup_physical_bridges"""'], {}), "(self.agent, 'setup_physical_bridges')\n", (28418, 28456), False, 'import mock\n'), ((28528, 28576), 'mock.patch.object', 'mock.patch.object', (['self.agent', '"""setup_tunnel_br"""'], {}), "(self.agent, 'setup_tunnel_br')\n", (28545, 28576), False, 'import mock\n'), ((28654, 28708), 'mock.patch.object', 'mock.patch.object', (['self.agent', '"""setup_tunnel_br_flows"""'], {}), "(self.agent, 'setup_tunnel_br_flows')\n", (28671, 28708), False, 'import mock\n'), ((28793, 28833), 'mock.patch.object', 'mock.patch.object', (['self.LOG', '"""exception"""'], {}), "(self.LOG, 'exception')\n", (28810, 28833), False, 'import mock\n'), ((28910, 28962), 'mock.patch.object', 'mock.patch.object', (['self.agent.monitor_log', '"""warning"""'], {}), "(self.agent.monitor_log, 'warning')\n", (28927, 28962), False, 'import mock\n'), ((29036, 29085), 'mock.patch.object', 'mock.patch.object', (['self.agent.monitor_log', '"""info"""'], {}), "(self.agent.monitor_log, 'info')\n", (29053, 29085), False, 'import mock\n'), ((31220, 31283), 'mock.patch.object', 'mock.patch.object', (['self.agent.sg_agent', '"""add_devices_to_filter"""'], {}), "(self.agent.sg_agent, 'add_devices_to_filter')\n", (31237, 31283), False, 'import mock\n'), ((31355, 31409), 'mock.patch.object', 'mock.patch.object', (['self.agent', '"""_provision_local_vlan"""'], {}), "(self.agent, '_provision_local_vlan')\n", (31372, 31409), False, 'import mock\n'), ((32297, 32360), 'mock.patch.object', 'mock.patch.object', (['self.agent.sg_agent', '"""add_devices_to_filter"""'], {}), "(self.agent.sg_agent, 'add_devices_to_filter')\n", (32314, 32360), False, 'import mock\n'), ((32432, 32486), 'mock.patch.object', 'mock.patch.object', (['self.agent', '"""_provision_local_vlan"""'], {}), "(self.agent, '_provision_local_vlan')\n", (32449, 32486), False, 'import mock\n'), ((33081, 33121), 'mock.patch', 'mock.patch', (['"""eventlet.GreenPool.spawn_n"""'], {}), "('eventlet.GreenPool.spawn_n')\n", (33091, 33121), False, 'import mock\n'), ((33162, 33202), 'mock.patch.object', 'mock.patch.object', (['self.LOG', '"""exception"""'], {}), "(self.LOG, 'exception')\n", (33179, 33202), False, 'import mock\n'), ((34025, 34065), 'mock.patch', 'mock.patch', (['"""eventlet.GreenPool.spawn_n"""'], {}), "('eventlet.GreenPool.spawn_n')\n", (34035, 34065), False, 'import mock\n'), ((34106, 34146), 'mock.patch.object', 'mock.patch.object', (['self.LOG', '"""exception"""'], {}), "(self.LOG, 'exception')\n", (34123, 34146), False, 'import mock\n'), ((34868, 34966), 'mock.patch.object', 'mock.patch.object', (['self.agent.ovsvapp_rpc', '"""get_ports_details_list"""'], {'return_value': '[fakeport_1]'}), "(self.agent.ovsvapp_rpc, 'get_ports_details_list',\n return_value=[fakeport_1])\n", (34885, 34966), False, 'import mock\n'), ((35107, 35170), 'mock.patch.object', 'mock.patch.object', (['self.agent.sg_agent', '"""add_devices_to_filter"""'], {}), "(self.agent.sg_agent, 'add_devices_to_filter')\n", (35124, 35170), False, 'import mock\n'), ((35255, 35313), 'mock.patch.object', 'mock.patch.object', (['self.agent.sg_agent', '"""refresh_firewall"""'], {}), "(self.agent.sg_agent, 'refresh_firewall')\n", (35272, 35313), False, 'import mock\n'), ((35392, 35446), 'mock.patch.object', 'mock.patch.object', (['self.agent', '"""_provision_local_vlan"""'], {}), "(self.agent, '_provision_local_vlan')\n", (35409, 35446), False, 'import mock\n'), ((35530, 35570), 'mock.patch.object', 'mock.patch.object', (['self.LOG', '"""exception"""'], {}), "(self.LOG, 'exception')\n", (35547, 35570), False, 'import mock\n'), ((36731, 36841), 'mock.patch.object', 'mock.patch.object', (['self.agent.ovsvapp_rpc', '"""get_ports_details_list"""'], {'return_value': '[fakeport_1, fakeport_2]'}), "(self.agent.ovsvapp_rpc, 'get_ports_details_list',\n return_value=[fakeport_1, fakeport_2])\n", (36748, 36841), False, 'import mock\n'), ((36982, 37045), 'mock.patch.object', 'mock.patch.object', (['self.agent.sg_agent', '"""add_devices_to_filter"""'], {}), "(self.agent.sg_agent, 'add_devices_to_filter')\n", (36999, 37045), False, 'import mock\n'), ((37130, 37188), 'mock.patch.object', 'mock.patch.object', (['self.agent.sg_agent', '"""refresh_firewall"""'], {}), "(self.agent.sg_agent, 'refresh_firewall')\n", (37147, 37188), False, 'import mock\n'), ((37267, 37321), 'mock.patch.object', 'mock.patch.object', (['self.agent', '"""_provision_local_vlan"""'], {}), "(self.agent, '_provision_local_vlan')\n", (37284, 37321), False, 'import mock\n'), ((37400, 37440), 'mock.patch.object', 'mock.patch.object', (['self.LOG', '"""exception"""'], {}), "(self.LOG, 'exception')\n", (37417, 37440), False, 'import mock\n'), ((38493, 38591), 'mock.patch.object', 'mock.patch.object', (['self.agent.ovsvapp_rpc', '"""get_ports_details_list"""'], {'return_value': '[fakeport_1]'}), "(self.agent.ovsvapp_rpc, 'get_ports_details_list',\n return_value=[fakeport_1])\n", (38510, 38591), False, 'import mock\n'), ((38732, 38795), 'mock.patch.object', 'mock.patch.object', (['self.agent.sg_agent', '"""add_devices_to_filter"""'], {}), "(self.agent.sg_agent, 'add_devices_to_filter')\n", (38749, 38795), False, 'import mock\n'), ((38880, 38938), 'mock.patch.object', 'mock.patch.object', (['self.agent.sg_agent', '"""refresh_firewall"""'], {}), "(self.agent.sg_agent, 'refresh_firewall')\n", (38897, 38938), False, 'import mock\n'), ((39017, 39063), 'mock.patch.object', 'mock.patch.object', (['self.agent', '"""_populate_lvm"""'], {}), "(self.agent, '_populate_lvm')\n", (39034, 39063), False, 'import mock\n'), ((39083, 39137), 'mock.patch.object', 'mock.patch.object', (['self.agent', '"""_provision_local_vlan"""'], {}), "(self.agent, '_provision_local_vlan')\n", (39100, 39137), False, 'import mock\n'), ((39216, 39256), 'mock.patch.object', 'mock.patch.object', (['self.LOG', '"""exception"""'], {}), "(self.LOG, 'exception')\n", (39233, 39256), False, 'import mock\n'), ((40431, 40541), 'mock.patch.object', 'mock.patch.object', (['self.agent.ovsvapp_rpc', '"""get_ports_details_list"""'], {'return_value': '[fakeport_1, fakeport_2]'}), "(self.agent.ovsvapp_rpc, 'get_ports_details_list',\n return_value=[fakeport_1, fakeport_2])\n", (40448, 40541), False, 'import mock\n'), ((40682, 40745), 'mock.patch.object', 'mock.patch.object', (['self.agent.sg_agent', '"""add_devices_to_filter"""'], {}), "(self.agent.sg_agent, 'add_devices_to_filter')\n", (40699, 40745), False, 'import mock\n'), ((40830, 40888), 'mock.patch.object', 'mock.patch.object', (['self.agent.sg_agent', '"""refresh_firewall"""'], {}), "(self.agent.sg_agent, 'refresh_firewall')\n", (40847, 40888), False, 'import mock\n'), ((40967, 41013), 'mock.patch.object', 'mock.patch.object', (['self.agent', '"""_populate_lvm"""'], {}), "(self.agent, '_populate_lvm')\n", (40984, 41013), False, 'import mock\n'), ((41033, 41087), 'mock.patch.object', 'mock.patch.object', (['self.agent', '"""_provision_local_vlan"""'], {}), "(self.agent, '_provision_local_vlan')\n", (41050, 41087), False, 'import mock\n'), ((41166, 41206), 'mock.patch.object', 'mock.patch.object', (['self.LOG', '"""exception"""'], {}), "(self.LOG, 'exception')\n", (41183, 41206), False, 'import mock\n'), ((42702, 42812), 'mock.patch.object', 'mock.patch.object', (['self.agent.ovsvapp_rpc', '"""get_ports_details_list"""'], {'return_value': '[fakeport_1, fakeport_2]'}), "(self.agent.ovsvapp_rpc, 'get_ports_details_list',\n return_value=[fakeport_1, fakeport_2])\n", (42719, 42812), False, 'import mock\n'), ((42953, 43016), 'mock.patch.object', 'mock.patch.object', (['self.agent.sg_agent', '"""add_devices_to_filter"""'], {}), "(self.agent.sg_agent, 'add_devices_to_filter')\n", (42970, 43016), False, 'import mock\n'), ((43101, 43159), 'mock.patch.object', 'mock.patch.object', (['self.agent.sg_agent', '"""refresh_firewall"""'], {}), "(self.agent.sg_agent, 'refresh_firewall')\n", (43118, 43159), False, 'import mock\n'), ((43238, 43301), 'mock.patch.object', 'mock.patch.object', (['self.agent.sg_agent', '"""remove_devices_filter"""'], {}), "(self.agent.sg_agent, 'remove_devices_filter')\n", (43255, 43301), False, 'import mock\n'), ((43418, 43472), 'mock.patch.object', 'mock.patch.object', (['self.agent', '"""_provision_local_vlan"""'], {}), "(self.agent, '_provision_local_vlan')\n", (43435, 43472), False, 'import mock\n'), ((43551, 43609), 'mock.patch.object', 'mock.patch.object', (['self.agent', '"""_remove_stale_ports_flows"""'], {}), "(self.agent, '_remove_stale_ports_flows')\n", (43568, 43609), False, 'import mock\n'), ((43629, 43680), 'mock.patch.object', 'mock.patch.object', (['self.agent', '"""_block_stale_ports"""'], {}), "(self.agent, '_block_stale_ports')\n", (43646, 43680), False, 'import mock\n'), ((43700, 43740), 'mock.patch.object', 'mock.patch.object', (['self.LOG', '"""exception"""'], {}), "(self.LOG, 'exception')\n", (43717, 43740), False, 'import mock\n'), ((45244, 45354), 'mock.patch.object', 'mock.patch.object', (['self.agent.ovsvapp_rpc', '"""get_ports_details_list"""'], {'return_value': '[fakeport_1, fakeport_2]'}), "(self.agent.ovsvapp_rpc, 'get_ports_details_list',\n return_value=[fakeport_1, fakeport_2])\n", (45261, 45354), False, 'import mock\n'), ((45495, 45553), 'mock.patch.object', 'mock.patch.object', (['self.agent.sg_agent', '"""refresh_firewall"""'], {}), "(self.agent.sg_agent, 'refresh_firewall')\n", (45512, 45553), False, 'import mock\n'), ((45633, 45687), 'mock.patch.object', 'mock.patch.object', (['self.agent', '"""_provision_local_vlan"""'], {}), "(self.agent, '_provision_local_vlan')\n", (45650, 45687), False, 'import mock\n'), ((45742, 45800), 'mock.patch.object', 'mock.patch.object', (['self.agent', '"""_remove_stale_ports_flows"""'], {}), "(self.agent, '_remove_stale_ports_flows')\n", (45759, 45800), False, 'import mock\n'), ((45820, 45871), 'mock.patch.object', 'mock.patch.object', (['self.agent', '"""_block_stale_ports"""'], {}), "(self.agent, '_block_stale_ports')\n", (45837, 45871), False, 'import mock\n'), ((45891, 45943), 'mock.patch.object', 'mock.patch.object', (['self.agent.monitor_log', '"""warning"""'], {}), "(self.agent.monitor_log, 'warning')\n", (45908, 45943), False, 'import mock\n'), ((46017, 46066), 'mock.patch.object', 'mock.patch.object', (['self.agent.monitor_log', '"""info"""'], {}), "(self.agent.monitor_log, 'info')\n", (46034, 46066), False, 'import mock\n'), ((47530, 47588), 'mock.patch.object', 'mock.patch.object', (['self.agent.sg_agent', '"""refresh_firewall"""'], {}), "(self.agent.sg_agent, 'refresh_firewall')\n", (47547, 47588), False, 'import mock\n'), ((47668, 47720), 'mock.patch.object', 'mock.patch.object', (['self.agent.monitor_log', '"""warning"""'], {}), "(self.agent.monitor_log, 'warning')\n", (47685, 47720), False, 'import mock\n'), ((47794, 47843), 'mock.patch.object', 'mock.patch.object', (['self.agent.monitor_log', '"""info"""'], {}), "(self.agent.monitor_log, 'info')\n", (47811, 47843), False, 'import mock\n'), ((48736, 48801), 'mock.patch.object', 'mock.patch.object', (['self.agent', '"""check_ovs_status"""'], {'return_value': '(4)'}), "(self.agent, 'check_ovs_status', return_value=4)\n", (48753, 48801), False, 'import mock\n'), ((48870, 48919), 'mock.patch.object', 'mock.patch.object', (['self.agent', '"""_update_firewall"""'], {}), "(self.agent, '_update_firewall')\n", (48887, 48919), False, 'import mock\n'), ((48998, 49087), 'mock.patch.object', 'mock.patch.object', (['self.agent.sg_agent', '"""firewall_refresh_needed"""'], {'return_value': '(False)'}), "(self.agent.sg_agent, 'firewall_refresh_needed',\n return_value=False)\n", (49015, 49087), False, 'import mock\n'), ((49231, 49293), 'mock.patch.object', 'mock.patch.object', (['self.agent.sg_agent', '"""refresh_port_filters"""'], {}), "(self.agent.sg_agent, 'refresh_port_filters')\n", (49248, 49293), False, 'import mock\n'), ((49377, 49431), 'mock.patch.object', 'mock.patch.object', (['self.agent', '"""_update_port_bindings"""'], {}), "(self.agent, '_update_port_bindings')\n", (49394, 49431), False, 'import mock\n'), ((49993, 50058), 'mock.patch.object', 'mock.patch.object', (['self.agent', '"""check_ovs_status"""'], {'return_value': '(0)'}), "(self.agent, 'check_ovs_status', return_value=0)\n", (50010, 50058), False, 'import mock\n'), ((50127, 50180), 'mock.patch.object', 'mock.patch.object', (['self.agent', '"""mitigate_ovs_restart"""'], {}), "(self.agent, 'mitigate_ovs_restart')\n", (50144, 50180), False, 'import mock\n'), ((50252, 50301), 'mock.patch.object', 'mock.patch.object', (['self.agent', '"""_update_firewall"""'], {}), "(self.agent, '_update_firewall')\n", (50269, 50301), False, 'import mock\n'), ((50380, 50469), 'mock.patch.object', 'mock.patch.object', (['self.agent.sg_agent', '"""firewall_refresh_needed"""'], {'return_value': '(False)'}), "(self.agent.sg_agent, 'firewall_refresh_needed',\n return_value=False)\n", (50397, 50469), False, 'import mock\n'), ((50613, 50667), 'mock.patch.object', 'mock.patch.object', (['self.agent', '"""_update_port_bindings"""'], {}), "(self.agent, '_update_port_bindings')\n", (50630, 50667), False, 'import mock\n'), ((51342, 51395), 'mock.patch.object', 'mock.patch.object', (['self.agent', '"""mitigate_ovs_restart"""'], {}), "(self.agent, 'mitigate_ovs_restart')\n", (51359, 51395), False, 'import mock\n'), ((51464, 51513), 'mock.patch.object', 'mock.patch.object', (['self.agent', '"""_update_firewall"""'], {}), "(self.agent, '_update_firewall')\n", (51481, 51513), False, 'import mock\n'), ((51592, 51681), 'mock.patch.object', 'mock.patch.object', (['self.agent.sg_agent', '"""firewall_refresh_needed"""'], {'return_value': '(False)'}), "(self.agent.sg_agent, 'firewall_refresh_needed',\n return_value=False)\n", (51609, 51681), False, 'import mock\n'), ((51825, 51879), 'mock.patch.object', 'mock.patch.object', (['self.agent', '"""_update_port_bindings"""'], {}), "(self.agent, '_update_port_bindings')\n", (51842, 51879), False, 'import mock\n'), ((52885, 52950), 'mock.patch.object', 'mock.patch.object', (['self.agent', '"""check_ovs_status"""'], {'return_value': '(4)'}), "(self.agent, 'check_ovs_status', return_value=4)\n", (52902, 52950), False, 'import mock\n'), ((53019, 53072), 'mock.patch.object', 'mock.patch.object', (['self.agent', '"""mitigate_ovs_restart"""'], {}), "(self.agent, 'mitigate_ovs_restart')\n", (53036, 53072), False, 'import mock\n'), ((53144, 53193), 'mock.patch.object', 'mock.patch.object', (['self.agent', '"""_update_firewall"""'], {}), "(self.agent, '_update_firewall')\n", (53161, 53193), False, 'import mock\n'), ((53271, 53360), 'mock.patch.object', 'mock.patch.object', (['self.agent.sg_agent', '"""firewall_refresh_needed"""'], {'return_value': '(False)'}), "(self.agent.sg_agent, 'firewall_refresh_needed',\n return_value=False)\n", (53288, 53360), False, 'import mock\n'), ((53504, 53558), 'mock.patch.object', 'mock.patch.object', (['self.agent', '"""_update_port_bindings"""'], {}), "(self.agent, '_update_port_bindings')\n", (53521, 53558), False, 'import mock\n'), ((54110, 54175), 'mock.patch.object', 'mock.patch.object', (['self.agent', '"""check_ovs_status"""'], {'return_value': '(4)'}), "(self.agent, 'check_ovs_status', return_value=4)\n", (54127, 54175), False, 'import mock\n'), ((54244, 54293), 'mock.patch.object', 'mock.patch.object', (['self.agent', '"""_update_firewall"""'], {}), "(self.agent, '_update_firewall')\n", (54261, 54293), False, 'import mock\n'), ((54372, 54460), 'mock.patch.object', 'mock.patch.object', (['self.agent.sg_agent', '"""firewall_refresh_needed"""'], {'return_value': '(True)'}), "(self.agent.sg_agent, 'firewall_refresh_needed',\n return_value=True)\n", (54389, 54460), False, 'import mock\n'), ((54603, 54665), 'mock.patch.object', 'mock.patch.object', (['self.agent.sg_agent', '"""refresh_port_filters"""'], {}), "(self.agent.sg_agent, 'refresh_port_filters')\n", (54620, 54665), False, 'import mock\n'), ((54749, 54803), 'mock.patch.object', 'mock.patch.object', (['self.agent', '"""_update_port_bindings"""'], {}), "(self.agent, '_update_port_bindings')\n", (54766, 54803), False, 'import mock\n'), ((55374, 55439), 'mock.patch.object', 'mock.patch.object', (['self.agent', '"""check_ovs_status"""'], {'return_value': '(4)'}), "(self.agent, 'check_ovs_status', return_value=4)\n", (55391, 55439), False, 'import mock\n'), ((55508, 55557), 'mock.patch.object', 'mock.patch.object', (['self.agent', '"""_update_firewall"""'], {}), "(self.agent, '_update_firewall')\n", (55525, 55557), False, 'import mock\n'), ((55636, 55725), 'mock.patch.object', 'mock.patch.object', (['self.agent.sg_agent', '"""firewall_refresh_needed"""'], {'return_value': '(False)'}), "(self.agent.sg_agent, 'firewall_refresh_needed',\n return_value=False)\n", (55653, 55725), False, 'import mock\n'), ((55869, 55923), 'mock.patch.object', 'mock.patch.object', (['self.agent', '"""_update_port_bindings"""'], {}), "(self.agent, '_update_port_bindings')\n", (55886, 55923), False, 'import mock\n'), ((56465, 56556), 'mock.patch.object', 'mock.patch.object', (['self.agent.ovsvapp_rpc', '"""update_devices_up"""'], {'return_value': 'ret_value'}), "(self.agent.ovsvapp_rpc, 'update_devices_up', return_value\n =ret_value)\n", (56482, 56556), False, 'import mock\n'), ((56686, 56726), 'mock.patch.object', 'mock.patch.object', (['self.LOG', '"""exception"""'], {}), "(self.LOG, 'exception')\n", (56703, 56726), False, 'import mock\n'), ((57329, 57369), 'mock.patch.object', 'mock.patch.object', (['self.LOG', '"""exception"""'], {}), "(self.LOG, 'exception')\n", (57346, 57369), False, 'import mock\n'), ((57900, 57991), 'mock.patch.object', 'mock.patch.object', (['self.agent.ovsvapp_rpc', '"""update_devices_up"""'], {'return_value': 'ret_value'}), "(self.agent.ovsvapp_rpc, 'update_devices_up', return_value\n =ret_value)\n", (57917, 57991), False, 'import mock\n'), ((58121, 58161), 'mock.patch.object', 'mock.patch.object', (['self.LOG', '"""exception"""'], {}), "(self.LOG, 'exception')\n", (58138, 58161), False, 'import mock\n'), ((58647, 58739), 'mock.patch.object', 'mock.patch.object', (['self.agent.ovsvapp_rpc', '"""update_devices_down"""'], {'return_value': 'ret_value'}), "(self.agent.ovsvapp_rpc, 'update_devices_down',\n return_value=ret_value)\n", (58664, 58739), False, 'import mock\n'), ((58872, 58912), 'mock.patch.object', 'mock.patch.object', (['self.LOG', '"""exception"""'], {}), "(self.LOG, 'exception')\n", (58889, 58912), False, 'import mock\n'), ((59529, 59569), 'mock.patch.object', 'mock.patch.object', (['self.LOG', '"""exception"""'], {}), "(self.LOG, 'exception')\n", (59546, 59569), False, 'import mock\n'), ((60114, 60206), 'mock.patch.object', 'mock.patch.object', (['self.agent.ovsvapp_rpc', '"""update_devices_down"""'], {'return_value': 'ret_value'}), "(self.agent.ovsvapp_rpc, 'update_devices_down',\n return_value=ret_value)\n", (60131, 60206), False, 'import mock\n'), ((60339, 60379), 'mock.patch.object', 'mock.patch.object', (['self.LOG', '"""exception"""'], {}), "(self.LOG, 'exception')\n", (60356, 60379), False, 'import mock\n'), ((60707, 60762), 'mock.patch.object', 'mock.patch.object', (['self.agent.state_rpc', '"""report_state"""'], {}), "(self.agent.state_rpc, 'report_state')\n", (60724, 60762), False, 'import mock\n'), ((61474, 61514), 'mock.patch.object', 'mock.patch.object', (['self.LOG', '"""exception"""'], {}), "(self.LOG, 'exception')\n", (61491, 61514), False, 'import mock\n'), ((62049, 62102), 'mock.patch.object', 'mock.patch.object', (['self.agent', '"""_notify_device_added"""'], {}), "(self.agent, '_notify_device_added')\n", (62066, 62102), False, 'import mock\n'), ((62168, 62223), 'mock.patch.object', 'mock.patch.object', (['self.agent', '"""_notify_device_updated"""'], {}), "(self.agent, '_notify_device_updated')\n", (62185, 62223), False, 'import mock\n'), ((62295, 62350), 'mock.patch.object', 'mock.patch.object', (['self.agent', '"""_notify_device_deleted"""'], {}), "(self.agent, '_notify_device_deleted')\n", (62312, 62350), False, 'import mock\n'), ((62419, 62455), 'mock.patch.object', 'mock.patch.object', (['self.LOG', '"""debug"""'], {}), "(self.LOG, 'debug')\n", (62436, 62455), False, 'import mock\n'), ((63100, 63140), 'mock.patch.object', 'mock.patch.object', (['self.LOG', '"""exception"""'], {}), "(self.LOG, 'exception')\n", (63117, 63140), False, 'import mock\n'), ((63217, 63253), 'mock.patch.object', 'mock.patch.object', (['self.LOG', '"""error"""'], {}), "(self.LOG, 'error')\n", (63234, 63253), False, 'import mock\n'), ((63805, 63858), 'mock.patch.object', 'mock.patch.object', (['self.agent', '"""_notify_device_added"""'], {}), "(self.agent, '_notify_device_added')\n", (63822, 63858), False, 'import mock\n'), ((64322, 64375), 'mock.patch.object', 'mock.patch.object', (['self.agent', '"""_notify_device_added"""'], {}), "(self.agent, '_notify_device_added')\n", (64339, 64375), False, 'import mock\n'), ((65056, 65141), 'mock.patch.object', 'mock.patch.object', (['self.agent.sec_br', '"""dump_flows_for"""'], {'return_value': '"""mock_flow"""'}), "(self.agent.sec_br, 'dump_flows_for', return_value='mock_flow'\n )\n", (65073, 65141), False, 'import mock\n'), ((65998, 66083), 'mock.patch.object', 'mock.patch.object', (['self.agent.sec_br', '"""dump_flows_for"""'], {'return_value': '"""mock_flow"""'}), "(self.agent.sec_br, 'dump_flows_for', return_value='mock_flow'\n )\n", (66015, 66083), False, 'import mock\n'), ((66491, 66562), 'mock.patch.object', 'mock.patch.object', (['self.agent.sec_br', '"""dump_flows_for"""'], {'return_value': '""""""'}), "(self.agent.sec_br, 'dump_flows_for', return_value='')\n", (66508, 66562), False, 'import mock\n'), ((66632, 66720), 'mock.patch.object', 'mock.patch.object', (['self.agent.ovsvapp_rpc', '"""get_ports_for_device"""'], {'return_value': '(True)'}), "(self.agent.ovsvapp_rpc, 'get_ports_for_device',\n return_value=True)\n", (66649, 66720), False, 'import mock\n'), ((68673, 68709), 'mock.patch.object', 'mock.patch.object', (['self.LOG', '"""debug"""'], {}), "(self.LOG, 'debug')\n", (68690, 68709), False, 'import mock\n'), ((71939, 72027), 'mock.patch.object', 'mock.patch.object', (['self.agent.ovsvapp_rpc', '"""get_ports_for_device"""'], {'return_value': '(True)'}), "(self.agent.ovsvapp_rpc, 'get_ports_for_device',\n return_value=True)\n", (71956, 72027), False, 'import mock\n'), ((72123, 72163), 'mock.patch.object', 'mock.patch.object', (['self.LOG', '"""exception"""'], {}), "(self.LOG, 'exception')\n", (72140, 72163), False, 'import mock\n'), ((72240, 72272), 'mock.patch.object', 'mock.patch.object', (['time', '"""sleep"""'], {}), "(time, 'sleep')\n", (72257, 72272), False, 'import mock\n'), ((72915, 72955), 'mock.patch.object', 'mock.patch.object', (['self.LOG', '"""exception"""'], {}), "(self.LOG, 'exception')\n", (72932, 72955), False, 'import mock\n'), ((73031, 73063), 'mock.patch.object', 'mock.patch.object', (['time', '"""sleep"""'], {}), "(time, 'sleep')\n", (73048, 73063), False, 'import mock\n'), ((73596, 73685), 'mock.patch.object', 'mock.patch.object', (['self.agent.ovsvapp_rpc', '"""get_ports_for_device"""'], {'return_value': '(False)'}), "(self.agent.ovsvapp_rpc, 'get_ports_for_device',\n return_value=False)\n", (73613, 73685), False, 'import mock\n'), ((73781, 73821), 'mock.patch.object', 'mock.patch.object', (['self.LOG', '"""exception"""'], {}), "(self.LOG, 'exception')\n", (73798, 73821), False, 'import mock\n'), ((73898, 73930), 'mock.patch.object', 'mock.patch.object', (['time', '"""sleep"""'], {}), "(time, 'sleep')\n", (73915, 73930), False, 'import mock\n'), ((74745, 74811), 'mock.patch.object', 'mock.patch.object', (['self.agent.ovsvapp_rpc', '"""update_device_binding"""'], {}), "(self.agent.ovsvapp_rpc, 'update_device_binding')\n", (74762, 74811), False, 'import mock\n'), ((74924, 74964), 'mock.patch.object', 'mock.patch.object', (['self.LOG', '"""exception"""'], {}), "(self.LOG, 'exception')\n", (74941, 74964), False, 'import mock\n'), ((75788, 75854), 'mock.patch.object', 'mock.patch.object', (['self.agent.ovsvapp_rpc', '"""update_device_binding"""'], {}), "(self.agent.ovsvapp_rpc, 'update_device_binding')\n", (75805, 75854), False, 'import mock\n'), ((76127, 76193), 'mock.patch.object', 'mock.patch.object', (['self.agent.ovsvapp_rpc', '"""update_device_binding"""'], {}), "(self.agent.ovsvapp_rpc, 'update_device_binding')\n", (76144, 76193), False, 'import mock\n'), ((76956, 77022), 'mock.patch.object', 'mock.patch.object', (['self.agent.ovsvapp_rpc', '"""update_device_binding"""'], {}), "(self.agent.ovsvapp_rpc, 'update_device_binding')\n", (76973, 77022), False, 'import mock\n'), ((78093, 78133), 'mock.patch.object', 'mock.patch.object', (['self.LOG', '"""exception"""'], {}), "(self.LOG, 'exception')\n", (78110, 78133), False, 'import mock\n'), ((79366, 79432), 'mock.patch.object', 'mock.patch.object', (['self.agent.ovsvapp_rpc', '"""update_device_binding"""'], {}), "(self.agent.ovsvapp_rpc, 'update_device_binding')\n", (79383, 79432), False, 'import mock\n'), ((79545, 79585), 'mock.patch.object', 'mock.patch.object', (['self.LOG', '"""exception"""'], {}), "(self.LOG, 'exception')\n", (79562, 79585), False, 'import mock\n'), ((80903, 80969), 'mock.patch.object', 'mock.patch.object', (['self.agent.ovsvapp_rpc', '"""update_device_binding"""'], {}), "(self.agent.ovsvapp_rpc, 'update_device_binding')\n", (80920, 80969), False, 'import mock\n'), ((81082, 81122), 'mock.patch.object', 'mock.patch.object', (['self.LOG', '"""exception"""'], {}), "(self.LOG, 'exception')\n", (81099, 81122), False, 'import mock\n'), ((82007, 82047), 'mock.patch.object', 'mock.patch.object', (['self.LOG', '"""exception"""'], {}), "(self.LOG, 'exception')\n", (82024, 82047), False, 'import mock\n'), ((83570, 83643), 'mock.patch.object', 'mock.patch.object', (['self.agent', '"""_process_create_ports"""'], {'return_value': '(True)'}), "(self.agent, '_process_create_ports', return_value=True)\n", (83587, 83643), False, 'import mock\n'), ((83746, 83782), 'mock.patch.object', 'mock.patch.object', (['self.LOG', '"""debug"""'], {}), "(self.LOG, 'debug')\n", (83763, 83782), False, 'import mock\n'), ((84451, 84514), 'mock.patch.object', 'mock.patch.object', (['self.agent.sg_agent', '"""add_devices_to_filter"""'], {}), "(self.agent.sg_agent, 'add_devices_to_filter')\n", (84468, 84514), False, 'import mock\n'), ((84589, 84648), 'mock.patch.object', 'mock.patch.object', (['self.agent.sg_agent', '"""ovsvapp_sg_update"""'], {}), "(self.agent.sg_agent, 'ovsvapp_sg_update')\n", (84606, 84648), False, 'import mock\n'), ((84724, 84814), 'mock.patch.object', 'mock.patch.object', (['self.agent.sg_agent', '"""expand_sg_rules"""'], {'return_value': 'FAKE_SG_RULES'}), "(self.agent.sg_agent, 'expand_sg_rules', return_value=\n FAKE_SG_RULES)\n", (84741, 84814), False, 'import mock\n'), ((84922, 84976), 'mock.patch.object', 'mock.patch.object', (['self.agent', '"""_provision_local_vlan"""'], {}), "(self.agent, '_provision_local_vlan')\n", (84939, 84976), False, 'import mock\n'), ((85055, 85091), 'mock.patch.object', 'mock.patch.object', (['self.LOG', '"""debug"""'], {}), "(self.LOG, 'debug')\n", (85072, 85091), False, 'import mock\n'), ((86419, 86482), 'mock.patch.object', 'mock.patch.object', (['self.agent.sg_agent', '"""add_devices_to_filter"""'], {}), "(self.agent.sg_agent, 'add_devices_to_filter')\n", (86436, 86482), False, 'import mock\n'), ((86557, 86616), 'mock.patch.object', 'mock.patch.object', (['self.agent.sg_agent', '"""ovsvapp_sg_update"""'], {}), "(self.agent.sg_agent, 'ovsvapp_sg_update')\n", (86574, 86616), False, 'import mock\n'), ((86692, 86782), 'mock.patch.object', 'mock.patch.object', (['self.agent.sg_agent', '"""expand_sg_rules"""'], {'return_value': 'FAKE_SG_RULES'}), "(self.agent.sg_agent, 'expand_sg_rules', return_value=\n FAKE_SG_RULES)\n", (86709, 86782), False, 'import mock\n'), ((86890, 86944), 'mock.patch.object', 'mock.patch.object', (['self.agent', '"""_provision_local_vlan"""'], {}), "(self.agent, '_provision_local_vlan')\n", (86907, 86944), False, 'import mock\n'), ((87023, 87059), 'mock.patch.object', 'mock.patch.object', (['self.LOG', '"""debug"""'], {}), "(self.LOG, 'debug')\n", (87040, 87059), False, 'import mock\n'), ((88463, 88526), 'mock.patch.object', 'mock.patch.object', (['self.agent.sg_agent', '"""add_devices_to_filter"""'], {}), "(self.agent.sg_agent, 'add_devices_to_filter')\n", (88480, 88526), False, 'import mock\n'), ((88601, 88660), 'mock.patch.object', 'mock.patch.object', (['self.agent.sg_agent', '"""ovsvapp_sg_update"""'], {}), "(self.agent.sg_agent, 'ovsvapp_sg_update')\n", (88618, 88660), False, 'import mock\n'), ((88736, 88834), 'mock.patch.object', 'mock.patch.object', (['self.agent.sg_agent', '"""expand_sg_rules"""'], {'return_value': 'FAKE_SG_RULES_MISSING'}), "(self.agent.sg_agent, 'expand_sg_rules', return_value=\n FAKE_SG_RULES_MISSING)\n", (88753, 88834), False, 'import mock\n'), ((88942, 88996), 'mock.patch.object', 'mock.patch.object', (['self.agent', '"""_provision_local_vlan"""'], {}), "(self.agent, '_provision_local_vlan')\n", (88959, 88996), False, 'import mock\n'), ((89075, 89111), 'mock.patch.object', 'mock.patch.object', (['self.LOG', '"""debug"""'], {}), "(self.LOG, 'debug')\n", (89092, 89111), False, 'import mock\n'), ((90593, 90656), 'mock.patch.object', 'mock.patch.object', (['self.agent.sg_agent', '"""add_devices_to_filter"""'], {}), "(self.agent.sg_agent, 'add_devices_to_filter')\n", (90610, 90656), False, 'import mock\n'), ((90731, 90790), 'mock.patch.object', 'mock.patch.object', (['self.agent.sg_agent', '"""ovsvapp_sg_update"""'], {}), "(self.agent.sg_agent, 'ovsvapp_sg_update')\n", (90748, 90790), False, 'import mock\n'), ((90866, 90964), 'mock.patch.object', 'mock.patch.object', (['self.agent.sg_agent', '"""expand_sg_rules"""'], {'return_value': 'FAKE_SG_RULES_PARTIAL'}), "(self.agent.sg_agent, 'expand_sg_rules', return_value=\n FAKE_SG_RULES_PARTIAL)\n", (90883, 90964), False, 'import mock\n'), ((91072, 91126), 'mock.patch.object', 'mock.patch.object', (['self.agent', '"""_provision_local_vlan"""'], {}), "(self.agent, '_provision_local_vlan')\n", (91089, 91126), False, 'import mock\n'), ((91205, 91241), 'mock.patch.object', 'mock.patch.object', (['self.LOG', '"""debug"""'], {}), "(self.LOG, 'debug')\n", (91222, 91241), False, 'import mock\n'), ((92737, 92791), 'mock.patch.object', 'mock.patch.object', (['self.agent', '"""_provision_local_vlan"""'], {}), "(self.agent, '_provision_local_vlan')\n", (92754, 92791), False, 'import mock\n'), ((92867, 92930), 'mock.patch.object', 'mock.patch.object', (['self.agent.sg_agent', '"""add_devices_to_filter"""'], {}), "(self.agent.sg_agent, 'add_devices_to_filter')\n", (92884, 92930), False, 'import mock\n'), ((93042, 93101), 'mock.patch.object', 'mock.patch.object', (['self.agent.sg_agent', '"""ovsvapp_sg_update"""'], {}), "(self.agent.sg_agent, 'ovsvapp_sg_update')\n", (93059, 93101), False, 'import mock\n'), ((93177, 93267), 'mock.patch.object', 'mock.patch.object', (['self.agent.sg_agent', '"""expand_sg_rules"""'], {'return_value': 'FAKE_SG_RULES'}), "(self.agent.sg_agent, 'expand_sg_rules', return_value=\n FAKE_SG_RULES)\n", (93194, 93267), False, 'import mock\n'), ((93375, 93435), 'mock.patch.object', 'mock.patch.object', (['self.agent.plugin_rpc', '"""update_device_up"""'], {}), "(self.agent.plugin_rpc, 'update_device_up')\n", (93392, 93435), False, 'import mock\n'), ((93515, 93551), 'mock.patch.object', 'mock.patch.object', (['self.LOG', '"""debug"""'], {}), "(self.LOG, 'debug')\n", (93532, 93551), False, 'import mock\n'), ((95006, 95060), 'mock.patch.object', 'mock.patch.object', (['self.agent', '"""_provision_local_vlan"""'], {}), "(self.agent, '_provision_local_vlan')\n", (95023, 95060), False, 'import mock\n'), ((95136, 95199), 'mock.patch.object', 'mock.patch.object', (['self.agent.sg_agent', '"""add_devices_to_filter"""'], {}), "(self.agent.sg_agent, 'add_devices_to_filter')\n", (95153, 95199), False, 'import mock\n'), ((95311, 95370), 'mock.patch.object', 'mock.patch.object', (['self.agent.sg_agent', '"""ovsvapp_sg_update"""'], {}), "(self.agent.sg_agent, 'ovsvapp_sg_update')\n", (95328, 95370), False, 'import mock\n'), ((95446, 95544), 'mock.patch.object', 'mock.patch.object', (['self.agent.sg_agent', '"""expand_sg_rules"""'], {'return_value': 'FAKE_SG_RULES_MISSING'}), "(self.agent.sg_agent, 'expand_sg_rules', return_value=\n FAKE_SG_RULES_MISSING)\n", (95463, 95544), False, 'import mock\n'), ((95652, 95712), 'mock.patch.object', 'mock.patch.object', (['self.agent.plugin_rpc', '"""update_device_up"""'], {}), "(self.agent.plugin_rpc, 'update_device_up')\n", (95669, 95712), False, 'import mock\n'), ((95792, 95828), 'mock.patch.object', 'mock.patch.object', (['self.LOG', '"""debug"""'], {}), "(self.LOG, 'debug')\n", (95809, 95828), False, 'import mock\n'), ((97222, 97285), 'mock.patch.object', 'mock.patch.object', (['self.agent.sg_agent', '"""add_devices_to_filter"""'], {}), "(self.agent.sg_agent, 'add_devices_to_filter')\n", (97239, 97285), False, 'import mock\n'), ((97337, 97391), 'mock.patch.object', 'mock.patch.object', (['self.agent', '"""_provision_local_vlan"""'], {}), "(self.agent, '_provision_local_vlan')\n", (97354, 97391), False, 'import mock\n'), ((97446, 97505), 'mock.patch.object', 'mock.patch.object', (['self.agent.sg_agent', '"""ovsvapp_sg_update"""'], {}), "(self.agent.sg_agent, 'ovsvapp_sg_update')\n", (97463, 97505), False, 'import mock\n'), ((97581, 97671), 'mock.patch.object', 'mock.patch.object', (['self.agent.sg_agent', '"""expand_sg_rules"""'], {'return_value': 'FAKE_SG_RULES'}), "(self.agent.sg_agent, 'expand_sg_rules', return_value=\n FAKE_SG_RULES)\n", (97598, 97671), False, 'import mock\n'), ((97779, 97815), 'mock.patch.object', 'mock.patch.object', (['self.LOG', '"""debug"""'], {}), "(self.LOG, 'debug')\n", (97796, 97815), False, 'import mock\n'), ((97856, 97896), 'mock.patch.object', 'mock.patch.object', (['self.LOG', '"""exception"""'], {}), "(self.LOG, 'exception')\n", (97873, 97896), False, 'import mock\n'), ((99242, 99282), 'mock.patch.object', 'mock.patch.object', (['self.LOG', '"""exception"""'], {}), "(self.LOG, 'exception')\n", (99259, 99282), False, 'import mock\n'), ((99356, 99392), 'mock.patch.object', 'mock.patch.object', (['self.LOG', '"""debug"""'], {}), "(self.LOG, 'debug')\n", (99373, 99392), False, 'import mock\n'), ((100478, 100555), 'mock.patch.object', 'mock.patch.object', (['resource_util', '"""get_vm_mor_by_name"""'], {'return_value': '"""vm_mor"""'}), "(resource_util, 'get_vm_mor_by_name', return_value='vm_mor')\n", (100495, 100555), False, 'import mock\n'), ((100655, 100741), 'mock.patch.object', 'mock.patch.object', (['resource_util', '"""get_host_mor_by_name"""'], {'return_value': '"""host_mor"""'}), "(resource_util, 'get_host_mor_by_name', return_value=\n 'host_mor')\n", (100672, 100741), False, 'import mock\n'), ((100879, 100930), 'mock.patch.object', 'mock.patch.object', (['resource_util', '"""set_vm_poweroff"""'], {}), "(resource_util, 'set_vm_poweroff')\n", (100896, 100930), False, 'import mock\n'), ((100997, 101063), 'mock.patch.object', 'mock.patch.object', (['resource_util', '"""set_host_into_maintenance_mode"""'], {}), "(resource_util, 'set_host_into_maintenance_mode')\n", (101014, 101063), False, 'import mock\n'), ((101172, 101235), 'mock.patch.object', 'mock.patch.object', (['resource_util', '"""set_host_into_shutdown_mode"""'], {}), "(resource_util, 'set_host_into_shutdown_mode')\n", (101189, 101235), False, 'import mock\n'), ((101341, 101405), 'mock.patch.object', 'mock.patch.object', (['self.agent.ovsvapp_rpc', '"""update_cluster_lock"""'], {}), "(self.agent.ovsvapp_rpc, 'update_cluster_lock')\n", (101358, 101405), False, 'import mock\n'), ((101475, 101515), 'mock.patch.object', 'mock.patch.object', (['self.LOG', '"""exception"""'], {}), "(self.LOG, 'exception')\n", (101492, 101515), False, 'import mock\n'), ((101552, 101584), 'mock.patch.object', 'mock.patch.object', (['time', '"""sleep"""'], {}), "(time, 'sleep')\n", (101569, 101584), False, 'import mock\n'), ((102915, 102992), 'mock.patch.object', 'mock.patch.object', (['resource_util', '"""get_vm_mor_by_name"""'], {'return_value': '"""vm_mor"""'}), "(resource_util, 'get_vm_mor_by_name', return_value='vm_mor')\n", (102932, 102992), False, 'import mock\n'), ((103092, 103178), 'mock.patch.object', 'mock.patch.object', (['resource_util', '"""get_host_mor_by_name"""'], {'return_value': '"""host_mor"""'}), "(resource_util, 'get_host_mor_by_name', return_value=\n 'host_mor')\n", (103109, 103178), False, 'import mock\n'), ((103316, 103367), 'mock.patch.object', 'mock.patch.object', (['resource_util', '"""set_vm_poweroff"""'], {}), "(resource_util, 'set_vm_poweroff')\n", (103333, 103367), False, 'import mock\n'), ((103434, 103500), 'mock.patch.object', 'mock.patch.object', (['resource_util', '"""set_host_into_maintenance_mode"""'], {}), "(resource_util, 'set_host_into_maintenance_mode')\n", (103451, 103500), False, 'import mock\n'), ((103609, 103672), 'mock.patch.object', 'mock.patch.object', (['resource_util', '"""set_host_into_shutdown_mode"""'], {}), "(resource_util, 'set_host_into_shutdown_mode')\n", (103626, 103672), False, 'import mock\n'), ((103778, 103842), 'mock.patch.object', 'mock.patch.object', (['self.agent.ovsvapp_rpc', '"""update_cluster_lock"""'], {}), "(self.agent.ovsvapp_rpc, 'update_cluster_lock')\n", (103795, 103842), False, 'import mock\n'), ((103912, 103952), 'mock.patch.object', 'mock.patch.object', (['self.LOG', '"""exception"""'], {}), "(self.LOG, 'exception')\n", (103929, 103952), False, 'import mock\n'), ((103989, 104021), 'mock.patch.object', 'mock.patch.object', (['time', '"""sleep"""'], {}), "(time, 'sleep')\n", (104006, 104021), False, 'import mock\n'), ((105367, 105444), 'mock.patch.object', 'mock.patch.object', (['resource_util', '"""get_vm_mor_by_name"""'], {'return_value': '"""vm_mor"""'}), "(resource_util, 'get_vm_mor_by_name', return_value='vm_mor')\n", (105384, 105444), False, 'import mock\n'), ((105544, 105630), 'mock.patch.object', 'mock.patch.object', (['resource_util', '"""get_host_mor_by_name"""'], {'return_value': '"""host_mor"""'}), "(resource_util, 'get_host_mor_by_name', return_value=\n 'host_mor')\n", (105561, 105630), False, 'import mock\n'), ((105945, 106011), 'mock.patch.object', 'mock.patch.object', (['resource_util', '"""set_host_into_maintenance_mode"""'], {}), "(resource_util, 'set_host_into_maintenance_mode')\n", (105962, 106011), False, 'import mock\n'), ((106120, 106183), 'mock.patch.object', 'mock.patch.object', (['resource_util', '"""set_host_into_shutdown_mode"""'], {}), "(resource_util, 'set_host_into_shutdown_mode')\n", (106137, 106183), False, 'import mock\n'), ((106289, 106353), 'mock.patch.object', 'mock.patch.object', (['self.agent.ovsvapp_rpc', '"""update_cluster_lock"""'], {}), "(self.agent.ovsvapp_rpc, 'update_cluster_lock')\n", (106306, 106353), False, 'import mock\n'), ((106423, 106463), 'mock.patch.object', 'mock.patch.object', (['self.LOG', '"""exception"""'], {}), "(self.LOG, 'exception')\n", (106440, 106463), False, 'import mock\n'), ((106500, 106532), 'mock.patch.object', 'mock.patch.object', (['time', '"""sleep"""'], {}), "(time, 'sleep')\n", (106517, 106532), False, 'import mock\n'), ((107874, 107951), 'mock.patch.object', 'mock.patch.object', (['resource_util', '"""get_vm_mor_by_name"""'], {'return_value': '"""vm_mor"""'}), "(resource_util, 'get_vm_mor_by_name', return_value='vm_mor')\n", (107891, 107951), False, 'import mock\n'), ((108051, 108137), 'mock.patch.object', 'mock.patch.object', (['resource_util', '"""get_host_mor_by_name"""'], {'return_value': '"""host_mor"""'}), "(resource_util, 'get_host_mor_by_name', return_value=\n 'host_mor')\n", (108068, 108137), False, 'import mock\n'), ((108686, 108749), 'mock.patch.object', 'mock.patch.object', (['resource_util', '"""set_host_into_shutdown_mode"""'], {}), "(resource_util, 'set_host_into_shutdown_mode')\n", (108703, 108749), False, 'import mock\n'), ((108855, 108919), 'mock.patch.object', 'mock.patch.object', (['self.agent.ovsvapp_rpc', '"""update_cluster_lock"""'], {}), "(self.agent.ovsvapp_rpc, 'update_cluster_lock')\n", (108872, 108919), False, 'import mock\n'), ((108989, 109029), 'mock.patch.object', 'mock.patch.object', (['self.LOG', '"""exception"""'], {}), "(self.LOG, 'exception')\n", (109006, 109029), False, 'import mock\n'), ((109066, 109098), 'mock.patch.object', 'mock.patch.object', (['time', '"""sleep"""'], {}), "(time, 'sleep')\n", (109083, 109098), False, 'import mock\n'), ((109961, 109996), 'mock.patch.object', 'mock.patch.object', (['self.LOG', '"""info"""'], {}), "(self.LOG, 'info')\n", (109978, 109996), False, 'import mock\n'), ((110028, 110089), 'mock.patch.object', 'mock.patch.object', (['self.agent.sg_agent', '"""sg_provider_updated"""'], {}), "(self.agent.sg_agent, 'sg_provider_updated')\n", (110045, 110089), False, 'import mock\n'), ((111513, 111576), 'mock.patch.object', 'mock.patch.object', (['self.agent.sg_agent', '"""add_devices_to_filter"""'], {}), "(self.agent.sg_agent, 'add_devices_to_filter')\n", (111530, 111576), False, 'import mock\n'), ((111651, 111710), 'mock.patch.object', 'mock.patch.object', (['self.agent.sg_agent', '"""ovsvapp_sg_update"""'], {}), "(self.agent.sg_agent, 'ovsvapp_sg_update')\n", (111668, 111710), False, 'import mock\n'), ((111765, 111825), 'mock.patch.object', 'mock.patch.object', (['self.agent.int_br', '"""provision_local_vlan"""'], {}), "(self.agent.int_br, 'provision_local_vlan')\n", (111782, 111825), False, 'import mock\n'), ((111904, 112006), 'mock.patch.object', 'mock.patch.object', (['self.agent.sg_agent', '"""expand_sg_rules"""'], {'return_value': 'FAKE_SG_RULES_MULTI_PORTS'}), "(self.agent.sg_agent, 'expand_sg_rules', return_value=\n FAKE_SG_RULES_MULTI_PORTS)\n", (111921, 112006), False, 'import mock\n'), ((112090, 112126), 'mock.patch.object', 'mock.patch.object', (['self.LOG', '"""debug"""'], {}), "(self.LOG, 'debug')\n", (112107, 112126), False, 'import mock\n'), ((85313, 85329), 'mock.MagicMock', 'mock.MagicMock', ([], {}), '()\n', (85327, 85329), False, 'import mock\n'), ((87281, 87297), 'mock.MagicMock', 'mock.MagicMock', ([], {}), '()\n', (87295, 87297), False, 'import mock\n'), ((89333, 89349), 'mock.MagicMock', 'mock.MagicMock', ([], {}), '()\n', (89347, 89349), False, 'import mock\n'), ((91463, 91479), 'mock.MagicMock', 'mock.MagicMock', ([], {}), '()\n', (91477, 91479), False, 'import mock\n'), ((93773, 93789), 'mock.MagicMock', 'mock.MagicMock', ([], {}), '()\n', (93787, 93789), False, 'import mock\n'), ((96050, 96066), 'mock.MagicMock', 'mock.MagicMock', ([], {}), '()\n', (96064, 96066), False, 'import mock\n'), ((98120, 98136), 'mock.MagicMock', 'mock.MagicMock', ([], {}), '()\n', (98134, 98136), False, 'import mock\n'), ((112348, 112364), 'mock.MagicMock', 'mock.MagicMock', ([], {}), '()\n', (112362, 112364), False, 'import mock\n')] |
"""
This module provides classes that support observers, smart value handling and debug functions
All changes to values nominate an agent, and observers nominate the agent making changes they
are interested in.
It supercedes the pvars module
"""
import logging, sys, threading, pathlib, math, json
from enum import Enum, auto as enumauto, Flag
class loglvls(Enum):
"""
A class for logging levels so data is self identfying
"""
VAST = logging.DEBUG-1
DEBUG = logging.DEBUG
INFO = logging.INFO
WARN = logging.WARN
ERROR = logging.ERROR
FATAL = logging.FATAL
NONE = 0
class myagents(Flag):
NONE = 0
app = enumauto()
user = enumauto()
class wflags(Flag):
NONE = 0
DISABLED = enumauto()
class watchable():
"""
provides a 'smart' object that provides basic observer functionality around an object.
Changes to the value can be policed, and updates have to provide an agent that is
performing the update. Observers can then request to be notified when the value is changed
by specific agents.
"""
def __init__(self, value, app, flags=wflags.NONE, loglevel=loglvls.INFO):
"""
creates a new watchable. Initialises the internal value and sets an empty observers list
value: the initial value for the object. Not validated!
app : the app instance for this. Used for logging and for validating agents
"""
self._val=value
self.app=app
self.observers=None
self.oblock=threading.Lock()
self.flags=flags
self.loglevel=loglevel
self.log(loglvls.DEBUG, 'watchable type %s setup with value %s' % (type(self).__name__, self._val))
def setValue(self, value, agent):
"""
Updates the value of a watchable or the loglevel.
if not a loglevel, this validates and converts (if relevant) the requested value.
If the value is valid and different from the current value, checks for and calls
any observers interested in changes by the given agent.
"""
if isinstance(value, loglvls):
self.loglevel = value
return False
if isinstance(value, wflags):
self.flags=value
return False
assert isinstance(agent, self.app.agentclass), 'unexpected value %s of type %s in setValue' % (value, type(value).__name__)
newvalue=self.validValue(value, agent)
if newvalue != self._val:
self.notify(newvalue, agent)
return True
else:
self.log(loglvls.DEBUG,'value unchanged (%s)' % self._val)
return False
def getValue(self):
return self._val
def validValue(self, value, agent=None):
"""
validates the given value and returns the canonical value which will be stored.
Raise an exception if the value is invalid
'Real' classes must implement this
"""
raise NotImplementedError()
def notify(self, newvalue, agent):
if self.observers:
clist=None
with self.oblock:
if agent in self.observers:
clist=self.observers[agent].copy()
oldvalue=self._val
self._val=newvalue
if clist:
for ob in clist:
ob(oldValue=oldvalue, newValue=newvalue, agent=agent, watched=self)
self.log(loglvls.DEBUG,'value changed (%s)- observers called' % self._val)
else:
self._val=newvalue
self.log(loglvls.DEBUG,'value changed (%s)- no observers' % self._val)
def addNotify(self, callback, agent):
assert callable(callback)
assert isinstance(agent, self.app.agentclass)
self.log(loglvls.DEBUG,'added watcher %s' % callback.__name__)
with self.oblock:
if self.observers is None:
self.observers={agent:[callback]}
elif agent in self.observers:
self.observers[agent].append(callback)
else:
self.observers[agent]=[callback]
def dropNotify(self, callback, agent):
with self.oblock:
aglist=self.observers[agent]
ix = aglist.index(callback)
aglist.pop(ix)
def log(self, loglevel, *args, **kwargs):
"""
request a logging operation. This does nothing if the given loglevel is < the loglevel set in the object
"""
if loglevel.value >= self.loglevel.value:
self.app.log(loglevel, *args, **kwargs)
class textWatch(watchable):
"""
A refinement of watchable for text strings.
"""
def validValue(self, value, agent):
"""
value : the requested new value for the field, can be anything that str() takes, but None will fail.
agent : who asked for then change (ignored here)
returns : the valid new value (this is always a str)
raises : Any error that str() can raise
"""
if value is None:
raise ValueError('None is not a valid textVar value')
return str(value)
class floatWatch(watchable):
"""
A refinement of watchable that restricts the value to numbers - simple floating point.
"""
def __init__(self, *, maxv=sys.float_info.max, minv=-sys.float_info.max, clamp=False, allowNaN=True, **kwargs):
"""
Makes a float given min and max values. The value can be set clamped to prevent failures
minv : the lowest allowed value - use 0 to allow only positive numbers
maxv : the highest value allowed
clamp : if True all values that can float() are accepted for updating, but are restricted to be between minv and maxv
"""
self.maxv=float(maxv)
self.minv=float(minv)
self.clamp=clamp==True
self.allowNaN=allowNaN
super().__init__(**kwargs)
def validValue(self, value, agent):
"""
value : the requested new value for the field, can be anything that float(x) can handle that is between minv and maxv
- or if clamp is True, any value
agent : who asked for then change (ignored here)
returns : the valid new value (this is always a float)
raises : ValueError if the provided value is invalid
"""
av=float(value)
if math.isnan(av) and self.allowNaN:
return av
if self.clamp:
return self.minv if av < self.minv else self.maxv if av > self.maxv else av
if self.minv <= av <= self.maxv:
return av
raise ValueError('value {} is outside range {} to {}'.format(value, self.minv, self.maxv))
class intWatch(watchable):
"""
A refinement of watchable that restricts the field value to integer numbers optionally within a range.
"""
def __init__(self, maxv=None, minv=None, clamp=False, **kwargs):
"""
creates an integer var
maxv: None if unbounded maximum else anything that int() accepts
minv: None if unbounded minimum else anything that int() accepts
clamp: if True then value is clamped to maxv and minv (either can be None for unbounded in either 'direction'
"""
self.maxv=maxv if maxv is None else int(maxv)
self.minv=minv if minv is None else int(minv)
self.clamp=clamp==True
super().__init__(**kwargs)
def validValue(self, value, agent):
"""
value : the requested new value for the field, can be anything that int() can handle that is between minv and maxv
- or if clamp is True, any value
agent : who asked for then change (ignored here)
returns : the valid new value (this is always an int)
raises : ValueError if the provided value is invalid
"""
av=int(value)
if self.clamp:
if not self.minv is None and av < self.minv:
return self.minv
if not self.maxv is None and av > self.maxv:
return self.maxv
return av
if (self.minv is None or av >= self.minv) and (self.maxv is None or av <= self.maxv):
return av
raise ValueError('value {} is outside range {} to {} for watchable'.format(value, self.minv, self.maxv))
def increment(self, agent, count=1):
incer=int(count)
newval=self.getValue()+incer
self.setValue(newval, agent)
return newval
class enumWatch(watchable):
"""
a watchable that can only take a specific set of values, and can wrap / clamp values.
It also allows values to be cycled through
"""
def __init__(self, vlist, wrap=True, clamp=False, **kwargs):
self.wrap=wrap == True
self.clamp=clamp == True
self.vlist=vlist
super().__init__(**kwargs)
def validValue(self, value, agent):
if not value in self.vlist:
raise ValueError('value (%s) not valid' % value)
return value
def getIndex(self):
return self.vlist.index(self._val)
def increment(self, agent, inc=1):
newi=self.getIndex()+inc
if 0 <= newi < len(self.vlist):
return self.setValue(self.vlist[newi], agent)
elif self.wrap:
if newi < 0:
useval = self.vlist[-1]
else:
useval = self.vlist[0]
elif self.clamp:
if newi < 0:
useval = self.vlist[0]
else:
useval = self.vlist[-1]
else:
raise ValueError('operation exceeds list boundary')
self.setValue(useval, agent)
def setIndex(self, ival, agent):
if 0 <= ival < len(self.vlist):
return self.setValue(self.vlist[ival], agent)
else:
raise ValueError('index out of range')
class btnWatch(watchable):
"""
For simple click buttons that always notify
"""
def setValue(self, value, agent):
if isinstance(value, loglvls):
self.loglevel = value
return False
if isinstance(value, wflags):
self.flags=value
return False
assert isinstance(agent, self.app.agentclass)
self.notify(self._val, agent)
return True
class folderWatch(watchable):
"""
Internally. the value is a pathlib path to a folder (subfolders are created automatically).
"""
def __init__(self, value, **kwargs):
super().__init__(value=self.validValue(value, None), **kwargs)
def validValue(self, value, agent):
tp=pathlib.Path(value).expanduser()
if tp.exists():
if tp.is_dir():
return tp
else:
raise ValueError('%s is not a folder' % str(tp))
else:
tp.mkdir(parents=True, exist_ok=True)
return tp
def getValue(self):
return str(self._val)
def getFolder(self):
return self._val
def currentfilenames(self, includes=None, excludes=None):
"""
returns names of files currently in this folder
"""
return [pp.name for pp in self.getValue().iterdir() if pp.is_file() and
(True if includes is None else [1 for x in includes if pp.name.endswith(x)]) and
(True if excludes is None else [1 for x in excludes if not pp.name.endswith(x)])]
class watchablegroup(object):
def __init__(self, value, wabledefs, loglevel=None):
"""
value : dict of preferred values for watchables in this activity (e.g. from saved settings file)
wabledefs: a list of 5-tuples that define each watchable with the following entries:
0: name of the watchable
1: class of the watchable
2: default value of the watchable
3: True if the watchable is returned by fetchsettings (as a dict member)
4: kwargs to use when setting up the watchable
"""
self.perslist=[]
self.loglevel=loglvls.INFO if loglevel is None else loglevel
for awable in wabledefs:
ch=self.makeChild(defn=awable, value=awable[2] if value is None else value.get(awable[0], awable[2]))
if ch is None:
raise ValueError('child construction failed - see log')
setattr(self, awable[0], ch)
if awable[3]:
self.perslist.append(awable[0])
def makeChild(self, value, defn):
"""
returns a new object with this object as the app using a definition list
value : value for the
defn: a list of 5-tuples that define each watchable with the following entries:
0: name of the watchable - not used
1: class of the watchable
2: default value of the watchable - only used if value is None
3: True if then watchable is returned by fetchsettings (as a dict member) - not used
4: kwargs to use when setting up the watchable
"""
deflen=len(defn)
if deflen==4:
params={}
elif deflen==5:
params=defn[4]
else:
raise ValueError('there are not 4 or 5 entries in this definition for class %s: %s' % (type(self).__name__, defn))
try:
vv=defn[2] if value is None else value
return defn[1](app=self, value=vv, **params)
except:
print('Exception in makeChild for class %s' % defn[1], ('using defn value (%s)' % defn[2]) if value is None else str(vv))
print('extra keyword args', params)
print('input values:', value)
self.log(loglvls.ERROR,'class %s exception making variable %s' % (type(self).__name__, defn[0]), exc_info=True, stack_info=True)
return None
def fetchsettings(self):
return {kv: getattr(self,kv).getValue() for kv in self.perslist}
def applysettings(self, settings, agent):
for k,v in settings:
if k in self.perslist:
getattr(self, k).setValue(v, agent)
class watchablesmart(watchablegroup):
"""
This class can act as a complete app, or as a part of an app.
For a complete app:
sets up logging for the app
for a component of an app:
passes logging calls up to the app.
value: for the top level (app is None), if a string, this is the file name for json file which should yield a dict with the settings to be applied in construction
otherwise id should be a dict with the settings
lower levels always expect a dict
app: If app is None, this node is the app, otherwise it should be the app object (which provides logging and save / restore settings
"""
def __init__(self, value, app=None, loglevel=loglvls.INFO, **kwargs):
if app==None: # this is the real (top level) app
if loglevel is None or loglevel is loglvls.NONE:
self.logger=None
print('%s no logging' % type(self).__name__)
else:
self.agentclass=myagents
self.logger=logging.getLogger(__loader__.name+'.'+type(self).__name__)
chandler=logging.StreamHandler()
chandler.setFormatter(logging.Formatter(fmt= '%(asctime)s %(levelname)7s (%(process)d)%(threadName)12s %(module)s.%(funcName)s: %(message)s', datefmt= "%M:%S"))
self.logger.addHandler(chandler)
self.logger.setLevel(loglevel.value)
self.log(loglvls.INFO,'logging level is %s' % loglevel)
self.startsettings, lmsg, self.settingsfrom = loadsettings(value)
self.log(loglvls.INFO, lmsg)
else:
self.app=app
self.agentclass=app.agentclass
self.startsettings=value
super().__init__(value=self.startsettings, loglevel=loglevel, **kwargs)
def log(self, level, msg, *args, **kwargs):
if hasattr(self,'app'):
if self.loglevel.value <= level.value:
self.app.log(level, msg, *args, **kwargs)
else:
if self.logger:
self.logger.log(level.value, msg, *args, **kwargs)
elif level.value >= loglvls.WARN:
print(msg)
def savesettings(self, oldValue, newValue, agent, watched):
if hasattr(self, 'app'):
raise ValueError('only the app level can save settings')
try:
setts = self.fetchsettings()
except:
self.log(loglvls.WARN,'fetchsettings failed', exc_info=True, stack_info=True)
setts = None
if not setts is None:
try:
settstr=json.dumps(setts, indent=4)
except:
self.log(loglvls.WARN,'json conversion of these settings failed', exc_info=True, stack_info=True)
self.log(loglvls.WARN,str(setts))
settstr=None
if not settstr is None:
try:
with self.settingsfrom.open('w') as sfo:
sfo.write(settstr)
except:
self.log(loglvls.WARN,'save settings failed to write file', exc_info=True, stack_info=True)
return
self.log(loglvls.INFO,'settings saved to file %s' % str(self.settingsfrom))
class watchablepigpio(watchablesmart):
"""
a root class that adds in pigpio setup to watchablesmart
"""
def __init__(self, app=None, pigp=None, **kwargs):
"""
if the app has a pio attribute, (an instance of pigpio.pi), that is used otherwise one is set up.
"""
if not app is None and hasattr(app,'pio'):
self.pio=app.pio
self.mypio=False
elif pigp is None:
import pigpio
ptest=pigpio.pi()
if not ptest.connected:
raise ValueError('pigpio failed to initialise')
self.pio=ptest
self.mypio=True
else:
self.pio=pigp
self.mypio=False
if not self.pio.connected:
raise ValueError('pigpio is not connected')
super().__init__(app=app, **kwargs)
def close(self):
if self.mypio:
self.pio.stop()
self.mypio=False
self.pio=None
class watchableAct(watchablegroup):
"""
An app can have a number of optional activities (that can have their own threads, watched vars etc.
This class provides useful common bits for such activities. It provides:
A way to set up the watchable variables for the class, using passed in values (for saved settings for example)
with defaults if a value isn't passed.
A way to automatically retrieve values for a subset of watchable variables (e.g. to save values as a known config)
logging via the parent app using Python's standard logging module
"""
def __init__(self, app, **kwargs):
self.app=app
self.agentclass=app.agentclass
super().__init__(**kwargs)
def log(self, loglevel, *args, **kwargs):
"""
request a logging operation. This does nothing if the given loglevel is < the loglevel set in the object
"""
if self.loglevel.value <= loglevel.value:
self.app.log(loglevel, *args, **kwargs)
class watchableApp(object):
def __init__(self, agentclass=myagents, loglevel=None):
self.agentclass=agentclass
if loglevel is None or loglevel is loglvls.NONE:
self.logger=None
print('%s no logging' % type(self).__name__)
else:
self.logger=logging.getLogger(__loader__.name+'.'+type(self).__name__)
chandler=logging.StreamHandler()
chandler.setFormatter(logging.Formatter(fmt= '%(asctime)s %(levelname)7s (%(process)d)%(threadName)12s %(module)s.%(funcName)s: %(message)s', datefmt= "%M:%S"))
self.logger.addHandler(chandler)
self.logger.setLevel(loglevel.value)
def log(self, level, msg, *args, **kwargs):
if self.logger:
self.logger.log(level.value, msg, *args, **kwargs)
def loadsettings(value):
if isinstance(value, str):
spath=pathlib.Path(value).expanduser()
settingsfrom=spath
if spath.is_file():
try:
with spath.open('r') as spo:
startsettings=json.load(spo)
return startsettings, 'app settings loaded from file %s' % spath, spath
except:
return {}, 'failed to load settings from %s - default values used' % spath, spath
else:
return {}, 'app settings file %s not found - default values used' % str(spath), spath
elif hasattr(value,'keys'):
return value, 'using settings from passed object', None
elif value is None:
return {}, 'settings not specified, default values used', None
else:
return {}, 'setings not processed from passed %s' % type(values).__name__, None
| [
"logging.StreamHandler",
"enum.auto",
"pathlib.Path",
"threading.Lock",
"logging.Formatter",
"json.dumps",
"pigpio.pi",
"json.load",
"math.isnan"
] | [((680, 690), 'enum.auto', 'enumauto', ([], {}), '()\n', (688, 690), True, 'from enum import Enum, auto as enumauto, Flag\n'), ((705, 715), 'enum.auto', 'enumauto', ([], {}), '()\n', (713, 715), True, 'from enum import Enum, auto as enumauto, Flag\n'), ((775, 785), 'enum.auto', 'enumauto', ([], {}), '()\n', (783, 785), True, 'from enum import Enum, auto as enumauto, Flag\n'), ((1583, 1599), 'threading.Lock', 'threading.Lock', ([], {}), '()\n', (1597, 1599), False, 'import logging, sys, threading, pathlib, math, json\n'), ((6556, 6570), 'math.isnan', 'math.isnan', (['av'], {}), '(av)\n', (6566, 6570), False, 'import logging, sys, threading, pathlib, math, json\n'), ((20049, 20072), 'logging.StreamHandler', 'logging.StreamHandler', ([], {}), '()\n', (20070, 20072), False, 'import logging, sys, threading, pathlib, math, json\n'), ((10840, 10859), 'pathlib.Path', 'pathlib.Path', (['value'], {}), '(value)\n', (10852, 10859), False, 'import logging, sys, threading, pathlib, math, json\n'), ((15523, 15546), 'logging.StreamHandler', 'logging.StreamHandler', ([], {}), '()\n', (15544, 15546), False, 'import logging, sys, threading, pathlib, math, json\n'), ((17013, 17040), 'json.dumps', 'json.dumps', (['setts'], {'indent': '(4)'}), '(setts, indent=4)\n', (17023, 17040), False, 'import logging, sys, threading, pathlib, math, json\n'), ((18120, 18131), 'pigpio.pi', 'pigpio.pi', ([], {}), '()\n', (18129, 18131), False, 'import pigpio\n'), ((20107, 20253), 'logging.Formatter', 'logging.Formatter', ([], {'fmt': '"""%(asctime)s %(levelname)7s (%(process)d)%(threadName)12s %(module)s.%(funcName)s: %(message)s"""', 'datefmt': '"""%M:%S"""'}), "(fmt=\n '%(asctime)s %(levelname)7s (%(process)d)%(threadName)12s %(module)s.%(funcName)s: %(message)s'\n , datefmt='%M:%S')\n", (20124, 20253), False, 'import logging, sys, threading, pathlib, math, json\n'), ((20548, 20567), 'pathlib.Path', 'pathlib.Path', (['value'], {}), '(value)\n', (20560, 20567), False, 'import logging, sys, threading, pathlib, math, json\n'), ((15585, 15731), 'logging.Formatter', 'logging.Formatter', ([], {'fmt': '"""%(asctime)s %(levelname)7s (%(process)d)%(threadName)12s %(module)s.%(funcName)s: %(message)s"""', 'datefmt': '"""%M:%S"""'}), "(fmt=\n '%(asctime)s %(levelname)7s (%(process)d)%(threadName)12s %(module)s.%(funcName)s: %(message)s'\n , datefmt='%M:%S')\n", (15602, 15731), False, 'import logging, sys, threading, pathlib, math, json\n'), ((20732, 20746), 'json.load', 'json.load', (['spo'], {}), '(spo)\n', (20741, 20746), False, 'import logging, sys, threading, pathlib, math, json\n')] |
import sqlite3
import pytest
from sqlalchemy import create_engine
from sqlalchemy.orm import sessionmaker
import tests.testsite.testapp.models as dm
from tests.sa_models import Base, Car, Child, Dog, Parent
@pytest.fixture(scope="session")
def engine():
print("NEW ENGINE")
engine = create_engine(
"sqlite://",
creator=lambda: sqlite3.connect(
"file:memorydb?mode=memory&cache=shared", uri=True
),
)
yield engine
engine.dispose()
@pytest.fixture(scope="session")
def session(engine):
print("CREATE TABLES")
Base.metadata.create_all(engine)
session = sessionmaker(bind=engine)()
yield session
session.close()
@pytest.fixture(scope="session")
def mock_data_session(session):
parent = Parent(name="Peter")
parent2 = Parent(name="Hugo")
child1 = Child(name="Hans", age=3, parent=parent, boolfield=True)
child2 = Child(name="Franz", age=5, parent=parent, boolfield=False)
dog1 = Dog(name="Rex")
dog1.owners = [child2]
car1 = Car(horsepower=560)
car2 = Car(horsepower=32)
parent.cars = [car1, car2]
session.add_all([parent, parent2, child1, child2, dog1])
session.commit()
return session
def test_data(mock_data_session):
assert len(mock_data_session.query(Parent).all()) == 2
assert len(mock_data_session.query(Child).all()) == 2
@pytest.mark.django_db
def test_django_orm(mock_data_session):
parents = dm.Parent.objects.order_by("pk")
assert len(parents) == 2
assert parents[0].name == "Peter"
assert parents[1].name == "Hugo"
def test_nullable(mock_data_session):
assert dm.Child._meta.get_field("boolfield").null == False
assert dm.Child._meta.get_field("citextfield").null == True
@pytest.mark.django_db
def test_fk(mock_data_session):
parent = dm.Parent.objects.get(name="Peter")
dm_child = dm.Child.objects.get(name="Hans")
assert dm_child.parent_id == parent.id
assert dm_child.parent == parent
# test back reference
assert len(parent.children.all()) == 2
assert dm_child in parent.children.all()
@pytest.mark.django_db
def test_pk(mock_data_session):
assert dm.Child._meta.pk.name == "key"
assert dm.Parent._meta.pk.name == "id"
@pytest.mark.django_db
def test_many_to_many(mock_data_session):
peter = dm.Parent.objects.get(name="Peter")
assert len(peter.cars.all()) == 2
car0 = dm.Car.objects.all()[0]
assert car0.drivers.all()[0].name == "Peter"
car1 = dm.Car.objects.all()[1]
assert car1.drivers.all()[0].name == "Peter"
@pytest.mark.django_db
def test_relation_without_fk(mock_data_session):
franz = dm.Child.objects.get(name="Franz")
rex = dm.Dog.objects.get(name="Rex")
assert franz.dog == rex
assert list(rex.owners.all()) == [franz]
| [
"tests.testsite.testapp.models.Child.objects.get",
"sqlalchemy.orm.sessionmaker",
"tests.sa_models.Base.metadata.create_all",
"sqlite3.connect",
"tests.sa_models.Car",
"tests.testsite.testapp.models.Parent.objects.get",
"tests.testsite.testapp.models.Car.objects.all",
"tests.sa_models.Parent",
"test... | [((212, 243), 'pytest.fixture', 'pytest.fixture', ([], {'scope': '"""session"""'}), "(scope='session')\n", (226, 243), False, 'import pytest\n'), ((493, 524), 'pytest.fixture', 'pytest.fixture', ([], {'scope': '"""session"""'}), "(scope='session')\n", (507, 524), False, 'import pytest\n'), ((693, 724), 'pytest.fixture', 'pytest.fixture', ([], {'scope': '"""session"""'}), "(scope='session')\n", (707, 724), False, 'import pytest\n'), ((577, 609), 'tests.sa_models.Base.metadata.create_all', 'Base.metadata.create_all', (['engine'], {}), '(engine)\n', (601, 609), False, 'from tests.sa_models import Base, Car, Child, Dog, Parent\n'), ((770, 790), 'tests.sa_models.Parent', 'Parent', ([], {'name': '"""Peter"""'}), "(name='Peter')\n", (776, 790), False, 'from tests.sa_models import Base, Car, Child, Dog, Parent\n'), ((805, 824), 'tests.sa_models.Parent', 'Parent', ([], {'name': '"""Hugo"""'}), "(name='Hugo')\n", (811, 824), False, 'from tests.sa_models import Base, Car, Child, Dog, Parent\n'), ((838, 894), 'tests.sa_models.Child', 'Child', ([], {'name': '"""Hans"""', 'age': '(3)', 'parent': 'parent', 'boolfield': '(True)'}), "(name='Hans', age=3, parent=parent, boolfield=True)\n", (843, 894), False, 'from tests.sa_models import Base, Car, Child, Dog, Parent\n'), ((908, 966), 'tests.sa_models.Child', 'Child', ([], {'name': '"""Franz"""', 'age': '(5)', 'parent': 'parent', 'boolfield': '(False)'}), "(name='Franz', age=5, parent=parent, boolfield=False)\n", (913, 966), False, 'from tests.sa_models import Base, Car, Child, Dog, Parent\n'), ((978, 993), 'tests.sa_models.Dog', 'Dog', ([], {'name': '"""Rex"""'}), "(name='Rex')\n", (981, 993), False, 'from tests.sa_models import Base, Car, Child, Dog, Parent\n'), ((1032, 1051), 'tests.sa_models.Car', 'Car', ([], {'horsepower': '(560)'}), '(horsepower=560)\n', (1035, 1051), False, 'from tests.sa_models import Base, Car, Child, Dog, Parent\n'), ((1063, 1081), 'tests.sa_models.Car', 'Car', ([], {'horsepower': '(32)'}), '(horsepower=32)\n', (1066, 1081), False, 'from tests.sa_models import Base, Car, Child, Dog, Parent\n'), ((1446, 1478), 'tests.testsite.testapp.models.Parent.objects.order_by', 'dm.Parent.objects.order_by', (['"""pk"""'], {}), "('pk')\n", (1472, 1478), True, 'import tests.testsite.testapp.models as dm\n'), ((1820, 1855), 'tests.testsite.testapp.models.Parent.objects.get', 'dm.Parent.objects.get', ([], {'name': '"""Peter"""'}), "(name='Peter')\n", (1841, 1855), True, 'import tests.testsite.testapp.models as dm\n'), ((1871, 1904), 'tests.testsite.testapp.models.Child.objects.get', 'dm.Child.objects.get', ([], {'name': '"""Hans"""'}), "(name='Hans')\n", (1891, 1904), True, 'import tests.testsite.testapp.models as dm\n'), ((2322, 2357), 'tests.testsite.testapp.models.Parent.objects.get', 'dm.Parent.objects.get', ([], {'name': '"""Peter"""'}), "(name='Peter')\n", (2343, 2357), True, 'import tests.testsite.testapp.models as dm\n'), ((2652, 2686), 'tests.testsite.testapp.models.Child.objects.get', 'dm.Child.objects.get', ([], {'name': '"""Franz"""'}), "(name='Franz')\n", (2672, 2686), True, 'import tests.testsite.testapp.models as dm\n'), ((2697, 2727), 'tests.testsite.testapp.models.Dog.objects.get', 'dm.Dog.objects.get', ([], {'name': '"""Rex"""'}), "(name='Rex')\n", (2715, 2727), True, 'import tests.testsite.testapp.models as dm\n'), ((624, 649), 'sqlalchemy.orm.sessionmaker', 'sessionmaker', ([], {'bind': 'engine'}), '(bind=engine)\n', (636, 649), False, 'from sqlalchemy.orm import sessionmaker\n'), ((2408, 2428), 'tests.testsite.testapp.models.Car.objects.all', 'dm.Car.objects.all', ([], {}), '()\n', (2426, 2428), True, 'import tests.testsite.testapp.models as dm\n'), ((2493, 2513), 'tests.testsite.testapp.models.Car.objects.all', 'dm.Car.objects.all', ([], {}), '()\n', (2511, 2513), True, 'import tests.testsite.testapp.models as dm\n'), ((1634, 1671), 'tests.testsite.testapp.models.Child._meta.get_field', 'dm.Child._meta.get_field', (['"""boolfield"""'], {}), "('boolfield')\n", (1658, 1671), True, 'import tests.testsite.testapp.models as dm\n'), ((1697, 1736), 'tests.testsite.testapp.models.Child._meta.get_field', 'dm.Child._meta.get_field', (['"""citextfield"""'], {}), "('citextfield')\n", (1721, 1736), True, 'import tests.testsite.testapp.models as dm\n'), ((355, 422), 'sqlite3.connect', 'sqlite3.connect', (['"""file:memorydb?mode=memory&cache=shared"""'], {'uri': '(True)'}), "('file:memorydb?mode=memory&cache=shared', uri=True)\n", (370, 422), False, 'import sqlite3\n')] |
"""Module containing session and auth logic."""
import collections.abc as abc_collections
import datetime
from contextlib import contextmanager
from logging import getLogger
import dateutil.parser
import requests
from . import __version__
from . import exceptions as exc
__url_cache__ = {}
__logs__ = getLogger(__package__)
def requires_2fa(response):
"""Determine whether a response requires us to prompt the user for 2FA."""
if (
response.status_code == 401
and "X-GitHub-OTP" in response.headers
and "required" in response.headers["X-GitHub-OTP"]
):
return True
return False
class BasicAuth(requests.auth.HTTPBasicAuth):
"""Sub-class requests's class so we have a nice repr."""
def __repr__(self):
"""Use the username as the representation."""
return "basic {}".format(self.username)
class TokenAuth(requests.auth.AuthBase):
"""Auth class that handles simple tokens."""
header_format_str = "token {}"
def __init__(self, token):
"""Store our token."""
self.token = token
def __repr__(self):
"""Return a nice view of the token in use."""
return "token {}...".format(self.token[:4])
def __ne__(self, other):
"""Test for equality, or the lack thereof."""
return not self == other
def __eq__(self, other):
"""Test for equality, or the lack thereof."""
return self.token == getattr(other, "token", None)
def __call__(self, request):
"""Add the authorization header and format it."""
request.headers["Authorization"] = self.header_format_str.format(self.token)
return request
class GitHubSession(requests.Session):
"""Our slightly specialized Session object.
Normally this is created automatically by
:class:`~github4.github.GitHub`. To use alternate values for
network timeouts, this class can be instantiated directly and
passed to the GitHub object. For example:
.. code-block:: python
gh = github.GitHub(session=session.GitHubSession(
default_connect_timeout=T, default_read_timeout=N))
:param default_connect_timeout:
the number of seconds to wait when establishing a connection to
GitHub
:type default_connect_timeout:
float
:param default_read_timeout:
the number of seconds to wait for a response from GitHub
:type default_read_timeout:
float
"""
auth = None
__attrs__ = requests.Session.__attrs__ + [
"base_url",
"two_factor_auth_cb",
"default_connect_timeout",
"default_read_timeout",
"request_counter",
]
def __init__(self, default_connect_timeout=4, default_read_timeout=10):
"""Slightly modify how we initialize our session."""
super(GitHubSession, self).__init__()
self.default_connect_timeout = default_connect_timeout
self.default_read_timeout = default_read_timeout
self.headers.update(
{
# Only accept JSON responses
"Accept": "application/vnd.github.v3.full+json",
# Only accept UTF-8 encoded data
"Accept-Charset": "utf-8",
# Always sending JSON
"Content-Type": "application/json",
# Set our own custom User-Agent string
"User-Agent": f"github4.py/{__version__}",
}
)
self.base_url = "https://api.github.com"
self.two_factor_auth_cb = None
self.request_counter = 0
@property
def timeout(self):
"""Return the timeout tuple as expected by Requests"""
return (self.default_connect_timeout, self.default_read_timeout)
def basic_auth(self, username, password):
"""Set the Basic Auth credentials on this Session.
:param str username: Your GitHub username
:param str password: Your GitHub password
"""
if not (username and password):
return
self.auth = BasicAuth(username, password)
def build_url(self, *args, **kwargs):
"""Build a new API url from scratch."""
parts = [kwargs.get("base_url") or self.base_url]
parts.extend(args)
parts = [str(p) for p in parts]
key = tuple(parts)
__logs__.info("Building a url from %s", key)
if key not in __url_cache__:
__logs__.info("Missed the cache building the url")
__url_cache__[key] = "/".join(parts)
return __url_cache__[key]
def handle_two_factor_auth(self, args, kwargs):
"""Handler for when the user has 2FA turned on."""
headers = kwargs.pop("headers", {})
headers.update({"X-GitHub-OTP": str(self.two_factor_auth_cb())})
kwargs.update(headers=headers)
return super(GitHubSession, self).request(*args, **kwargs)
def has_auth(self):
"""Check for whether or not the user has authentication configured."""
return self.auth or self.headers.get("Authorization")
def oauth2_auth(self, client_id, client_secret):
"""Use OAuth2 for authentication.
It is suggested you install requests-oauthlib to use this.
:param str client_id: Client ID retrieved from GitHub
:param str client_secret: Client secret retrieved from GitHub
"""
raise NotImplementedError("These features are not implemented yet")
def request(self, *args, **kwargs):
"""Make a request, count it, and handle 2FA if necessary."""
kwargs.setdefault("timeout", self.timeout)
response = super(GitHubSession, self).request(*args, **kwargs)
self.request_counter += 1
if requires_2fa(response) and self.two_factor_auth_cb:
# No need to flatten and re-collect the args in
# handle_two_factor_auth
new_response = self.handle_two_factor_auth(args, kwargs)
new_response.history.append(response)
response = new_response
return response
def retrieve_client_credentials(self):
"""Return the client credentials.
:returns: tuple(client_id, client_secret)
"""
client_id = self.params.get("client_id")
client_secret = self.params.get("client_secret")
return (client_id, client_secret)
def two_factor_auth_callback(self, callback):
"""Register our 2FA callback specified by the user."""
if not callback:
return
if not isinstance(callback, abc_collections.Callable):
raise ValueError("Your callback should be callable")
self.two_factor_auth_cb = callback
def token_auth(self, token):
"""Use an application token for authentication.
:param str token: Application token retrieved from GitHub's
/authorizations endpoint
"""
if not token:
return
self.auth = TokenAuth(token)
def app_bearer_token_auth(self, headers, expire_in):
"""Authenticate as an App to be able to view its metadata."""
if not headers:
return
self.auth = AppBearerTokenAuth(headers, expire_in)
def app_installation_token_auth(self, json):
"""Use an access token generated by an App's installation."""
if not json:
return
self.auth = AppInstallationTokenAuth(json["token"], json["expires_at"])
@contextmanager
def temporary_basic_auth(self, *auth):
"""Allow us to temporarily swap out basic auth credentials."""
old_basic_auth = self.auth
old_token_auth = self.headers.get("Authorization")
self.basic_auth(*auth)
yield
self.auth = old_basic_auth
if old_token_auth:
self.headers["Authorization"] = old_token_auth
@contextmanager
def no_auth(self):
"""Unset authentication temporarily as a context manager."""
old_basic_auth, self.auth = self.auth, None
old_token_auth = self.headers.pop("Authorization", None)
yield
self.auth = old_basic_auth
if old_token_auth:
self.headers["Authorization"] = old_token_auth
def _utcnow():
return datetime.datetime.now(dateutil.tz.UTC)
class AppInstallationTokenAuth(TokenAuth):
"""Use token authentication but throw an exception on expiration."""
def __init__(self, token, expires_at):
"""Set-up our authentication handler."""
super(AppInstallationTokenAuth, self).__init__(token)
self.expires_at_str = expires_at
self.expires_at = dateutil.parser.parse(expires_at)
def __repr__(self):
"""Return a nice view of the token in use."""
return "app installation token {}... expiring at {}".format(
self.token[:4], self.expires_at_str
)
@property
def expired(self):
"""Indicate whether our token is expired or not."""
now = _utcnow()
return now > self.expires_at
def __call__(self, request):
"""Add the authorization header and format it."""
if self.expired:
raise exc.AppInstallationTokenExpired(
"Your app installation token expired at {}".format(self.expires_at_str)
)
return super(AppInstallationTokenAuth, self).__call__(request)
class AppBearerTokenAuth(TokenAuth):
"""Use JWT authentication but throw an exception on expiration."""
header_format_str = "Bearer {}"
def __init__(self, token, expire_in):
"""Set-up our authentication handler."""
super(AppBearerTokenAuth, self).__init__(token)
expire_in = datetime.timedelta(seconds=expire_in)
self.expires_at = _utcnow() + expire_in
def __repr__(self):
"""Return a helpful view of the token."""
return "app bearer token {} expiring at {}".format(
self.token[:4], str(self.expires_at)
)
@property
def expired(self):
"""Indicate whether our token is expired or not."""
now = _utcnow()
return now > self.expires_at
def __call__(self, request):
"""Add the authorization header and format it."""
if self.expired:
raise exc.AppTokenExpired(
"Your app token expired at {}".format(str(self.expires_at))
)
return super(AppBearerTokenAuth, self).__call__(request)
| [
"logging.getLogger",
"datetime.datetime.now",
"datetime.timedelta"
] | [((305, 327), 'logging.getLogger', 'getLogger', (['__package__'], {}), '(__package__)\n', (314, 327), False, 'from logging import getLogger\n'), ((8225, 8263), 'datetime.datetime.now', 'datetime.datetime.now', (['dateutil.tz.UTC'], {}), '(dateutil.tz.UTC)\n', (8246, 8263), False, 'import datetime\n'), ((9659, 9696), 'datetime.timedelta', 'datetime.timedelta', ([], {'seconds': 'expire_in'}), '(seconds=expire_in)\n', (9677, 9696), False, 'import datetime\n')] |
import theano
import theano.tensor as T
from theano.sandbox.rng_mrg import MRG_RandomStreams
from theano.tensor.nnet.conv import conv2d
from theano.tensor.signal.downsample import max_pool_2d
from theano.tensor.shared_randomstreams import RandomStreams
import numpy as np
from toolbox import *
from modelbase import *
import itertools
class FFN_ace(ModelSLBase):
"""
Auto-classifier-encoder (Georgiev, 2015)
"""
def save(self):
if not os.path.exists('savedmodels\\'):
os.makedirs('savedmodels\\')
self.params.save(self.filename)
def __init__(self, data, hp):
super(FFN_ace, self).__init__(self.__class__.__name__, data, hp)
# batch_size: 10000; learning_rate = 0.0015; lr_halflife = 200, 500
self.epsilon = 0.0001
self.params = Parameters()
self.shared_vars = Parameters()
n_x = self.data['n_x']
n_y = self.data['n_y']
n_h1 = 1200
n_h2 = 1000
n_h3 = 800
n_h4 = 800
scale = hp.init_scale
if hp.load_model and os.path.isfile(self.filename):
self.params.load(self.filename)
else:
with self.params:
w_h = shared_normal((n_x, n_h1), scale=scale)
b_h = shared_zeros((n_h1,))
w_h2 = shared_normal((n_h1, n_h2), scale=scale)
b_h2 = shared_zeros((n_h2,))
w_h3 = shared_normal((n_h2, n_h3), scale=scale)
b_h3 = shared_zeros((n_h3,))
w_h4 = shared_normal((n_h3, n_h4), scale=scale)
b_h4 = shared_zeros((n_h4,))
w_o = shared_normal((n_h4, n_y), scale=scale)
def batch_norm(h):
m = T.mean(h, axis=0, keepdims=True)
std = T.sqrt(T.var(h, axis=0, keepdims=True) + self.epsilon)
h = (h - m) / std
return h
def model(X, params, p_drop_input, p_drop_hidden):
X_noise = X + gaussian(X.shape, p_drop_input)
h = batch_norm(dropout(rectify(T.dot(X_noise, params.w_h) + params.b_h), p_drop_hidden))
# Dual reconstruction error
phx = T.nnet.sigmoid(T.dot(h, T.dot(h.T, X_noise)) / self.hp.batch_size)
log_phx = T.nnet.binary_crossentropy(phx, X_noise).sum()
h2 = dropout(rectify(T.dot(h, params.w_h2) + params.b_h2), p_drop_hidden)
h3 = batch_norm(dropout(rectify(T.dot(h2, params.w_h3) + params.b_h3), p_drop_hidden))
h4 = dropout(rectify(T.dot(h3, params.w_h4) + params.b_h4), p_drop_hidden)
py_x = softmax(T.dot(h4, params.w_o))
return [py_x, log_phx]
noise_py_x, cost_recon = model(self.X, self.params, 0.2, 0.5)
cost_y2 = -T.sum(self.Y * T.log(noise_py_x))
cost = cost_y2 + cost_recon
pyx, _ = model(self.X, self.params, 0., 0.)
map_pyx = T.argmax(pyx, axis=1)
error_map_pyx = T.sum(T.neq(map_pyx, T.argmax(self.Y, axis=1)))
self.compile(cost, error_map_pyx)
| [
"theano.tensor.log",
"theano.tensor.mean",
"theano.tensor.argmax",
"theano.tensor.nnet.binary_crossentropy",
"theano.tensor.var",
"theano.tensor.dot"
] | [((2908, 2929), 'theano.tensor.argmax', 'T.argmax', (['pyx'], {'axis': '(1)'}), '(pyx, axis=1)\n', (2916, 2929), True, 'import theano.tensor as T\n'), ((1741, 1773), 'theano.tensor.mean', 'T.mean', (['h'], {'axis': '(0)', 'keepdims': '(True)'}), '(h, axis=0, keepdims=True)\n', (1747, 1773), True, 'import theano.tensor as T\n'), ((2606, 2627), 'theano.tensor.dot', 'T.dot', (['h4', 'params.w_o'], {}), '(h4, params.w_o)\n', (2611, 2627), True, 'import theano.tensor as T\n'), ((2975, 2999), 'theano.tensor.argmax', 'T.argmax', (['self.Y'], {'axis': '(1)'}), '(self.Y, axis=1)\n', (2983, 2999), True, 'import theano.tensor as T\n'), ((1796, 1827), 'theano.tensor.var', 'T.var', (['h'], {'axis': '(0)', 'keepdims': '(True)'}), '(h, axis=0, keepdims=True)\n', (1801, 1827), True, 'import theano.tensor as T\n'), ((2242, 2282), 'theano.tensor.nnet.binary_crossentropy', 'T.nnet.binary_crossentropy', (['phx', 'X_noise'], {}), '(phx, X_noise)\n', (2268, 2282), True, 'import theano.tensor as T\n'), ((2774, 2791), 'theano.tensor.log', 'T.log', (['noise_py_x'], {}), '(noise_py_x)\n', (2779, 2791), True, 'import theano.tensor as T\n'), ((2180, 2199), 'theano.tensor.dot', 'T.dot', (['h.T', 'X_noise'], {}), '(h.T, X_noise)\n', (2185, 2199), True, 'import theano.tensor as T\n'), ((2332, 2353), 'theano.tensor.dot', 'T.dot', (['h', 'params.w_h2'], {}), '(h, params.w_h2)\n', (2337, 2353), True, 'import theano.tensor as T\n'), ((2522, 2544), 'theano.tensor.dot', 'T.dot', (['h3', 'params.w_h4'], {}), '(h3, params.w_h4)\n', (2527, 2544), True, 'import theano.tensor as T\n'), ((2044, 2070), 'theano.tensor.dot', 'T.dot', (['X_noise', 'params.w_h'], {}), '(X_noise, params.w_h)\n', (2049, 2070), True, 'import theano.tensor as T\n'), ((2437, 2459), 'theano.tensor.dot', 'T.dot', (['h2', 'params.w_h3'], {}), '(h2, params.w_h3)\n', (2442, 2459), True, 'import theano.tensor as T\n')] |
from typing import List
class Solution:
"""
BFS
"""
def canReach_1(self, arr: List[int], start: int) -> bool:
"""
Recursively.
"""
seen = set()
def helper(pos):
if not 0 <= pos < len(arr) or pos in seen:
return False
if not arr[pos]:
return True
seen.add(pos)
return helper(pos + arr[pos]) or helper(pos - arr[pos])
return helper(start)
def canReach_2(self, arr: List[int], start: int) -> bool:
"""
Iteratively
"""
from collections import deque
queue, seen = deque([start]), {start}
while queue:
curr = queue.popleft()
if not arr[curr]:
return True
for nxt in [curr + arr[curr], curr - arr[curr]]:
if 0 <= nxt < len(arr) and nxt not in seen:
seen.add(nxt)
queue.append(nxt)
return False
| [
"collections.deque"
] | [((667, 681), 'collections.deque', 'deque', (['[start]'], {}), '([start])\n', (672, 681), False, 'from collections import deque\n')] |
import pygame
FPS = 60
BLOCK_SIZE = 48
COLOR_BACKGROUND = pygame.Color(0, 0, 0)
| [
"pygame.Color"
] | [((59, 80), 'pygame.Color', 'pygame.Color', (['(0)', '(0)', '(0)'], {}), '(0, 0, 0)\n', (71, 80), False, 'import pygame\n')] |
"""
Installs and configures neutron
"""
import logging
import os
import re
import uuid
from packstack.installer import utils
from packstack.installer import validators
from packstack.installer.utils import split_hosts
from packstack.modules.ospluginutils import getManifestTemplate, appendManifestFile
# Controller object will be initialized from main flow
controller = None
# Plugin name
PLUGIN_NAME = "OS-NEUTRON"
logging.debug("plugin %s loaded", __name__)
def initConfig(controllerObject):
global controller
controller = controllerObject
logging.debug("Adding OpenStack Neutron configuration")
conf_params = {
"NEUTRON" : [
{"CMD_OPTION" : "neutron-server-host",
"USAGE" : "The IP addresses of the server on which to install the Neutron server",
"PROMPT" : "Enter the IP address of the Neutron server",
"OPTION_LIST" : [],
"VALIDATORS" : [validators.validate_ip, validators.validate_ssh],
"DEFAULT_VALUE" : utils.get_localhost_ip(),
"MASK_INPUT" : False,
"LOOSE_VALIDATION": True,
"CONF_NAME" : "CONFIG_NEUTRON_SERVER_HOST",
"USE_DEFAULT" : False,
"NEED_CONFIRM" : False,
"CONDITION" : False },
{"CMD_OPTION" : "neutron-ks-password",
"USAGE" : "The password to use for Neutron to authenticate with Keystone",
"PROMPT" : "Enter the password for Neutron Keystone access",
"OPTION_LIST" : [],
"VALIDATORS" : [validators.validate_not_empty],
"DEFAULT_VALUE" : uuid.uuid4().hex[:16],
"MASK_INPUT" : True,
"LOOSE_VALIDATION": False,
"CONF_NAME" : "CONFIG_NEUTRON_KS_PW",
"USE_DEFAULT" : True,
"NEED_CONFIRM" : True,
"CONDITION" : False },
{"CMD_OPTION" : "neutron-db-password",
"USAGE" : "The password to use for Neutron to access DB",
"PROMPT" : "Enter the password for Neutron DB access",
"OPTION_LIST" : [],
"VALIDATORS" : [validators.validate_not_empty],
"DEFAULT_VALUE" : uuid.uuid4().hex[:16],
"MASK_INPUT" : True,
"LOOSE_VALIDATION": False,
"CONF_NAME" : "CONFIG_NEUTRON_DB_PW",
"USE_DEFAULT" : True,
"NEED_CONFIRM" : True,
"CONDITION" : False },
{"CMD_OPTION" : "neutron-l3-hosts",
"USAGE" : "A comma separated list of IP addresses on which to install Neutron L3 agent",
"PROMPT" : "Enter a comma separated list of IP addresses on which to install the Neutron L3 agent",
"OPTION_LIST" : [],
"VALIDATORS" : [validators.validate_multi_ssh],
"DEFAULT_VALUE" : utils.get_localhost_ip(),
"MASK_INPUT" : False,
"LOOSE_VALIDATION": True,
"CONF_NAME" : "CONFIG_NEUTRON_L3_HOSTS",
"USE_DEFAULT" : False,
"NEED_CONFIRM" : False,
"CONDITION" : False },
{"CMD_OPTION" : "neutron-l3-ext-bridge",
"USAGE" : "The name of the bridge that the Neutron L3 agent will use for external traffic, or 'provider' if using provider networks",
"PROMPT" : "Enter the bridge the Neutron L3 agent will use for external traffic, or 'provider' if using provider networks",
"OPTION_LIST" : [],
"VALIDATORS" : [validators.validate_not_empty],
"DEFAULT_VALUE" : "br-ex",
"MASK_INPUT" : False,
"LOOSE_VALIDATION": True,
"CONF_NAME" : "CONFIG_NEUTRON_L3_EXT_BRIDGE",
"USE_DEFAULT" : False,
"NEED_CONFIRM" : False,
"CONDITION" : False },
{"CMD_OPTION" : "neutron-dhcp-hosts",
"USAGE" : "A comma separated list of IP addresses on which to install Neutron DHCP agent",
"PROMPT" : "Enter a comma separated list of IP addresses on which to install Neutron DHCP agent",
"OPTION_LIST" : [],
"VALIDATORS" : [validators.validate_multi_ssh],
"DEFAULT_VALUE" : utils.get_localhost_ip(),
"MASK_INPUT" : False,
"LOOSE_VALIDATION": True,
"CONF_NAME" : "CONFIG_NEUTRON_DHCP_HOSTS",
"USE_DEFAULT" : False,
"NEED_CONFIRM" : False,
"CONDITION" : False },
{"CMD_OPTION" : "neutron-lbaas-hosts",
"USAGE" : "A comma separated list of IP addresses on which to install Neutron LBaaS agent",
"PROMPT" : "Enter a comma separated list of IP addresses on which to install Neutron LBaaS agent",
"OPTION_LIST" : [],
"VALIDATORS" : [validators.validate_multi_ssh],
"DEFAULT_VALUE" : "",
"MASK_INPUT" : False,
"LOOSE_VALIDATION": True,
"CONF_NAME" : "CONFIG_NEUTRON_LBAAS_HOSTS",
"USE_DEFAULT" : False,
"NEED_CONFIRM" : False,
"CONDITION" : False },
{"CMD_OPTION" : "neutron-l2-plugin",
"USAGE" : "The name of the L2 plugin to be used with Neutron",
"PROMPT" : "Enter the name of the L2 plugin to be used with Neutron",
"OPTION_LIST" : ["linuxbridge", "openvswitch", "ml2"],
"VALIDATORS" : [validators.validate_options],
"DEFAULT_VALUE" : "openvswitch",
"MASK_INPUT" : False,
"LOOSE_VALIDATION": False,
"CONF_NAME" : "CONFIG_NEUTRON_L2_PLUGIN",
"USE_DEFAULT" : False,
"NEED_CONFIRM" : False,
"CONDITION" : False },
{"CMD_OPTION" : "neutron-metadata-hosts",
"USAGE" : "A comma separated list of IP addresses on which to install Neutron metadata agent",
"PROMPT" : "Enter a comma separated list of IP addresses on which to install the Neutron metadata agent",
"OPTION_LIST" : [],
"VALIDATORS" : [validators.validate_multi_ssh],
"DEFAULT_VALUE" : utils.get_localhost_ip(),
"MASK_INPUT" : False,
"LOOSE_VALIDATION": True,
"CONF_NAME" : "CONFIG_NEUTRON_METADATA_HOSTS",
"USE_DEFAULT" : False,
"NEED_CONFIRM" : False,
"CONDITION" : False },
{"CMD_OPTION" : "neutron-metadata-pw",
"USAGE" : "A comma separated list of IP addresses on which to install Neutron metadata agent",
"PROMPT" : "Enter a comma separated list of IP addresses on which to install the Neutron metadata agent",
"OPTION_LIST" : [],
"VALIDATORS" : [validators.validate_not_empty],
"DEFAULT_VALUE" : uuid.uuid4().hex[:16],
"MASK_INPUT" : True,
"LOOSE_VALIDATION": False,
"CONF_NAME" : "CONFIG_NEUTRON_METADATA_PW",
"USE_DEFAULT" : True,
"NEED_CONFIRM" : True,
"CONDITION" : False },
],
"NEUTRON_LB_PLUGIN" : [
{"CMD_OPTION" : "neutron-lb-tenant-network-type",
"USAGE" : "The type of network to allocate for tenant networks (eg. vlan, local)",
"PROMPT" : "Enter the type of network to allocate for tenant networks",
"OPTION_LIST" : ["local", "vlan"],
"VALIDATORS" : [validators.validate_options],
"DEFAULT_VALUE" : "local",
"MASK_INPUT" : False,
"LOOSE_VALIDATION": False,
"CONF_NAME" : "CONFIG_NEUTRON_LB_TENANT_NETWORK_TYPE",
"USE_DEFAULT" : False,
"NEED_CONFIRM" : False,
"CONDITION" : False },
{"CMD_OPTION" : "neutron-lb-vlan-ranges",
"USAGE" : "A comma separated list of VLAN ranges for the Neutron linuxbridge plugin (eg. physnet1:1:4094,physnet2,physnet3:3000:3999)",
"PROMPT" : "Enter a comma separated list of VLAN ranges for the Neutron linuxbridge plugin",
"OPTION_LIST" : [],
"VALIDATORS" : [],
"DEFAULT_VALUE" : "",
"MASK_INPUT" : False,
"LOOSE_VALIDATION": True,
"CONF_NAME" : "CONFIG_NEUTRON_LB_VLAN_RANGES",
"USE_DEFAULT" : False,
"NEED_CONFIRM" : False,
"CONDITION" : False },
],
"NEUTRON_LB_PLUGIN_AND_AGENT" : [
{"CMD_OPTION" : "neutron-lb-interface-mappings",
"USAGE" : "A comma separated list of interface mappings for the Neutron linuxbridge plugin (eg. physnet1:br-eth1,physnet2:br-eth2,physnet3:br-eth3)",
"PROMPT" : "Enter a comma separated list of interface mappings for the Neutron linuxbridge plugin",
"OPTION_LIST" : [],
"VALIDATORS" : [],
"DEFAULT_VALUE" : "",
"MASK_INPUT" : False,
"LOOSE_VALIDATION": True,
"CONF_NAME" : "CONFIG_NEUTRON_LB_INTERFACE_MAPPINGS",
"USE_DEFAULT" : False,
"NEED_CONFIRM" : False,
"CONDITION" : False },
],
"NEUTRON_OVS_PLUGIN" : [
{"CMD_OPTION" : "neutron-ovs-tenant-network-type",
"USAGE" : "Type of network to allocate for tenant networks (eg. vlan, local, gre, vxlan)",
"PROMPT" : "Enter the type of network to allocate for tenant networks",
"OPTION_LIST" : ["local", "vlan", "gre", "vxlan"],
"VALIDATORS" : [validators.validate_options],
"DEFAULT_VALUE" : "local",
"MASK_INPUT" : False,
"LOOSE_VALIDATION": False,
"CONF_NAME" : "CONFIG_NEUTRON_OVS_TENANT_NETWORK_TYPE",
"USE_DEFAULT" : False,
"NEED_CONFIRM" : False,
"CONDITION" : False },
{"CMD_OPTION" : "neutron-ovs-vlan-ranges",
"USAGE" : "A comma separated list of VLAN ranges for the Neutron openvswitch plugin (eg. physnet1:1:4094,physnet2,physnet3:3000:3999)",
"PROMPT" : "Enter a comma separated list of VLAN ranges for the Neutron openvswitch plugin",
"OPTION_LIST" : [],
"VALIDATORS" : [],
"DEFAULT_VALUE" : "",
"MASK_INPUT" : False,
"LOOSE_VALIDATION": True,
"CONF_NAME" : "CONFIG_NEUTRON_OVS_VLAN_RANGES",
"USE_DEFAULT" : False,
"NEED_CONFIRM" : False,
"CONDITION" : False },
],
"NEUTRON_OVS_PLUGIN_AND_AGENT" : [
{"CMD_OPTION" : "neutron-ovs-bridge-mappings",
"USAGE" : "A comma separated list of bridge mappings for the Neutron openvswitch plugin (eg. physnet1:br-eth1,physnet2:br-eth2,physnet3:br-eth3)",
"PROMPT" : "Enter a comma separated list of bridge mappings for the Neutron openvswitch plugin",
"OPTION_LIST" : [],
"VALIDATORS" : [],
"DEFAULT_VALUE" : "",
"MASK_INPUT" : False,
"LOOSE_VALIDATION": True,
"CONF_NAME" : "CONFIG_NEUTRON_OVS_BRIDGE_MAPPINGS",
"USE_DEFAULT" : False,
"NEED_CONFIRM" : False,
"CONDITION" : False },
{"CMD_OPTION" : "neutron-ovs-bridge-interfaces",
"USAGE" : "A comma separated list of colon-separated OVS bridge:interface pairs. The interface will be added to the associated bridge.",
"PROMPT" : "Enter a comma separated list of OVS bridge:interface pairs for the Neutron openvswitch plugin",
"OPTION_LIST" : [],
"VALIDATORS" : [],
"DEFAULT_VALUE" : "",
"MASK_INPUT" : False,
"LOOSE_VALIDATION": True,
"CONF_NAME" : "CONFIG_NEUTRON_OVS_BRIDGE_IFACES",
"USE_DEFAULT" : False,
"NEED_CONFIRM" : False,
"CONDITION" : False },
],
"NEUTRON_OVS_PLUGIN_TUNNEL" : [
{"CMD_OPTION" : "neutron-ovs-tunnel-ranges",
"USAGE" : "A comma separated list of tunnel ranges for the Neutron openvswitch plugin (eg. 1:1000)",
"PROMPT" : "Enter a comma separated list of tunnel ranges for the Neutron openvswitch plugin",
"OPTION_LIST" : [],
"VALIDATORS" : [],
"DEFAULT_VALUE" : "",
"MASK_INPUT" : False,
"LOOSE_VALIDATION": True,
"CONF_NAME" : "CONFIG_NEUTRON_OVS_TUNNEL_RANGES",
"USE_DEFAULT" : False,
"NEED_CONFIRM" : False,
"CONDITION" : False },
],
"NEUTRON_OVS_PLUGIN_AND_AGENT_TUNNEL" : [
{"CMD_OPTION" : "neutron-ovs-tunnel-if",
"USAGE" : "The interface for the OVS tunnel. Packstack will override the IP address used for tunnels on this hypervisor to the IP found on the specified interface. (eg. eth1) ",
"PROMPT" : "Enter interface with IP to override the default tunnel local_ip",
"OPTION_LIST" : [],
"VALIDATORS" : [],
"DEFAULT_VALUE" : "",
"MASK_INPUT" : False,
"LOOSE_VALIDATION": True,
"CONF_NAME" : "CONFIG_NEUTRON_OVS_TUNNEL_IF",
"USE_DEFAULT" : False,
"NEED_CONFIRM" : False,
"CONDITION" : False },
],
"NEUTRON_OVS_PLUGIN_AND_AGENT_VXLAN" : [
{"CMD_OPTION" : "neutron-ovs-vxlan-udp-port",
"CONF_NAME" : "CONFIG_NEUTRON_OVS_VXLAN_UDP_PORT",
"USAGE" : "VXLAN UDP port",
"PROMPT" : "Enter VXLAN UDP port number",
"OPTION_LIST" : [],
"VALIDATORS" : [validators.validate_port],
"DEFAULT_VALUE" : 4789,
"MASK_INPUT" : False,
"LOOSE_VALIDATION": True,
"USE_DEFAULT" : False,
"NEED_CONFIRM" : False,
"CONDITION" : False },
],
"NEUTRON_ML2_PLUGIN" : [
{"CMD_OPTION" : "neutron-ml2-type-drivers",
"CONF_NAME" : "CONFIG_NEUTRON_ML2_TYPE_DRIVERS",
"USAGE" : ("A comma separated list of network type "
"driver entrypoints to be loaded from the "
"neutron.ml2.type_drivers namespace."),
"PROMPT" : ("Enter a comma separated list of network "
"type driver entrypoints"),
"OPTION_LIST" : ["local", "flat", "vlan", "gre", "vxlan"],
"VALIDATORS" : [validators.validate_multi_options],
"DEFAULT_VALUE" : "local",
"MASK_INPUT" : False,
"LOOSE_VALIDATION": False,
"USE_DEFAULT" : False,
"NEED_CONFIRM" : False,
"CONDITION" : False },
{"CMD_OPTION" : "neutron-ml2-tenant-network-types",
"CONF_NAME" : "CONFIG_NEUTRON_ML2_TENANT_NETWORK_TYPES",
"USAGE" : ("A comma separated ordered list of "
"network_types to allocate as tenant "
"networks. The value 'local' is only useful "
"for single-box testing but provides no "
"connectivity between hosts."),
"PROMPT" : ("Enter a comma separated ordered list of "
"network_types to allocate as tenant "
"networks"),
"OPTION_LIST" : ["local", "vlan", "gre", "vxlan"],
"VALIDATORS" : [validators.validate_multi_options],
"DEFAULT_VALUE" : "local",
"MASK_INPUT" : False,
"LOOSE_VALIDATION": False,
"USE_DEFAULT" : False,
"NEED_CONFIRM" : False,
"CONDITION" : False },
{"CMD_OPTION" : "neutron-ml2-mechanism-drivers",
"CONF_NAME" : "CONFIG_NEUTRON_ML2_MECHANISM_DRIVERS",
"USAGE" : ("A comma separated ordered list of "
"networking mechanism driver entrypoints "
"to be loaded from the "
"neutron.ml2.mechanism_drivers namespace."),
"PROMPT" : ("Enter a comma separated ordered list of "
"networking mechanism driver entrypoints"),
"OPTION_LIST" : ["logger", "test", "linuxbridge",
"openvswitch", "hyperv", "ncs", "arista",
"cisco_nexus", "l2population"],
"VALIDATORS" : [validators.validate_multi_options],
"DEFAULT_VALUE" : "openvswitch",
"MASK_INPUT" : False,
"LOOSE_VALIDATION": False,
"USE_DEFAULT" : False,
"NEED_CONFIRM" : False,
"CONDITION" : False },
{"CMD_OPTION" : "neutron-ml2-flat-networks",
"CONF_NAME" : "CONFIG_NEUTRON_ML2_FLAT_NETWORKS",
"USAGE" : ("A comma separated list of physical_network"
" names with which flat networks can be "
"created. Use * to allow flat networks with "
"arbitrary physical_network names."),
"PROMPT" : ("Enter a comma separated list of "
"physical_network names with which flat "
"networks can be created"),
"OPTION_LIST" : [],
"VALIDATORS" : [],
"DEFAULT_VALUE" : "*",
"MASK_INPUT" : False,
"LOOSE_VALIDATION": False,
"USE_DEFAULT" : False,
"NEED_CONFIRM" : False,
"CONDITION" : False },
{"CMD_OPTION" : "neutron-ml2-vlan-ranges",
"CONF_NAME" : "CONFIG_NEUTRON_ML2_VLAN_RANGES",
"USAGE" : ("A comma separated list of "
"<physical_network>:<vlan_min>:<vlan_max> "
"or <physical_network> specifying "
"physical_network names usable for VLAN "
"provider and tenant networks, as well as "
"ranges of VLAN tags on each available for "
"allocation to tenant networks."),
"PROMPT" : ("Enter a comma separated list of "
"physical_network names usable for VLAN"),
"OPTION_LIST" : [],
"VALIDATORS" : [],
"DEFAULT_VALUE" : "",
"MASK_INPUT" : False,
"LOOSE_VALIDATION": False,
"USE_DEFAULT" : False,
"NEED_CONFIRM" : False,
"CONDITION" : False },
{"CMD_OPTION" : "neutron-ml2-tunnel-id-ranges",
"CONF_NAME" : "CONFIG_NEUTRON_ML2_TUNNEL_ID_RANGES",
"USAGE" : ("A comma separated list of <tun_min>:"
"<tun_max> tuples enumerating ranges of GRE "
"tunnel IDs that are available for tenant "
"network allocation. Should be an array with"
" tun_max +1 - tun_min > 1000000"),
"PROMPT" : ("Enter a comma separated list of <tun_min>:"
"<tun_max> tuples enumerating ranges of GRE "
"tunnel IDs that are available for tenant "
"network allocation"),
"OPTION_LIST" : [],
"VALIDATORS" : [],
"DEFAULT_VALUE" : "",
"MASK_INPUT" : False,
"LOOSE_VALIDATION": False,
"USE_DEFAULT" : False,
"NEED_CONFIRM" : False,
"CONDITION" : False },
{"CMD_OPTION" : "neutron-ml2-vxlan-group",
"CONF_NAME" : "CONFIG_NEUTRON_ML2_VXLAN_GROUP",
"USAGE" : ("Multicast group for VXLAN. If unset, "
"disables VXLAN enable sending allocate "
"broadcast traffic to this multicast group. "
"When left unconfigured, will disable "
"multicast VXLAN mode. Should be an "
"Multicast IP (v4 or v6) address."),
"PROMPT" : "Enter a multicast group for VXLAN",
"OPTION_LIST" : [],
"VALIDATORS" : [],
"DEFAULT_VALUE" : "",
"MASK_INPUT" : False,
"LOOSE_VALIDATION": False,
"USE_DEFAULT" : False,
"NEED_CONFIRM" : False,
"CONDITION" : False },
{"CMD_OPTION" : "neutron-ml2-vni-ranges",
"CONF_NAME" : "CONFIG_NEUTRON_ML2_VNI_RANGES",
"USAGE" : ("A comma separated list of <vni_min>:"
"<vni_max> tuples enumerating ranges of "
"VXLAN VNI IDs that are available for tenant"
" network allocation. Min value is 0 and Max"
" value is 16777215."),
"PROMPT" : ("Enter a comma separated list of <vni_min>:"
"<vni_max> tuples enumerating ranges of "
"VXLAN VNI IDs that are available for tenant"
" network allocation"),
"OPTION_LIST" : [],
"VALIDATORS" : [],
"DEFAULT_VALUE" : "",
"MASK_INPUT" : False,
"LOOSE_VALIDATION": False,
"USE_DEFAULT" : False,
"NEED_CONFIRM" : False,
"CONDITION" : False },
{"CMD_OPTION" : "neutron-l2-agent", # We need to ask for this only in case of ML2 plugins
"USAGE" : "The name of the L2 agent to be used with Neutron",
"PROMPT" : "Enter the name of the L2 agent to be used with Neutron",
"OPTION_LIST" : ["linuxbridge", "openvswitch"],
"VALIDATORS" : [validators.validate_options],
"DEFAULT_VALUE" : "openvswitch",
"MASK_INPUT" : False,
"LOOSE_VALIDATION": False,
"CONF_NAME" : "CONFIG_NEUTRON_L2_AGENT",
"USE_DEFAULT" : False,
"NEED_CONFIRM" : False,
"CONDITION" : False },
],
}
conf_groups = [
{ "GROUP_NAME" : "NEUTRON",
"DESCRIPTION" : "Neutron config",
"PRE_CONDITION" : "CONFIG_NEUTRON_INSTALL",
"PRE_CONDITION_MATCH" : "y",
"POST_CONDITION" : False,
"POST_CONDITION_MATCH" : True },
{ "GROUP_NAME" : "NEUTRON_ML2_PLUGIN",
"DESCRIPTION" : "Neutron ML2 plugin config",
"PRE_CONDITION" : use_ml2_plugin,
"PRE_CONDITION_MATCH" : True,
"POST_CONDITION" : False,
"POST_CONDITION_MATCH" : True },
{ "GROUP_NAME" : "NEUTRON_LB_PLUGIN",
"DESCRIPTION" : "Neutron LB plugin config",
"PRE_CONDITION" : use_linuxbridge_plugin,
"PRE_CONDITION_MATCH" : True,
"POST_CONDITION" : False,
"POST_CONDITION_MATCH" : True },
{ "GROUP_NAME" : "NEUTRON_LB_PLUGIN_AND_AGENT",
"DESCRIPTION" : "Neutron LB agent config",
"PRE_CONDITION" : use_linuxbridge_agent,
"PRE_CONDITION_MATCH" : True,
"POST_CONDITION" : False,
"POST_CONDITION_MATCH" : True },
{ "GROUP_NAME" : "NEUTRON_OVS_PLUGIN",
"DESCRIPTION" : "Neutron OVS plugin config",
"PRE_CONDITION" : use_openvswitch_plugin,
"PRE_CONDITION_MATCH" : True,
"POST_CONDITION" : False,
"POST_CONDITION_MATCH" : True },
{ "GROUP_NAME" : "NEUTRON_OVS_PLUGIN_AND_AGENT",
"DESCRIPTION" : "Neutron OVS agent config",
"PRE_CONDITION" : use_openvswitch_agent,
"PRE_CONDITION_MATCH" : True,
"POST_CONDITION" : False,
"POST_CONDITION_MATCH" : True },
{ "GROUP_NAME" : "NEUTRON_OVS_PLUGIN_TUNNEL",
"DESCRIPTION" : "Neutron OVS plugin config for tunnels",
"PRE_CONDITION" : use_openvswitch_plugin_tunnel,
"PRE_CONDITION_MATCH" : True,
"POST_CONDITION" : False,
"POST_CONDITION_MATCH" : True },
{ "GROUP_NAME" : "NEUTRON_OVS_PLUGIN_AND_AGENT_TUNNEL",
"DESCRIPTION" : "Neutron OVS agent config for tunnels",
"PRE_CONDITION" : use_openvswitch_agent_tunnel,
"PRE_CONDITION_MATCH" : True,
"POST_CONDITION" : False,
"POST_CONDITION_MATCH" : True },
{ "GROUP_NAME" : "NEUTRON_OVS_PLUGIN_AND_AGENT_VXLAN",
"DESCRIPTION" : "Neutron OVS agent config for VXLAN",
"PRE_CONDITION" : use_openvswitch_vxlan,
"PRE_CONDITION_MATCH" : True,
"POST_CONDITION" : False,
"POST_CONDITION_MATCH" : True },
]
for group in conf_groups:
paramList = conf_params[group["GROUP_NAME"]]
controller.addGroup(group, paramList)
def use_ml2_plugin(config):
return (config['CONFIG_NEUTRON_INSTALL'] == 'y' and
config['CONFIG_NEUTRON_L2_PLUGIN'] == 'ml2')
def use_linuxbridge_plugin(config):
result = (config['CONFIG_NEUTRON_INSTALL'] == 'y' and
config['CONFIG_NEUTRON_L2_PLUGIN'] == 'linuxbridge')
if result:
config["CONFIG_NEUTRON_L2_AGENT"] = 'linuxbridge'
return result
def use_linuxbridge_agent(config):
ml2_used = (use_ml2_plugin(config) and
config["CONFIG_NEUTRON_L2_AGENT"] == 'linuxbridge')
return use_linuxbridge_plugin(config) or ml2_used
def use_openvswitch_plugin(config):
result = (config['CONFIG_NEUTRON_INSTALL'] == 'y' and
config['CONFIG_NEUTRON_L2_PLUGIN'] == 'openvswitch')
if result:
config["CONFIG_NEUTRON_L2_AGENT"] = 'openvswitch'
return result
def use_openvswitch_plugin_tunnel(config):
tun_types = ('gre', 'vxlan')
return (use_openvswitch_plugin(config) and
config['CONFIG_NEUTRON_OVS_TENANT_NETWORK_TYPE'] in tun_types)
def use_ml2_with_ovs(config):
return (use_ml2_plugin(config) and
config["CONFIG_NEUTRON_L2_AGENT"] == 'openvswitch')
def use_openvswitch_agent(config):
return use_openvswitch_plugin(config) or use_ml2_with_ovs(config)
def use_openvswitch_agent_tunnel(config):
return use_openvswitch_plugin_tunnel(config) or use_ml2_with_ovs(config)
def use_openvswitch_vxlan(config):
return ((use_openvswitch_plugin_tunnel(config) and
config['CONFIG_NEUTRON_OVS_TENANT_NETWORK_TYPE'] == 'vxlan')
or
(use_ml2_with_ovs(config) and
'vxlan' in config['CONFIG_NEUTRON_ML2_TYPE_DRIVERS']))
def use_openvswitch_gre(config):
ovs_vxlan = (
use_openvswitch_plugin_tunnel(config) and
config['CONFIG_NEUTRON_OVS_TENANT_NETWORK_TYPE'] == 'gre'
)
ml2_vxlan = (
use_ml2_with_ovs(config) and
'gre' in config['CONFIG_NEUTRON_ML2_TENANT_NETWORK_TYPES']
)
return ovs_vxlan or ml2_vxlan
def get_if_driver(config):
agent = config['CONFIG_NEUTRON_L2_AGENT']
if agent == "openvswitch":
return 'neutron.agent.linux.interface.OVSInterfaceDriver'
elif agent == 'linuxbridge':
return 'neutron.agent.linux.interface.BridgeInterfaceDriver'
def initSequences(controller):
config = controller.CONF
if config['CONFIG_NEUTRON_INSTALL'] != 'y':
return
if config['CONFIG_NEUTRON_L2_PLUGIN'] == 'openvswitch':
plugin_db = 'ovs_neutron'
plugin_path = ('neutron.plugins.openvswitch.ovs_neutron_plugin.'
'OVSNeutronPluginV2')
elif config['CONFIG_NEUTRON_L2_PLUGIN'] == 'linuxbridge':
plugin_db = 'neutron_linux_bridge'
plugin_path = ('neutron.plugins.linuxbridge.lb_neutron_plugin.'
'LinuxBridgePluginV2')
elif config['CONFIG_NEUTRON_L2_PLUGIN'] == 'ml2':
plugin_db = 'neutron'
plugin_path = 'neutron.plugins.ml2.plugin.Ml2Plugin'
# values modification
for key in ('CONFIG_NEUTRON_ML2_TYPE_DRIVERS',
'CONFIG_NEUTRON_ML2_TENANT_NETWORK_TYPES',
'CONFIG_NEUTRON_ML2_MECHANISM_DRIVERS',
'CONFIG_NEUTRON_ML2_FLAT_NETWORKS',
'CONFIG_NEUTRON_ML2_VLAN_RANGES',
'CONFIG_NEUTRON_ML2_TUNNEL_ID_RANGES',
'CONFIG_NEUTRON_ML2_VNI_RANGES'):
config[key] = str([i.strip() for i in config[key].split(',') if i])
key = 'CONFIG_NEUTRON_ML2_VXLAN_GROUP'
config[key] = "'%s'" % config[key] if config[key] else 'undef'
config['CONFIG_NEUTRON_L2_DBNAME'] = plugin_db
config['CONFIG_NEUTRON_CORE_PLUGIN'] = plugin_path
global api_hosts, l3_hosts, dhcp_hosts, lbaas_hosts, compute_hosts, meta_hosts, q_hosts
api_hosts = split_hosts(config['CONFIG_NEUTRON_SERVER_HOST'])
l3_hosts = split_hosts(config['CONFIG_NEUTRON_L3_HOSTS'])
dhcp_hosts = split_hosts(config['CONFIG_NEUTRON_DHCP_HOSTS'])
lbaas_hosts = split_hosts(config['CONFIG_NEUTRON_LBAAS_HOSTS'])
meta_hosts = split_hosts(config['CONFIG_NEUTRON_METADATA_HOSTS'])
compute_hosts = set()
if config['CONFIG_NOVA_INSTALL'] == 'y':
compute_hosts = split_hosts(config['CONFIG_NOVA_COMPUTE_HOSTS'])
q_hosts = api_hosts | l3_hosts | dhcp_hosts | lbaas_hosts | compute_hosts | meta_hosts
neutron_steps = [
{'title': 'Adding Neutron API manifest entries',
'functions': [create_manifests]},
{'title': 'Adding Neutron Keystone manifest entries',
'functions': [create_keystone_manifest]},
{'title': 'Adding Neutron L3 manifest entries',
'functions': [create_l3_manifests]},
{'title': 'Adding Neutron L2 Agent manifest entries',
'functions': [create_l2_agent_manifests]},
{'title': 'Adding Neutron DHCP Agent manifest entries',
'functions': [create_dhcp_manifests]},
{'title': 'Adding Neutron LBaaS Agent manifest entries',
'functions': [create_lbaas_manifests]},
{'title': 'Adding Neutron Metadata Agent manifest entries',
'functions': [create_metadata_manifests]},
]
controller.addSequence("Installing OpenStack Neutron", [], [],
neutron_steps)
def create_manifests(config):
global q_hosts
service_plugins = []
if config['CONFIG_NEUTRON_LBAAS_HOSTS']:
service_plugins.append(
'neutron.services.loadbalancer.plugin.LoadBalancerPlugin'
)
if config['CONFIG_NEUTRON_L2_PLUGIN'] == 'ml2':
# ML2 uses the L3 Router service plugin to implement l3 agent
service_plugins.append(
'neutron.services.l3_router.l3_router_plugin.L3RouterPlugin'
)
config['SERVICE_PLUGINS'] = (str(service_plugins) if service_plugins
else 'undef')
if config['CONFIG_NEUTRON_L2_PLUGIN'] == 'openvswitch':
nettype = config.get("CONFIG_NEUTRON_OVS_TENANT_NETWORK_TYPE", "local")
plugin_manifest = 'neutron_ovs_plugin_%s.pp' % nettype
elif config['CONFIG_NEUTRON_L2_PLUGIN'] == 'linuxbridge':
plugin_manifest = 'neutron_lb_plugin.pp'
elif config['CONFIG_NEUTRON_L2_PLUGIN'] == 'ml2':
plugin_manifest = 'neutron_ml2_plugin.pp'
# host to which allow neutron server
allowed_hosts = set(q_hosts)
if config['CONFIG_CLIENT_INSTALL'] == 'y':
allowed_hosts.add(config['CONFIG_OSCLIENT_HOST'])
if config['CONFIG_HORIZON_INSTALL'] == 'y':
allowed_hosts.add(config['CONFIG_HORIZON_HOST'])
if config['CONFIG_NOVA_INSTALL'] == 'y':
allowed_hosts.add(config['CONFIG_NOVA_API_HOST'])
for host in q_hosts:
manifest_file = "%s_neutron.pp" % (host,)
manifest_data = getManifestTemplate("neutron.pp")
appendManifestFile(manifest_file, manifest_data, 'neutron')
if host in api_hosts:
manifest_file = "%s_neutron.pp" % (host,)
manifest_data = getManifestTemplate("neutron_api.pp")
# Firewall Rules
for f_host in allowed_hosts:
config['FIREWALL_SERVICE_NAME'] = "neutron server"
config['FIREWALL_PORTS'] = "'9696'"
config['FIREWALL_CHAIN'] = "INPUT"
config['FIREWALL_PROTOCOL'] = 'tcp'
config['FIREWALL_ALLOWED'] = "'%s'" % f_host
config['FIREWALL_SERVICE_ID'] = "neutron_server_%s_%s" % (host, f_host)
manifest_data += getManifestTemplate("firewall.pp")
appendManifestFile(manifest_file, manifest_data, 'neutron')
# Set up any l2 plugin configs we need anywhere we install neutron
# XXX I am not completely sure about this, but it seems necessary:
manifest_data = getManifestTemplate(plugin_manifest)
# We also need to open VXLAN/GRE port for agent
if use_openvswitch_vxlan(config) or use_openvswitch_gre(config):
if use_openvswitch_vxlan(config):
config['FIREWALL_PROTOCOL'] = 'udp'
tunnel_port = ("'%s'"
% config['CONFIG_NEUTRON_OVS_VXLAN_UDP_PORT'])
else:
config['FIREWALL_PROTOCOL'] = 'gre'
tunnel_port = 'undef'
config['FIREWALL_ALLOWED'] = "'ALL'"
config['FIREWALL_SERVICE_NAME'] = "neutron tunnel port"
config['FIREWALL_SERVICE_ID'] = ("neutron_tunnel")
config['FIREWALL_PORTS'] = tunnel_port
config['FIREWALL_CHAIN'] = "INPUT"
manifest_data += getManifestTemplate('firewall.pp')
appendManifestFile(manifest_file, manifest_data, 'neutron')
def create_keystone_manifest(config):
manifestfile = "%s_keystone.pp" % config['CONFIG_KEYSTONE_HOST']
manifestdata = getManifestTemplate("keystone_neutron.pp")
appendManifestFile(manifestfile, manifestdata)
def find_mapping(haystack, needle):
return needle in [x.split(':')[1].strip() for x in get_values(haystack)]
def create_l3_manifests(config):
global l3_hosts
plugin = config['CONFIG_NEUTRON_L2_PLUGIN']
if config['CONFIG_NEUTRON_L3_EXT_BRIDGE'] == 'provider':
config['CONFIG_NEUTRON_L3_EXT_BRIDGE'] = ''
for host in l3_hosts:
config['CONFIG_NEUTRON_L3_HOST'] = host
config['CONFIG_NEUTRON_L3_INTERFACE_DRIVER'] = get_if_driver(config)
manifestdata = getManifestTemplate("neutron_l3.pp")
manifestfile = "%s_neutron.pp" % (host,)
appendManifestFile(manifestfile, manifestdata + '\n')
if (config['CONFIG_NEUTRON_L2_PLUGIN'] == 'openvswitch' and
config['CONFIG_NEUTRON_L3_EXT_BRIDGE'] and
not find_mapping(config['CONFIG_NEUTRON_OVS_BRIDGE_MAPPINGS'],
config['CONFIG_NEUTRON_L3_EXT_BRIDGE'])):
config['CONFIG_NEUTRON_OVS_BRIDGE'] = config['CONFIG_NEUTRON_L3_EXT_BRIDGE']
manifestdata = getManifestTemplate('neutron_ovs_bridge.pp')
appendManifestFile(manifestfile, manifestdata + '\n')
def create_dhcp_manifests(config):
global dhcp_hosts
plugin = config['CONFIG_NEUTRON_L2_PLUGIN']
for host in dhcp_hosts:
config["CONFIG_NEUTRON_DHCP_HOST"] = host
config['CONFIG_NEUTRON_DHCP_INTERFACE_DRIVER'] = get_if_driver(config)
manifest_data = getManifestTemplate("neutron_dhcp.pp")
manifest_file = "%s_neutron.pp" % (host,)
# Firewall Rules
config['FIREWALL_PROTOCOL'] = 'tcp'
for f_host in q_hosts:
config['FIREWALL_ALLOWED'] = "'%s'" % f_host
config['FIREWALL_SERVICE_NAME'] = "neutron dhcp in"
config['FIREWALL_SERVICE_ID'] = "neutron_dhcp_in_%s_%s" % (host, f_host)
config['FIREWALL_PORTS'] = "'67'"
config['FIREWALL_CHAIN'] = "INPUT"
manifest_data += getManifestTemplate("firewall.pp")
config['FIREWALL_SERVICE_NAME'] = "neutron dhcp out"
config['FIREWALL_SERVICE_ID'] = "neutron_dhcp_out_%s_%s" % (host, f_host)
config['FIREWALL_PORTS'] = "'68'"
config['FIREWALL_CHAIN'] = "OUTPUT"
manifest_data += getManifestTemplate("firewall.pp")
appendManifestFile(manifest_file, manifest_data, 'neutron')
def create_lbaas_manifests(config):
global lbaas_hosts
for host in lbaas_hosts:
controller.CONF['CONFIG_NEUTRON_LBAAS_INTERFACE_DRIVER'] = get_if_driver(config)
manifestdata = getManifestTemplate("neutron_lbaas.pp")
manifestfile = "%s_neutron.pp" % (host,)
appendManifestFile(manifestfile, manifestdata + "\n")
def get_values(val):
return [x.strip() for x in val.split(',')] if val else []
def get_agent_type(config):
# The only real use case I can think of for multiples right now is to list
# "vlan,gre" or "vlan,vxlan" so that VLANs are used if available,
# but tunnels are used if not.
tenant_types = config.get('CONFIG_NEUTRON_ML2_TENANT_NETWORK_TYPES',
"['local']").strip('[]')
tenant_types = [i.strip('"\'') for i in tenant_types.split(',')]
for i in ['gre', 'vxlan', 'vlan']:
if i in tenant_types:
return i
return tenant_types[0]
def create_l2_agent_manifests(config):
global api_hosts, compute_hosts, dhcp_host, l3_hosts
plugin = config['CONFIG_NEUTRON_L2_PLUGIN']
agent = config["CONFIG_NEUTRON_L2_AGENT"]
if agent == "openvswitch":
host_var = 'CONFIG_NEUTRON_OVS_HOST'
if plugin == agent:
# monolithic plugin installation
ovs_type = 'CONFIG_NEUTRON_OVS_TENANT_NETWORK_TYPE'
ovs_type = config.get(ovs_type, 'local')
elif plugin == 'ml2':
ovs_type = get_agent_type(config)
else:
raise RuntimeError('Invalid combination of plugin and agent.')
template_name = "neutron_ovs_agent_%s.pp" % ovs_type
bm_arr = get_values(config["CONFIG_NEUTRON_OVS_BRIDGE_MAPPINGS"])
iface_arr = get_values(config["CONFIG_NEUTRON_OVS_BRIDGE_IFACES"])
# The CONFIG_NEUTRON_OVS_BRIDGE_MAPPINGS parameter contains a
# comma-separated list of bridge mappings. Since the puppet module
# expects this parameter to be an array, this parameter must be properly
# formatted by packstack, then consumed by the puppet module.
# For example, the input string 'A, B, C' should formatted as '['A','B','C']'.
config["CONFIG_NEUTRON_OVS_BRIDGE_MAPPINGS"] = str(bm_arr)
elif agent == "linuxbridge":
host_var = 'CONFIG_NEUTRON_LB_HOST'
template_name = 'neutron_lb_agent.pp'
else:
raise KeyError("Unknown layer2 agent")
# Install l2 agents on every compute host in addition to any hosts listed
# specifically for the l2 agent
for host in api_hosts | compute_hosts | dhcp_hosts | l3_hosts:
config[host_var] = host
manifestfile = "%s_neutron.pp" % (host,)
manifestdata = getManifestTemplate(template_name)
appendManifestFile(manifestfile, manifestdata + "\n")
# neutron ovs port only on network hosts
if (
agent == "openvswitch" and (
(host in l3_hosts and ovs_type in ['vxlan', 'gre'])
or ovs_type == 'vlan')
):
bridge_key = 'CONFIG_NEUTRON_OVS_BRIDGE'
iface_key = 'CONFIG_NEUTRON_OVS_IFACE'
for if_map in iface_arr:
config[bridge_key], config[iface_key] = if_map.split(':')
manifestdata = getManifestTemplate("neutron_ovs_port.pp")
appendManifestFile(manifestfile, manifestdata + "\n")
# Additional configurations required for compute hosts and
# network hosts.
manifestdata = getManifestTemplate('neutron_bridge_module.pp')
appendManifestFile(manifestfile, manifestdata + '\n')
def create_metadata_manifests(config):
global meta_hosts
if config.get('CONFIG_NOVA_INSTALL') == 'n':
return
for host in meta_hosts:
controller.CONF['CONFIG_NEUTRON_METADATA_HOST'] = host
manifestdata = getManifestTemplate('neutron_metadata.pp')
manifestfile = "%s_neutron.pp" % (host,)
appendManifestFile(manifestfile, manifestdata + "\n")
| [
"logging.debug",
"uuid.uuid4",
"packstack.modules.ospluginutils.getManifestTemplate",
"packstack.installer.utils.split_hosts",
"packstack.installer.utils.get_localhost_ip",
"packstack.modules.ospluginutils.appendManifestFile"
] | [((422, 465), 'logging.debug', 'logging.debug', (['"""plugin %s loaded"""', '__name__'], {}), "('plugin %s loaded', __name__)\n", (435, 465), False, 'import logging\n'), ((562, 617), 'logging.debug', 'logging.debug', (['"""Adding OpenStack Neutron configuration"""'], {}), "('Adding OpenStack Neutron configuration')\n", (575, 617), False, 'import logging\n'), ((31144, 31193), 'packstack.installer.utils.split_hosts', 'split_hosts', (["config['CONFIG_NEUTRON_SERVER_HOST']"], {}), "(config['CONFIG_NEUTRON_SERVER_HOST'])\n", (31155, 31193), False, 'from packstack.installer.utils import split_hosts\n'), ((31209, 31255), 'packstack.installer.utils.split_hosts', 'split_hosts', (["config['CONFIG_NEUTRON_L3_HOSTS']"], {}), "(config['CONFIG_NEUTRON_L3_HOSTS'])\n", (31220, 31255), False, 'from packstack.installer.utils import split_hosts\n'), ((31273, 31321), 'packstack.installer.utils.split_hosts', 'split_hosts', (["config['CONFIG_NEUTRON_DHCP_HOSTS']"], {}), "(config['CONFIG_NEUTRON_DHCP_HOSTS'])\n", (31284, 31321), False, 'from packstack.installer.utils import split_hosts\n'), ((31340, 31389), 'packstack.installer.utils.split_hosts', 'split_hosts', (["config['CONFIG_NEUTRON_LBAAS_HOSTS']"], {}), "(config['CONFIG_NEUTRON_LBAAS_HOSTS'])\n", (31351, 31389), False, 'from packstack.installer.utils import split_hosts\n'), ((31407, 31459), 'packstack.installer.utils.split_hosts', 'split_hosts', (["config['CONFIG_NEUTRON_METADATA_HOSTS']"], {}), "(config['CONFIG_NEUTRON_METADATA_HOSTS'])\n", (31418, 31459), False, 'from packstack.installer.utils import split_hosts\n'), ((36145, 36187), 'packstack.modules.ospluginutils.getManifestTemplate', 'getManifestTemplate', (['"""keystone_neutron.pp"""'], {}), "('keystone_neutron.pp')\n", (36164, 36187), False, 'from packstack.modules.ospluginutils import getManifestTemplate, appendManifestFile\n'), ((36192, 36238), 'packstack.modules.ospluginutils.appendManifestFile', 'appendManifestFile', (['manifestfile', 'manifestdata'], {}), '(manifestfile, manifestdata)\n', (36210, 36238), False, 'from packstack.modules.ospluginutils import getManifestTemplate, appendManifestFile\n'), ((31555, 31603), 'packstack.installer.utils.split_hosts', 'split_hosts', (["config['CONFIG_NOVA_COMPUTE_HOSTS']"], {}), "(config['CONFIG_NOVA_COMPUTE_HOSTS'])\n", (31566, 31603), False, 'from packstack.installer.utils import split_hosts\n'), ((34107, 34140), 'packstack.modules.ospluginutils.getManifestTemplate', 'getManifestTemplate', (['"""neutron.pp"""'], {}), "('neutron.pp')\n", (34126, 34140), False, 'from packstack.modules.ospluginutils import getManifestTemplate, appendManifestFile\n'), ((34149, 34208), 'packstack.modules.ospluginutils.appendManifestFile', 'appendManifestFile', (['manifest_file', 'manifest_data', '"""neutron"""'], {}), "(manifest_file, manifest_data, 'neutron')\n", (34167, 34208), False, 'from packstack.modules.ospluginutils import getManifestTemplate, appendManifestFile\n'), ((35117, 35153), 'packstack.modules.ospluginutils.getManifestTemplate', 'getManifestTemplate', (['plugin_manifest'], {}), '(plugin_manifest)\n', (35136, 35153), False, 'from packstack.modules.ospluginutils import getManifestTemplate, appendManifestFile\n'), ((35957, 36016), 'packstack.modules.ospluginutils.appendManifestFile', 'appendManifestFile', (['manifest_file', 'manifest_data', '"""neutron"""'], {}), "(manifest_file, manifest_data, 'neutron')\n", (35975, 36016), False, 'from packstack.modules.ospluginutils import getManifestTemplate, appendManifestFile\n'), ((36746, 36782), 'packstack.modules.ospluginutils.getManifestTemplate', 'getManifestTemplate', (['"""neutron_l3.pp"""'], {}), "('neutron_l3.pp')\n", (36765, 36782), False, 'from packstack.modules.ospluginutils import getManifestTemplate, appendManifestFile\n'), ((36840, 36893), 'packstack.modules.ospluginutils.appendManifestFile', 'appendManifestFile', (['manifestfile', "(manifestdata + '\\n')"], {}), "(manifestfile, manifestdata + '\\n')\n", (36858, 36893), False, 'from packstack.modules.ospluginutils import getManifestTemplate, appendManifestFile\n'), ((37692, 37730), 'packstack.modules.ospluginutils.getManifestTemplate', 'getManifestTemplate', (['"""neutron_dhcp.pp"""'], {}), "('neutron_dhcp.pp')\n", (37711, 37730), False, 'from packstack.modules.ospluginutils import getManifestTemplate, appendManifestFile\n'), ((38563, 38622), 'packstack.modules.ospluginutils.appendManifestFile', 'appendManifestFile', (['manifest_file', 'manifest_data', '"""neutron"""'], {}), "(manifest_file, manifest_data, 'neutron')\n", (38581, 38622), False, 'from packstack.modules.ospluginutils import getManifestTemplate, appendManifestFile\n'), ((38826, 38865), 'packstack.modules.ospluginutils.getManifestTemplate', 'getManifestTemplate', (['"""neutron_lbaas.pp"""'], {}), "('neutron_lbaas.pp')\n", (38845, 38865), False, 'from packstack.modules.ospluginutils import getManifestTemplate, appendManifestFile\n'), ((38923, 38976), 'packstack.modules.ospluginutils.appendManifestFile', 'appendManifestFile', (['manifestfile', "(manifestdata + '\\n')"], {}), "(manifestfile, manifestdata + '\\n')\n", (38941, 38976), False, 'from packstack.modules.ospluginutils import getManifestTemplate, appendManifestFile\n'), ((41342, 41376), 'packstack.modules.ospluginutils.getManifestTemplate', 'getManifestTemplate', (['template_name'], {}), '(template_name)\n', (41361, 41376), False, 'from packstack.modules.ospluginutils import getManifestTemplate, appendManifestFile\n'), ((41385, 41438), 'packstack.modules.ospluginutils.appendManifestFile', 'appendManifestFile', (['manifestfile', "(manifestdata + '\\n')"], {}), "(manifestfile, manifestdata + '\\n')\n", (41403, 41438), False, 'from packstack.modules.ospluginutils import getManifestTemplate, appendManifestFile\n'), ((42170, 42217), 'packstack.modules.ospluginutils.getManifestTemplate', 'getManifestTemplate', (['"""neutron_bridge_module.pp"""'], {}), "('neutron_bridge_module.pp')\n", (42189, 42217), False, 'from packstack.modules.ospluginutils import getManifestTemplate, appendManifestFile\n'), ((42226, 42279), 'packstack.modules.ospluginutils.appendManifestFile', 'appendManifestFile', (['manifestfile', "(manifestdata + '\\n')"], {}), "(manifestfile, manifestdata + '\\n')\n", (42244, 42279), False, 'from packstack.modules.ospluginutils import getManifestTemplate, appendManifestFile\n'), ((42520, 42562), 'packstack.modules.ospluginutils.getManifestTemplate', 'getManifestTemplate', (['"""neutron_metadata.pp"""'], {}), "('neutron_metadata.pp')\n", (42539, 42562), False, 'from packstack.modules.ospluginutils import getManifestTemplate, appendManifestFile\n'), ((42620, 42673), 'packstack.modules.ospluginutils.appendManifestFile', 'appendManifestFile', (['manifestfile', "(manifestdata + '\\n')"], {}), "(manifestfile, manifestdata + '\\n')\n", (42638, 42673), False, 'from packstack.modules.ospluginutils import getManifestTemplate, appendManifestFile\n'), ((34322, 34359), 'packstack.modules.ospluginutils.getManifestTemplate', 'getManifestTemplate', (['"""neutron_api.pp"""'], {}), "('neutron_api.pp')\n", (34341, 34359), False, 'from packstack.modules.ospluginutils import getManifestTemplate, appendManifestFile\n'), ((34882, 34941), 'packstack.modules.ospluginutils.appendManifestFile', 'appendManifestFile', (['manifest_file', 'manifest_data', '"""neutron"""'], {}), "(manifest_file, manifest_data, 'neutron')\n", (34900, 34941), False, 'from packstack.modules.ospluginutils import getManifestTemplate, appendManifestFile\n'), ((35913, 35947), 'packstack.modules.ospluginutils.getManifestTemplate', 'getManifestTemplate', (['"""firewall.pp"""'], {}), "('firewall.pp')\n", (35932, 35947), False, 'from packstack.modules.ospluginutils import getManifestTemplate, appendManifestFile\n'), ((37291, 37335), 'packstack.modules.ospluginutils.getManifestTemplate', 'getManifestTemplate', (['"""neutron_ovs_bridge.pp"""'], {}), "('neutron_ovs_bridge.pp')\n", (37310, 37335), False, 'from packstack.modules.ospluginutils import getManifestTemplate, appendManifestFile\n'), ((37348, 37401), 'packstack.modules.ospluginutils.appendManifestFile', 'appendManifestFile', (['manifestfile', "(manifestdata + '\\n')"], {}), "(manifestfile, manifestdata + '\\n')\n", (37366, 37401), False, 'from packstack.modules.ospluginutils import getManifestTemplate, appendManifestFile\n'), ((38210, 38244), 'packstack.modules.ospluginutils.getManifestTemplate', 'getManifestTemplate', (['"""firewall.pp"""'], {}), "('firewall.pp')\n", (38229, 38244), False, 'from packstack.modules.ospluginutils import getManifestTemplate, appendManifestFile\n'), ((38519, 38553), 'packstack.modules.ospluginutils.getManifestTemplate', 'getManifestTemplate', (['"""firewall.pp"""'], {}), "('firewall.pp')\n", (38538, 38553), False, 'from packstack.modules.ospluginutils import getManifestTemplate, appendManifestFile\n'), ((1056, 1080), 'packstack.installer.utils.get_localhost_ip', 'utils.get_localhost_ip', ([], {}), '()\n', (1078, 1080), False, 'from packstack.installer import utils\n'), ((3050, 3074), 'packstack.installer.utils.get_localhost_ip', 'utils.get_localhost_ip', ([], {}), '()\n', (3072, 3074), False, 'from packstack.installer import utils\n'), ((4534, 4558), 'packstack.installer.utils.get_localhost_ip', 'utils.get_localhost_ip', ([], {}), '()\n', (4556, 4558), False, 'from packstack.installer import utils\n'), ((6639, 6663), 'packstack.installer.utils.get_localhost_ip', 'utils.get_localhost_ip', ([], {}), '()\n', (6661, 6663), False, 'from packstack.installer import utils\n'), ((34834, 34868), 'packstack.modules.ospluginutils.getManifestTemplate', 'getManifestTemplate', (['"""firewall.pp"""'], {}), "('firewall.pp')\n", (34853, 34868), False, 'from packstack.modules.ospluginutils import getManifestTemplate, appendManifestFile\n'), ((41938, 41980), 'packstack.modules.ospluginutils.getManifestTemplate', 'getManifestTemplate', (['"""neutron_ovs_port.pp"""'], {}), "('neutron_ovs_port.pp')\n", (41957, 41980), False, 'from packstack.modules.ospluginutils import getManifestTemplate, appendManifestFile\n'), ((42001, 42054), 'packstack.modules.ospluginutils.appendManifestFile', 'appendManifestFile', (['manifestfile', "(manifestdata + '\\n')"], {}), "(manifestfile, manifestdata + '\\n')\n", (42019, 42054), False, 'from packstack.modules.ospluginutils import getManifestTemplate, appendManifestFile\n'), ((1719, 1731), 'uuid.uuid4', 'uuid.uuid4', ([], {}), '()\n', (1729, 1731), False, 'import uuid\n'), ((2348, 2360), 'uuid.uuid4', 'uuid.uuid4', ([], {}), '()\n', (2358, 2360), False, 'import uuid\n'), ((7370, 7382), 'uuid.uuid4', 'uuid.uuid4', ([], {}), '()\n', (7380, 7382), False, 'import uuid\n')] |
from hyperopt import hp
# Define the search space here, e.g.
# from hyperopt.pyll.base import scope
# search_space = {
# 'epochs': hp.qloguniform('epochs', 0, 4, 2),
# 'max_df': hp.uniform('max_df', 1, 2),
# 'max_ngrams': scope.int(hp.quniform('max_ngram', 3, 9, 1))
# }
# Default search space: Try different numbers of training epochs.
search_space = {"epochs": hp.qloguniform("epochs", 0, 4, 2)}
| [
"hyperopt.hp.qloguniform"
] | [((382, 415), 'hyperopt.hp.qloguniform', 'hp.qloguniform', (['"""epochs"""', '(0)', '(4)', '(2)'], {}), "('epochs', 0, 4, 2)\n", (396, 415), False, 'from hyperopt import hp\n')] |
"""
<NAME> S-2013A7PS189P
<NAME> -2013A7PS079P
<NAME> -2013A7PS039P
Artificial Intelligence Term Project
"""
import pickle
import BeautifulSoup
import re
import boto
from boto.s3.connection import S3Connection
from boto.s3.key import Key
from google import search
def get_10_summary(query, source="google"):
"""
This function returns the first ten (or less, if 10 are not present) summaries when the query (a string) is run on the source (here google).
The return type is a beautifulSoup module's object and is similar to a list
"""
result = search(query) #calls query on google
#print "---------------------------" + str(type(results)) + "---------------------------"
return result
| [
"google.search"
] | [((579, 592), 'google.search', 'search', (['query'], {}), '(query)\n', (585, 592), False, 'from google import search\n')] |
# -*- coding: utf-8 -*-
from pep3143daemon import DaemonContext, PidFile
import signal
import os
import sys
import time
class Daemon:
def stop(self, pidfile):
try:
pid = open(pidfile).readline()
except IOError:
print("Daemon already gone, or pidfile was deleted manually")
sys.exit(1)
print("terminating Daemon with Pid: {0}".format(pid))
os.kill(int(pid), signal.SIGTERM)
sys.stdout.write("Waiting...")
while os.path.isfile(self.pid):
sys.stdout.write(".")
sys.stdout.flush()
time.sleep(0.5)
print("Gone")
def reload(self, pidfile):
try:
pid = open(pidfile).readline()
except IOError:
print("Daemon not running, or pidfile was deleted manually")
sys.exit(1)
print("Sending SIGUSR1 to Daemon with Pid: {0}".format(pid))
os.kill(int(pid), signal.SIGUSR1)
sys.stdout.write("Ok")
def start(app):
app.config = app.readConfig(app.config_file)
app.daemon = DaemonContext(pidfile=PidFile(app.pid)
, signal_map={signal.SIGTERM: app.program_cleanup,
signal.SIGHUP: app.terminate,
signal.SIGUSR1: app.reload_program_config}
# ,files_preserve=(sys.stdout)
, stdout=open("/tmp/daemon_stdout.log", 'w')
, stderr=open("/tmp/daemon_stderr.log", 'w')
, gid=app.config["daemon"]["groupid"])
print("daemon created")
if app.nodaemon:
print("no daemon")
app.daemon.detach_process = False
else:
app.daemon.detach_process = True
try:
print("before daemon")
app.daemon.open()
print("after daemon")
app.createLogger()
app.logger.debug('After open')
app.run()
except:
print("Unexpected error:", sys.exc_info()[0])
raise
| [
"time.sleep",
"os.path.isfile",
"sys.exc_info",
"sys.exit",
"sys.stdout.flush",
"pep3143daemon.PidFile",
"sys.stdout.write"
] | [((455, 485), 'sys.stdout.write', 'sys.stdout.write', (['"""Waiting..."""'], {}), "('Waiting...')\n", (471, 485), False, 'import sys\n'), ((500, 524), 'os.path.isfile', 'os.path.isfile', (['self.pid'], {}), '(self.pid)\n', (514, 524), False, 'import os\n'), ((970, 992), 'sys.stdout.write', 'sys.stdout.write', (['"""Ok"""'], {}), "('Ok')\n", (986, 992), False, 'import sys\n'), ((538, 559), 'sys.stdout.write', 'sys.stdout.write', (['"""."""'], {}), "('.')\n", (554, 559), False, 'import sys\n'), ((572, 590), 'sys.stdout.flush', 'sys.stdout.flush', ([], {}), '()\n', (588, 590), False, 'import sys\n'), ((603, 618), 'time.sleep', 'time.sleep', (['(0.5)'], {}), '(0.5)\n', (613, 618), False, 'import time\n'), ((331, 342), 'sys.exit', 'sys.exit', (['(1)'], {}), '(1)\n', (339, 342), False, 'import sys\n'), ((839, 850), 'sys.exit', 'sys.exit', (['(1)'], {}), '(1)\n', (847, 850), False, 'import sys\n'), ((1110, 1126), 'pep3143daemon.PidFile', 'PidFile', (['app.pid'], {}), '(app.pid)\n', (1117, 1126), False, 'from pep3143daemon import DaemonContext, PidFile\n'), ((2174, 2188), 'sys.exc_info', 'sys.exc_info', ([], {}), '()\n', (2186, 2188), False, 'import sys\n')] |
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('games', '0003_auto_20150725_1737'),
]
operations = [
migrations.AlterField(
model_name='game',
name='description',
field=models.TextField(null=True),
),
migrations.AlterField(
model_name='game',
name='max_players',
field=models.IntegerField(null=True),
),
migrations.AlterField(
model_name='game',
name='max_time',
field=models.IntegerField(null=True),
),
migrations.AlterField(
model_name='game',
name='min_players',
field=models.IntegerField(null=True),
),
migrations.AlterField(
model_name='game',
name='min_time',
field=models.IntegerField(null=True),
),
migrations.AlterField(
model_name='game',
name='name',
field=models.CharField(max_length=255),
),
migrations.AlterField(
model_name='game',
name='url',
field=models.URLField(null=True),
),
migrations.AlterField(
model_name='publisher',
name='country',
field=models.CharField(max_length=2, null=True),
),
migrations.AlterField(
model_name='publisher',
name='url',
field=models.URLField(null=True),
),
]
| [
"django.db.models.URLField",
"django.db.models.CharField",
"django.db.models.TextField",
"django.db.models.IntegerField"
] | [((353, 380), 'django.db.models.TextField', 'models.TextField', ([], {'null': '(True)'}), '(null=True)\n', (369, 380), False, 'from django.db import models, migrations\n'), ((505, 535), 'django.db.models.IntegerField', 'models.IntegerField', ([], {'null': '(True)'}), '(null=True)\n', (524, 535), False, 'from django.db import models, migrations\n'), ((657, 687), 'django.db.models.IntegerField', 'models.IntegerField', ([], {'null': '(True)'}), '(null=True)\n', (676, 687), False, 'from django.db import models, migrations\n'), ((812, 842), 'django.db.models.IntegerField', 'models.IntegerField', ([], {'null': '(True)'}), '(null=True)\n', (831, 842), False, 'from django.db import models, migrations\n'), ((964, 994), 'django.db.models.IntegerField', 'models.IntegerField', ([], {'null': '(True)'}), '(null=True)\n', (983, 994), False, 'from django.db import models, migrations\n'), ((1112, 1144), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(255)'}), '(max_length=255)\n', (1128, 1144), False, 'from django.db import models, migrations\n'), ((1261, 1287), 'django.db.models.URLField', 'models.URLField', ([], {'null': '(True)'}), '(null=True)\n', (1276, 1287), False, 'from django.db import models, migrations\n'), ((1413, 1454), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(2)', 'null': '(True)'}), '(max_length=2, null=True)\n', (1429, 1454), False, 'from django.db import models, migrations\n'), ((1576, 1602), 'django.db.models.URLField', 'models.URLField', ([], {'null': '(True)'}), '(null=True)\n', (1591, 1602), False, 'from django.db import models, migrations\n')] |
import sys
import json
from os import path
from argparse import ArgumentParser
sys.path.append(path.dirname(path.dirname(path.abspath(__file__))) + '/utils/')
from algorithm_utils import set_algorithms_output_data
from health_check_lib import HealthCheckLocalDT
def main():
# Parse arguments
parser = ArgumentParser()
parser.add_argument('-local_step_dbs', required=True, help='Path to local db.')
args, unknown = parser.parse_known_args()
local_dbs = path.abspath(args.local_step_dbs)
local_out = HealthCheckLocalDT.load(local_dbs)
nodes = {}
nodes["active_nodes"] = local_out.get_data()
# Return the algorithm's output
set_algorithms_output_data(json.dumps(nodes))
if __name__ == '__main__':
main()
| [
"os.path.abspath",
"json.dumps",
"health_check_lib.HealthCheckLocalDT.load",
"argparse.ArgumentParser"
] | [((324, 340), 'argparse.ArgumentParser', 'ArgumentParser', ([], {}), '()\n', (338, 340), False, 'from argparse import ArgumentParser\n'), ((490, 523), 'os.path.abspath', 'path.abspath', (['args.local_step_dbs'], {}), '(args.local_step_dbs)\n', (502, 523), False, 'from os import path\n'), ((543, 577), 'health_check_lib.HealthCheckLocalDT.load', 'HealthCheckLocalDT.load', (['local_dbs'], {}), '(local_dbs)\n', (566, 577), False, 'from health_check_lib import HealthCheckLocalDT\n'), ((719, 736), 'json.dumps', 'json.dumps', (['nodes'], {}), '(nodes)\n', (729, 736), False, 'import json\n'), ((127, 149), 'os.path.abspath', 'path.abspath', (['__file__'], {}), '(__file__)\n', (139, 149), False, 'from os import path\n')] |
import logging
from utils.common.datapipeline import DataPipeline
import boto3
import json
from copy import deepcopy
s3 = boto3.resource('s3')
bucket = s3.Bucket('tier-0')
def run(config=None):
orgs = []
for obj in bucket.objects.all():
key = str(obj.key)
if len(key.split("_")) != 3:
continue
data = obj.get()['Body'].read().decode("utf-8")
orgs += json.loads(data)
# if len(orgs) >= 1000:
# break
logging.info("\tGot %s organisations.",len(orgs))
output = []
for org in orgs:
for r in org["results"]:
row = deepcopy(org)
row.pop("results")
row = dict(**row,**r)
if row not in output:
output.append(row)
# Write data
logging.info("\tWriting to table")
with DataPipeline(config) as dp:
for row in output:
dp.insert(row)
if __name__ == "__main__":
#run()
#import numpy as np
#all_numbers = list(np.arange(0,37242,6))
#all_numbers.append(37242)
print(len(open("not_done").read().split()))
n = 0
for obj in bucket.objects.all():
n += int(len(obj.key.split("_")) == 3)
#if key not in all_numbers:
# continue
#print(key,"!!")
#else:
# all_numbers.remove(key)
print(n)
# with open("not_done","w") as f:
# for n in all_numbers:
# print("-->",n,"<--")
# f.write(str(n)+" ")
#data = obj.get()['Body'].read().decode("utf-8")
#orgs += json.loads(data)
| [
"json.loads",
"utils.common.datapipeline.DataPipeline",
"boto3.resource",
"copy.deepcopy",
"logging.info"
] | [((123, 143), 'boto3.resource', 'boto3.resource', (['"""s3"""'], {}), "('s3')\n", (137, 143), False, 'import boto3\n'), ((804, 838), 'logging.info', 'logging.info', (['"""\tWriting to table"""'], {}), "('\\tWriting to table')\n", (816, 838), False, 'import logging\n'), ((405, 421), 'json.loads', 'json.loads', (['data'], {}), '(data)\n', (415, 421), False, 'import json\n'), ((848, 868), 'utils.common.datapipeline.DataPipeline', 'DataPipeline', (['config'], {}), '(config)\n', (860, 868), False, 'from utils.common.datapipeline import DataPipeline\n'), ((615, 628), 'copy.deepcopy', 'deepcopy', (['org'], {}), '(org)\n', (623, 628), False, 'from copy import deepcopy\n')] |
import math
import numpy as np
from matplotlib.patches import FancyArrowPatch
def home_has_possession(row):
if row.possessionTeam == row.homeTeamAbbr:
return True
return False
def calculate_team_sitation(row):
ball_string = 'football'
if row.team == ball_string:
return ball_string
if row.team == 'home' and row.homeHasPossession:
return 'attacking'
elif row.team == 'away' and not row.homeHasPossession:
return 'attacking'
return 'defending'
def convert_speed_to_marker_size(speed: float) -> int:
if 0 < speed <= 1.5:
return 10
elif 1.5 < speed <= 3:
return 15
elif 3 < speed <= 4.5:
return 20
elif 4.5 < speed <= 6:
return 25
return 30
def arrow(x, y, s, ax, color):
"""
Function to draw the arrow of the movement
:param x: position on x-axis
:param y: position on y-axis
:param s: speed in yards/s
:param ax: plot's configuration
:param color: color of the arrows
:return: arrows on the specific positions
"""
# distance between the arrows
distance = 5
ind = range(1, len(x), distance)
# computing of the arrows
for i in ind:
ar = FancyArrowPatch(
(x[i - 1], y[i - 1]), (x[i], y[i]),
arrowstyle='->',
mutation_scale=convert_speed_to_marker_size(s[i]),
color=color,
)
ax.add_patch(ar)
def calculate_arrow_xy(x, y, o):
o = o % 360
delta = 0.1
if o == 0:
y_delta = delta
x_delta = 0
return x + x_delta, y + y_delta
elif o == 90:
y_delta = 0
x_delta = delta
return x + x_delta, y + y_delta
elif o == 180:
y_delta = -delta
x_delta = 0
print(f'F {y_delta}')
return x + x_delta, y + y_delta
elif o == 270:
y_delta = 0
x_delta = -delta
return x + x_delta, y + y_delta
elif 0 < o < 90:
y_delta = math.sin(math.radians(90 - o)) * delta
x_delta = math.sqrt(delta ** 2 - y_delta ** 2)
return x + x_delta, y + y_delta
elif 90 < o < 180:
y_delta = math.sin(math.radians(o - 90)) * delta
x_delta = math.sqrt(delta ** 2 - y_delta ** 2)
return x + x_delta, y - y_delta
elif 180 < o < 270:
x_delta = math.sin(math.radians(o - 180)) * delta
y_delta = math.sqrt(delta ** 2 - x_delta ** 2)
return x - x_delta, y - y_delta
else:
y_delta = math.sin(math.radians(o - 270)) * delta
x_delta = math.sqrt(delta ** 2 - y_delta ** 2)
return x - x_delta, y + y_delta
def arrow_o(x, y, o, s, ax, color):
"""
Function to draw the arrow of the movement
:param x: position on x-axis
:param y: position on y-axis
:param o: orientation in degrees 0-360
:param s: speed in yards/s
:param ax: plot's configuration
:param color: color of the arrows
:return: arrows on the specific positions
"""
# distance between the arrows
distance = 3
ind = range(5, len(x), distance)
# computing of the arrows
for i in ind:
x2, y2 = calculate_arrow_xy(x[i], y[i], o[i])
ar = FancyArrowPatch(
(x[i], y[i]), (x2, y2),
arrowstyle='-|>',
mutation_scale=convert_speed_to_marker_size(s[i]),
alpha=0.6,
color=color,
)
ax.add_patch(ar)
def calculate_distance_v4(x1: np.array, y1: np.array, x2: np.array, y2: np.array) -> np.array:
return np.round(np.sqrt(np.square(x1 - x2) + np.square(y1 - y2)), 2)
| [
"math.radians",
"math.sqrt",
"numpy.square"
] | [((3550, 3568), 'numpy.square', 'np.square', (['(x1 - x2)'], {}), '(x1 - x2)\n', (3559, 3568), True, 'import numpy as np\n'), ((3571, 3589), 'numpy.square', 'np.square', (['(y1 - y2)'], {}), '(y1 - y2)\n', (3580, 3589), True, 'import numpy as np\n'), ((2039, 2075), 'math.sqrt', 'math.sqrt', (['(delta ** 2 - y_delta ** 2)'], {}), '(delta ** 2 - y_delta ** 2)\n', (2048, 2075), False, 'import math\n'), ((2214, 2250), 'math.sqrt', 'math.sqrt', (['(delta ** 2 - y_delta ** 2)'], {}), '(delta ** 2 - y_delta ** 2)\n', (2223, 2250), False, 'import math\n'), ((1991, 2011), 'math.radians', 'math.radians', (['(90 - o)'], {}), '(90 - o)\n', (2003, 2011), False, 'import math\n'), ((2391, 2427), 'math.sqrt', 'math.sqrt', (['(delta ** 2 - x_delta ** 2)'], {}), '(delta ** 2 - x_delta ** 2)\n', (2400, 2427), False, 'import math\n'), ((2554, 2590), 'math.sqrt', 'math.sqrt', (['(delta ** 2 - y_delta ** 2)'], {}), '(delta ** 2 - y_delta ** 2)\n', (2563, 2590), False, 'import math\n'), ((2166, 2186), 'math.radians', 'math.radians', (['(o - 90)'], {}), '(o - 90)\n', (2178, 2186), False, 'import math\n'), ((2342, 2363), 'math.radians', 'math.radians', (['(o - 180)'], {}), '(o - 180)\n', (2354, 2363), False, 'import math\n'), ((2505, 2526), 'math.radians', 'math.radians', (['(o - 270)'], {}), '(o - 270)\n', (2517, 2526), False, 'import math\n')] |
#!/usr/bin/env python
# 標準ライブラリ
from pathlib import Path
from re import search, sub
from sys import exit, argv
from xml.etree import ElementTree as ET
import csv
# サードパーティライブラリ
from requests import get
from requests.exceptions import Timeout, RequestException
# ローカルなライブラリ
from constants import ENC_API_KEY, NTA_API_URL
from crypt_string import decrypt_strings
def validate_number(corp_number: str) -> bool:
"""
指定された法人番号の妥当性をチェックデジットを用いて検証する。
Parameters
----------
corp_number : str
13桁の法人番号
Returns
-------
bool
指定された法人番号が正しい場合はtrue、誤っている場合はfalseを返す
"""
tmp_corp_num_lst = list(corp_number)
corp_num_lst = list(map(int, tmp_corp_num_lst))
# 最上位1桁目のチェックデジットを取得
check_degit = corp_num_lst[0]
del corp_num_lst[0]
# STEP1: 最下位から偶数桁の和 × 2 + 最下位から奇数桁の和 を求める。
degit_step1 = sum(corp_num_lst[-2::-2]) * 2 + sum(corp_num_lst[-1::-2])
# STEP2: STEP1で求めた数を9で割ったあまりを求める。
degit_step2 = degit_step1 % 9
# STEP3: 9から STEP2 で求めた数を引いた数
degit = 9 - degit_step2
if check_degit == degit:
return True
else:
return False
def get_corp_info(api_key: str, corp_number: str) -> str:
"""
[summary]
Parameters
----------
api_key : str
[description]
corp_number : str
[description]
Returns
-------
str
[description]
"""
# クエリーパラメータの作成
# ------------------------------------------------------------------------------
params = {
'id': api_key,
'number': corp_number,
'type': '12',
'history': '0',
}
# 法人情報の取得
# ------------------------------------------------------------------------------
try:
response = get(NTA_API_URL, params=params, timeout=3.0)
response.raise_for_status()
except Timeout as err:
# TODO: logging で出力するように変更する。要学習。
print(err)
print("タイムアウトしました。")
exit(11)
except RequestException as err:
# TODO: logging で出力するように変更する。要学習。
print(err)
exit(12)
# XMLの解析と出力
# ------------------------------------------------------------------------------
root = ET.fromstring(response.text)
num = 4
corp_info_list = [["法人番号", "最終更新年月日", "商号又は名称",
"本店又は主たる事務所の所在地", "郵便番号", "商号又は名称(フリガナ)"]]
if num >= len(root):
# TODO: logging で出力するように変更する。要学習。
print("指定された法人番号(" + corp_number + ")のデータが存在しません。")
else:
while num < len(root):
corp_info_list.append([root[num][1].text,
root[num][4].text,
root[num][6].text,
root[num][9].text +
root[num][10].text +
root[num][11].text,
sub(r'([0-9]{3})([0-9]{4})',
r'\1-\2', root[num][15].text),
root[num][28].text])
num += 1
for corp_info in corp_info_list[1:]:
print("{0: <14} : {1}".format(corp_info_list[0][0], corp_info[0]))
print("{0: <14} : {1}".format(corp_info_list[0][2], corp_info[2]))
print("{0: <14} : {1}".format(corp_info_list[0][5], corp_info[5]))
print("{0: <14} : {1}".format(corp_info_list[0][4], corp_info[4]))
print("{0: <14} : {1}".format(corp_info_list[0][3], corp_info[3]))
print("{0: <14} : {1}".format(corp_info_list[0][1], corp_info[1]))
print("")
try:
with open('../log/corp_info.csv', 'w', encoding='utf-8') as csv_out:
writer = csv.writer(csv_out, lineterminator='\n')
writer.writerows(corp_info_list)
except FileNotFoundError as err:
# TODO: logging で出力するように変更する。要学習。
print(err)
except PermissionError as err:
# TODO: logging で出力するように変更する。要学習。
print(err)
except csv.Error as err:
# TODO: logging で出力するように変更する。要学習。
print(err)
if __name__ == "__main__":
# Web-API利用用アプリケーションIDの復号
if Path(argv[-1]).is_file():
api_key = decrypt_strings(ENC_API_KEY, argv[-1])
del argv[-1]
else:
api_key = decrypt_strings(ENC_API_KEY)
# 入力された法人番号の確認
if not argv[1:]:
# TODO: logging で出力するように変更する。要学習。
print("法人番号が指定されてません。")
exit(1)
else:
for corp_number in argv[1:]:
if not search("^[1-9][0-9]{12}$", corp_number):
# TODO: logging で出力するように変更する。要学習。
print("法人番号は13桁で指定して下さい。")
exit(2)
elif not validate_number(corp_number):
# TODO: logging で出力するように変更する。要学習。
print("指定された法人番号(" + corp_number + ")は正しくありません。")
exit(3)
# 法人番号から情報を取得する。
corp_numbers = ",".join(map(str, argv[1:]))
get_corp_info(api_key, corp_numbers)
exit(0)
| [
"pathlib.Path",
"csv.writer",
"crypt_string.decrypt_strings",
"requests.get",
"sys.exit",
"re.sub",
"xml.etree.ElementTree.fromstring",
"re.search"
] | [((2193, 2221), 'xml.etree.ElementTree.fromstring', 'ET.fromstring', (['response.text'], {}), '(response.text)\n', (2206, 2221), True, 'from xml.etree import ElementTree as ET\n'), ((4955, 4962), 'sys.exit', 'exit', (['(0)'], {}), '(0)\n', (4959, 4962), False, 'from sys import exit, argv\n'), ((1749, 1793), 'requests.get', 'get', (['NTA_API_URL'], {'params': 'params', 'timeout': '(3.0)'}), '(NTA_API_URL, params=params, timeout=3.0)\n', (1752, 1793), False, 'from requests import get\n'), ((4170, 4208), 'crypt_string.decrypt_strings', 'decrypt_strings', (['ENC_API_KEY', 'argv[-1]'], {}), '(ENC_API_KEY, argv[-1])\n', (4185, 4208), False, 'from crypt_string import decrypt_strings\n'), ((4260, 4288), 'crypt_string.decrypt_strings', 'decrypt_strings', (['ENC_API_KEY'], {}), '(ENC_API_KEY)\n', (4275, 4288), False, 'from crypt_string import decrypt_strings\n'), ((4412, 4419), 'sys.exit', 'exit', (['(1)'], {}), '(1)\n', (4416, 4419), False, 'from sys import exit, argv\n'), ((1956, 1964), 'sys.exit', 'exit', (['(11)'], {}), '(11)\n', (1960, 1964), False, 'from sys import exit, argv\n'), ((2071, 2079), 'sys.exit', 'exit', (['(12)'], {}), '(12)\n', (2075, 2079), False, 'from sys import exit, argv\n'), ((3684, 3724), 'csv.writer', 'csv.writer', (['csv_out'], {'lineterminator': '"""\n"""'}), "(csv_out, lineterminator='\\n')\n", (3694, 3724), False, 'import csv\n'), ((4125, 4139), 'pathlib.Path', 'Path', (['argv[-1]'], {}), '(argv[-1])\n', (4129, 4139), False, 'from pathlib import Path\n'), ((4489, 4528), 're.search', 'search', (['"""^[1-9][0-9]{12}$"""', 'corp_number'], {}), "('^[1-9][0-9]{12}$', corp_number)\n", (4495, 4528), False, 'from re import search, sub\n'), ((4639, 4646), 'sys.exit', 'exit', (['(2)'], {}), '(2)\n', (4643, 4646), False, 'from sys import exit, argv\n'), ((2889, 2947), 're.sub', 'sub', (['"""([0-9]{3})([0-9]{4})"""', '"""\\\\1-\\\\2"""', 'root[num][15].text'], {}), "('([0-9]{3})([0-9]{4})', '\\\\1-\\\\2', root[num][15].text)\n", (2892, 2947), False, 'from re import search, sub\n'), ((4831, 4838), 'sys.exit', 'exit', (['(3)'], {}), '(3)\n', (4835, 4838), False, 'from sys import exit, argv\n')] |
# custom libs
from lib.args import getConf
# Python libs
from re import sub
from os import mkdir
from os.path import exists
from getpass import getuser
from socket import gethostname
def genFrame(file):
from classes.frame import Frame
from lib.array import getGrid
grid = getGrid(file)
return(Frame(len(grid[0]), len(grid), 0, grid))
# given an int (treated as binary list), generate all unique rotational permutations of int (circular shifts)
# http://bit.ly/GLdKmI
def genPermutations(i, width):
permutations = list()
for j in range(width):
permutations.append(i)
# (i & 1) << (width - 1) advances the end bit to the beginning of the binary string
i = (i >> 1) | ((i & 1) << (width - 1))
return(list(set(permutations)))
# given a string representation of a neighbor configuration, return the number of neighbors in the configuration
def getConfigNum(config):
return(len(filter(lambda x: x == "1", list(config))))
# makes a unique directory
def initDir(dir):
i = 0
tmpDir = dir
while(exists(tmpDir)):
i += 1
tmpDir = dir + "." + str(i)
mkdir(tmpDir)
return(tmpDir)
def pad(i, max):
maxLength = len(str(max))
return(str(i).zfill(maxLength))
def resolveBoundary(bound, coord):
if(coord < 0):
return(coord + bound)
if(coord > bound - 1):
return(coord - bound)
return(coord)
# given an array of lines:
# stripping lines that begin with "#"
# stripping the rest of a line with "#" in the middle
# stripping lines that end with ":"
# remove whitespace
def prep(file):
lines = list()
for line in file:
line = sub(r'\s', '', line.split("#")[0])
if((line != "") and (line[-1] != ":")):
lines.append(line)
return(lines)
# bin() format is "0bxxxxxx"
# [2:] strips "0b"
# [-width:] selects last < width > chars
def toBin(i, width):
return(bin(i)[2:][-width:].zfill(width))
# renders the configuration file
# def renderConfig(folder):
# if(folder[-1] != "/"):
# folder += "/"
# fp = open(folder + "config.conf", "r")
# s = "config file for " + folder[:-1] + ":\n\n"
# for line in fp:
# s += line
# return(s)
def renderConfig(name):
fp = open(name, "r")
s = "config file for " + name + ":\n\n"
for line in fp:
s += line
return(s)
# given a config file, output a CSV line
def renderCSV(simulation):
try:
open(simulation + "/conf.conf", "r")
except IOError as err:
return()
params = getConf(simulation + "/config.conf")
s = getuser() + "@" + gethostname() + ":" + simulation + ","
s += str(params["steps"]) + ","
s += str(params["dens"]) + ","
s += str(params["hori"]) + ","
s += str(params["diag"]) + ","
s += str(params["beta"]) + ","
s += str(params["energies"][0]["000000"]) + ","
s += str(params["energies"][1]["000001"]) + ","
s += str(params["energies"][2]["000011"]) + ","
s += str(params["energies"][2]["000101"]) + ","
s += str(params["energies"][2]["001001"]) + ","
s += str(params["energies"][3]["000111"]) + ","
s += str(params["energies"][3]["001011"]) + ","
s += str(params["energies"][3]["010011"]) + ","
s += str(params["energies"][3]["010101"]) + ","
s += str(params["energies"][4]["001111"]) + ","
s += str(params["energies"][4]["010111"]) + ","
s += str(params["energies"][4]["011011"]) + ","
s += str(params["energies"][5]["011111"]) + ","
s += str(params["energies"][6]["111111"])
return(s)
| [
"os.path.exists",
"lib.args.getConf",
"lib.array.getGrid",
"os.mkdir",
"getpass.getuser",
"socket.gethostname"
] | [((277, 290), 'lib.array.getGrid', 'getGrid', (['file'], {}), '(file)\n', (284, 290), False, 'from lib.array import getGrid\n'), ((1007, 1021), 'os.path.exists', 'exists', (['tmpDir'], {}), '(tmpDir)\n', (1013, 1021), False, 'from os.path import exists\n'), ((1064, 1077), 'os.mkdir', 'mkdir', (['tmpDir'], {}), '(tmpDir)\n', (1069, 1077), False, 'from os import mkdir\n'), ((2338, 2374), 'lib.args.getConf', 'getConf', (["(simulation + '/config.conf')"], {}), "(simulation + '/config.conf')\n", (2345, 2374), False, 'from lib.args import getConf\n'), ((2398, 2411), 'socket.gethostname', 'gethostname', ([], {}), '()\n', (2409, 2411), False, 'from socket import gethostname\n'), ((2380, 2389), 'getpass.getuser', 'getuser', ([], {}), '()\n', (2387, 2389), False, 'from getpass import getuser\n')] |
""" This module comes with functions to decide which poker player out
of all players has the best cards.
"""
import itertools
# full_list in [('A','A'),('B','B')...,('F','F')]
def results(full_list, public_card):
""" The results function takes a list of player cards and
the community cards (in the middle of the table) and calculates
who of the players has the wining hand. """
#public_card = ['6H', '6D', '5S', '4S', '8S']
#full_list = [['9C', 'AS'], ['9H', '5C'], ['4D', '2S'], ['KC', '2D'], ['9D', '10C']]
high_comb_rank = []
high_type_rank = []
high_point_rank = []
public_card_temp = []
winner_card_type = []
public_card_temp.extend(list(public_card))
total_players = len(full_list)
for player_card_check in full_list:
player_card_check += public_card
card_combinations = list(itertools.combinations(player_card_check, 5))
color_all = []
size_all = []
for card_combination in card_combinations:
color_current = []
for card in card_combination:
color_current.append(str(card[-1]))
color_all.append(color_current)
size_current = []
for card in card_combination:
if card[-2].isdigit():
size5 = int(card[-2])
if size5 == 0:
size5 = 10
else:
if card[-2] == "J":
size5 = 11
elif card[-2] == "Q":
size5 = 12
elif card[-2] == "K":
size5 = 13
elif card[-2] == "A":
size5 = 14
size_current.append(size5)
size_all.append(size_current)
card_type_all = []
type_score_all = []
high_card_all = []
win_point = []
for i, card_combination in enumerate(card_combinations):
color = color_all[i]
size = size_all[i]
high_card = []
card_type = []
size_set = list(set(size))
while len(set(color)) == 1:
if max(size) - min(size) == 4:
card_type = 'Straight flush'
high_card = max(size)
break
else:
card_type = 'Flush'
high_card = sum(size)
break
else:
if len(set(size)) == 5:
if max(size) - min(size) == 4:
if sorted(size)[2] == sum(size) / len(size):
card_type = 'Straight'
high_card = max(size)
elif max(size) - min(size) == 12:
if sum(size) == 28:
card_type = 'Straight'
high_card = 5
else:
card_type = 'High card'
high_card = sum(size)
else:
card_type = 'High card'
high_card = sum(size)
elif len(size) - 1 == len(set(size)):
card_type = 'One pair'
high_card = max([x for n, x in enumerate(size) if x in size[:n]])
elif len(size) - 2 == len(set(size)):
size_temp = []
size_temp.extend(size)
for a in range(0, 5):
for b in range(0, 3):
if size[a] == size_set[b]:
size[a] = 0
size_set[b] = 0
last = [x for x in size if x != 0]
size = []
size.extend(size_temp)
if last[0] == last[1]:
card_type = 'Three of a kind'
high_card = max([x for n, x in enumerate(size) if x in size[:n]])
else:
card_type = 'Two pairs'
high_card = sum([x for n, x in enumerate(size) if x in size[:n]])
elif len(size) - 3 == len(set(size)):
for a in range(0, 5):
for b in range(0, 2):
if size[a] == size[b]:
size[a] = 0
size_set[b] = 0
last = [x for x in size if x != 0]
if last[0] == last[1] == last[2]:
card_type = 'Four of a kind'
high_card = max([x for n, x in enumerate(size) if x in size[:n]])
else:
card_type = 'Full house'
high_card = max([x for n, x in enumerate(size) if x in size[:n]])
type_score = []
if card_type == 'Straight flush':
type_score = 9
elif card_type == 'Four of a kind':
type_score = 8
elif card_type == 'Full house':
type_score = 7
elif card_type == 'Flush':
type_score = 6
elif card_type == 'Straight':
type_score = 5
elif card_type == 'Three of a kind':
type_score = 4
elif card_type == 'Two pairs':
type_score = 3
elif card_type == 'One pair':
type_score = 2
elif card_type == 'High card':
type_score = 1
card_type_all.append(card_type)
high_card_all.append(high_card)
win_point.append(type_score * int(100) + high_card)
high_point = max(win_point)
locate = win_point.index(max(win_point))
high_comb = card_combinations[locate]
high_type = card_type_all[locate]
high_point_rank.append(high_point)
high_comb_rank.append(high_comb)
high_type_rank.append(high_type)
winner = ()
for i in range(len(high_point_rank)):
if high_point_rank[i] == max(high_point_rank):
winner += (i,)
for i in winner:
a = int(i)
b = high_type_rank[a]
winner_card_type.append(b)
return (winner, winner_card_type)
| [
"itertools.combinations"
] | [((864, 908), 'itertools.combinations', 'itertools.combinations', (['player_card_check', '(5)'], {}), '(player_card_check, 5)\n', (886, 908), False, 'import itertools\n')] |
from discord.ext.commands import Bot
from discord_components import DiscordComponents, Button, ButtonStyle, InteractionType
from asyncio import TimeoutError
bot = Bot("!")
@bot.event
async def on_ready():
DiscordComponents(bot)
print(f"Logged in as {bot.user}!")
@bot.command()
async def waitforclick(ctx):
m = await ctx.send(
"Buttons waiting for a click",
components=[
Button(style=ButtonStyle.red, label="Click Me!"),
],
)
def check(res):
return ctx.author == res.user and res.channel == ctx.channel
try:
res = await bot.wait_for("button_click", check=check, timeout=15)
await res.respond(
type=InteractionType.ChannelMessageWithSource, content=f"{res.component.label} pressed"
)
except TimeoutError:
await m.edit(
"Prompt timed out!",
components=[
Button(style=ButtonStyle.red, label="Timed out!", disabled=True),
],
)
bot.run("TOKEN")
| [
"discord.ext.commands.Bot",
"discord_components.Button",
"discord_components.DiscordComponents"
] | [((164, 172), 'discord.ext.commands.Bot', 'Bot', (['"""!"""'], {}), "('!')\n", (167, 172), False, 'from discord.ext.commands import Bot\n'), ((212, 234), 'discord_components.DiscordComponents', 'DiscordComponents', (['bot'], {}), '(bot)\n', (229, 234), False, 'from discord_components import DiscordComponents, Button, ButtonStyle, InteractionType\n'), ((416, 464), 'discord_components.Button', 'Button', ([], {'style': 'ButtonStyle.red', 'label': '"""Click Me!"""'}), "(style=ButtonStyle.red, label='Click Me!')\n", (422, 464), False, 'from discord_components import DiscordComponents, Button, ButtonStyle, InteractionType\n'), ((916, 980), 'discord_components.Button', 'Button', ([], {'style': 'ButtonStyle.red', 'label': '"""Timed out!"""', 'disabled': '(True)'}), "(style=ButtonStyle.red, label='Timed out!', disabled=True)\n", (922, 980), False, 'from discord_components import DiscordComponents, Button, ButtonStyle, InteractionType\n')] |
# -*- coding: utf-8 -*-
# Author:Guzhongren
# created: 2017-05-08
import os
path = 'C:\\geoconFailed\\willfix\\'
for file in os.listdir(path):
if os.path.isfile(os.path.join(path,file))==True:
_file= file.split(".")
_file_name=_file[0]
_file_type=_file[1]
new_file_name=_file_name[:-1]+"."+_file_type
os.rename(os.path.join(path,file), os.path.join(path, new_file_name))
print(file+u"更名成功")
| [
"os.listdir",
"os.path.join"
] | [((126, 142), 'os.listdir', 'os.listdir', (['path'], {}), '(path)\n', (136, 142), False, 'import os\n'), ((166, 190), 'os.path.join', 'os.path.join', (['path', 'file'], {}), '(path, file)\n', (178, 190), False, 'import os\n'), ((356, 380), 'os.path.join', 'os.path.join', (['path', 'file'], {}), '(path, file)\n', (368, 380), False, 'import os\n'), ((381, 414), 'os.path.join', 'os.path.join', (['path', 'new_file_name'], {}), '(path, new_file_name)\n', (393, 414), False, 'import os\n')] |
"""
Automatic speech recognition scenario
"""
import logging
from typing import Optional
from tqdm import tqdm
import numpy as np
from art.preprocessing.audio import LFilter, LFilterPyTorch
from armory.utils.config_loading import (
load_dataset,
load_model,
load_attack,
load_adversarial_dataset,
load_defense_wrapper,
load_defense_internal,
load_label_targeter,
)
from armory.utils import metrics
from armory.scenarios.base import Scenario
from armory.utils.export import SampleExporter
logger = logging.getLogger(__name__)
def load_audio_channel(delay, attenuation, pytorch=True):
"""
Return an art LFilter object for a simple delay (multipath) channel
If attenuation == 0 or delay == 0, return an identity channel
Otherwise, return a channel with length equal to delay + 1
NOTE: lfilter truncates the end of the echo, so output length equals input length
"""
delay = int(delay)
attenuation = float(attenuation)
if delay < 0:
raise ValueError(f"delay {delay} must be a nonnegative number (of samples)")
if delay == 0 or attenuation == 0:
logger.warning("Using an identity channel")
numerator_coef = np.array([1.0])
denominator_coef = np.array([1.0])
else:
if not (-1 <= attenuation <= 1):
logger.warning(f"filter attenuation {attenuation} not in [-1, 1]")
# Simple FIR filter with a single multipath delay
numerator_coef = np.zeros(delay + 1)
numerator_coef[0] = 1.0
numerator_coef[delay] = attenuation
denominator_coef = np.zeros_like(numerator_coef)
denominator_coef[0] = 1.0
if pytorch:
try:
return LFilterPyTorch(
numerator_coef=numerator_coef, denominator_coef=denominator_coef
)
except ImportError:
logger.exception("PyTorch not available. Resorting to scipy filter")
logger.warning("Scipy LFilter does not currently implement proper gradients")
return LFilter(numerator_coef=numerator_coef, denominator_coef=denominator_coef)
class AutomaticSpeechRecognition(Scenario):
def _evaluate(
self,
config: dict,
num_eval_batches: Optional[int],
skip_benign: Optional[bool],
skip_attack: Optional[bool],
skip_misclassified: Optional[bool],
) -> dict:
"""
Evaluate the config and return a results dict
"""
if skip_misclassified:
raise ValueError("skip_misclassified shouldn't be set for ASR scenario")
model_config = config["model"]
estimator, fit_preprocessing_fn = load_model(model_config)
audio_channel_config = config.get("adhoc", {}).get("audio_channel")
if audio_channel_config is not None:
logger.info("loading audio channel")
for k in "delay", "attenuation":
if k not in audio_channel_config:
raise ValueError(f"audio_channel must have key {k}")
audio_channel = load_audio_channel(**audio_channel_config)
if estimator.preprocessing_defences:
estimator.preprocessing_defences.insert(0, audio_channel)
else:
estimator.preprocessing_defences = [audio_channel]
estimator._update_preprocessing_operations()
defense_config = config.get("defense") or {}
defense_type = defense_config.get("type")
if defense_type in ["Preprocessor", "Postprocessor"]:
logger.info(f"Applying internal {defense_type} defense to estimator")
estimator = load_defense_internal(config["defense"], estimator)
if model_config["fit"]:
logger.info(
f"Fitting model {model_config['module']}.{model_config['name']}..."
)
fit_kwargs = model_config["fit_kwargs"]
logger.info(f"Loading train dataset {config['dataset']['name']}...")
batch_size = config["dataset"].pop("batch_size")
config["dataset"]["batch_size"] = fit_kwargs.get(
"fit_batch_size", batch_size
)
train_data = load_dataset(
config["dataset"],
epochs=fit_kwargs["nb_epochs"],
split=config["dataset"].get("train_split", "train_clean100"),
preprocessing_fn=fit_preprocessing_fn,
shuffle_files=True,
)
config["dataset"]["batch_size"] = batch_size
if defense_type == "Trainer":
logger.info(f"Training with {defense_type} defense...")
defense = load_defense_wrapper(config["defense"], estimator)
defense.fit_generator(train_data, **fit_kwargs)
else:
logger.info("Fitting estimator on clean train dataset...")
estimator.fit_generator(train_data, **fit_kwargs)
if defense_type == "Transform":
# NOTE: Transform currently not supported
logger.info(f"Transforming estimator with {defense_type} defense...")
defense = load_defense_wrapper(config["defense"], estimator)
estimator = defense()
attack_config = config["attack"]
attack_type = attack_config.get("type")
targeted = bool(attack_config.get("targeted"))
metrics_logger = metrics.MetricsLogger.from_config(
config["metric"],
skip_benign=skip_benign,
skip_attack=skip_attack,
targeted=targeted,
)
if config["dataset"]["batch_size"] != 1:
logger.warning("Evaluation batch_size != 1 may not be supported.")
predict_kwargs = config["model"].get("predict_kwargs", {})
eval_split = config["dataset"].get("eval_split", "test_clean")
if skip_benign:
logger.info("Skipping benign classification...")
else:
# Evaluate the ART estimator on benign test examples
logger.info(f"Loading test dataset {config['dataset']['name']}...")
test_data = load_dataset(
config["dataset"],
epochs=1,
split=eval_split,
num_batches=num_eval_batches,
shuffle_files=False,
)
logger.info("Running inference on benign examples...")
for x, y in tqdm(test_data, desc="Benign"):
# Ensure that input sample isn't overwritten by estimator
x.flags.writeable = False
with metrics.resource_context(
name="Inference",
profiler=config["metric"].get("profiler_type"),
computational_resource_dict=metrics_logger.computational_resource_dict,
):
y_pred = estimator.predict(x, **predict_kwargs)
metrics_logger.update_task(y, y_pred)
metrics_logger.log_task()
if skip_attack:
logger.info("Skipping attack generation...")
return metrics_logger.results()
# Imperceptible attack still WIP
if (config.get("adhoc") or {}).get("skip_adversarial"):
logger.info("Skipping adversarial classification...")
return metrics_logger.results()
# Evaluate the ART estimator on adversarial test examples
logger.info("Generating or loading / testing adversarial examples...")
if attack_type == "preloaded":
test_data = load_adversarial_dataset(
attack_config,
epochs=1,
split="adversarial",
num_batches=num_eval_batches,
shuffle_files=False,
)
else:
attack = load_attack(attack_config, estimator)
if targeted != attack.targeted:
logger.warning(
f"targeted config {targeted} != attack field {attack.targeted}"
)
test_data = load_dataset(
config["dataset"],
epochs=1,
split=eval_split,
num_batches=num_eval_batches,
shuffle_files=False,
)
if targeted:
label_targeter = load_label_targeter(attack_config["targeted_labels"])
export_samples = config["scenario"].get("export_samples")
if export_samples is not None and export_samples > 0:
sample_exporter = SampleExporter(
self.scenario_output_dir, test_data.context, export_samples
)
else:
sample_exporter = None
for x, y in tqdm(test_data, desc="Attack"):
with metrics.resource_context(
name="Attack",
profiler=config["metric"].get("profiler_type"),
computational_resource_dict=metrics_logger.computational_resource_dict,
):
if attack_type == "preloaded":
x, x_adv = x
if targeted:
y, y_target = y
elif attack_config.get("use_label"):
x_adv = attack.generate(x=x, y=y)
elif targeted:
y_target = label_targeter.generate(y)
x_adv = attack.generate(x=x, y=y_target)
else:
x_adv = attack.generate(x=x)
# Ensure that input sample isn't overwritten by estimator
x_adv.flags.writeable = False
y_pred_adv = estimator.predict(x_adv, **predict_kwargs)
metrics_logger.update_task(y, y_pred_adv, adversarial=True)
if targeted:
metrics_logger.update_task(
y_target, y_pred_adv, adversarial=True, targeted=True,
)
metrics_logger.update_perturbation(x, x_adv)
if sample_exporter is not None:
sample_exporter.export(x, x_adv, y, y_pred_adv)
metrics_logger.log_task(adversarial=True)
if targeted:
metrics_logger.log_task(adversarial=True, targeted=True)
return metrics_logger.results()
| [
"logging.getLogger",
"armory.utils.config_loading.load_label_targeter",
"art.preprocessing.audio.LFilterPyTorch",
"armory.utils.export.SampleExporter",
"armory.utils.metrics.MetricsLogger.from_config",
"tqdm.tqdm",
"art.preprocessing.audio.LFilter",
"armory.utils.config_loading.load_defense_wrapper",
... | [((530, 557), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (547, 557), False, 'import logging\n'), ((2030, 2103), 'art.preprocessing.audio.LFilter', 'LFilter', ([], {'numerator_coef': 'numerator_coef', 'denominator_coef': 'denominator_coef'}), '(numerator_coef=numerator_coef, denominator_coef=denominator_coef)\n', (2037, 2103), False, 'from art.preprocessing.audio import LFilter, LFilterPyTorch\n'), ((1206, 1221), 'numpy.array', 'np.array', (['[1.0]'], {}), '([1.0])\n', (1214, 1221), True, 'import numpy as np\n'), ((1249, 1264), 'numpy.array', 'np.array', (['[1.0]'], {}), '([1.0])\n', (1257, 1264), True, 'import numpy as np\n'), ((1479, 1498), 'numpy.zeros', 'np.zeros', (['(delay + 1)'], {}), '(delay + 1)\n', (1487, 1498), True, 'import numpy as np\n'), ((1603, 1632), 'numpy.zeros_like', 'np.zeros_like', (['numerator_coef'], {}), '(numerator_coef)\n', (1616, 1632), True, 'import numpy as np\n'), ((2654, 2678), 'armory.utils.config_loading.load_model', 'load_model', (['model_config'], {}), '(model_config)\n', (2664, 2678), False, 'from armory.utils.config_loading import load_dataset, load_model, load_attack, load_adversarial_dataset, load_defense_wrapper, load_defense_internal, load_label_targeter\n'), ((5382, 5506), 'armory.utils.metrics.MetricsLogger.from_config', 'metrics.MetricsLogger.from_config', (["config['metric']"], {'skip_benign': 'skip_benign', 'skip_attack': 'skip_attack', 'targeted': 'targeted'}), "(config['metric'], skip_benign=skip_benign,\n skip_attack=skip_attack, targeted=targeted)\n", (5415, 5506), False, 'from armory.utils import metrics\n'), ((8664, 8694), 'tqdm.tqdm', 'tqdm', (['test_data'], {'desc': '"""Attack"""'}), "(test_data, desc='Attack')\n", (8668, 8694), False, 'from tqdm import tqdm\n'), ((1716, 1801), 'art.preprocessing.audio.LFilterPyTorch', 'LFilterPyTorch', ([], {'numerator_coef': 'numerator_coef', 'denominator_coef': 'denominator_coef'}), '(numerator_coef=numerator_coef, denominator_coef=denominator_coef\n )\n', (1730, 1801), False, 'from art.preprocessing.audio import LFilter, LFilterPyTorch\n'), ((3627, 3678), 'armory.utils.config_loading.load_defense_internal', 'load_defense_internal', (["config['defense']", 'estimator'], {}), "(config['defense'], estimator)\n", (3648, 3678), False, 'from armory.utils.config_loading import load_dataset, load_model, load_attack, load_adversarial_dataset, load_defense_wrapper, load_defense_internal, load_label_targeter\n'), ((5126, 5176), 'armory.utils.config_loading.load_defense_wrapper', 'load_defense_wrapper', (["config['defense']", 'estimator'], {}), "(config['defense'], estimator)\n", (5146, 5176), False, 'from armory.utils.config_loading import load_dataset, load_model, load_attack, load_adversarial_dataset, load_defense_wrapper, load_defense_internal, load_label_targeter\n'), ((6098, 6213), 'armory.utils.config_loading.load_dataset', 'load_dataset', (["config['dataset']"], {'epochs': '(1)', 'split': 'eval_split', 'num_batches': 'num_eval_batches', 'shuffle_files': '(False)'}), "(config['dataset'], epochs=1, split=eval_split, num_batches=\n num_eval_batches, shuffle_files=False)\n", (6110, 6213), False, 'from armory.utils.config_loading import load_dataset, load_model, load_attack, load_adversarial_dataset, load_defense_wrapper, load_defense_internal, load_label_targeter\n'), ((6395, 6425), 'tqdm.tqdm', 'tqdm', (['test_data'], {'desc': '"""Benign"""'}), "(test_data, desc='Benign')\n", (6399, 6425), False, 'from tqdm import tqdm\n'), ((7519, 7644), 'armory.utils.config_loading.load_adversarial_dataset', 'load_adversarial_dataset', (['attack_config'], {'epochs': '(1)', 'split': '"""adversarial"""', 'num_batches': 'num_eval_batches', 'shuffle_files': '(False)'}), "(attack_config, epochs=1, split='adversarial',\n num_batches=num_eval_batches, shuffle_files=False)\n", (7543, 7644), False, 'from armory.utils.config_loading import load_dataset, load_model, load_attack, load_adversarial_dataset, load_defense_wrapper, load_defense_internal, load_label_targeter\n'), ((7771, 7808), 'armory.utils.config_loading.load_attack', 'load_attack', (['attack_config', 'estimator'], {}), '(attack_config, estimator)\n', (7782, 7808), False, 'from armory.utils.config_loading import load_dataset, load_model, load_attack, load_adversarial_dataset, load_defense_wrapper, load_defense_internal, load_label_targeter\n'), ((8011, 8126), 'armory.utils.config_loading.load_dataset', 'load_dataset', (["config['dataset']"], {'epochs': '(1)', 'split': 'eval_split', 'num_batches': 'num_eval_batches', 'shuffle_files': '(False)'}), "(config['dataset'], epochs=1, split=eval_split, num_batches=\n num_eval_batches, shuffle_files=False)\n", (8023, 8126), False, 'from armory.utils.config_loading import load_dataset, load_model, load_attack, load_adversarial_dataset, load_defense_wrapper, load_defense_internal, load_label_targeter\n'), ((8488, 8563), 'armory.utils.export.SampleExporter', 'SampleExporter', (['self.scenario_output_dir', 'test_data.context', 'export_samples'], {}), '(self.scenario_output_dir, test_data.context, export_samples)\n', (8502, 8563), False, 'from armory.utils.export import SampleExporter\n'), ((4653, 4703), 'armory.utils.config_loading.load_defense_wrapper', 'load_defense_wrapper', (["config['defense']", 'estimator'], {}), "(config['defense'], estimator)\n", (4673, 4703), False, 'from armory.utils.config_loading import load_dataset, load_model, load_attack, load_adversarial_dataset, load_defense_wrapper, load_defense_internal, load_label_targeter\n'), ((8275, 8328), 'armory.utils.config_loading.load_label_targeter', 'load_label_targeter', (["attack_config['targeted_labels']"], {}), "(attack_config['targeted_labels'])\n", (8294, 8328), False, 'from armory.utils.config_loading import load_dataset, load_model, load_attack, load_adversarial_dataset, load_defense_wrapper, load_defense_internal, load_label_targeter\n')] |
# file that contains db models to be exposed via a REST API
from models import room, survey, wifi_log, timetable, module # import db models
from app import app # import Flask app
from auth import auth # import Auth app to provide user authentificaiton
from flask import request # import request object to parse json request data
from flask_peewee.rest import RestAPI,UserAuthentication, RestrictOwnerResource, AdminAuthentication
# create RestrictOwnerResource subclass which prevents users modifying another user's content
class SurveyResource(RestrictOwnerResource):
owner_field = 'reporter'
def check_post(self):
'''fucntion that checks users are associated with the module they are submitting a POST request to '''
obj = request.get_json() # parse and return incoming json request data
user = obj["reporter"]
mod= obj["module_code"]
modules = module.select().where(module.module_code == mod) # select module data from module table in db using module_code posted by user
authorized = False # initialise authorized variable as False
for item in modules:
instructor = str(item.instructor) # select instructor associated with item
if instructor == user:
authorized = True
return authorized
# instantiate UserAuthentication
user_auth = UserAuthentication(auth)
# instantiate admin-only auth
admin_auth = AdminAuthentication(auth)
# instantiate our api wrapper, specifying user_auth as the default
api = RestAPI(app, default_auth=user_auth)
# register models so they are exposed via /api/<model>/
api.register(room, auth=admin_auth, allowed_methods=['GET'])
api.register(survey,SurveyResource,allowed_methods=['GET','POST'])
api.register(wifi_log, auth=admin_auth,allowed_methods=['GET'])
api.register(timetable, auth=admin_auth, allowed_methods=['GET'])
api.register(module, auth=admin_auth, allowed_methods=['GET'])
| [
"flask_peewee.rest.RestAPI",
"flask_peewee.rest.UserAuthentication",
"flask.request.get_json",
"models.module.select",
"flask_peewee.rest.AdminAuthentication"
] | [((1384, 1408), 'flask_peewee.rest.UserAuthentication', 'UserAuthentication', (['auth'], {}), '(auth)\n', (1402, 1408), False, 'from flask_peewee.rest import RestAPI, UserAuthentication, RestrictOwnerResource, AdminAuthentication\n'), ((1453, 1478), 'flask_peewee.rest.AdminAuthentication', 'AdminAuthentication', (['auth'], {}), '(auth)\n', (1472, 1478), False, 'from flask_peewee.rest import RestAPI, UserAuthentication, RestrictOwnerResource, AdminAuthentication\n'), ((1553, 1589), 'flask_peewee.rest.RestAPI', 'RestAPI', (['app'], {'default_auth': 'user_auth'}), '(app, default_auth=user_auth)\n', (1560, 1589), False, 'from flask_peewee.rest import RestAPI, UserAuthentication, RestrictOwnerResource, AdminAuthentication\n'), ((759, 777), 'flask.request.get_json', 'request.get_json', ([], {}), '()\n', (775, 777), False, 'from flask import request\n'), ((906, 921), 'models.module.select', 'module.select', ([], {}), '()\n', (919, 921), False, 'from models import room, survey, wifi_log, timetable, module\n')] |
from django.urls import path
from purple_admin import views
urlpatterns = [
path('', views.cabinet, name='admin_panel_cabinet'),
# Адмника Наименований маршрутов
path('route_list', views.cabinet_list, {'type': 'route'}, name='admin_panel_route_list'),
path('route_add', views.cabinet_add, {'type': 'route'}, name='admin_panel_route_add'),
path('route_edit/<int:pk>/', views.cabinet_edit, {'type': 'route'}, name='admin_panel_route_edit'),
path('route_delete/<int:pk>/', views.cabinet_delete, {'type': 'route'}, name='admin_panel_route_delete'),
# Адмника наименований остановок
path('route_platform_list', views.cabinet_list, {'type': 'route_platform'}, name='admin_panel_route_platform_list'),
path('route_platform_add', views.cabinet_add, {'type': 'route_platform'}, name='admin_panel_route_platform_add'),
path('route_platform_edit/<int:pk>/', views.cabinet_edit, {'type': 'route_platform'},
name='admin_panel_route_platform_edit'),
path('route_platform_delete/<int:pk>/', views.cabinet_delete, {'type': 'route_platform'},
name='admin_panel_route_platform_delete'),
path('route_relation_add_ajax', views.cabinet_add, {'type': 'route_platform_type'},
name='admin_panel_route_platform_type_relation_ajax_add'),
# Админка ТС
path('ts_list', views.cabinet_list, {'type': 'ts'}, name='admin_panel_ts_list'),
path('ts_add', views.cabinet_add, {'type': 'ts'}, name='admin_panel_ts_add'),
path('ts_edit/<int:pk>/', views.cabinet_edit, {'type': 'ts'}, name='admin_panel_ts_edit'),
path('ts_delete/<int:pk>/', views.cabinet_delete, {'type': 'ts'}, name='admin_panel_ts_delete'),
# Адмника Создания маршрута на карте
path('map_route_editor_add', views.mapped_route_add, name='admin_panel_mapped_route_add'),
]
| [
"django.urls.path"
] | [((82, 133), 'django.urls.path', 'path', (['""""""', 'views.cabinet'], {'name': '"""admin_panel_cabinet"""'}), "('', views.cabinet, name='admin_panel_cabinet')\n", (86, 133), False, 'from django.urls import path\n'), ((176, 269), 'django.urls.path', 'path', (['"""route_list"""', 'views.cabinet_list', "{'type': 'route'}"], {'name': '"""admin_panel_route_list"""'}), "('route_list', views.cabinet_list, {'type': 'route'}, name=\n 'admin_panel_route_list')\n", (180, 269), False, 'from django.urls import path\n'), ((270, 360), 'django.urls.path', 'path', (['"""route_add"""', 'views.cabinet_add', "{'type': 'route'}"], {'name': '"""admin_panel_route_add"""'}), "('route_add', views.cabinet_add, {'type': 'route'}, name=\n 'admin_panel_route_add')\n", (274, 360), False, 'from django.urls import path\n'), ((361, 464), 'django.urls.path', 'path', (['"""route_edit/<int:pk>/"""', 'views.cabinet_edit', "{'type': 'route'}"], {'name': '"""admin_panel_route_edit"""'}), "('route_edit/<int:pk>/', views.cabinet_edit, {'type': 'route'}, name=\n 'admin_panel_route_edit')\n", (365, 464), False, 'from django.urls import path\n'), ((465, 573), 'django.urls.path', 'path', (['"""route_delete/<int:pk>/"""', 'views.cabinet_delete', "{'type': 'route'}"], {'name': '"""admin_panel_route_delete"""'}), "('route_delete/<int:pk>/', views.cabinet_delete, {'type': 'route'},\n name='admin_panel_route_delete')\n", (469, 573), False, 'from django.urls import path\n'), ((612, 731), 'django.urls.path', 'path', (['"""route_platform_list"""', 'views.cabinet_list', "{'type': 'route_platform'}"], {'name': '"""admin_panel_route_platform_list"""'}), "('route_platform_list', views.cabinet_list, {'type': 'route_platform'},\n name='admin_panel_route_platform_list')\n", (616, 731), False, 'from django.urls import path\n'), ((733, 849), 'django.urls.path', 'path', (['"""route_platform_add"""', 'views.cabinet_add', "{'type': 'route_platform'}"], {'name': '"""admin_panel_route_platform_add"""'}), "('route_platform_add', views.cabinet_add, {'type': 'route_platform'},\n name='admin_panel_route_platform_add')\n", (737, 849), False, 'from django.urls import path\n'), ((851, 980), 'django.urls.path', 'path', (['"""route_platform_edit/<int:pk>/"""', 'views.cabinet_edit', "{'type': 'route_platform'}"], {'name': '"""admin_panel_route_platform_edit"""'}), "('route_platform_edit/<int:pk>/', views.cabinet_edit, {'type':\n 'route_platform'}, name='admin_panel_route_platform_edit')\n", (855, 980), False, 'from django.urls import path\n'), ((991, 1126), 'django.urls.path', 'path', (['"""route_platform_delete/<int:pk>/"""', 'views.cabinet_delete', "{'type': 'route_platform'}"], {'name': '"""admin_panel_route_platform_delete"""'}), "('route_platform_delete/<int:pk>/', views.cabinet_delete, {'type':\n 'route_platform'}, name='admin_panel_route_platform_delete')\n", (995, 1126), False, 'from django.urls import path\n'), ((1137, 1287), 'django.urls.path', 'path', (['"""route_relation_add_ajax"""', 'views.cabinet_add', "{'type': 'route_platform_type'}"], {'name': '"""admin_panel_route_platform_type_relation_ajax_add"""'}), "('route_relation_add_ajax', views.cabinet_add, {'type':\n 'route_platform_type'}, name=\n 'admin_panel_route_platform_type_relation_ajax_add')\n", (1141, 1287), False, 'from django.urls import path\n'), ((1310, 1389), 'django.urls.path', 'path', (['"""ts_list"""', 'views.cabinet_list', "{'type': 'ts'}"], {'name': '"""admin_panel_ts_list"""'}), "('ts_list', views.cabinet_list, {'type': 'ts'}, name='admin_panel_ts_list')\n", (1314, 1389), False, 'from django.urls import path\n'), ((1395, 1471), 'django.urls.path', 'path', (['"""ts_add"""', 'views.cabinet_add', "{'type': 'ts'}"], {'name': '"""admin_panel_ts_add"""'}), "('ts_add', views.cabinet_add, {'type': 'ts'}, name='admin_panel_ts_add')\n", (1399, 1471), False, 'from django.urls import path\n'), ((1477, 1571), 'django.urls.path', 'path', (['"""ts_edit/<int:pk>/"""', 'views.cabinet_edit', "{'type': 'ts'}"], {'name': '"""admin_panel_ts_edit"""'}), "('ts_edit/<int:pk>/', views.cabinet_edit, {'type': 'ts'}, name=\n 'admin_panel_ts_edit')\n", (1481, 1571), False, 'from django.urls import path\n'), ((1572, 1672), 'django.urls.path', 'path', (['"""ts_delete/<int:pk>/"""', 'views.cabinet_delete', "{'type': 'ts'}"], {'name': '"""admin_panel_ts_delete"""'}), "('ts_delete/<int:pk>/', views.cabinet_delete, {'type': 'ts'}, name=\n 'admin_panel_ts_delete')\n", (1576, 1672), False, 'from django.urls import path\n'), ((1714, 1808), 'django.urls.path', 'path', (['"""map_route_editor_add"""', 'views.mapped_route_add'], {'name': '"""admin_panel_mapped_route_add"""'}), "('map_route_editor_add', views.mapped_route_add, name=\n 'admin_panel_mapped_route_add')\n", (1718, 1808), False, 'from django.urls import path\n')] |
import tensorflow as tf
from os import path
import numpy as np
from scipy import misc
from styx_msgs.msg import TrafficLight
import cv2
import rospy
import tensorflow as tf
class CarlaModel(object):
def __init__(self, model_checkpoint):
self.sess = None
self.checkpoint = model_checkpoint
self.prob_thr = 0.90
self.TRAFFIC_LIGHT_CLASS = 10
self.image_no = 10000
tf.reset_default_graph()
def predict(self, img):
if self.sess == None:
gd = tf.GraphDef()
gd.ParseFromString(tf.gfile.GFile(self.checkpoint, "rb").read())
tf.import_graph_def(gd, name="object_detection_api")
self.sess = tf.Session()
g = tf.get_default_graph()
self.image = g.get_tensor_by_name("object_detection_api/image_tensor:0")
self.boxes = g.get_tensor_by_name("object_detection_api/detection_boxes:0")
self.scores = g.get_tensor_by_name("object_detection_api/detection_scores:0")
self.classes = g.get_tensor_by_name("object_detection_api/detection_classes:0")
img_h, img_w = img.shape[:2]
self.image_no = self.image_no+1
cv2.imwrite("full_"+str(self.image_no)+".png", img)
for h0 in [img_h//3, (img_h//3)-150]:
for w0 in [0, img_w//3, img_w*2//3]:
grid = img[h0:h0+img_h//3+50, w0:w0+img_w//3, :] # grid
pred_boxes, pred_scores, pred_classes = self.sess.run([self.boxes, self.scores, self.classes],
feed_dict={self.image: np.expand_dims(grid, axis=0)})
pred_boxes = pred_boxes.squeeze()
pred_scores = pred_scores.squeeze() # descreding order
pred_classes = pred_classes.squeeze()
traffic_light = None
h, w = grid.shape[:2]
cv2.imwrite("grid_"+str(self.image_no)+"_"+str(h0)+"_"+str(w0)+".png",grid)
rospy.loginfo("w,h is %s,%s",h0,w0)
for i in range(pred_boxes.shape[0]):
box = pred_boxes[i]
score = pred_scores[i]
if score < self.prob_thr: continue
if pred_classes[i] != self.TRAFFIC_LIGHT_CLASS: continue
x0, y0 = box[1] * w, box[0] * h
x1, y1 = box[3] * w, box[2] * h
x0, y0, x1, y1 = map(int, [x0, y0, x1, y1])
x_diff = x1 - x0
y_diff = y1 - y0
xy_ratio = x_diff/float(y_diff)
rospy.loginfo("image_no is %s", self.image_no)
rospy.loginfo("x,y ratio is %s",xy_ratio)
rospy.loginfo("score is %s",score)
if xy_ratio > 0.48: continue
area = np.abs((x1-x0) * (y1-y0)) / float(w*h)
rospy.loginfo("area is %s",area)
if area <= 0.001: continue
traffic_light = grid[y0:y1, x0:x1]
rospy.loginfo("traffic light given")
# select first -most confidence
if traffic_light is not None: break
if traffic_light is not None: break
if traffic_light is None:
pass
else:
rospy.loginfo("w,h is %s,%s",h0,w0)
rospy.loginfo("x,y ratio is %s",xy_ratio)
rospy.loginfo("score is %s",score)
cv2.imwrite("light_"+str(self.image_no)+".png",traffic_light)
#cv2.imwrite("full_"+str(self.image_no)+".png", img)
#cv2.imwrite("grid_"+str(self.image_no)+".png",grid)
#self.image_no = self.image_no+1
brightness = cv2.cvtColor(traffic_light, cv2.COLOR_RGB2HSV)[:,:,-1]
hs, ws = np.where(brightness >= (brightness.max()-30))
hs_mean = hs.mean()
tl_h = traffic_light.shape[0]
if hs_mean / tl_h < 0.4:
rospy.loginfo("image"+str(self.image_no-1)+" is RED")
return TrafficLight.RED
elif hs_mean / tl_h >= 0.55:
rospy.loginfo("image"+str(self.image_no-1)+" is GREEN")
return TrafficLight.GREEN
else:
rospy.loginfo("image"+str(self.image_no-1)+" is YELLOW")
return TrafficLight.YELLOW
return TrafficLight.UNKNOWN
| [
"numpy.abs",
"tensorflow.reset_default_graph",
"tensorflow.Session",
"tensorflow.GraphDef",
"tensorflow.import_graph_def",
"cv2.cvtColor",
"tensorflow.gfile.GFile",
"numpy.expand_dims",
"rospy.loginfo",
"tensorflow.get_default_graph"
] | [((417, 441), 'tensorflow.reset_default_graph', 'tf.reset_default_graph', ([], {}), '()\n', (439, 441), True, 'import tensorflow as tf\n'), ((522, 535), 'tensorflow.GraphDef', 'tf.GraphDef', ([], {}), '()\n', (533, 535), True, 'import tensorflow as tf\n'), ((625, 677), 'tensorflow.import_graph_def', 'tf.import_graph_def', (['gd'], {'name': '"""object_detection_api"""'}), "(gd, name='object_detection_api')\n", (644, 677), True, 'import tensorflow as tf\n'), ((702, 714), 'tensorflow.Session', 'tf.Session', ([], {}), '()\n', (712, 714), True, 'import tensorflow as tf\n'), ((740, 762), 'tensorflow.get_default_graph', 'tf.get_default_graph', ([], {}), '()\n', (760, 762), True, 'import tensorflow as tf\n'), ((3384, 3421), 'rospy.loginfo', 'rospy.loginfo', (['"""w,h is %s,%s"""', 'h0', 'w0'], {}), "('w,h is %s,%s', h0, w0)\n", (3397, 3421), False, 'import rospy\n'), ((3432, 3474), 'rospy.loginfo', 'rospy.loginfo', (['"""x,y ratio is %s"""', 'xy_ratio'], {}), "('x,y ratio is %s', xy_ratio)\n", (3445, 3474), False, 'import rospy\n'), ((3486, 3521), 'rospy.loginfo', 'rospy.loginfo', (['"""score is %s"""', 'score'], {}), "('score is %s', score)\n", (3499, 3521), False, 'import rospy\n'), ((2027, 2064), 'rospy.loginfo', 'rospy.loginfo', (['"""w,h is %s,%s"""', 'h0', 'w0'], {}), "('w,h is %s,%s', h0, w0)\n", (2040, 2064), False, 'import rospy\n'), ((3795, 3841), 'cv2.cvtColor', 'cv2.cvtColor', (['traffic_light', 'cv2.COLOR_RGB2HSV'], {}), '(traffic_light, cv2.COLOR_RGB2HSV)\n', (3807, 3841), False, 'import cv2\n'), ((2646, 2692), 'rospy.loginfo', 'rospy.loginfo', (['"""image_no is %s"""', 'self.image_no'], {}), "('image_no is %s', self.image_no)\n", (2659, 2692), False, 'import rospy\n'), ((2713, 2755), 'rospy.loginfo', 'rospy.loginfo', (['"""x,y ratio is %s"""', 'xy_ratio'], {}), "('x,y ratio is %s', xy_ratio)\n", (2726, 2755), False, 'import rospy\n'), ((2775, 2810), 'rospy.loginfo', 'rospy.loginfo', (['"""score is %s"""', 'score'], {}), "('score is %s', score)\n", (2788, 2810), False, 'import rospy\n'), ((2957, 2990), 'rospy.loginfo', 'rospy.loginfo', (['"""area is %s"""', 'area'], {}), "('area is %s', area)\n", (2970, 2990), False, 'import rospy\n'), ((3112, 3148), 'rospy.loginfo', 'rospy.loginfo', (['"""traffic light given"""'], {}), "('traffic light given')\n", (3125, 3148), False, 'import rospy\n'), ((567, 604), 'tensorflow.gfile.GFile', 'tf.gfile.GFile', (['self.checkpoint', '"""rb"""'], {}), "(self.checkpoint, 'rb')\n", (581, 604), True, 'import tensorflow as tf\n'), ((2898, 2927), 'numpy.abs', 'np.abs', (['((x1 - x0) * (y1 - y0))'], {}), '((x1 - x0) * (y1 - y0))\n', (2904, 2927), True, 'import numpy as np\n'), ((1636, 1664), 'numpy.expand_dims', 'np.expand_dims', (['grid'], {'axis': '(0)'}), '(grid, axis=0)\n', (1650, 1664), True, 'import numpy as np\n')] |
"""
``goless`` introduces go-like channels and select to Python,
built on top of Stackless Python (and maybe one day gevent).
Use :func:`goless.chan` to create a synchronous or buffered channel.
Use :func:`goless.select` like you would the ``Select`` function in Go's reflect package
(since Python lacks a switch/case statement, replicating Go's select statement syntax
wasn't very effective).
"""
import logging
import sys
import traceback
from .backends import current as _be
# noinspection PyUnresolvedReferences
from .channels import chan, ChannelClosed
# noinspection PyUnresolvedReferences
from .selecting import dcase, rcase, scase, select
version_info = 0, 0, 1
version = '.'.join([str(v) for v in version_info])
def on_panic(etype, value, tb):
"""
Called when there is an unhandled error in a goroutine.
By default, logs and exits the process.
"""
logging.critical(traceback.format_exception(etype, value, tb))
_be.propagate_exc(SystemExit, 1)
def go(func, *args, **kwargs):
"""
Run a function in a new tasklet, like a goroutine.
If the goroutine raises an unhandled exception (*panics*),
the :func:`goless.on_panic` will be called,
which by default logs the error and exits the process.
:param args: Positional arguments to ``func``.
:param kwargs: Keyword arguments to ``func``.
"""
def safe_wrapped(f):
# noinspection PyBroadException
try:
f(*args, **kwargs)
except:
on_panic(*sys.exc_info())
_be.start(safe_wrapped, func)
| [
"sys.exc_info",
"traceback.format_exception"
] | [((900, 944), 'traceback.format_exception', 'traceback.format_exception', (['etype', 'value', 'tb'], {}), '(etype, value, tb)\n', (926, 944), False, 'import traceback\n'), ((1506, 1520), 'sys.exc_info', 'sys.exc_info', ([], {}), '()\n', (1518, 1520), False, 'import sys\n')] |
from distutils.core import setup
from setuptools import find_packages
with open('README.md', 'r') as fh:
long_description = fh.read()
setup(
name='pyroaman',
version='0.1.1',
license='MIT',
description='Roam Research with Python',
author = '<NAME>',
author_email='<EMAIL>',
url = 'https://github.com/br-g/pyroaman',
download_url = 'https://github.com/br-g/pyroaman/archive/v0.1.1.tar.gz',
keywords = ['Roam Research'],
long_description=long_description,
long_description_content_type='text/markdown',
packages=find_packages(exclude=['tests']),
python_requires='>=3.6',
install_requires=[
'cached_property',
'dataclasses',
'loguru',
'tqdm',
'pathlib',
],
classifiers=[
'Development Status :: 5 - Production/Stable',
'Intended Audience :: Developers',
'Topic :: Software Development :: Libraries :: Python Modules',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 3.6',
],
)
| [
"setuptools.find_packages"
] | [((564, 596), 'setuptools.find_packages', 'find_packages', ([], {'exclude': "['tests']"}), "(exclude=['tests'])\n", (577, 596), False, 'from setuptools import find_packages\n')] |
# -*- coding: utf-8 -*-
# Copyright (c) 2021 The HERA Collaboration
# Licensed under the MIT License
"""Utilities for dealing with galaxy/QSO catalogs."""
import numpy as np
import matplotlib.pyplot as plt
from astropy.coordinates import SkyCoord
from .util import deg_per_hr
_xshooter_ref = "https://ui.adsabs.harvard.edu/abs/2020ApJ...905...51S/abstract"
# VIKING
_viking_ref1 = "https://ui.adsabs.harvard.edu/abs/2013ApJ...779...24V/abstract"
_viking_ref2 = "https://ui.adsabs.harvard.edu/abs/2015MNRAS.453.2259V/abstract"
_viking = {
"J2348-3054": {
"ra": "23h48m33.34s",
"dec": "-30d54m10.0s",
"z": 6.886,
"ref": _viking_ref1,
},
"J0109-3047": {
"ra": "01h09m53.13s",
"dec": "-30d47m26.3s",
"z": 6.745,
"ref": _viking_ref1,
},
"J0305-3150": {
"ra": "03h05m16.92s",
"dec": "-31d50m56.0s",
"z": 6.604,
"ref": _viking_ref1,
},
"J0328-3253": {
"ra": "03h28m35.511s",
"dec": "-32d53m22.92s",
"z": 5.860,
"ref": _viking_ref2,
},
"J0046-2837": {
"ra": "00h46m23.645s",
"dec": "-28d37m47.34s",
"z": 5.9926,
"ref": _xshooter_ref,
},
"J2211-3206": {
"ra": "22h11m12.391s",
"dec": "-32d06m12.95s",
"z": 6.3394,
"ref": _xshooter_ref,
},
"J2318-3029": {
"ra": "23h18m33.103s",
"dec": "-30d29m33.36s",
"z": 6.1456,
"ref": _xshooter_ref,
},
"J2348-3054_xshooter": {
"ra": "23h48m33.336s",
"dec": "-30d54m10.24s",
"z": 6.9007,
"ref": _xshooter_ref,
},
}
# Pan-STARRS1
_ps1_ref1 = "https://ui.adsabs.harvard.edu/abs/2014AJ....148...14B/abstract"
_ps1_ref2 = "https://ui.adsabs.harvard.edu/abs/2017ApJ...849...91M/abstract"
_ps1 = {
"PSO 231-20": {"ra": "231.6576", "dec": "-20.8335", "z": 6.5864, "ref": _ps1_ref2},
"PSO J037.9706-28.8389": {
"ra": "02h31m52.96s",
"dec": "-28d50m20.1s",
"z": 5.99,
"ref": _ps1_ref1,
},
"PSO J065.4085-26.9543": {
"ra": "04h21m38.049s",
"dec": "-26d57m15.61s",
"z": 6.1871,
"ref": _xshooter_ref,
},
}
# Banados+ 2016 https://ui.adsabs.harvard.edu/abs/2016ApJS..227...11B/abstract
# has table of all z > 5.6 quasars known at that point (March 2016).
# https://ned.ipac.caltech.edu/inrefcode?search_type=Search&refcode=2016ApJS..227...11B
# VLT ATLAS
# https://ui.adsabs.harvard.edu/abs/2015MNRAS.451L..16C/abstract
_atlas_ref1 = "https://ui.adsabs.harvard.edu/abs/2015MNRAS.451L..16C/abstract"
_atlas_ref2 = "https://ui.adsabs.harvard.edu/abs/2018MNRAS.478.1649C/abstract"
_atlas = {
"J025.6821-33.4627": {
"ra": "025.6821",
"dec": "-33.4627",
"z": 6.31,
"ref": _atlas_ref1,
},
"J332.8017-32.1036": {
"ra": "332.8017",
"dec": "-32.1036",
"z": 6.32,
"ref": _atlas_ref2,
},
}
# VHS-DES
_ps1_vhs_des = "https://ui.adsabs.harvard.edu/abs/2019MNRAS.487.1874R/abstract"
_des = {
"VDES J0020-3653": {
"ra": "00h20m31.47s",
"dec": "-36d53m41.8s",
"z": 6.5864,
"ref": _ps1_vhs_des,
},
}
_yang = "https://ui.adsabs.harvard.edu/abs/2020ApJ...904...26Y/abstract"
_decarli = "https://ui.adsabs.harvard.edu/abs/2018ApJ...854...97D/abstract"
_other = {
"J0142−3327": {"ra": "0142", "dec": "-3327", "z": 6.3379, "ref": _yang},
"J0148−2826": {"ra": "0148", "dec": "-2826", "z": 6.54, "ref": _yang},
"J2002−3013": {"ra": "2002", "dec": "-3013", "z": 6.67, "ref": _yang},
"J2318–3113": {
"ra": "23h18m18.351s",
"dec": "-31d13m46.35s",
"z": 6.444,
"ref": _decarli,
},
}
def _to_decimal(s):
if "." in s:
out = float(s)
elif s[0] == "-":
out = float(s[0:3] + "." + s[3:])
else:
out = float(s[0:2] + "." + s[2:])
return out
_qso_catalogs = {"viking": _viking, "panstarrs": _ps1, "atlas": _atlas, "other": _other}
class Catalog(object):
"""
Define a class for handling QSO catalogs.
Parameters
----------
data : str
The type of data to handle. Right now "qso" is the only allowed value.
kwargs : dict
Keyword arguments to save directly on the object.
"""
def __init__(self, data, **kwargs):
self.data = data
self.kwargs = kwargs
def plot_catalog(
self, ax=None, zmin=None, num=1, projection="rectilinear", **fig_kwargs
):
"""
Plot a catalog using matplotlib.
Parameters
----------
ax : matplotlib axis object, optional
The axes to use for plotting. If None, then a new figure and axis
will be created.
zmin : float, optional
The minimum redshift to use for plotting objects.
num : int, optional
The figure number to create if `ax` is not provided.
projection : str, optional
The projection to use for plotting.
kwargs : dict, optional
Additional kwargs passed to matplotlib.pyplot.figure
Returns
-------
ax : matplotlib axis object
If `ax` is provided as a parameter, the same axis object. Otherwise,
a new one.
Raises
------
NotImplementedError
Raised if any projection besides "rectilinear" is passed.
"""
if projection != "rectilinear":
raise NotImplementedError("Only know rectilinear projection right now!")
# Setup plot window
has_ax = True
if ax is None:
fig = plt.figure(num=num, **fig_kwargs)
ax = fig.gca()
has_ax = False
# Get all objects in catalog
names, coords = self.get_all_pos(zmin=zmin)
# Loop over them all and plot. Could do a lot more efficiently if
# we ever end up with big catalogs.
for i, coord in enumerate(coords):
ra, dec, z = coord
ax.scatter(ra, dec)
if not has_ax:
ax.set_xlabel(r"Right Ascension [hours]", fontsize=24, labelpad=5)
ax.set_ylabel(r"Declination [deg]", fontsize=24, labelpad=5)
return ax
def get_all_pos(self, zmin=None):
"""
Return a list of (RA, DEC, redshift) for all objects.
Parameters
----------
zmin : float
The minimum redshift to include for objects in the catalog.
Returns
-------
names : list of str, shape (n_objects)
The names of objects in the catalog.
data : ndarray, shape (n_objects, 3)
The RA [hour angle], dec [degree], and redshift of the objects.
Raises
------
ValueError
This is raised if `self.data` is not "qso", as this is the only type
of data we know how to handle right now.
"""
if not self.data.lower().startswith("qso"):
raise ValueError("Only know how to do QSOs right now.")
data = []
names = []
for cat in _qso_catalogs.keys():
for element in _qso_catalogs[cat]:
obj = _qso_catalogs[cat][element]
if zmin is not None:
if obj["z"] < zmin:
continue
if "h" in obj["ra"]:
kw = {"frame": "icrs"}
ra = obj["ra"]
dec = obj["dec"]
else:
kw = {"unit": "degree", "frame": "icrs"}
if len(obj["ra"]) == 4:
ra = _to_decimal(obj["ra"]) * deg_per_hr
else:
ra = _to_decimal(obj["ra"])
dec = _to_decimal(obj["dec"])
coord = SkyCoord(ra, dec, **kw)
names.append(element)
data.append((coord.ra.hour, coord.dec.degree, obj["z"]))
return names, np.array(data)
| [
"numpy.array",
"matplotlib.pyplot.figure",
"astropy.coordinates.SkyCoord"
] | [((5676, 5709), 'matplotlib.pyplot.figure', 'plt.figure', ([], {'num': 'num'}), '(num=num, **fig_kwargs)\n', (5686, 5709), True, 'import matplotlib.pyplot as plt\n'), ((8031, 8045), 'numpy.array', 'np.array', (['data'], {}), '(data)\n', (8039, 8045), True, 'import numpy as np\n'), ((7872, 7895), 'astropy.coordinates.SkyCoord', 'SkyCoord', (['ra', 'dec'], {}), '(ra, dec, **kw)\n', (7880, 7895), False, 'from astropy.coordinates import SkyCoord\n')] |
from configparser import ConfigParser
from os import path
def create_config() -> None:
_config.add_section("Telegram")
_config.set("Telegram", "api_id", "you api_id here")
_config.set("Telegram", "api_hash", "you api_hash here")
_config.set("Telegram", "username", "magicBot")
_config.set("Telegram", "session_string", "None")
with open(_path, "w") as config_file:
_config.write(config_file)
def write_session_string_in_config(session_string: str) -> None:
_config.set("Telegram", "session_string", session_string)
with open(_path, "w") as config_file:
_config.write(config_file)
_config: ConfigParser = ConfigParser()
_path: str = path.join(path.dirname(__file__), "config.ini")
if not path.exists(_path):
create_config()
print("Отсутствовал файл configs.ini файл, заполните api в нём")
exit()
_config.read(_path)
API_ID = _config['Telegram']['api_id']
API_HASH = _config['Telegram']['api_hash']
USERNAME: str = _config['Telegram']['username']
SESSION_STRING = (None
if _config['Telegram']['session_string'] == "None" or
_config['Telegram']['session_string'] == ""
else _config['Telegram']['session_string'])
| [
"os.path.dirname",
"os.path.exists",
"configparser.ConfigParser"
] | [((659, 673), 'configparser.ConfigParser', 'ConfigParser', ([], {}), '()\n', (671, 673), False, 'from configparser import ConfigParser\n'), ((697, 719), 'os.path.dirname', 'path.dirname', (['__file__'], {}), '(__file__)\n', (709, 719), False, 'from os import path\n'), ((742, 760), 'os.path.exists', 'path.exists', (['_path'], {}), '(_path)\n', (753, 760), False, 'from os import path\n')] |
import location
import unittest
class LocationTest(unittest.TestCase):
def testToJson(self):
test_location = location.Location(name='foo',
local_ip_address={'en0': {'local_ip_address': '1.2.3.4'}})
test_json = test_location.to_json()
self.assertEqual(test_json['name'], 'foo')
self.assertEqual(test_json['local_ip_address']['en0']['local_ip_address'],
'1.2.3.4')
| [
"location.Location"
] | [((117, 209), 'location.Location', 'location.Location', ([], {'name': '"""foo"""', 'local_ip_address': "{'en0': {'local_ip_address': '1.2.3.4'}}"}), "(name='foo', local_ip_address={'en0': {'local_ip_address':\n '1.2.3.4'}})\n", (134, 209), False, 'import location\n')] |
"""*****************************************************************************************
MIT License
Copyright (c) 2022 <NAME>, <NAME>, <NAME>, <NAME>, <NAME>
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
*****************************************************************************************"""
import Utils
from helper_functions import Fast_Caratheodory
import numpy as np
from scipy.optimize import linprog
from numpy import linalg as la
from scipy.linalg import null_space
from numpy.linalg import matrix_rank
from sklearn.decomposition import TruncatedSVD
import time
######################################################## Caratheodory ##################################################
def computeInitialWeightVector(P, p):
"""
This function given a point, solves the linear program dot(self.P.P^T, x) = p where x \in [0, \infty)^n,
and n denotes the number of rows of self.P.P.
:param p: A numpy array representing a point.
:return: A numpy array of n non-negative weights with respect to each row of self.P.P
"""
N = P.shape[0] # number of rows of P
# # Solve the linear program using scipy
# ts = time.time()
Q = P.T
Q = np.vstack((Q, np.ones((1, N))))
b = np.hstack((p, 1))
res = linprog(np.ones((N,)), A_eq=Q, b_eq=b, options={'maxiter': int(1e7), 'tol': 1e-10})
w = res.x
assert (np.linalg.norm(np.dot(P.T, w) - p) <= 1e-9, np.linalg.norm(np.dot(P.T, w) - p))
return w
def attainCaratheodorySet(P, p):
"""
The function at hand returns a set of at most d+1 indices of rows of P where d denotes the dimension of
rows of P. It calls the algorithms implemented by <NAME>, <NAME> and <NAME> at
"Fast and Accurate Least-Mean-Squares Solvers".
:param p: A numpy array denoting a point.
:return: The indices of points from self.P.P which p is a convex combination of.
"""
d = P.shape[1]
u = computeInitialWeightVector(P, p) # compute initial weight vector
# print('Sum of weights {}'.format(np.sum(u)))
if np.count_nonzero(u) > (d + 1): # if the number of positive weights exceeds d+1
u = Fast_Caratheodory(P, u.flatten(), False)
assert(np.linalg.norm(p - np.dot(P.T, u)) <= 1e-9, np.linalg.norm(p - np.dot(P.T, u)))
return np.where(u != 0)[0]
############################################################ AMVEE #####################################################
def isPD(B):
"""Returns true when input is positive-definite, via Cholesky"""
try:
_ = la.cholesky(B)
return True
except la.LinAlgError:
return False
def nearestPD(A):
"""Find the nearest positive-definite matrix to input
A Python/Numpy port of <NAME>'s `nearestSPD` MATLAB code [1], which
credits [2].
[1] https://www.mathworks.com/matlabcentral/fileexchange/42885-nearestspd
[2] <NAME>, "Computing a nearest symmetric positive semidefinite
matrix" (1988): https://doi.org/10.1016/0024-3795(88)90223-6
"""
B = (A + A.T) / 2
_, s, V = la.svd(B)
H = np.dot(V.T, np.dot(np.diag(s), V))
A2 = (B + H) / 2
A3 = (A2 + A2.T) / 2
if isPD(A3):
return A3
spacing = np.spacing(la.norm(A))
# The above is different from [1]. It appears that MATLAB's `chol` Cholesky
# decomposition will accept matrixes with exactly 0-eigenvalue, whereas
# Numpy's will not. So where [1] uses `eps(mineig)` (where `eps` is Matlab
# for `np.spacing`), we use the above definition. CAVEAT: our `spacing`
# will be much larger than [1]'s `eps(mineig)`, since `mineig` is usually on
# the order of 1e-16, and `eps(1e-16)` is on the order of 1e-34, whereas
# `spacing` will, for Gaussian random matrixes of small dimension, be on
# othe order of 1e-16. In practice, both ways converge, as the unit test
# below suggests.
I = np.eye(A.shape[0])
k = 1
while not isPD(A3):
mineig = np.min(np.real(la.eigvals(A3)))
A3 += I * (-mineig * k ** 2 + spacing)
k += 1
return A3
def computeAxesPoints(E, C):
"""
This function finds the vertices of the self.E (the MVEE of P or the inscribed version of it)
:return: A numpy matrix containing the vertices of the ellipsoid.
"""
if not isPD(E):
E = nearestPD(E)
# L = np.linalg.cholesky(self.E) # compute the cholesky decomposition of self.E
# U, D, V = np.linalg.svd(L, full_matrices=True) # attain the length of each axis of the ellipsoid and the
# # rotation of the ellipsoid
_, D, V = np.linalg.svd(E, full_matrices=True)
ellips_points = np.multiply(1.0 / np.sqrt(D[:, np.newaxis]), V.T) # attain the vertices of the ellipsoid assuming it was
# centered at the origin
return np.vstack((ellips_points + C.flatten(), - ellips_points + C.flatten()))
def volumeApproximation(P):
"""
This is our implementation of Algorithm 4.1 at the paper "On Khachiyan’s Algorithm for te Computation of Minimum
Volume Enclosing Ellipsoids" by <NAME> and <NAME>. It serves to compute a set of at most
2*self.P.d points which will be used for computing an initial ellipsoid.
:return: A numpy array of 2 * self.P.d indices of points from self.P.P
"""
basis = None
basis_points = []
n, d = P
if n <= 2 * d:
# if number of points is less than 2*self.P.d, then return their indices in self.P.P
return [i for i in range(n)]
v = np.random.randn(d) # start with a random vector
while np.linalg.matrix_rank(basis) < d: # while rank of basis is less than self.P.d
if basis is not None: # if we already have computed basis points
if basis.shape[1] == d:
# if this line is reached then it means that there is numerical instability
print('Numerical Issues!')
_, _, V = np.linalg.svd(basis[:, :-1], full_matrices=True)
return list(range(n))
orth_basis = null_space(basis.T) # get the orthant of basis
v = orth_basis[:, 0] if orth_basis.ndim > 1 else orth_basis # set v to be the first column of basis
Q = np.dot(P, v.T) # get the dot product of each row of self.P.P and v
if len(basis_points) > 0: # if there are already chosen points, then their dot product is depricated
Q[basis_points] = np.nan
p_alpha = np.nanargmax(np.dot(P, v.T)) # get the index of row with largest non nan dot product value
p_beta = np.nanargmin(np.dot(P, v.T)) # get the index of row with smallest non nan dot product value
v = np.expand_dims(P[p_beta, :] - P[p_alpha, :], 1) # let v be the substraction between the
# row of the largest dot product and the
# point with the smallest dot product
if basis is None: # if no basis was computed
basis = v / np.linalg.norm(v)
else: # add v to the basis
basis = np.hstack((basis, v / np.linalg.norm(v, 2)))
basis_points.append(p_alpha) # add the index of the point with largest dot product
basis_points.append(p_beta) # add the index of the point with smallest dot product
return basis_points
def computemahalanobisDistance(Q, ellip):
"""
This function is used for computing the distance between the rows of Q and ellip using the Mahalanobis
loss function.
:param ellip: A numpy array representing a p.s.d matrix (an ellipsoid)
:return: The Mahalanobis distance between each row in self.P.P to ellip.
"""
s = np.einsum("ij,ij->i", np.dot(Q, ellip), Q) # compute the distance efficiently
return s
def computeEllipsoid(P, weights):
"""
This function computes the ellipsoid which is the MVEE of self.P.
:param weights: a numpy of array of weights with respest to the rows of self.P.P.
:return:
- The MVEE of self.P.P in a p.s.d. matrix form.
- The center of the MVEE of self.P.P.
"""
if weights.ndim == 1: # make sure that the weights are not flattened
weights = np.expand_dims(weights, 1)
c = np.dot(P.T, weights) # attain the center of the MVEE
d = P.shape[1]
Q = P[np.where(weights.flatten() > 0.0)[0], :] # get all the points with positive weights
weights2 = weights[np.where(weights.flatten() > 0.0)[0], :] # get all the positive weights
# compute a p.s.d matrix which will represent the ellipsoid
ellipsoid = 1.0 / d * np.linalg.inv(np.dot(np.multiply(Q, weights2).T, Q)
- np.multiply.outer(c.T.ravel(), c.T.ravel()))
return ellipsoid, c
def enlargeByTol(ellipsoid):
"""
The function at hand enlarges the MVEE (the ellipsoid) by a fact or (1 + Utils.TOL).
:param ellipsoid: A numpy matrix represent a p.s.d matrix
:return: An enlarged version of ellipsoid.
"""
return ellipsoid / (1 + Utils.TOL) ** 2.0
def getContainedEllipsoid(ellipsoid):
"""
This function returns a dialtion of E such that it will be contained in the convex hull of self.P.P.
:param ellipsoid: A p.s.d matrix which represents the MVEE of self.P.P
:return: A dilated version of the MVEE of self.P.P such that it will be contained in the convex hull
of self.P.P.
"""
return ellipsoid * ellipsoid.shape[1] ** 2 * (1 + Utils.TOL) ** 2 # get inscribed ellipsoid
def computeEllipInHigherDimension(Q, weights):
"""
The function at hand computes the ellipsoid in a self.P.d + 1 dimensional space (with respect to the
lifted points) which is centered at the origin.
:param weights: A numpy array of weights with respect to each lifter point in self.Q
:return:
"""
idxs = np.where(weights > 0.0)[0] # get all indices of points with positive weights
weighted_Q = np.multiply(Q[idxs, :], np.expand_dims(np.sqrt(weights[idxs]), 1)) # multiply the postive
# weights with their
# corresponding points
delta = np.sum(np.einsum('bi,bo->bio', weighted_Q, weighted_Q), axis=0) # compute an ellipsoid which is
# centered at the origin
return delta
def optimalityCondition(d, Q, ellip, weights):
"""
This function checks if the MVEE of P is found in the context of <NAME> and <NAME>
algorithm.
:param ellip: A numpy array representing a p.s.d matrix.
:param weights: A numpy array of weights with respect to the rows of P.
:return: A boolean value whether the desired MVEE has been achieved or not.
"""
pos_weights_idxs = np.where(weights > 0)[0] # get the indices of all the points with positive weights
current_dists = computemahalanobisDistance(Q, ellip) # compute the Mahalanobis distance between ellip and
# the rows of P
# check if all the distance are at max (1 + self.tol) * (self.P.d +1) and the distances of the points
# with positive weights are at least (1.0 - self.tol) * (self.P.d + 1)
return np.all(current_dists <= (1.0 + Utils.TOL) * (d + 1)) and \
np.all(current_dists[pos_weights_idxs] >= (1.0 - Utils.TOL) * (d + 1)), current_dists
def yilidrimAlgorithm(P):
"""
This is our implementation of Algorithm 4.2 at the paper "On Khachiyan’s Algorithm for te Computation of Minimum
Volume Enclosing Ellipsoids" by <NAME> and <NAME>. It serves to compute an MVEE of self.P.P
faster than Khachiyan's algorithm.
:return: The MVEE ellipsoid of self.P.P.
"""
n, d = P.shape
Q = np.hstack((P, np.ones((n, 1))))
chosen_indices = volumeApproximation(P) # compute an initial set of points which will give the initial
# ellipsoid
if len(chosen_indices) == n: # if all the points were chosen then simply run Khachiyan's algorithm.
# Might occur due to numerical instabilities.
return khachiyanAlgorithm(P)
weights = np.zeros((n, 1)).flatten() # initial the weights to zeros
weights[chosen_indices] = 1.0 / len(chosen_indices) # all the chosen indices of points by the
# volume Approximation algorithm are given uniform weights
ellip = np.linalg.inv(computeEllipInHigherDimension(Q, weights)) # compute the initial ellipsoid
while True: # run till conditions are fulfilled
stop_flag, distances = optimalityCondition(d, Q, ellip, weights) # check if current ellipsoid is desired
# MVEE, and get the distance between rows
# of self.P.P to current ellipsoid
pos_weights_idx = np.where(weights > 0)[0] # get indices of points with positive weights
if stop_flag: # if desired MVEE is achieved
break
j_plus = np.argmax(distances) # index of maximal distance from the ellipsoid
k_plus = distances[j_plus] # maximal distance from the ellipsoid
j_minus = pos_weights_idx[np.argmin(distances[pos_weights_idx])] # get the the index of the points with
# positive weights which also have the
# smallest distance from the current
# ellipsoid
k_minus = distances[j_minus] # the smallest distance of the point among the points with positive weights
eps_plus = k_plus / (d + 1.0) - 1.0
eps_minus = 1.0 - k_minus / (d + 1.0)
if eps_plus > eps_minus: # new point is found and it is important
beta_current = (k_plus - d - 1.0) / ((d + 1) * (k_plus - 1.0))
weights = (1.0 - beta_current) * weights
weights[j_plus] = weights[j_plus] + beta_current
else: # a point which was already found before, yet has large impact on the ellipsoid
beta_current = min((d + 1.0 - k_minus) / ((d + 1.0) * (k_minus - 1.0)),
weights[j_minus]/(1 - weights[j_minus]))
weights = weights * (1 + beta_current)
weights[j_minus] = weights[j_minus] - beta_current
weights[weights < 0.0] = 0.0 # all negative weights are set to zero
ellip = np.linalg.inv(computeEllipInHigherDimension(weights)) # recompute the ellipsoid
return computeEllipsoid(P, weights)
def khachiyanAlgorithm(P):
"""
This is our implementation of Algorithm 3.1 at the paper "On Khachiyan’s Algorithm for te Computation of Minimum
Volume Enclosing Ellipsoids" by <NAME> and <NAME>. It serves to compute an MVEE of self.P.P
using Khachiyan's algorithm.
:return: The MVEE ellipsoid of self.P.P.
"""
err = 1
count = 1 # used for debugging purposes
n, d = P.shape
u = np.ones((n, 1)) / n # all points have uniform weights
Q = np.hstack((P, np.ones((n, 1))))
while err > Utils.TOL: # while the approximation of the ellipsoid is higher than desired
X = np.dot(np.multiply(Q, u).T, Q) # compute ellipsoid
M = computemahalanobisDistance(Q, np.linalg.inv(X)) # get Mahalanobis distances between rows of self.P.P
# and current ellipsoid
j = np.argmax(M) # index of point with maximal distance from current ellipsoid
max_val = M[j] # the maximal Mahalanobis distance from the rows of self.P.P and the current ellipsoid
step_size = (max_val - d - 1) / ((d + 1) * (max_val - 1))
new_u = (1 - step_size) * u # update weights
new_u[j, 0] += step_size
count += 1
err = np.linalg.norm(new_u - u) # set err to be the change between updated weighted and current weights
u = new_u
return computeEllipsoid(P, u)
def computeMVEE(P, alg_type=1):
"""
This function is responsible for running the desired algorithm chosen by the user (or by default value) for
computing the MVEE of P.
:param alg_type: An algorithm type indicator where 1 stands for yilidrim and 0 stands kachaiyan.
:return:
- The inscribed version of MVEE of P.
- The center of the MVEE of P.
- The vertices of the inscribed ellipsoid.
"""
global ax
if alg_type == 1: # yilidrim is chosen or by default
E, C = yilidrimAlgorithm(P)
else: # Kachaiyan, slower yet more numerically stable
E, C = khachiyanAlgorithm(P)
# self.plotEllipsoid(self.E, self.C, self.computeAxesPoints())
contained_ellipsoid = getContainedEllipsoid(E) # get inscribed ellipsoid
return contained_ellipsoid, C, computeAxesPoints(contained_ellipsoid, C)
################################################## ApproximateCenterProblems ###########################################
def computeLINFCoresetKOne(P):
"""
The function at hand computes an L∞ coreset for the matrix vector multiplication or the dot product, with
respect to the weighted set of points P.
:return:
- C: the coreset points, which are a subset of the rows of P
- idx_in_P: the indices with respect to the coreset points C in P.
- an upper bound on the approximation which our L∞ coreset is associated with.
"""
global max_time
r = matrix_rank(P[:, :-1]) # get the rank of P or the dimension of the span of P
d = P.shape[1]
if r < d - 1: # if the span of P is a subspace in REAL^d
svd = TruncatedSVD(n_components=r) # an instance of TruncatedSVD
Q = svd.fit_transform(P[:, :-1]) # reduce the dimensionality of P by taking their dot product by the
# subspace which spans P
Q = np.hstack((Q, np.expand_dims(P[:, -1], 1))) # concatenate the indices to their respected "projected"
# points
else: # if the span of P is REAL^d where d is the dimension of P
Q = P
start_time = time.time() # start counting the time here
if r > 1: # if the dimension of the "projected points" is not on a line
if Q.shape[1] - 1 >= Q.shape[0]:
return Q, np.arange(Q.shape[0]).astype(np.int), Utils.UPPER_BOUND(r)
else:
_, _, S = computeMVEE(Q[:, :-1], alg_type=0) # compute the MVEE of Q
else: # otherwise
# get the index of the maximal and minimal point on the line, i.e., both its ends
idx_in_P = np.unique([np.argmin(Q[:, :-1]).astype(np.int),
np.argmax(Q[:, :-1]).astype(np.int)]).tolist()
return Q[idx_in_P], idx_in_P, Utils.UPPER_BOUND(r)
C = []
# idx_in_P_list = []
# C_list = []
# ts = time.time()
# for q in S: # for each boundary points along the axis of the MVEE of Q
# K = attainCaratheodorySet(P[:, :-1], q) # get d+1 indices of points from Q where q is their convex
# # combination
# idx_in_P_list += [int(idx) for idx in K] # get the indices of the coreset point in Q
# C_list += [int(Q[idx, -1]) for idx in K] # the actual coreset points
# # print('Time for list {}'.format(time.time() - ts))
idx_in_P = np.empty((2*(Utils.J + 1) ** 2, )).astype(np.int)
C = np.empty((2*(Utils.J + 1) ** 2, )).astype(np.int)
idx = 0
# ts = time.time()
for q in S: # for each boundary points along the axis of the MVEE of Q
K = attainCaratheodorySet(Q[:, :-1], q) # get d+1 indices of points from Q where q is their convex
# combination
idx_in_P[idx:idx+K.shape[0]] = K.astype(np.int) # get the indices of the coreset point in Q
C[idx:idx+K.shape[0]] = Q[idx_in_P[idx:idx+K.shape[0]], -1].astype(np.int)
idx += K.shape[0]
# print('Time for numpy {}'.format(time.time() - ts))
return np.unique(C[:idx]), np.unique(idx_in_P[:idx]), Utils.UPPER_BOUND(r)
####################################################### Bicriteria #####################################################
def attainClosestPointsToSubspaces(P, W, flats, indices):
"""
This function returns the closest n/2 points among all of the n points to a list of flats.
:param flats: A list of flats where each flat is represented by an orthogonal matrix and a translation vector.
:param indices: A list of indices of points in self.P.P
:return: The function returns the closest n/2 points to flats.
"""
dists = np.empty((P[indices, :].shape[0], ))
N = indices.shape[0]
if not Utils.ACCELERATE_BICRETERIA:
for i in range(N):
dists[i] = np.min([
Utils.computeDistanceToSubspace(P[np.array([indices[i]]), :], flats[j][0], flats[j][1])
for j in range(len(flats))])
else:
dists = Utils.computeDistanceToSubspace(P[indices, :], flats[0], flats[1])
idxs = np.argpartition(dists, N // 2)[:N//2]
return idxs.tolist()
return np.array(indices)[np.argsort(dists).astype(np.int)[:int(N / 2)]].tolist()
def sortDistancesToSubspace(P, X, v, points_indices):
"""
The function at hand sorts the distances in an ascending order between the points and the flat denoted by (X,v).
:param X: An orthogonal matrix which it's span is a subspace.
:param v: An numpy array denoting a translation vector.
:param points_indices: a numpy array of indices for computing the distance to a subset of the points.
:return: sorted distances between the subset points addressed by points_indices and the flat (X,v).
"""
dists = Utils.computeDistanceToSubspace(P[points_indices, :], X, v) # compute the distance between the subset
# of points towards
# the flat which is represented by (X,v)
return np.array(points_indices)[np.argsort(dists).astype(np.int)].tolist() # return sorted distances
def computeSubOptimalFlat(P, weights):
"""
This function computes the sub optimal flat with respect to l2^2 loss function, which relied on computing the
SVD factorization of the set of the given points, namely P.
:param P: A numpy matrix which denotes the set of points.
:param weights: A numpy array of weightes with respect to each row (point) in P.
:return: A flat which best fits P with respect to the l2^2 loss function.
"""
v = np.average(P, axis=0, weights=weights) # compute the weighted mean of the points
svd = TruncatedSVD(algorithm='randomized', n_iter=1, n_components=Utils.J).fit(P-v)
V = svd.components_
return V, v # return a flat denoted by an orthogonal matrix and a translation vector
def clusterIdxsBasedOnKSubspaces(P, B):
"""
This functions partitions the points into clusters a list of flats.
:param B: A list of flats
:return: A numpy array such each entry contains the index of the flat to which the point which is related to the
entry is assigned to.
"""
n = P.shape[0]
idxs = np.arange(n) # a numpy array of indices
centers = np.array(B) # a numpy array of the flats
dists = np.apply_along_axis(lambda x: Utils.computeDistanceToSubspace(P[idxs, :], x[0], x[1]), 1, centers) # compute the
# distance between
# each point and
# each flat
idxs = np.argmin(dists, axis=0)
return idxs # return the index of the closest flat to each point in self.P.P
def addFlats(P, W, S, B):
"""
This function is responsible for computing a set of all possible flats which passes through j+1 points.
:param S: list of j+1 subsets of points.
:return: None (Add all the aforementioned flats into B).
"""
indices = [np.arange(S[i].shape[0]) for i in range(len(S))]
points = np.meshgrid(*indices) # compute a mesh grid using the duplicated coefs
points = np.array([p.flatten() for p in points]) # flatten each point in the meshgrid for computing the
# all possible ordered sets of j+1 points
idx = len(B)
for i in range(points.shape[1]):
A = [S[j][points[j, i]][0] for j in range(points.shape[0])]
P_sub, W_sub = P[A, :], W[A]
B.append(computeSubOptimalFlat(P_sub, W_sub))
return np.arange(idx, len(B)), B
def computeBicriteria(P, W):
"""
The function at hand is an implemetation of Algorithm Approx-k-j-Flats(P, k, j) at the paper
"Bi-criteria Linear-time Approximations for Generalized k-Mean/Median/Center". The algorithm returns an
(2^j, O(log(n) * (jk)^O(j))-approximation algorithm for the (k,j)-projective clustering problem using the l2^2
loss function.
:return: A (2^j, O(log(n) * (jk)^O(j)) approximation solution towards the optimal solution.
"""
n = P.shape[0]
Q = np.arange(0, n, 1)
t = 1
B = []
tol_sample_size = Utils.K * (Utils.J + 1)
sample_size = (lambda t: int(np.ceil(Utils.K * (Utils.J + 1) * (2 + np.log(Utils.J + 1) +
np.log(Utils.K) +
min(t, np.log(np.log(n)))))))
while np.size(Q) >= tol_sample_size: # run we have small set of points
S = []
for i in range(0, Utils.J+1): # Sample j + 1 subsets of the points in an i.i.d. fashion
random_sample = np.random.choice(Q, size=sample_size(t))
S.append(random_sample[:, np.newaxis])
if not Utils.ACCELERATE_BICRETERIA:
F = addFlats(P, W, S, B)
else:
S = np.unique(np.vstack(S).flatten())
F = computeSubOptimalFlat(P[S, :], W[S])
B.append(F)
sorted_indices = attainClosestPointsToSubspaces(P, W, F, Q)
Q = np.delete(Q, sorted_indices)
t += 1
if not Utils.ACCELERATE_BICRETERIA:
_, B = addFlats(P, W, [Q for i in range(Utils.J + 1)], B)
else:
F = computeSubOptimalFlat(P[Q.flatten(), :], W[Q.flatten()])
B.append(F)
return B
################################################### L1Coreset ##########################################################
def applyBiCriterea(P, W):
"""
The function at hand runs a bicriteria algorithm, which then partition the rows of P into clusters.
:return:
- B: The set of flats which give the bicriteria algorithm, i.e., O((jk)^{j+1}) j-flats which attain 2^j
approximation towards the optimal (k,j)-projective clustering problem involving self.P.P.
- idxs: The set of indices where each entry is with respect to a point in P and contains
index of the flat in B which is assigned to respected point in P.
"""
B = computeBicriteria(P,W) # compute the set of flats which bi-cirteria algorithm returns
idxs = clusterIdxsBasedOnKSubspaces(P, B) # compute for each point which flat fits it best
return B, idxs
def initializeSens(P, B, idxs):
"""
This function initializes the sensitivities using the bicriteria algorithm, to be the distance between each
point to it's closest flat from the set of flats B divided by the sum of distances between self.P.P and B.
:param B: A set of flats where each flat is represented by an orthogonal matrix and a translation vector.
:param idxs: A numpy array which represents the clustering which B imposes on self.P.P
:return: None.
"""
centers_idxs = np.unique(idxs) # number of clusters imposed by B
sensitivity_additive_term = np.zeros((P.shape[0], ))
for center_idx in centers_idxs: # go over each cluster of points from self.P.P
cluster_per_center = np.where(idxs == center_idx)[0] # get all points in certain cluster
# compute the distance of each point in the cluster to its respect flat
cost_per_point_in_cluster = Utils.computeDistanceToSubspace(P[cluster_per_center, :-1],
B[center_idx][0], B[center_idx][1])
# ost_per_point_in_cluster = np.apply_along_axis(lambda x:
# Utils.computeDistanceToSubspace(x, B[center_idx][0],
# B[center_idx][1]), 1,
# self.set_P.P[cluster_per_center, :-1])
# set the sensitivity to the distance of each point from its respected flat divided by the total distance
# between cluster points and the respected flat
sensitivity_additive_term[cluster_per_center] = 2 ** Utils.J * \
np.nan_to_num(cost_per_point_in_cluster /
np.sum(cost_per_point_in_cluster))
return sensitivity_additive_term
def Level(P, k, V, desired_eps=0.01):
"""
The algorithm is an implementation of Algorithm 7 of "Coresets for Gaussian Mixture Models of Any shapes" by Zahi
Kfir and <NAME>.
:param P: A Pointset object, i.e., a weighted set of points.
:param k: The number of $j$-subspaces which defines the (k,j)-projective clustering problem.
:param V: A set of numpy arrays
:param desired_eps: An approximation error, default value is set to 0.01.
:return: A list "C" of subset of points of P.P.
"""
t = V.shape[0] # numnber of points in V
d = P.shape[1] - 1 # exclude last entry of each point for it is the concatenated index
# C = [[]] #np.empty((P.shape[0] + Utils.J ** (2 * Utils.K), P.shape[1])) # initialize list of coresets
# U = [[]] #np.empty((P.shape[0] + Utils.J ** (2 * Utils.K), P.shape[1])) # list of each point in V \setminus V_0 minus its
# projection onto a specific affine subspace, see below
C = np.zeros((P.shape[0], ), dtype="bool")
D = np.zeros((P.shape[0], ), dtype="bool")
if k <= 1 or t-1 >= Utils.J:
return np.array([])
# ts = time.time()
A, v = Utils.computeAffineSpan(V)
# print('Affine took {}'.format(time.time() - ts))
dists_from_P_to_A = Utils.computeDistanceToSubspace(P[:, :-1], A.T, v)
non_zero_idxs = np.where(dists_from_P_to_A > 1e-11)[0]
d_0 = 0 if len(non_zero_idxs) < 1 else np.min(dists_from_P_to_A[non_zero_idxs])
c = 1 / d ** (1.5 * (d + 1))
M = np.max(np.abs(P[:, :-1]))
on_j_subspace = np.where(dists_from_P_to_A <= 1e-11)[0]
B = [[]]
if on_j_subspace.size != 0:
B[0] = P[on_j_subspace, :]
if B[0].shape[0] >= Utils.J ** (2 * k):
indices_in_B = B[0][:, -1]
Q = np.hstack((B[0][:,:-1], np.arange(B[0].shape[0])[:, np.newaxis]))
temp = computeLInfCoreset(B[0], k-1)
C[indices_in_B[temp].astype(np.int)] = True
else:
C[B[0][:, -1].astype(np.int)] = True
# current_point += temp.shape[0]
# D = [P[C]]
# print('Bound is {}'.format(int(np.ceil(8 * np.log(M) + np.log(1.0/c)) + 1)))
if d_0 > 0:
for i in range(1, int(np.ceil(8 * np.log(M) + np.log(1.0/c)) + 1)):
B.append(P[np.where(np.logical_and(2 ** (i-1) * d_0 <= dists_from_P_to_A,
dists_from_P_to_A <= 2 ** i * d_0))[0], :])
if len(B[i]) > 0:
if len(B[i]) >= Utils.J ** (2 * k):
indices_B = B[i][:, -1]
Q_B = np.hstack((B[i][:, :-1], np.arange(B[i].shape[0])[:, np.newaxis]))
temp = computeLInfCoreset(Q_B, k-1)
if temp.size > 0:
C[indices_B[temp].astype(np.int)] = True
else:
C[B[i][:, -1].astype(np.int)] = True
temp = np.arange(B[i].shape[0]).astype(np.int)
list_of_coresets = [x for x in B if len(x) > 0]
Q = np.vstack(list_of_coresets)
indices_Q = Q[:, -1]
Q = np.hstack((Q[:, :-1], np.arange(Q.shape[0])[:, np.newaxis]))
if temp.size > 0:
for point in B[i][temp, :]:
indices = Level(Q, k-1, np.vstack((V, point[np.newaxis, :-1])))
if indices.size > 0:
D[indices_Q[indices].astype(np.int)] = True
# D.extend(Level(Q, k-1, np.vstack((V, point[np.newaxis, :-1]))))
return np.where(np.add(C, D))[0]
def computeLInfCoreset(P, k):
"""
This function is our main L_\infty coreset method, as for k = 1 it runs our fast algorithm for computing the
L_\infty coreset. When k > 1, it runs a recursive algorithm for computing a L_\infty coreset for the
(k,j)-projective clustering problem.
This algorithm is a variant of Algorithm 6 of "Coresets for Gaussian Mixture Models of Any shapes" by Zahi
Kfir and <NAME>.
:param P: A PointSet object, i.e., a weighted set of points.
:param k: The number of $j$-subspaces which defines the (k,j)-projective clustering problem.
:return: A PointSet object which contains a subset of P which serves as a L_\infty coreset for the
(k,j)-projective clustering problem.
"""
C = []
if k == 1: # if subspace clustering problem
_, idxs_in_Q, upper_bound = computeLINFCoresetKOne(P) # Compute our L_\infty coreset for P
return idxs_in_Q
elif k < 1: # should return None here
return np.array([])
else: # If k > 1
temp = computeLInfCoreset(P, k-1) # call recursively till k == 1
C = np.zeros((P.shape[0], ), dtype="bool")
C[P[temp, -1].astype(np.int)] = True
# Q = np.empty((P.shape[0] + Utils.J ** (2 * Utils.K), P.shape[1]))
# Q[:C_0.shape[0], :] = C_0
for p in P[temp, :]: # for each point in coreset
# print('K = {}'.format(k))
recursive_core = Level(P, k, p[np.newaxis, :-1]) # compute a coreset for (k,j)-projective clustering
# problem using a coreset for (k-1,j)-projective
# clustering problem
if recursive_core.size > 0: # if the coreset for the (k,j)-projective clustering problem is not empty
C[P[recursive_core, -1].astype(np.int)] = True
if np.where(C == False)[0].size < 1:
return np.where(C)[0]
return np.where(C)[0] # return a L_\infty coreset for (k,j)-projective clustering problem
def computeSensitivityPerCluster(P):
sensitivity = np.ones((P.shape[0], )) * np.inf
i = 0
upper_bound = Utils.determineUpperBound() # set upper bound on the approximation which the L_\infty
Q = np.hstack((P[:, :-1], np.arange(P.shape[0])[:, np.newaxis]))
# coreset attains
while Q.shape[0] > 2 * Q.shape[1]: # run till you have at most 2*j points
orig_idx_in_Q = Q[:, -1]
idxs_of_P = computeLInfCoreset(np.hstack((Q[:, :-1], np.arange(Q.shape[0])[:, np.newaxis])), Utils.K) # compute L_\infty coreset
# idxs_of_P = np.unique(Q_P[:, -1]).astype(np.int) # get all points in P which are also in Q_P
if np.any(np.logical_not(np.isinf(sensitivity[orig_idx_in_Q[idxs_of_P].astype(np.int)]))): # used for debugging
raise ValueError('A crucial Bug!')
sensitivity[orig_idx_in_Q[idxs_of_P].astype(np.int)] = upper_bound / (i + 1) # bound the sensitivity of each point in Q_P
if np.isnan(np.sum(sensitivity)):
print('HOLD ON!')
remaining_idxs = Utils.attainAllButSpecifiedIndices(Q, orig_idx_in_Q[idxs_of_P].astype(np.int)) # get all points in cluster which
# are not in Q_P
idxs_in_Q = np.where(remaining_idxs)[0] # get indices in cluster which are not in Q_P
Q = Q[idxs_in_Q, :] # update cluster to exclude current L_\infty coreset
print('Batch {} has finished'.format(i))
i += 1 # count number of L_\infty coreset per each cluster of points
remaining_idxs_per_cluster = Q[:, -1].astype(np.int) # all of the remaining 2*j points
sensitivity[remaining_idxs_per_cluster] = upper_bound / (i if i > 0 else i + 1) # give them the lowest
return np.hstack((sensitivity[:, np.newaxis], P[:, -1][:, np.newaxis]))
def computeSensitivity(P, W):
"""
The function at hand computes the sensitivity of each point using a reduction from L_\infty to L1.
:return: None
"""
P = np.hstack((P, np.arange(P.shape[0])[:, np.newaxis]))
B, idxs = applyBiCriterea(P[:, :-1], W) # attain set of flats which gives 2^j approximation to the optimal solution
sensitivity_additive_term = initializeSens(P, B, idxs) # initialize the sensitivities
unique_cetner_idxs = np.unique(idxs) # get unique indices of clusters
sensitivity = np.empty((P.shape[0], ))
clusters = [np.where(idxs == idx)[0] for idx in unique_cetner_idxs]
Qs = [[] for idx in range(len(clusters))]
for idx in range(len(clusters)): # apply L_\infty conversion to L_1 on each cluster of points
# Qs[idx] = np.hstack(((P[clusters[idx], :-1] - B[idx][1]).dot(B[idx][0].T.dot(B[idx][0])), P[clusters[idx], -1][:, np.newaxis]))
Qs[idx] = np.hstack(((P[clusters[idx], :-1] - B[idx][1]).dot(B[idx][0].T), P[clusters[idx], -1][:, np.newaxis]))
ts = time.time()
# s = computeSensitivityPerCluster(Qs[0])
# print('max = {}, min = {}'.format(np.max(s[0,:]), np.min(s[0,:])))
# print('Time for one cluster took {} secs'.format(time.time() - ts))
# input()
# pool = multiprocessing.Pool(3)
# list_of_sensitivities = pool.map(computeSensitivityPerCluster, Qs)
# print('Time for parallel took {} secs'.format(time.time() - ts))
for i in range(len(Qs)):
s = computeSensitivityPerCluster(Qs[i])
sensitivity[s[:, -1].astype(np.int)] = s[:, 0]
# print('Number of unique values = {}, max = {}, min = {}'.format(np.unique(sensitivity).shape[0],
# np.max(sensitivity), np.min(sensitivity)))
sensitivity += 2 ** Utils.J * sensitivity_additive_term # add the additive term for the sensitivity
return sensitivity
if __name__ == '__main__':
P = np.random.randn(10000, 5)
P = np.hstack((P, np.arange(10000)[:, np.newaxis]))
W = np.ones((P.shape[0], ))
s = computeSensitivity(P, W) | [
"numpy.linalg.matrix_rank",
"numpy.sqrt",
"numpy.hstack",
"numpy.log",
"Utils.UPPER_BOUND",
"Utils.computeDistanceToSubspace",
"numpy.array",
"numpy.count_nonzero",
"numpy.argsort",
"numpy.einsum",
"numpy.linalg.norm",
"numpy.arange",
"Utils.determineUpperBound",
"numpy.multiply",
"numpy... | [((2252, 2269), 'numpy.hstack', 'np.hstack', (['(p, 1)'], {}), '((p, 1))\n', (2261, 2269), True, 'import numpy as np\n'), ((4103, 4112), 'numpy.linalg.svd', 'la.svd', (['B'], {}), '(B)\n', (4109, 4112), True, 'from numpy import linalg as la\n'), ((4953, 4971), 'numpy.eye', 'np.eye', (['A.shape[0]'], {}), '(A.shape[0])\n', (4959, 4971), True, 'import numpy as np\n'), ((5709, 5745), 'numpy.linalg.svd', 'np.linalg.svd', (['E'], {'full_matrices': '(True)'}), '(E, full_matrices=True)\n', (5722, 5745), True, 'import numpy as np\n'), ((6684, 6702), 'numpy.random.randn', 'np.random.randn', (['d'], {}), '(d)\n', (6699, 6702), True, 'import numpy as np\n'), ((9502, 9522), 'numpy.dot', 'np.dot', (['P.T', 'weights'], {}), '(P.T, weights)\n', (9508, 9522), True, 'import numpy as np\n'), ((19251, 19273), 'numpy.linalg.matrix_rank', 'matrix_rank', (['P[:, :-1]'], {}), '(P[:, :-1])\n', (19262, 19273), False, 'from numpy.linalg import matrix_rank\n'), ((19954, 19965), 'time.time', 'time.time', ([], {}), '()\n', (19963, 19965), False, 'import time\n'), ((22541, 22576), 'numpy.empty', 'np.empty', (['(P[indices, :].shape[0],)'], {}), '((P[indices, :].shape[0],))\n', (22549, 22576), True, 'import numpy as np\n'), ((23678, 23737), 'Utils.computeDistanceToSubspace', 'Utils.computeDistanceToSubspace', (['P[points_indices, :]', 'X', 'v'], {}), '(P[points_indices, :], X, v)\n', (23709, 23737), False, 'import Utils\n'), ((24578, 24616), 'numpy.average', 'np.average', (['P'], {'axis': '(0)', 'weights': 'weights'}), '(P, axis=0, weights=weights)\n', (24588, 24616), True, 'import numpy as np\n'), ((25220, 25232), 'numpy.arange', 'np.arange', (['n'], {}), '(n)\n', (25229, 25232), True, 'import numpy as np\n'), ((25276, 25287), 'numpy.array', 'np.array', (['B'], {}), '(B)\n', (25284, 25287), True, 'import numpy as np\n'), ((25798, 25822), 'numpy.argmin', 'np.argmin', (['dists'], {'axis': '(0)'}), '(dists, axis=0)\n', (25807, 25822), True, 'import numpy as np\n'), ((26257, 26278), 'numpy.meshgrid', 'np.meshgrid', (['*indices'], {}), '(*indices)\n', (26268, 26278), True, 'import numpy as np\n'), ((27335, 27353), 'numpy.arange', 'np.arange', (['(0)', 'n', '(1)'], {}), '(0, n, 1)\n', (27344, 27353), True, 'import numpy as np\n'), ((30064, 30079), 'numpy.unique', 'np.unique', (['idxs'], {}), '(idxs)\n', (30073, 30079), True, 'import numpy as np\n'), ((30148, 30171), 'numpy.zeros', 'np.zeros', (['(P.shape[0],)'], {}), '((P.shape[0],))\n', (30156, 30171), True, 'import numpy as np\n'), ((32579, 32616), 'numpy.zeros', 'np.zeros', (['(P.shape[0],)'], {'dtype': '"""bool"""'}), "((P.shape[0],), dtype='bool')\n", (32587, 32616), True, 'import numpy as np\n'), ((32627, 32664), 'numpy.zeros', 'np.zeros', (['(P.shape[0],)'], {'dtype': '"""bool"""'}), "((P.shape[0],), dtype='bool')\n", (32635, 32664), True, 'import numpy as np\n'), ((32771, 32797), 'Utils.computeAffineSpan', 'Utils.computeAffineSpan', (['V'], {}), '(V)\n', (32794, 32797), False, 'import Utils\n'), ((32879, 32929), 'Utils.computeDistanceToSubspace', 'Utils.computeDistanceToSubspace', (['P[:, :-1]', 'A.T', 'v'], {}), '(P[:, :-1], A.T, v)\n', (32910, 32929), False, 'import Utils\n'), ((37502, 37529), 'Utils.determineUpperBound', 'Utils.determineUpperBound', ([], {}), '()\n', (37527, 37529), False, 'import Utils\n'), ((39108, 39172), 'numpy.hstack', 'np.hstack', (['(sensitivity[:, np.newaxis], P[:, -1][:, np.newaxis])'], {}), '((sensitivity[:, np.newaxis], P[:, -1][:, np.newaxis]))\n', (39117, 39172), True, 'import numpy as np\n'), ((39655, 39670), 'numpy.unique', 'np.unique', (['idxs'], {}), '(idxs)\n', (39664, 39670), True, 'import numpy as np\n'), ((39724, 39747), 'numpy.empty', 'np.empty', (['(P.shape[0],)'], {}), '((P.shape[0],))\n', (39732, 39747), True, 'import numpy as np\n'), ((40248, 40259), 'time.time', 'time.time', ([], {}), '()\n', (40257, 40259), False, 'import time\n'), ((41191, 41216), 'numpy.random.randn', 'np.random.randn', (['(10000)', '(5)'], {}), '(10000, 5)\n', (41206, 41216), True, 'import numpy as np\n'), ((41283, 41305), 'numpy.ones', 'np.ones', (['(P.shape[0],)'], {}), '((P.shape[0],))\n', (41290, 41305), True, 'import numpy as np\n'), ((2289, 2302), 'numpy.ones', 'np.ones', (['(N,)'], {}), '((N,))\n', (2296, 2302), True, 'import numpy as np\n'), ((3081, 3100), 'numpy.count_nonzero', 'np.count_nonzero', (['u'], {}), '(u)\n', (3097, 3100), True, 'import numpy as np\n'), ((3321, 3337), 'numpy.where', 'np.where', (['(u != 0)'], {}), '(u != 0)\n', (3329, 3337), True, 'import numpy as np\n'), ((3574, 3588), 'numpy.linalg.cholesky', 'la.cholesky', (['B'], {}), '(B)\n', (3585, 3588), True, 'from numpy import linalg as la\n'), ((4278, 4288), 'numpy.linalg.norm', 'la.norm', (['A'], {}), '(A)\n', (4285, 4288), True, 'from numpy import linalg as la\n'), ((6744, 6772), 'numpy.linalg.matrix_rank', 'np.linalg.matrix_rank', (['basis'], {}), '(basis)\n', (6765, 6772), True, 'import numpy as np\n'), ((7388, 7402), 'numpy.dot', 'np.dot', (['P', 'v.T'], {}), '(P, v.T)\n', (7394, 7402), True, 'import numpy as np\n'), ((7841, 7888), 'numpy.expand_dims', 'np.expand_dims', (['(P[p_beta, :] - P[p_alpha, :])', '(1)'], {}), '(P[p_beta, :] - P[p_alpha, :], 1)\n', (7855, 7888), True, 'import numpy as np\n'), ((8954, 8970), 'numpy.dot', 'np.dot', (['Q', 'ellip'], {}), '(Q, ellip)\n', (8960, 8970), True, 'import numpy as np\n'), ((9464, 9490), 'numpy.expand_dims', 'np.expand_dims', (['weights', '(1)'], {}), '(weights, 1)\n', (9478, 9490), True, 'import numpy as np\n'), ((11165, 11188), 'numpy.where', 'np.where', (['(weights > 0.0)'], {}), '(weights > 0.0)\n', (11173, 11188), True, 'import numpy as np\n'), ((11598, 11645), 'numpy.einsum', 'np.einsum', (['"""bi,bo->bio"""', 'weighted_Q', 'weighted_Q'], {}), "('bi,bo->bio', weighted_Q, weighted_Q)\n", (11607, 11645), True, 'import numpy as np\n'), ((12229, 12250), 'numpy.where', 'np.where', (['(weights > 0)'], {}), '(weights > 0)\n', (12237, 12250), True, 'import numpy as np\n'), ((14638, 14658), 'numpy.argmax', 'np.argmax', (['distances'], {}), '(distances)\n', (14647, 14658), True, 'import numpy as np\n'), ((16710, 16725), 'numpy.ones', 'np.ones', (['(n, 1)'], {}), '((n, 1))\n', (16717, 16725), True, 'import numpy as np\n'), ((17184, 17196), 'numpy.argmax', 'np.argmax', (['M'], {}), '(M)\n', (17193, 17196), True, 'import numpy as np\n'), ((17563, 17588), 'numpy.linalg.norm', 'np.linalg.norm', (['(new_u - u)'], {}), '(new_u - u)\n', (17577, 17588), True, 'import numpy as np\n'), ((19427, 19455), 'sklearn.decomposition.TruncatedSVD', 'TruncatedSVD', ([], {'n_components': 'r'}), '(n_components=r)\n', (19439, 19455), False, 'from sklearn.decomposition import TruncatedSVD\n'), ((21912, 21930), 'numpy.unique', 'np.unique', (['C[:idx]'], {}), '(C[:idx])\n', (21921, 21930), True, 'import numpy as np\n'), ((21932, 21957), 'numpy.unique', 'np.unique', (['idx_in_P[:idx]'], {}), '(idx_in_P[:idx])\n', (21941, 21957), True, 'import numpy as np\n'), ((21959, 21979), 'Utils.UPPER_BOUND', 'Utils.UPPER_BOUND', (['r'], {}), '(r)\n', (21976, 21979), False, 'import Utils\n'), ((22885, 22951), 'Utils.computeDistanceToSubspace', 'Utils.computeDistanceToSubspace', (['P[indices, :]', 'flats[0]', 'flats[1]'], {}), '(P[indices, :], flats[0], flats[1])\n', (22916, 22951), False, 'import Utils\n'), ((26192, 26216), 'numpy.arange', 'np.arange', (['S[i].shape[0]'], {}), '(S[i].shape[0])\n', (26201, 26216), True, 'import numpy as np\n'), ((27728, 27738), 'numpy.size', 'np.size', (['Q'], {}), '(Q)\n', (27735, 27738), True, 'import numpy as np\n'), ((28344, 28372), 'numpy.delete', 'np.delete', (['Q', 'sorted_indices'], {}), '(Q, sorted_indices)\n', (28353, 28372), True, 'import numpy as np\n'), ((30479, 30579), 'Utils.computeDistanceToSubspace', 'Utils.computeDistanceToSubspace', (['P[cluster_per_center, :-1]', 'B[center_idx][0]', 'B[center_idx][1]'], {}), '(P[cluster_per_center, :-1], B[center_idx][0\n ], B[center_idx][1])\n', (30510, 30579), False, 'import Utils\n'), ((32718, 32730), 'numpy.array', 'np.array', (['[]'], {}), '([])\n', (32726, 32730), True, 'import numpy as np\n'), ((32951, 32986), 'numpy.where', 'np.where', (['(dists_from_P_to_A > 1e-11)'], {}), '(dists_from_P_to_A > 1e-11)\n', (32959, 32986), True, 'import numpy as np\n'), ((33036, 33076), 'numpy.min', 'np.min', (['dists_from_P_to_A[non_zero_idxs]'], {}), '(dists_from_P_to_A[non_zero_idxs])\n', (33042, 33076), True, 'import numpy as np\n'), ((33127, 33144), 'numpy.abs', 'np.abs', (['P[:, :-1]'], {}), '(P[:, :-1])\n', (33133, 33144), True, 'import numpy as np\n'), ((33167, 33203), 'numpy.where', 'np.where', (['(dists_from_P_to_A <= 1e-11)'], {}), '(dists_from_P_to_A <= 1e-11)\n', (33175, 33203), True, 'import numpy as np\n'), ((37439, 37461), 'numpy.ones', 'np.ones', (['(P.shape[0],)'], {}), '((P.shape[0],))\n', (37446, 37461), True, 'import numpy as np\n'), ((2225, 2240), 'numpy.ones', 'np.ones', (['(1, N)'], {}), '((1, N))\n', (2232, 2240), True, 'import numpy as np\n'), ((4143, 4153), 'numpy.diag', 'np.diag', (['s'], {}), '(s)\n', (4150, 4153), True, 'import numpy as np\n'), ((5785, 5810), 'numpy.sqrt', 'np.sqrt', (['D[:, np.newaxis]'], {}), '(D[:, np.newaxis])\n', (5792, 5810), True, 'import numpy as np\n'), ((7213, 7232), 'scipy.linalg.null_space', 'null_space', (['basis.T'], {}), '(basis.T)\n', (7223, 7232), False, 'from scipy.linalg import null_space\n'), ((7639, 7653), 'numpy.dot', 'np.dot', (['P', 'v.T'], {}), '(P, v.T)\n', (7645, 7653), True, 'import numpy as np\n'), ((7749, 7763), 'numpy.dot', 'np.dot', (['P', 'v.T'], {}), '(P, v.T)\n', (7755, 7763), True, 'import numpy as np\n'), ((11300, 11322), 'numpy.sqrt', 'np.sqrt', (['weights[idxs]'], {}), '(weights[idxs])\n', (11307, 11322), True, 'import numpy as np\n'), ((12697, 12749), 'numpy.all', 'np.all', (['(current_dists <= (1.0 + Utils.TOL) * (d + 1))'], {}), '(current_dists <= (1.0 + Utils.TOL) * (d + 1))\n', (12703, 12749), True, 'import numpy as np\n'), ((12765, 12835), 'numpy.all', 'np.all', (['(current_dists[pos_weights_idxs] >= (1.0 - Utils.TOL) * (d + 1))'], {}), '(current_dists[pos_weights_idxs] >= (1.0 - Utils.TOL) * (d + 1))\n', (12771, 12835), True, 'import numpy as np\n'), ((13246, 13261), 'numpy.ones', 'np.ones', (['(n, 1)'], {}), '((n, 1))\n', (13253, 13261), True, 'import numpy as np\n'), ((13673, 13689), 'numpy.zeros', 'np.zeros', (['(n, 1)'], {}), '((n, 1))\n', (13681, 13689), True, 'import numpy as np\n'), ((14475, 14496), 'numpy.where', 'np.where', (['(weights > 0)'], {}), '(weights > 0)\n', (14483, 14496), True, 'import numpy as np\n'), ((14817, 14854), 'numpy.argmin', 'np.argmin', (['distances[pos_weights_idx]'], {}), '(distances[pos_weights_idx])\n', (14826, 14854), True, 'import numpy as np\n'), ((16788, 16803), 'numpy.ones', 'np.ones', (['(n, 1)'], {}), '((n, 1))\n', (16795, 16803), True, 'import numpy as np\n'), ((17011, 17027), 'numpy.linalg.inv', 'np.linalg.inv', (['X'], {}), '(X)\n', (17024, 17027), True, 'import numpy as np\n'), ((20605, 20625), 'Utils.UPPER_BOUND', 'Utils.UPPER_BOUND', (['r'], {}), '(r)\n', (20622, 20625), False, 'import Utils\n'), ((21231, 21266), 'numpy.empty', 'np.empty', (['(2 * (Utils.J + 1) ** 2,)'], {}), '((2 * (Utils.J + 1) ** 2,))\n', (21239, 21266), True, 'import numpy as np\n'), ((21290, 21325), 'numpy.empty', 'np.empty', (['(2 * (Utils.J + 1) ** 2,)'], {}), '((2 * (Utils.J + 1) ** 2,))\n', (21298, 21325), True, 'import numpy as np\n'), ((22968, 22998), 'numpy.argpartition', 'np.argpartition', (['dists', '(N // 2)'], {}), '(dists, N // 2)\n', (22983, 22998), True, 'import numpy as np\n'), ((24671, 24739), 'sklearn.decomposition.TruncatedSVD', 'TruncatedSVD', ([], {'algorithm': '"""randomized"""', 'n_iter': '(1)', 'n_components': 'Utils.J'}), "(algorithm='randomized', n_iter=1, n_components=Utils.J)\n", (24683, 24739), False, 'from sklearn.decomposition import TruncatedSVD\n'), ((25363, 25418), 'Utils.computeDistanceToSubspace', 'Utils.computeDistanceToSubspace', (['P[idxs, :]', 'x[0]', 'x[1]'], {}), '(P[idxs, :], x[0], x[1])\n', (25394, 25418), False, 'import Utils\n'), ((30290, 30318), 'numpy.where', 'np.where', (['(idxs == center_idx)'], {}), '(idxs == center_idx)\n', (30298, 30318), True, 'import numpy as np\n'), ((35233, 35245), 'numpy.add', 'np.add', (['C', 'D'], {}), '(C, D)\n', (35239, 35245), True, 'import numpy as np\n'), ((36271, 36283), 'numpy.array', 'np.array', (['[]'], {}), '([])\n', (36279, 36283), True, 'import numpy as np\n'), ((36395, 36432), 'numpy.zeros', 'np.zeros', (['(P.shape[0],)'], {'dtype': '"""bool"""'}), "((P.shape[0],), dtype='bool')\n", (36403, 36432), True, 'import numpy as np\n'), ((38365, 38384), 'numpy.sum', 'np.sum', (['sensitivity'], {}), '(sensitivity)\n', (38371, 38384), True, 'import numpy as np\n'), ((38605, 38629), 'numpy.where', 'np.where', (['remaining_idxs'], {}), '(remaining_idxs)\n', (38613, 38629), True, 'import numpy as np\n'), ((39768, 39789), 'numpy.where', 'np.where', (['(idxs == idx)'], {}), '(idxs == idx)\n', (39776, 39789), True, 'import numpy as np\n'), ((2452, 2466), 'numpy.dot', 'np.dot', (['P.T', 'w'], {}), '(P.T, w)\n', (2458, 2466), True, 'import numpy as np\n'), ((3292, 3306), 'numpy.dot', 'np.dot', (['P.T', 'u'], {}), '(P.T, u)\n', (3298, 3306), True, 'import numpy as np\n'), ((5041, 5055), 'numpy.linalg.eigvals', 'la.eigvals', (['A3'], {}), '(A3)\n', (5051, 5055), True, 'from numpy import linalg as la\n'), ((7099, 7147), 'numpy.linalg.svd', 'np.linalg.svd', (['basis[:, :-1]'], {'full_matrices': '(True)'}), '(basis[:, :-1], full_matrices=True)\n', (7112, 7147), True, 'import numpy as np\n'), ((8241, 8258), 'numpy.linalg.norm', 'np.linalg.norm', (['v'], {}), '(v)\n', (8255, 8258), True, 'import numpy as np\n'), ((16923, 16940), 'numpy.multiply', 'np.multiply', (['Q', 'u'], {}), '(Q, u)\n', (16934, 16940), True, 'import numpy as np\n'), ((19693, 19720), 'numpy.expand_dims', 'np.expand_dims', (['P[:, -1]', '(1)'], {}), '(P[:, -1], 1)\n', (19707, 19720), True, 'import numpy as np\n'), ((20179, 20199), 'Utils.UPPER_BOUND', 'Utils.UPPER_BOUND', (['r'], {}), '(r)\n', (20196, 20199), False, 'import Utils\n'), ((23050, 23067), 'numpy.array', 'np.array', (['indices'], {}), '(indices)\n', (23058, 23067), True, 'import numpy as np\n'), ((24002, 24026), 'numpy.array', 'np.array', (['points_indices'], {}), '(points_indices)\n', (24010, 24026), True, 'import numpy as np\n'), ((34679, 34706), 'numpy.vstack', 'np.vstack', (['list_of_coresets'], {}), '(list_of_coresets)\n', (34688, 34706), True, 'import numpy as np\n'), ((37292, 37303), 'numpy.where', 'np.where', (['C'], {}), '(C)\n', (37300, 37303), True, 'import numpy as np\n'), ((37620, 37641), 'numpy.arange', 'np.arange', (['P.shape[0]'], {}), '(P.shape[0])\n', (37629, 37641), True, 'import numpy as np\n'), ((39376, 39397), 'numpy.arange', 'np.arange', (['P.shape[0]'], {}), '(P.shape[0])\n', (39385, 39397), True, 'import numpy as np\n'), ((41240, 41256), 'numpy.arange', 'np.arange', (['(10000)'], {}), '(10000)\n', (41249, 41256), True, 'import numpy as np\n'), ((2408, 2422), 'numpy.dot', 'np.dot', (['P.T', 'w'], {}), '(P.T, w)\n', (2414, 2422), True, 'import numpy as np\n'), ((3248, 3262), 'numpy.dot', 'np.dot', (['P.T', 'u'], {}), '(P.T, u)\n', (3254, 3262), True, 'import numpy as np\n'), ((31462, 31495), 'numpy.sum', 'np.sum', (['cost_per_point_in_cluster'], {}), '(cost_per_point_in_cluster)\n', (31468, 31495), True, 'import numpy as np\n'), ((8339, 8359), 'numpy.linalg.norm', 'np.linalg.norm', (['v', '(2)'], {}), '(v, 2)\n', (8353, 8359), True, 'import numpy as np\n'), ((9888, 9912), 'numpy.multiply', 'np.multiply', (['Q', 'weights2'], {}), '(Q, weights2)\n', (9899, 9912), True, 'import numpy as np\n'), ((20141, 20162), 'numpy.arange', 'np.arange', (['Q.shape[0]'], {}), '(Q.shape[0])\n', (20150, 20162), True, 'import numpy as np\n'), ((24027, 24044), 'numpy.argsort', 'np.argsort', (['dists'], {}), '(dists)\n', (24037, 24044), True, 'import numpy as np\n'), ((28157, 28169), 'numpy.vstack', 'np.vstack', (['S'], {}), '(S)\n', (28166, 28169), True, 'import numpy as np\n'), ((33424, 33448), 'numpy.arange', 'np.arange', (['B[0].shape[0]'], {}), '(B[0].shape[0])\n', (33433, 33448), True, 'import numpy as np\n'), ((37261, 37272), 'numpy.where', 'np.where', (['C'], {}), '(C)\n', (37269, 37272), True, 'import numpy as np\n'), ((37858, 37879), 'numpy.arange', 'np.arange', (['Q.shape[0]'], {}), '(Q.shape[0])\n', (37867, 37879), True, 'import numpy as np\n'), ((23068, 23085), 'numpy.argsort', 'np.argsort', (['dists'], {}), '(dists)\n', (23078, 23085), True, 'import numpy as np\n'), ((27593, 27608), 'numpy.log', 'np.log', (['Utils.K'], {}), '(Utils.K)\n', (27599, 27608), True, 'import numpy as np\n'), ((33856, 33871), 'numpy.log', 'np.log', (['(1.0 / c)'], {}), '(1.0 / c)\n', (33862, 33871), True, 'import numpy as np\n'), ((34553, 34577), 'numpy.arange', 'np.arange', (['B[i].shape[0]'], {}), '(B[i].shape[0])\n', (34562, 34577), True, 'import numpy as np\n'), ((34788, 34809), 'numpy.arange', 'np.arange', (['Q.shape[0]'], {}), '(Q.shape[0])\n', (34797, 34809), True, 'import numpy as np\n'), ((34960, 34998), 'numpy.vstack', 'np.vstack', (['(V, point[np.newaxis, :-1])'], {}), '((V, point[np.newaxis, :-1]))\n', (34969, 34998), True, 'import numpy as np\n'), ((37203, 37223), 'numpy.where', 'np.where', (['(C == False)'], {}), '(C == False)\n', (37211, 37223), True, 'import numpy as np\n'), ((20446, 20466), 'numpy.argmin', 'np.argmin', (['Q[:, :-1]'], {}), '(Q[:, :-1])\n', (20455, 20466), True, 'import numpy as np\n'), ((20519, 20539), 'numpy.argmax', 'np.argmax', (['Q[:, :-1]'], {}), '(Q[:, :-1])\n', (20528, 20539), True, 'import numpy as np\n'), ((22757, 22779), 'numpy.array', 'np.array', (['[indices[i]]'], {}), '([indices[i]])\n', (22765, 22779), True, 'import numpy as np\n'), ((27497, 27516), 'numpy.log', 'np.log', (['(Utils.J + 1)'], {}), '(Utils.J + 1)\n', (27503, 27516), True, 'import numpy as np\n'), ((27699, 27708), 'numpy.log', 'np.log', (['n'], {}), '(n)\n', (27705, 27708), True, 'import numpy as np\n'), ((33844, 33853), 'numpy.log', 'np.log', (['M'], {}), '(M)\n', (33850, 33853), True, 'import numpy as np\n'), ((33911, 34005), 'numpy.logical_and', 'np.logical_and', (['(2 ** (i - 1) * d_0 <= dists_from_P_to_A)', '(dists_from_P_to_A <= 2 ** i * d_0)'], {}), '(2 ** (i - 1) * d_0 <= dists_from_P_to_A, dists_from_P_to_A <=\n 2 ** i * d_0)\n', (33925, 34005), True, 'import numpy as np\n'), ((34240, 34264), 'numpy.arange', 'np.arange', (['B[i].shape[0]'], {}), '(B[i].shape[0])\n', (34249, 34264), True, 'import numpy as np\n')] |
#
# Defines data that is consumed by the header2whatever hooks/templates
# to modify the generated files
#
import enum
from typing import Dict, List, Tuple, Optional
from pydantic import validator
from .util import Model, _generating_documentation
class ParamData(Model):
"""Various ways to modify parameters"""
#: Set parameter name to this
name: Optional[str] = None
#: Change C++ type emitted
x_type: Optional[str] = None
#: Default value for parameter
default: Optional[str] = None
#: Disables a default cast caused by ``default_arg_cast``
disable_type_caster_default_cast: bool = False
#: Force this to be an 'out' parameter
#:
#: .. seealso:: :ref:`autowrap_out_params`
#:
force_out: bool = False
#: Force an array size
array_size: Optional[int] = None
#: Ignore this parameter
ignore: bool = False
class BufferType(str, enum.Enum):
#: The buffer must indicate that it is readable (such as bytes, or bytearray)
IN = "in"
#: The buffer must indicate that it is writeable (such as a bytearray)
OUT = "out"
#: The buffer must indicate that it readable or writeable (such as a bytearray)
INOUT = "inout"
class BufferData(Model):
#: Indicates what type of python buffer is required
type: BufferType
#: Name of C++ parameter that the buffer will use
src: str
#: Name of the C++ length parameter. An out-only parameter, it will be set
#: to the size of the python buffer, and will be returned so the caller can
#: determine how many bytes were written
len: str
#: If specified, the minimum size of the python buffer
minsz: Optional[int] = None
class ReturnValuePolicy(enum.Enum):
"""
See `pybind11 documentation <https://pybind11.readthedocs.io/en/stable/advanced/functions.html#return-value-policies>`_
for what each of these values mean.
"""
TAKE_OWNERSHIP = "take_ownership"
COPY = "copy"
MOVE = "move"
REFERENCE = "reference"
REFERENCE_INTERNAL = "reference_internal"
AUTOMATIC = "automatic"
AUTOMATIC_REFERENCE = "automatic_reference"
class FunctionData(Model):
"""
Customize the way the autogenerator binds a function.
.. code-block:: yaml
functions:
# for non-overloaded functions, just specify the name + customizations
name_of_non_overloaded_fn:
# add customizations for function here
# For overloaded functions, specify the name, but each overload
# separately
my_overloaded_fn:
overloads:
int, int:
# customizations for `my_overloaded_fn(int, int)`
int, int, int:
# customizations for `my_overloaded_fn(int, int, int)`
"""
#: If True, don't wrap this
ignore: bool = False
#: If True, don't wrap this, but provide a pure virtual implementation
ignore_pure: bool = False
#: Generate this in an `#ifdef`
ifdef: Optional[str] = None
#: Generate this in an `#ifndef`
ifndef: Optional[str] = None
#: Use this code instead of the generated code
cpp_code: Optional[str] = None
#: Docstring for the function, will attempt to convert Doxygen docs if omitted
doc: Optional[str] = None
#: Text to append to the (autoconverted) docstring for the function
doc_append: Optional[str] = None
#: If True, prepends an underscore to the python name
internal: bool = False
#: Use this to set the name of the function as exposed to python
rename: Optional[str] = None
#: Mechanism to override individual parameters
param_override: Dict[str, ParamData] = {}
#: If specified, put the function in a sub.pack.age
subpackage: Optional[str] = None
#: By default, robotpy-build will release the GIL whenever a wrapped
#: function is called.
no_release_gil: Optional[bool] = None
buffers: List[BufferData] = []
overloads: Dict[str, "FunctionData"] = {}
#: Adds py::keep_alive<x,y> to the function. Overrides automatic
#: keepalive support, which retains references passed to constructors.
#: https://pybind11.readthedocs.io/en/stable/advanced/functions.html#keep-alive
keepalive: Optional[List[Tuple[int, int]]] = None
#: https://pybind11.readthedocs.io/en/stable/advanced/functions.html#return-value-policies
return_value_policy: ReturnValuePolicy = ReturnValuePolicy.AUTOMATIC
#: If this is a function template, this is a list of instantiations
#: that you wish to provide. This is a list of lists, where the inner
#: list is the template parameters for that function
template_impls: Optional[List[List[str]]] = None
#: Specify a transformation lambda to be used when this virtual function
#: is called from C++. This inline code should be a lambda that has the same
#: arguments as the original C++ virtual function, except the first argument
#: will be a py::function with the python overload
#:
#: cpp_code should also be specified for this to be useful
#:
#: For example, to transform a function that takes an iostream into a function
#: that returns a string:
#:
#: .. code-block:: yaml
#:
#: cpp_code: |
#: [](MyClass* self) {
#: return "string";
#: }
#: virtual_xform: |
#: [](py::function fn, MyClass* self, std::iostream &is) {
#: std::string d = py::cast(fn());
#: is << d;
#: }
#:
virtual_xform: Optional[str] = None
@validator("overloads", pre=True)
def validate_overloads(cls, value):
for k, v in value.items():
if v is None:
value[k] = FunctionData()
return value
if not _generating_documentation:
FunctionData.update_forward_refs()
class PropAccess(enum.Enum):
#: Determine read/read-write automatically:
#:
#: * If a struct/union, default to readwrite
#: * If a class, default to readwrite if a basic type that isn't a
#: reference, otherwise default to readonly
AUTOMATIC = "auto"
#: Allow python users access to the value, but ensure it can't
#: change. This is useful for properties that are defined directly
#: in the class
READONLY = "readonly"
#: Allows python users to read/write the value
READWRITE = "readwrite"
class PropData(Model):
#: If set to True, this property is not made available to python
ignore: bool = False
#: Set the python name of this property to the specified string
rename: Optional[str]
#: Python code access to this property
access: PropAccess = PropAccess.AUTOMATIC
#: Docstring for the property (only available on class properties)
doc: Optional[str] = None
#: Text to append to the (autoconverted) docstring
doc_append: Optional[str] = None
class EnumValue(Model):
#: If set to True, this property is not made available to python
ignore: bool = False
#: Set the python name of this enum value to the specified string
rename: Optional[str] = None
#: Docstring for the enum value
doc: Optional[str] = None
#: Text to append to the (autoconverted) docstring
doc_append: Optional[str] = None
class EnumData(Model):
#: Set your own docstring for the enum
doc: Optional[str] = None
#: Text to append to the (autoconverted) docstring
doc_append: Optional[str] = None
#: If set to True, this property is not made available to python
ignore: bool = False
#: Set the python name of this enum to the specified string
rename: Optional[str] = None
value_prefix: Optional[str] = None
#: If specified, put the enum in a sub.pack.age (ignored for
#: enums that are part of classes)
subpackage: Optional[str] = None
values: Dict[str, EnumValue] = {}
class ClassData(Model):
#: Docstring for the class
doc: Optional[str] = None
#: Text to append to the (autoconverted) docstring
doc_append: Optional[str] = None
ignore: bool = False
ignored_bases: List[str] = []
#: Specify fully qualified names for the bases
base_qualnames: Dict[str, str] = {}
attributes: Dict[str, PropData] = {}
enums: Dict[str, EnumData] = {}
methods: Dict[str, FunctionData] = {}
is_polymorphic: bool = False
force_no_trampoline: bool = False
force_no_default_constructor: bool = False
#: pybind11 will detect multiple inheritance automatically if a
#: class directly derives from multiple classes. However,
#: If the class derives from classes that participate in multiple
#: inheritance, pybind11 won't detect it automatically, so this
#: flag is needed.
force_multiple_inheritance: bool = False
#: If there are circular dependencies, this will help you resolve them
#: manually. TODO: make it so we don't need this
force_depends: List[str] = []
#: Use this to bring in type casters for a particular type that may have
#: been hidden (for example, with a typedef or definition in another file),
#: instead of explicitly including the header. This should be the full
#: namespace of the type.
force_type_casters: List[str] = []
#: If the object shouldn't be deleted by pybind11, use this. Disables
#: implicit constructors.
nodelete: bool = False
#: Set the python name of the class to this
rename: Optional[str] = None
#: This is deprecated and has no effect
shared_ptr: bool = True
#: If specified, put the class in a sub.pack.age. Ignored
#: for functions attached to a class. When template parameters
#: are used, must define subpackage on template instances
#: instead
subpackage: Optional[str] = None
#: Extra 'using' directives to insert into the trampoline and the
#: wrapping scope
typealias: List[str] = []
#: Extra constexpr to insert into the trampoline and wrapping scopes
constants: List[str] = []
#: If this is a template class, a list of the parameters if it can't
#: be autodetected (currently can't autodetect). If there is no space
#: in the parameter, then it is assumed to be a 'typename', otherwise
#: the parameter is split by space and the first item is the type and
#: the second parameter is the name (useful for integral templates)
template_params: Optional[List[str]] = None
#: If this is a template class, the specified C++ code is inserted
#: into the template definition
template_inline_code: str = ""
#: If this class has an associated trampoline, add this code inline at
#: the bottom of the trampoline class. This is rarely useful.
trampoline_inline_code: Optional[str] = None
@validator("attributes", pre=True)
def validate_attributes(cls, value):
for k, v in value.items():
if v is None:
value[k] = PropData()
return value
@validator("enums", pre=True)
def validate_enums(cls, value):
for k, v in value.items():
if v is None:
value[k] = EnumData()
return value
@validator("methods", pre=True)
def validate_methods(cls, value):
for k, v in value.items():
if v is None:
value[k] = FunctionData()
return value
class TemplateData(Model):
"""
Instantiates a template as a python type. To customize the class,
add it to the ``classes`` key and specify the template type.
Code to be wrapped:
.. code-block:: c++
template <typename T>
class MyClass {};
To bind ``MyClass<int>`` as the python class ``MyIntClass``, add this
to your YAML:
.. code-block:: yaml
classes:
MyClass:
template_params:
- T
templates:
MyIntClass:
qualname: MyClass
params:
- int
"""
#: Fully qualified name of instantiated class
qualname: str
#: Template parameters to use
params: List[str]
#: If specified, put the template instantiation in a sub.pack.age
subpackage: Optional[str] = None
#: Set the docstring for the template instance
doc: Optional[str] = None
#: Text to append to the (autoconverted) docstring for the template instance
doc_append: Optional[str] = None
class HooksDataYaml(Model):
"""
Format of the file in [tool.robotpy-build.wrappers."PACKAGENAME"]
generation_data
"""
strip_prefixes: List[str] = []
#: Adds ``#include <FILENAME>`` directives to the top of the autogenerated
#: C++ file, after autodetected include dependencies are inserted.
extra_includes: List[str] = []
#: Adds ``#include <FILENAME>`` directives after robotpy_build.h is
#: included, but before any autodetected include dependencies. Only use
#: this when dealing with broken headers.
extra_includes_first: List[str] = []
#: Specify raw C++ code that will be inserted at the end of the
#: autogenerated file, inside a function. This is useful for extending
#: your classes or providing other customizations. The following C++
#: variables are available:
#:
#: * ``m`` is the ``py::module`` instance
#: * ``cls_CLASSNAME`` are ``py::class`` instances
#: * ... lots of other things too
#:
#: The trampoline class (useful for accessing protected items) is available
#: at ``{CLASSNAME}_Trampoline``
#:
#: To see the full list, run a build and look at the generated code at
#: ``build/*/gensrc/**/*.cpp``
#:
#: Recommend that you use the YAML multiline syntax to specify it:
#:
#: .. code-block:: yaml
#:
#: inline_code: |
#: cls_CLASSNAME.def("get42", []() { return 42; });
inline_code: Optional[str] = None
#: Key is the attribute (variable) name
#:
#: .. code-block:: yaml
#:
#: attributes:
#: my_variable:
#: # customizations here, see PropData
#:
attributes: Dict[str, PropData] = {}
#: Key is the class name
#:
#: .. code-block:: yaml
#:
#: classes:
#: CLASSNAME:
#: # customizations here, see ClassData
#:
classes: Dict[str, ClassData] = {}
#: Key is the function name
#:
#: .. code-block:: yaml
#:
#: functions:
#: fn_name:
#: # customizations here, see FunctionData
#:
functions: Dict[str, FunctionData] = {}
#: Key is the enum name, for enums at global scope
#:
#: .. code-block:: yaml
#:
#: enums:
#: MyEnum:
#: # customizations here, see EnumData
#:
enums: Dict[str, EnumData] = {}
#: Instantiates a template. Key is the name to give to the Python type.
#:
#: .. code-block:: yaml
#:
#: templates:
#: ClassName:
#: # customizations here, see TemplateData
#:
templates: Dict[str, TemplateData] = {}
@validator("attributes", pre=True)
def validate_attributes(cls, value):
for k, v in value.items():
if v is None:
value[k] = PropData()
return value
@validator("classes", pre=True)
def validate_classes(cls, value):
for k, v in value.items():
if v is None:
value[k] = ClassData()
return value
@validator("enums", pre=True)
def validate_enums(cls, value):
for k, v in value.items():
if v is None:
value[k] = EnumData()
return value
@validator("functions", pre=True)
def validate_functions(cls, value):
for k, v in value.items():
if v is None:
value[k] = FunctionData()
return value
| [
"pydantic.validator"
] | [((5578, 5610), 'pydantic.validator', 'validator', (['"""overloads"""'], {'pre': '(True)'}), "('overloads', pre=True)\n", (5587, 5610), False, 'from pydantic import validator\n'), ((10774, 10807), 'pydantic.validator', 'validator', (['"""attributes"""'], {'pre': '(True)'}), "('attributes', pre=True)\n", (10783, 10807), False, 'from pydantic import validator\n'), ((10975, 11003), 'pydantic.validator', 'validator', (['"""enums"""'], {'pre': '(True)'}), "('enums', pre=True)\n", (10984, 11003), False, 'from pydantic import validator\n'), ((11166, 11196), 'pydantic.validator', 'validator', (['"""methods"""'], {'pre': '(True)'}), "('methods', pre=True)\n", (11175, 11196), False, 'from pydantic import validator\n'), ((15033, 15066), 'pydantic.validator', 'validator', (['"""attributes"""'], {'pre': '(True)'}), "('attributes', pre=True)\n", (15042, 15066), False, 'from pydantic import validator\n'), ((15234, 15264), 'pydantic.validator', 'validator', (['"""classes"""'], {'pre': '(True)'}), "('classes', pre=True)\n", (15243, 15264), False, 'from pydantic import validator\n'), ((15430, 15458), 'pydantic.validator', 'validator', (['"""enums"""'], {'pre': '(True)'}), "('enums', pre=True)\n", (15439, 15458), False, 'from pydantic import validator\n'), ((15621, 15653), 'pydantic.validator', 'validator', (['"""functions"""'], {'pre': '(True)'}), "('functions', pre=True)\n", (15630, 15653), False, 'from pydantic import validator\n')] |
# -*- coding: utf-8 -*-
"""
Module that implements the questions types
"""
import json
from . import errors
def question_factory(kind, *args, **kwargs):
for clazz in (Text, Password, Confirm, List, Checkbox):
if clazz.kind == kind:
return clazz(*args, **kwargs)
raise errors.UnknownQuestionTypeError()
def load_from_dict(question_dict):
"""
Load one question from a dict.
It requires the keys 'name' and 'kind'.
:return: The Question object with associated data.
:return type: Question
"""
return question_factory(**question_dict)
def load_from_list(question_list):
"""
Load a list of questions from a list of dicts.
It requires the keys 'name' and 'kind' for each dict.
:return: A list of Question objects with associated data.
:return type: List
"""
return [load_from_dict(q) for q in question_list]
def load_from_json(question_json):
"""
Load Questions from a JSON string.
:return: A list of Question objects with associated data if the JSON
contains a list or a Question if the JSON contains a dict.
:return type: List or Dict
"""
data = json.loads(question_json)
if isinstance(data, list):
return load_from_list(data)
if isinstance(data, dict):
return load_from_dict(data)
raise TypeError(
'Json contained a %s variable when a dict or list was expected',
type(data))
class TaggedValue(object):
def __init__(self, label, value):
self.label = label
self.value = value
def __str__(self):
return self.label
def __repr__(self):
return self.value
def __cmp__(self, other):
if isinstance(other, TaggedValue):
return self.value != other.value
return self.value != other
class Question(object):
kind = 'base question'
def __init__(self,
name,
message='',
choices=None,
default=None,
ignore=False,
validate=True):
self.name = name
self._message = message
self._choices = choices or []
self._default = default
self._ignore = ignore
self._validate = validate
self.answers = {}
@property
def ignore(self):
return bool(self._solve(self._ignore))
@property
def message(self):
return self._solve(self._message)
@property
def default(self):
return self._solve(self._default)
@property
def choices_generator(self):
for choice in self._solve(self._choices):
yield (
TaggedValue(*choice)
if isinstance(choice, tuple) and len(choice) == 2
else choice
)
@property
def choices(self):
return list(self.choices_generator)
def validate(self, current):
try:
if self._solve(self._validate, current):
return
except Exception:
pass
raise errors.ValidationError(current)
def _solve(self, prop, *args, **kwargs):
if callable(prop):
return prop(self.answers, *args, **kwargs)
if isinstance(prop, str):
return prop.format(**self.answers)
return prop
class Text(Question):
kind = 'text'
class Password(Question):
kind = 'password'
class Confirm(Question):
kind = 'confirm'
def __init__(self, name, default=False, **kwargs):
super(Confirm, self).__init__(name, default=default, **kwargs)
class List(Question):
kind = 'list'
class Checkbox(Question):
kind = 'checkbox'
| [
"json.loads"
] | [((1173, 1198), 'json.loads', 'json.loads', (['question_json'], {}), '(question_json)\n', (1183, 1198), False, 'import json\n')] |
import os
import json
import string
from tkinter import filedialog, simpledialog
from tkinter import *
class CsvImporter(object):
def __init__(self):
self.csv_data = None
self.languages = []
def import_csv(self, csv_filename):
with open(csv_filename, 'r') as file:
self.csv_data = {}
for key, line in enumerate(file):
# Create list of line item.
line_items = [x.strip() for x in line.split(',')]
# Header row?
if key == 0:
# Create dictionaries for each language, except the first.
self.languages = line_items[1:]
for language in self.languages:
self.csv_data[language] = {}
else:
# Populate each language's dictionary.
for key, language in enumerate(self.languages):
try:
# Key from first column, value from next.
self.csv_data[language].update({
line_items[0]: line_items[key + 1]
})
except IndexError:
# Sometimes, no item is expected.
pass
return self.csv_data
class JsonEditor(object):
def import_json(self, json_filename):
# Bring JSON in as an object.
with open(json_filename) as file:
json_data = json.load(file)
return json_data
def export_new_json(self, output_filename, json_data):
# Save the JSON object as a file.
f = open(output_filename, "w")
json_data = json.dumps(json_data)
f.write(json_data)
f.close()
return
def update_json(self, input_json, target_key, target_value, update_value):
# Duplicate input_json for modification.
output_json = input_json
if isinstance(input_json, dict):
# Loop through dictionary, searching for target_key, target_value
# and update output_json if there is an update_value
for key, value in input_json.items():
if key == target_key:
if target_value == value:
if update_value:
output_json[key] = update_value
# If we run into a list or another dictionary, recurse.
self.update_json(input_json[key], target_key, target_value, update_value)
elif isinstance(input_json, list):
# Loop through list, searching for lists and dictionaries.
for entity in input_json:
# Recurse through any new list or dictionary.
self.update_json(entity, target_key, target_value, update_value)
return output_json
if __name__ == '__main__':
root = Tk()
root.csv_filename = filedialog.askopenfilename(
title="Select CSV file with translations",
filetypes=(("CSV Files", "*.csv"),)
)
root.json_filename = filedialog.askopenfilename(
title="Select master JSON file to build tranlated JSON files",
filetypes=(("JSON Files","*.json"),("All Files", "*.*"))
)
target_key = simpledialog.askstring(
"Input",
"What is the target key for the values we are replacing?",
initialvalue="title"
)
base_output_filename = simpledialog.askstring(
"Input",
"What would you like the base file to be named?"
)
# Import CSV.
csv = CsvImporter()
csv_data = csv.import_csv(root.csv_filename)
# Import JSON.
make_json = JsonEditor()
# Make changes per language.
for language in csv_data:
# Edit JSON.
input_json = make_json.import_json(root.json_filename)
for key, value in csv_data[language].items():
updated_json = make_json.update_json(input_json, target_key, key, value)
# Create filename per language.
language_filename = base_output_filename + "_" + language + ".json"
made_json = make_json.export_new_json(language_filename, updated_json)
# Finished.
print("Success!")
| [
"json.load",
"tkinter.simpledialog.askstring",
"json.dumps",
"tkinter.filedialog.askopenfilename"
] | [((2951, 3061), 'tkinter.filedialog.askopenfilename', 'filedialog.askopenfilename', ([], {'title': '"""Select CSV file with translations"""', 'filetypes': "(('CSV Files', '*.csv'),)"}), "(title='Select CSV file with translations',\n filetypes=(('CSV Files', '*.csv'),))\n", (2977, 3061), False, 'from tkinter import filedialog, simpledialog\n'), ((3110, 3269), 'tkinter.filedialog.askopenfilename', 'filedialog.askopenfilename', ([], {'title': '"""Select master JSON file to build tranlated JSON files"""', 'filetypes': "(('JSON Files', '*.json'), ('All Files', '*.*'))"}), "(title=\n 'Select master JSON file to build tranlated JSON files', filetypes=((\n 'JSON Files', '*.json'), ('All Files', '*.*')))\n", (3136, 3269), False, 'from tkinter import filedialog, simpledialog\n'), ((3298, 3419), 'tkinter.simpledialog.askstring', 'simpledialog.askstring', (['"""Input"""', '"""What is the target key for the values we are replacing?"""'], {'initialvalue': '"""title"""'}), "('Input',\n 'What is the target key for the values we are replacing?', initialvalue\n ='title')\n", (3320, 3419), False, 'from tkinter import filedialog, simpledialog\n'), ((3457, 3542), 'tkinter.simpledialog.askstring', 'simpledialog.askstring', (['"""Input"""', '"""What would you like the base file to be named?"""'], {}), "('Input',\n 'What would you like the base file to be named?')\n", (3479, 3542), False, 'from tkinter import filedialog, simpledialog\n'), ((1734, 1755), 'json.dumps', 'json.dumps', (['json_data'], {}), '(json_data)\n', (1744, 1755), False, 'import json\n'), ((1532, 1547), 'json.load', 'json.load', (['file'], {}), '(file)\n', (1541, 1547), False, 'import json\n')] |
import logic
import numpy as np
import gym
ACTION_MAP = {
0: 'up',
1: 'down',
2: 'left',
3: 'right'
}
class Env2048(gym.Env):
metadata = {'render.modes': ['human']}
def __init__(self, n=4, max_idle=100, seed=None):
super(Env2048, self).__init__()
self.n = n
self.max_idle = max_idle
self.action_map = ACTION_MAP
# up, down, left, right
self.action_space = gym.spaces.Discrete(4)
self.observation_space = gym.spaces.Box(
low=0, high=255,
shape=(self.n, self.n, 2 ** n), dtype=np.uint8)
self.eye = np.eye(2 ** n)
self.reward_range = (float('-inf'), float('inf'))
if seed is not None:
self.seed(seed)
def seed(self, seed):
np.random.seed(seed)
def reset(self):
self.matrix = logic.new_game(self.n)
self.reward_i = self.i = 0
self.total_reward = 0
return self.obs
@property
def obs(self):
m = np.array(self.matrix)
m = np.clip(m, 1, float('inf')) # from 0, 2, 4, 8, ... to 1, 2, 4, 8
m = np.log2(m).astype(np.int64) # from 1, 2, 4, 8,..., 2048 to 0, 1, 2, 3, ..., 11
m = self.eye[m]
m = m * 255
m = m.astype(np.uint8)
obs = m
return obs
def step(self, action):
if isinstance(action, str) and action in ('up', 'down', 'left', 'right'):
pass
if isinstance(action, (int, np.int64, np.int32)):
action = self.action_map[int(action)]
else:
print(action, type(action))
raise
old_score = np.sort(np.array(self.matrix).flatten())[::-1]
old_matrix = str(self.matrix)
# import pdb; pdb.set_trace()
if action == 'up':
self.matrix, updated = logic.up(self.matrix)
elif action == 'down':
self.matrix, updated = logic.down(self.matrix)
elif action == 'left':
self.matrix, updated = logic.left(self.matrix)
elif action == 'right':
self.matrix, updated = logic.right(self.matrix)
new_matrix = str(self.matrix)
new_score = np.sort(np.array(self.matrix).flatten())[::-1]
reward = np.sum((new_score - old_score) * (new_score >= old_score)) * 4
reward = float(reward)
self.total_reward += reward
self.i += 1
if updated: # matrix有更新
self.matrix = logic.add_two(self.matrix)
if logic.game_state(self.matrix) == 'win':
print('you win')
return self.obs, 10000.0, True, {'i': self.i, 'ri': self.reward_i, 'tr': self.total_reward}
elif logic.game_state(self.matrix) == 'lose':
return self.obs, 100.0, True, {'i': self.i, 'ri': self.reward_i, 'tr': self.total_reward}
idle = False
if old_matrix == new_matrix:
idle = True
if idle:
reward = -1
else:
self.reward_i = self.i
if self.i - self.reward_i > self.max_idle:
return self.obs, -100, True, {'i': self.i, 'ri': self.reward_i, 'tr': self.total_reward}
return self.obs, reward, False, {'i': self.i, 'ri': self.reward_i, 'tr': self.total_reward}
def render(self, mode='human'):
pass
def close(self):
pass
def main():
env = Env2048()
obs = env.reset()
print(obs)
for _ in range(1000):
obs, reward, done, info = env.step(np.random.choice(['right', 'left', 'up', 'down']))
print(obs)
print(reward, done, info)
if done:
break
if __name__ == '__main__':
main()
| [
"numpy.eye",
"logic.left",
"numpy.random.choice",
"logic.game_state",
"gym.spaces.Discrete",
"gym.spaces.Box",
"logic.new_game",
"numpy.array",
"numpy.sum",
"logic.right",
"numpy.random.seed",
"logic.add_two",
"logic.up",
"numpy.log2",
"logic.down"
] | [((434, 456), 'gym.spaces.Discrete', 'gym.spaces.Discrete', (['(4)'], {}), '(4)\n', (453, 456), False, 'import gym\n'), ((490, 569), 'gym.spaces.Box', 'gym.spaces.Box', ([], {'low': '(0)', 'high': '(255)', 'shape': '(self.n, self.n, 2 ** n)', 'dtype': 'np.uint8'}), '(low=0, high=255, shape=(self.n, self.n, 2 ** n), dtype=np.uint8)\n', (504, 569), False, 'import gym\n'), ((614, 628), 'numpy.eye', 'np.eye', (['(2 ** n)'], {}), '(2 ** n)\n', (620, 628), True, 'import numpy as np\n'), ((779, 799), 'numpy.random.seed', 'np.random.seed', (['seed'], {}), '(seed)\n', (793, 799), True, 'import numpy as np\n'), ((844, 866), 'logic.new_game', 'logic.new_game', (['self.n'], {}), '(self.n)\n', (858, 866), False, 'import logic\n'), ((1002, 1023), 'numpy.array', 'np.array', (['self.matrix'], {}), '(self.matrix)\n', (1010, 1023), True, 'import numpy as np\n'), ((1823, 1844), 'logic.up', 'logic.up', (['self.matrix'], {}), '(self.matrix)\n', (1831, 1844), False, 'import logic\n'), ((2240, 2298), 'numpy.sum', 'np.sum', (['((new_score - old_score) * (new_score >= old_score))'], {}), '((new_score - old_score) * (new_score >= old_score))\n', (2246, 2298), True, 'import numpy as np\n'), ((2450, 2476), 'logic.add_two', 'logic.add_two', (['self.matrix'], {}), '(self.matrix)\n', (2463, 2476), False, 'import logic\n'), ((3495, 3544), 'numpy.random.choice', 'np.random.choice', (["['right', 'left', 'up', 'down']"], {}), "(['right', 'left', 'up', 'down'])\n", (3511, 3544), True, 'import numpy as np\n'), ((1114, 1124), 'numpy.log2', 'np.log2', (['m'], {}), '(m)\n', (1121, 1124), True, 'import numpy as np\n'), ((1911, 1934), 'logic.down', 'logic.down', (['self.matrix'], {}), '(self.matrix)\n', (1921, 1934), False, 'import logic\n'), ((2493, 2522), 'logic.game_state', 'logic.game_state', (['self.matrix'], {}), '(self.matrix)\n', (2509, 2522), False, 'import logic\n'), ((2001, 2024), 'logic.left', 'logic.left', (['self.matrix'], {}), '(self.matrix)\n', (2011, 2024), False, 'import logic\n'), ((2691, 2720), 'logic.game_state', 'logic.game_state', (['self.matrix'], {}), '(self.matrix)\n', (2707, 2720), False, 'import logic\n'), ((1646, 1667), 'numpy.array', 'np.array', (['self.matrix'], {}), '(self.matrix)\n', (1654, 1667), True, 'import numpy as np\n'), ((2092, 2116), 'logic.right', 'logic.right', (['self.matrix'], {}), '(self.matrix)\n', (2103, 2116), False, 'import logic\n'), ((2184, 2205), 'numpy.array', 'np.array', (['self.matrix'], {}), '(self.matrix)\n', (2192, 2205), True, 'import numpy as np\n')] |
# -*- coding: utf-8 -*-
"""
Created on Tue Nov 13 12:55:47 2018
@name: CSVMachLearn.py
@description: 1D CNN using CSV vector for machine learning
@author: <NAME>
"""
from __future__ import absolute_import, division, print_function
import matplotlib.pyplot as plt
from matplotlib.lines import Line2D
from sklearn.decomposition import PCA
import numpy as np
import tensorflow as tf
import tensorflow.contrib.eager as tfe
from tensorflow import set_random_seed
tf.enable_eager_execution()
set_random_seed(0)
nrds='S0'
#==============================================================================
# Global parameters
#==============================================================================
total_dataset_fp="D:\\AI_experiments\\CSV\\"+nrds+"\\DAT"+nrds+".csv"
pathlog="D:\\AI_experiments\\CSV\\"+nrds+"\\"+nrds+"pub.log"
pathimg="D:\\AI_experiments\\CSV\\"+nrds+"\\IMG"
num_epochs = 1001 # number of epochs
lrate=2e-5 # learning rate
test_procent=0.2 # procentage of test_dataset
learn_batch_size=32 # batch size
print("Local copy of the dataset file: {}".format(total_dataset_fp))
print("TensorFlow version: {}".format(tf.VERSION))
print("Eager execution: {}".format(tf.executing_eagerly()))
#==============================================================================
# Methods
#==============================================================================
def ChangeBatchSize(dataset,bsize):
dataset=dataset.apply(tf.data.experimental.unbatch())
dataset=dataset.batch(batch_size=bsize)
return dataset
def pack_features_vector(features, labels):
"""Pack the features into a single array."""
features = tf.stack(list(features.values()), axis=1)
return features, labels
with open(total_dataset_fp) as f:
content = f.readlines()
grup=content[0].split(',')
print(grup[1])
f_size=int(grup[1])-1 #number of points in data vector
print("Vector size: "+str(f_size))
filtr1=32
filtr_size1=5
filtr2=32
filtr_size2=5
filtr3=64
filtr_size3=5
filtr4=64
filtr_size4=4
DenseLast=4096
filtr5=512
filtr_size5=5
def create_model():
model = tf.keras.models.Sequential([
tf.keras.layers.Reshape((f_size,1), input_shape=(None,f_size),name='x'),
tf.keras.layers.Conv1D(filters=filtr1,kernel_size=filtr_size1,strides=1, kernel_initializer='random_uniform',activation=tf.nn.relu,padding='same',name='Conv1'),
tf.keras.layers.MaxPooling1D(pool_size=filtr_size1, strides=2, padding='same', name='pool1'),
tf.keras.layers.Conv1D(filters=filtr2,kernel_size=filtr_size2,strides=1, padding='same',name='Conv2',activation=tf.nn.relu, kernel_initializer='random_uniform'),
tf.keras.layers.MaxPooling1D(pool_size=filtr_size2, strides=2, padding='same', name='pool2'),
tf.keras.layers.Conv1D(filters=filtr3,kernel_size=filtr_size3,strides=1, padding='same',name='Conv3',activation=tf.nn.relu, kernel_initializer='random_uniform'),
tf.keras.layers.MaxPooling1D(pool_size=filtr_size3, strides=2, padding='same', name='pool3'),
tf.keras.layers.Conv1D(filters=filtr4,kernel_size=filtr_size4,strides=1, padding='same',name='Conv4',activation=tf.nn.relu, kernel_initializer='random_uniform'),
tf.keras.layers.MaxPooling1D(pool_size=filtr_size4, strides=2, padding='same', name='pool4'),
tf.keras.layers.GlobalMaxPool1D(), #size of last filter
tf.keras.layers.Dense(DenseLast, activation=tf.nn.relu,name='fir'), # input shape required
tf.keras.layers.Dense(256, activation=tf.nn.relu,name='mod_up'),
tf.keras.layers.Dense(3,name='y_pred'), #output layer
])
model.compile(optimizer=tf.train.AdamOptimizer(),
loss=tf.keras.losses.sparse_categorical_crossentropy,
metrics=['accuracy'])
return model
def loss(model, x, y):
y_ = model(x)
#print(y)
#print(y_)
return tf.losses.sparse_softmax_cross_entropy(labels=y, logits=y_)
def grad(model, inputs, targets):
with tf.GradientTape() as tape:
loss_value = loss(model, inputs, targets)
#print(loss_value)
return loss_value, tape.gradient(loss_value, model.trainable_variables)
mapcolor=['red','green','blue']
# column order in CSV file
column_names = []
for a in range(0,f_size):
column_names.append(str(a))
column_names.append('signal')
print(len(column_names))
feature_names = column_names[:-1]
label_name = column_names[-1]
#class_names = ['Left','Right','NONE']
class_names = ['LIP','JAW','NONE']
batch_size = 200000
#train_dataset = tf.data.experimental.make_csv_dataset(
# total_dataset_fp,
# batch_size,
# column_names=column_names,
# label_name=label_name,
# num_epochs=1,
# shuffle=False)
#train_dataset = train_dataset.map(pack_features_vector)
total_dataset = tf.data.experimental.make_csv_dataset(
total_dataset_fp,
batch_size,
column_names=column_names,
label_name=label_name,
num_epochs=1,
shuffle=True)
features, labels = next(iter(total_dataset))
setsize=float(str(labels.shape[0]))
ts_size=setsize*test_procent
tr_size=setsize-ts_size
print("Total_CSV_size: "+str(setsize) )
print("Train_size: "+str(tr_size) )
print("Test_size: "+str(ts_size) )
total_dataset = total_dataset.map(pack_features_vector)
total_dataset=ChangeBatchSize(total_dataset,tr_size)
#==============================================================================
#Split dataset into train_dataset and test_dataset.
#==============================================================================
i=0
for (parts, labels) in total_dataset:
if(i==0):
k1 = parts
l1 = labels
else:
k2 = parts
l2 = labels
i=i+1
train_dataset = tf.data.Dataset.from_tensors((k1, l1))
train_dataset = ChangeBatchSize(train_dataset,learn_batch_size)
test_dataset = tf.data.Dataset.from_tensors((k2, l2))
test_dataset = ChangeBatchSize(test_dataset,ts_size)
#==============================================================================
# Create model object
#==============================================================================
model=create_model()
model.summary()
optimizer = tf.train.AdamOptimizer(learning_rate=lrate)
global_step = tf.train.get_or_create_global_step()
legend_elements = [Line2D([0], [0], marker='o', color='w', label=class_names[0],markerfacecolor='r', markersize=10),
Line2D([0], [0], marker='o', color='w', label=class_names[1],markerfacecolor='g', markersize=10),
Line2D([0], [0], marker='o', color='w', label=class_names[2],markerfacecolor='b', markersize=10)]
# keep results for plotting
train_loss_results = []
train_accuracy_results = []
np.set_printoptions(threshold=np.nan)
#==============================================================================
# Make machine learning process
#==============================================================================
old_loss=1000
for epoch in range(num_epochs):
epoch_loss_avg = tfe.metrics.Mean()
epoch_accuracy = tfe.metrics.Accuracy()
# Training loop - using batches of 32
for x, y in train_dataset:
# Optimize the model
#print(str(type(x)))
#print(str(x.shape))
loss_value, grads = grad(model, x, y)
optimizer.apply_gradients(zip(grads, model.variables),
global_step)
# Track progress
epoch_loss_avg(loss_value) # add current batch loss
# compare predicted label to actual label
epoch_accuracy(tf.argmax(model(x), axis=1, output_type=tf.int32), y)
# end epoch
train_loss_results.append(epoch_loss_avg.result())
train_accuracy_results.append(epoch_accuracy.result())
if epoch % 5 == 0:
test_accuracy = tfe.metrics.Accuracy()
for (x, y) in test_dataset:
logits = model(x)
prediction = tf.argmax(logits, axis=1, output_type=tf.int32)
test_accuracy(prediction, y)
X=logits.numpy()
Y=y.numpy()
PCA(copy=True, iterated_power='auto', n_components=2, random_state=None, svd_solver='auto', tol=0.0, whiten=False)
X = PCA(n_components=2).fit_transform(X)
arrcolor = []
for cl in Y:
arrcolor.append(mapcolor[cl])
plt.scatter(X[:, 0], X[:, 1], s=40, c=arrcolor)
#plt.show()
imgfile="{:s}\\epoch{:03d}.png".format(pathimg,epoch)
plt.title("{:.3%}".format(test_accuracy.result()))
plt.legend(handles=legend_elements, loc='upper right')
plt.savefig(imgfile)
plt.close()
new_loss=epoch_loss_avg.result()
accur=epoch_accuracy.result()
test_acc=test_accuracy.result()
msg="Epoch {:03d}: Loss: {:.6f}, Accuracy: {:.3%}, Test: {:.3%}".format(epoch,new_loss,accur,test_acc)
msg2 = "{0} {1:.6f} {2:.6f} {3:.6f} \n".format(epoch,accur,test_acc,new_loss)
print(msg)
if new_loss>old_loss:
break
file = open(pathlog,"a");
file.write(msg2)
file.close();
old_loss=epoch_loss_avg.result()
#==============================================================================
# Save trained model to disk
#==============================================================================
model.compile(optimizer=tf.train.AdamOptimizer(),
loss=tf.keras.losses.sparse_categorical_crossentropy,
metrics=['accuracy'])
filepath="csvsignal.h5"
tf.keras.models.save_model(
model,
filepath,
overwrite=True,
include_optimizer=True
)
print("Model csvsignal.h5 saved to disk")
| [
"tensorflow.enable_eager_execution",
"tensorflow.GradientTape",
"tensorflow.keras.layers.Dense",
"tensorflow.keras.layers.MaxPooling1D",
"tensorflow.set_random_seed",
"matplotlib.lines.Line2D",
"tensorflow.keras.models.save_model",
"tensorflow.keras.layers.Reshape",
"sklearn.decomposition.PCA",
"t... | [((485, 512), 'tensorflow.enable_eager_execution', 'tf.enable_eager_execution', ([], {}), '()\n', (510, 512), True, 'import tensorflow as tf\n'), ((514, 532), 'tensorflow.set_random_seed', 'set_random_seed', (['(0)'], {}), '(0)\n', (529, 532), False, 'from tensorflow import set_random_seed\n'), ((5009, 5163), 'tensorflow.data.experimental.make_csv_dataset', 'tf.data.experimental.make_csv_dataset', (['total_dataset_fp', 'batch_size'], {'column_names': 'column_names', 'label_name': 'label_name', 'num_epochs': '(1)', 'shuffle': '(True)'}), '(total_dataset_fp, batch_size,\n column_names=column_names, label_name=label_name, num_epochs=1, shuffle\n =True)\n', (5046, 5163), True, 'import tensorflow as tf\n'), ((5951, 5989), 'tensorflow.data.Dataset.from_tensors', 'tf.data.Dataset.from_tensors', (['(k1, l1)'], {}), '((k1, l1))\n', (5979, 5989), True, 'import tensorflow as tf\n'), ((6074, 6112), 'tensorflow.data.Dataset.from_tensors', 'tf.data.Dataset.from_tensors', (['(k2, l2)'], {}), '((k2, l2))\n', (6102, 6112), True, 'import tensorflow as tf\n'), ((6411, 6454), 'tensorflow.train.AdamOptimizer', 'tf.train.AdamOptimizer', ([], {'learning_rate': 'lrate'}), '(learning_rate=lrate)\n', (6433, 6454), True, 'import tensorflow as tf\n'), ((6472, 6508), 'tensorflow.train.get_or_create_global_step', 'tf.train.get_or_create_global_step', ([], {}), '()\n', (6506, 6508), True, 'import tensorflow as tf\n'), ((6953, 6990), 'numpy.set_printoptions', 'np.set_printoptions', ([], {'threshold': 'np.nan'}), '(threshold=np.nan)\n', (6972, 6990), True, 'import numpy as np\n'), ((9700, 9787), 'tensorflow.keras.models.save_model', 'tf.keras.models.save_model', (['model', 'filepath'], {'overwrite': '(True)', 'include_optimizer': '(True)'}), '(model, filepath, overwrite=True,\n include_optimizer=True)\n', (9726, 9787), True, 'import tensorflow as tf\n'), ((4065, 4124), 'tensorflow.losses.sparse_softmax_cross_entropy', 'tf.losses.sparse_softmax_cross_entropy', ([], {'labels': 'y', 'logits': 'y_'}), '(labels=y, logits=y_)\n', (4103, 4124), True, 'import tensorflow as tf\n'), ((6533, 6634), 'matplotlib.lines.Line2D', 'Line2D', (['[0]', '[0]'], {'marker': '"""o"""', 'color': '"""w"""', 'label': 'class_names[0]', 'markerfacecolor': '"""r"""', 'markersize': '(10)'}), "([0], [0], marker='o', color='w', label=class_names[0],\n markerfacecolor='r', markersize=10)\n", (6539, 6634), False, 'from matplotlib.lines import Line2D\n'), ((6651, 6752), 'matplotlib.lines.Line2D', 'Line2D', (['[0]', '[0]'], {'marker': '"""o"""', 'color': '"""w"""', 'label': 'class_names[1]', 'markerfacecolor': '"""g"""', 'markersize': '(10)'}), "([0], [0], marker='o', color='w', label=class_names[1],\n markerfacecolor='g', markersize=10)\n", (6657, 6752), False, 'from matplotlib.lines import Line2D\n'), ((6769, 6870), 'matplotlib.lines.Line2D', 'Line2D', (['[0]', '[0]'], {'marker': '"""o"""', 'color': '"""w"""', 'label': 'class_names[2]', 'markerfacecolor': '"""b"""', 'markersize': '(10)'}), "([0], [0], marker='o', color='w', label=class_names[2],\n markerfacecolor='b', markersize=10)\n", (6775, 6870), False, 'from matplotlib.lines import Line2D\n'), ((7256, 7274), 'tensorflow.contrib.eager.metrics.Mean', 'tfe.metrics.Mean', ([], {}), '()\n', (7272, 7274), True, 'import tensorflow.contrib.eager as tfe\n'), ((7295, 7317), 'tensorflow.contrib.eager.metrics.Accuracy', 'tfe.metrics.Accuracy', ([], {}), '()\n', (7315, 7317), True, 'import tensorflow.contrib.eager as tfe\n'), ((1376, 1398), 'tensorflow.executing_eagerly', 'tf.executing_eagerly', ([], {}), '()\n', (1396, 1398), True, 'import tensorflow as tf\n'), ((1640, 1670), 'tensorflow.data.experimental.unbatch', 'tf.data.experimental.unbatch', ([], {}), '()\n', (1668, 1670), True, 'import tensorflow as tf\n'), ((4172, 4189), 'tensorflow.GradientTape', 'tf.GradientTape', ([], {}), '()\n', (4187, 4189), True, 'import tensorflow as tf\n'), ((8000, 8022), 'tensorflow.contrib.eager.metrics.Accuracy', 'tfe.metrics.Accuracy', ([], {}), '()\n', (8020, 8022), True, 'import tensorflow.contrib.eager as tfe\n'), ((9535, 9559), 'tensorflow.train.AdamOptimizer', 'tf.train.AdamOptimizer', ([], {}), '()\n', (9557, 9559), True, 'import tensorflow as tf\n'), ((2351, 2425), 'tensorflow.keras.layers.Reshape', 'tf.keras.layers.Reshape', (['(f_size, 1)'], {'input_shape': '(None, f_size)', 'name': '"""x"""'}), "((f_size, 1), input_shape=(None, f_size), name='x')\n", (2374, 2425), True, 'import tensorflow as tf\n'), ((2429, 2602), 'tensorflow.keras.layers.Conv1D', 'tf.keras.layers.Conv1D', ([], {'filters': 'filtr1', 'kernel_size': 'filtr_size1', 'strides': '(1)', 'kernel_initializer': '"""random_uniform"""', 'activation': 'tf.nn.relu', 'padding': '"""same"""', 'name': '"""Conv1"""'}), "(filters=filtr1, kernel_size=filtr_size1, strides=1,\n kernel_initializer='random_uniform', activation=tf.nn.relu, padding=\n 'same', name='Conv1')\n", (2451, 2602), True, 'import tensorflow as tf\n'), ((2595, 2692), 'tensorflow.keras.layers.MaxPooling1D', 'tf.keras.layers.MaxPooling1D', ([], {'pool_size': 'filtr_size1', 'strides': '(2)', 'padding': '"""same"""', 'name': '"""pool1"""'}), "(pool_size=filtr_size1, strides=2, padding=\n 'same', name='pool1')\n", (2623, 2692), True, 'import tensorflow as tf\n'), ((2694, 2867), 'tensorflow.keras.layers.Conv1D', 'tf.keras.layers.Conv1D', ([], {'filters': 'filtr2', 'kernel_size': 'filtr_size2', 'strides': '(1)', 'padding': '"""same"""', 'name': '"""Conv2"""', 'activation': 'tf.nn.relu', 'kernel_initializer': '"""random_uniform"""'}), "(filters=filtr2, kernel_size=filtr_size2, strides=1,\n padding='same', name='Conv2', activation=tf.nn.relu, kernel_initializer\n ='random_uniform')\n", (2716, 2867), True, 'import tensorflow as tf\n'), ((2861, 2958), 'tensorflow.keras.layers.MaxPooling1D', 'tf.keras.layers.MaxPooling1D', ([], {'pool_size': 'filtr_size2', 'strides': '(2)', 'padding': '"""same"""', 'name': '"""pool2"""'}), "(pool_size=filtr_size2, strides=2, padding=\n 'same', name='pool2')\n", (2889, 2958), True, 'import tensorflow as tf\n'), ((2966, 3139), 'tensorflow.keras.layers.Conv1D', 'tf.keras.layers.Conv1D', ([], {'filters': 'filtr3', 'kernel_size': 'filtr_size3', 'strides': '(1)', 'padding': '"""same"""', 'name': '"""Conv3"""', 'activation': 'tf.nn.relu', 'kernel_initializer': '"""random_uniform"""'}), "(filters=filtr3, kernel_size=filtr_size3, strides=1,\n padding='same', name='Conv3', activation=tf.nn.relu, kernel_initializer\n ='random_uniform')\n", (2988, 3139), True, 'import tensorflow as tf\n'), ((3133, 3230), 'tensorflow.keras.layers.MaxPooling1D', 'tf.keras.layers.MaxPooling1D', ([], {'pool_size': 'filtr_size3', 'strides': '(2)', 'padding': '"""same"""', 'name': '"""pool3"""'}), "(pool_size=filtr_size3, strides=2, padding=\n 'same', name='pool3')\n", (3161, 3230), True, 'import tensorflow as tf\n'), ((3232, 3405), 'tensorflow.keras.layers.Conv1D', 'tf.keras.layers.Conv1D', ([], {'filters': 'filtr4', 'kernel_size': 'filtr_size4', 'strides': '(1)', 'padding': '"""same"""', 'name': '"""Conv4"""', 'activation': 'tf.nn.relu', 'kernel_initializer': '"""random_uniform"""'}), "(filters=filtr4, kernel_size=filtr_size4, strides=1,\n padding='same', name='Conv4', activation=tf.nn.relu, kernel_initializer\n ='random_uniform')\n", (3254, 3405), True, 'import tensorflow as tf\n'), ((3399, 3496), 'tensorflow.keras.layers.MaxPooling1D', 'tf.keras.layers.MaxPooling1D', ([], {'pool_size': 'filtr_size4', 'strides': '(2)', 'padding': '"""same"""', 'name': '"""pool4"""'}), "(pool_size=filtr_size4, strides=2, padding=\n 'same', name='pool4')\n", (3427, 3496), True, 'import tensorflow as tf\n'), ((3500, 3533), 'tensorflow.keras.layers.GlobalMaxPool1D', 'tf.keras.layers.GlobalMaxPool1D', ([], {}), '()\n', (3531, 3533), True, 'import tensorflow as tf\n'), ((3566, 3633), 'tensorflow.keras.layers.Dense', 'tf.keras.layers.Dense', (['DenseLast'], {'activation': 'tf.nn.relu', 'name': '"""fir"""'}), "(DenseLast, activation=tf.nn.relu, name='fir')\n", (3587, 3633), True, 'import tensorflow as tf\n'), ((3663, 3727), 'tensorflow.keras.layers.Dense', 'tf.keras.layers.Dense', (['(256)'], {'activation': 'tf.nn.relu', 'name': '"""mod_up"""'}), "(256, activation=tf.nn.relu, name='mod_up')\n", (3684, 3727), True, 'import tensorflow as tf\n'), ((3735, 3774), 'tensorflow.keras.layers.Dense', 'tf.keras.layers.Dense', (['(3)'], {'name': '"""y_pred"""'}), "(3, name='y_pred')\n", (3756, 3774), True, 'import tensorflow as tf\n'), ((3828, 3852), 'tensorflow.train.AdamOptimizer', 'tf.train.AdamOptimizer', ([], {}), '()\n', (3850, 3852), True, 'import tensorflow as tf\n'), ((8103, 8150), 'tensorflow.argmax', 'tf.argmax', (['logits'], {'axis': '(1)', 'output_type': 'tf.int32'}), '(logits, axis=1, output_type=tf.int32)\n', (8112, 8150), True, 'import tensorflow as tf\n'), ((8237, 8355), 'sklearn.decomposition.PCA', 'PCA', ([], {'copy': '(True)', 'iterated_power': '"""auto"""', 'n_components': '(2)', 'random_state': 'None', 'svd_solver': '"""auto"""', 'tol': '(0.0)', 'whiten': '(False)'}), "(copy=True, iterated_power='auto', n_components=2, random_state=None,\n svd_solver='auto', tol=0.0, whiten=False)\n", (8240, 8355), False, 'from sklearn.decomposition import PCA\n'), ((8503, 8550), 'matplotlib.pyplot.scatter', 'plt.scatter', (['X[:, 0]', 'X[:, 1]'], {'s': '(40)', 'c': 'arrcolor'}), '(X[:, 0], X[:, 1], s=40, c=arrcolor)\n', (8514, 8550), True, 'import matplotlib.pyplot as plt\n'), ((8714, 8768), 'matplotlib.pyplot.legend', 'plt.legend', ([], {'handles': 'legend_elements', 'loc': '"""upper right"""'}), "(handles=legend_elements, loc='upper right')\n", (8724, 8768), True, 'import matplotlib.pyplot as plt\n'), ((8776, 8796), 'matplotlib.pyplot.savefig', 'plt.savefig', (['imgfile'], {}), '(imgfile)\n', (8787, 8796), True, 'import matplotlib.pyplot as plt\n'), ((8804, 8815), 'matplotlib.pyplot.close', 'plt.close', ([], {}), '()\n', (8813, 8815), True, 'import matplotlib.pyplot as plt\n'), ((8363, 8382), 'sklearn.decomposition.PCA', 'PCA', ([], {'n_components': '(2)'}), '(n_components=2)\n', (8366, 8382), False, 'from sklearn.decomposition import PCA\n')] |
# -*- coding: utf-8 -*-
"""
Created on 2020/03/16
Feature selection: Relief-based feature selection algorithm.
------
@author: <NAME>
"""
import numpy as np
from sklearn import preprocessing
import os
from sklearn.externals import joblib
from el_classify_sensitive_person_train_validation import ClassifyFourKindOfPersonTrain
from eslearn.utils.lc_evaluation_model_performances import eval_performance
class ClassifyFourKindOfPersonTest():
"""
This class is used to testing classification model for 2 kind of sensitive person identification.
Parameters
----------
data_test_file: path str
Path of the dataset
label_test_file: path str
Path of the label
path_out :
Path to save results
is_feature_selection : bool
if perfrome feature selection.
is_showfig_finally: bool
If show figure after all iteration finished.
Returns
-------
Save all classification results and figures to local disk.
"""
def __init__(selftest,
data_test_file=None,
label_test_file=None,
data_train_file=None,
models_path=None,
path_out=None,
is_feature_selection=False,
is_showfig_finally=True):
selftest.data_test_file = data_test_file
selftest.label_test_file = label_test_file
selftest.data_train_file = data_train_file
selftest.path_out = path_out
selftest.models_path = models_path
selftest.is_feature_selection = is_feature_selection
selftest.is_showfig_finally = is_showfig_finally
def main_function(selftest):
"""
"""
print('Training model and testing...\n')
# load data and mask
mask_lassocv = joblib.load(os.path.join(selftest.path_out, 'mask_selected_features_lassocv.pkl'))
model_feature_selection = joblib.load(os.path.join(selftest.models_path, 'model_feature_selection.pkl'))
model_classification = joblib.load(os.path.join(selftest.models_path, 'model_classification.pkl'))
feature_test, selftest.label_test, feature_train = selftest._load_data()
# Age encoding
feature_test[:,2] = ClassifyFourKindOfPersonTrain().age_encodeing(feature_train[:,2], feature_test[:,2])
# Feature selection
if selftest.is_feature_selection:
feature_test = feature_test[:, mask_lassocv != 0]
# Testting
selftest.prediction, selftest.decision = selftest.testing(model_classification, feature_test)
# Evaluating classification performances
selftest.accuracy, selftest.sensitivity, selftest.specificity, selftest.AUC = eval_performance(selftest.label_test, selftest.prediction, selftest.decision,
accuracy_kfold=None, sensitivity_kfold=None, specificity_kfold=None, AUC_kfold=None,
verbose=1, is_showfig=0)
# Save results and fig to local path
selftest.save_results()
selftest.save_fig()
print("--" * 10 + "Done!" + "--" * 10 )
return selftest
def _load_data(selftest):
"""
Load data
"""
data_test = np.load(selftest.data_test_file)
label_test = np.load(selftest.label_test_file)
data_train = np.load(selftest.data_train_file)
return data_test, label_test, data_train
def testing(selftest, model, test_X):
predict = model.predict(test_X)
decision = model.decision_function(test_X)
return predict, decision
def save_results(selftest):
# Save performances and others
import pandas as pd
performances_to_save = np.array([selftest.accuracy, selftest.sensitivity, selftest.specificity, selftest.AUC]).reshape(1,4)
de_pred_label_to_save = np.vstack([selftest.decision.T, selftest.prediction.T, selftest.label_test.T]).T
performances_to_save = pd.DataFrame(performances_to_save, columns=[['Accuracy','Sensitivity', 'Specificity', 'AUC']])
de_pred_label_to_save = pd.DataFrame(de_pred_label_to_save, columns=[['Decision','Prediction', 'Sorted_Real_Label']])
performances_to_save.to_csv(os.path.join(selftest.path_out, 'test_Performances.txt'), index=False, header=True)
de_pred_label_to_save.to_csv(os.path.join(selftest.path_out, 'test_Decision_prediction_label.txt'), index=False, header=True)
def save_fig(selftest):
# Save ROC and Classification 2D figure
acc, sens, spec, auc = eval_performance(selftest.label_test, selftest.prediction, selftest.decision,
selftest.accuracy, selftest.sensitivity, selftest.specificity, selftest.AUC,
verbose=0, is_showfig=selftest.is_showfig_finally, is_savefig=1,
out_name=os.path.join(selftest.path_out, 'Classification_performances_test.pdf'),
legend1='Healthy', legend2='Unhealthy')
#
if __name__ == '__main__':
# =============================================================================
# All inputs
data_file = r'D:\workstation_b\Fundation\给黎超.xlsx'
path_out = r'D:\workstation_b\Fundation'
models_path = r'D:\workstation_b\Fundation'
# =============================================================================
selftest = ClassifyFourKindOfPersonTest(data_test_file=r'D:\workstation_b\Fundation\feature_test.npy',
label_test_file=r'D:\workstation_b\Fundation\label_test.npy',
data_train_file=r'D:\workstation_b\Fundation\feature_train.npy',
path_out=path_out,
models_path=models_path,
is_feature_selection=1)
selftest.main_function()
| [
"pandas.DataFrame",
"os.path.join",
"numpy.array",
"numpy.vstack",
"eslearn.utils.lc_evaluation_model_performances.eval_performance",
"el_classify_sensitive_person_train_validation.ClassifyFourKindOfPersonTrain",
"numpy.load"
] | [((2744, 2940), 'eslearn.utils.lc_evaluation_model_performances.eval_performance', 'eval_performance', (['selftest.label_test', 'selftest.prediction', 'selftest.decision'], {'accuracy_kfold': 'None', 'sensitivity_kfold': 'None', 'specificity_kfold': 'None', 'AUC_kfold': 'None', 'verbose': '(1)', 'is_showfig': '(0)'}), '(selftest.label_test, selftest.prediction, selftest.\n decision, accuracy_kfold=None, sensitivity_kfold=None,\n specificity_kfold=None, AUC_kfold=None, verbose=1, is_showfig=0)\n', (2760, 2940), False, 'from eslearn.utils.lc_evaluation_model_performances import eval_performance\n'), ((3243, 3275), 'numpy.load', 'np.load', (['selftest.data_test_file'], {}), '(selftest.data_test_file)\n', (3250, 3275), True, 'import numpy as np\n'), ((3297, 3330), 'numpy.load', 'np.load', (['selftest.label_test_file'], {}), '(selftest.label_test_file)\n', (3304, 3330), True, 'import numpy as np\n'), ((3352, 3385), 'numpy.load', 'np.load', (['selftest.data_train_file'], {}), '(selftest.data_train_file)\n', (3359, 3385), True, 'import numpy as np\n'), ((3979, 4078), 'pandas.DataFrame', 'pd.DataFrame', (['performances_to_save'], {'columns': "[['Accuracy', 'Sensitivity', 'Specificity', 'AUC']]"}), "(performances_to_save, columns=[['Accuracy', 'Sensitivity',\n 'Specificity', 'AUC']])\n", (3991, 4078), True, 'import pandas as pd\n'), ((4106, 4204), 'pandas.DataFrame', 'pd.DataFrame', (['de_pred_label_to_save'], {'columns': "[['Decision', 'Prediction', 'Sorted_Real_Label']]"}), "(de_pred_label_to_save, columns=[['Decision', 'Prediction',\n 'Sorted_Real_Label']])\n", (4118, 4204), True, 'import pandas as pd\n'), ((1827, 1896), 'os.path.join', 'os.path.join', (['selftest.path_out', '"""mask_selected_features_lassocv.pkl"""'], {}), "(selftest.path_out, 'mask_selected_features_lassocv.pkl')\n", (1839, 1896), False, 'import os\n'), ((1944, 2009), 'os.path.join', 'os.path.join', (['selftest.models_path', '"""model_feature_selection.pkl"""'], {}), "(selftest.models_path, 'model_feature_selection.pkl')\n", (1956, 2009), False, 'import os\n'), ((2054, 2116), 'os.path.join', 'os.path.join', (['selftest.models_path', '"""model_classification.pkl"""'], {}), "(selftest.models_path, 'model_classification.pkl')\n", (2066, 2116), False, 'import os\n'), ((3867, 3945), 'numpy.vstack', 'np.vstack', (['[selftest.decision.T, selftest.prediction.T, selftest.label_test.T]'], {}), '([selftest.decision.T, selftest.prediction.T, selftest.label_test.T])\n', (3876, 3945), True, 'import numpy as np\n'), ((4245, 4301), 'os.path.join', 'os.path.join', (['selftest.path_out', '"""test_Performances.txt"""'], {}), "(selftest.path_out, 'test_Performances.txt')\n", (4257, 4301), False, 'import os\n'), ((4366, 4435), 'os.path.join', 'os.path.join', (['selftest.path_out', '"""test_Decision_prediction_label.txt"""'], {}), "(selftest.path_out, 'test_Decision_prediction_label.txt')\n", (4378, 4435), False, 'import os\n'), ((2253, 2284), 'el_classify_sensitive_person_train_validation.ClassifyFourKindOfPersonTrain', 'ClassifyFourKindOfPersonTrain', ([], {}), '()\n', (2282, 2284), False, 'from el_classify_sensitive_person_train_validation import ClassifyFourKindOfPersonTrain\n'), ((3734, 3825), 'numpy.array', 'np.array', (['[selftest.accuracy, selftest.sensitivity, selftest.specificity, selftest.AUC]'], {}), '([selftest.accuracy, selftest.sensitivity, selftest.specificity,\n selftest.AUC])\n', (3742, 3825), True, 'import numpy as np\n'), ((4954, 5025), 'os.path.join', 'os.path.join', (['selftest.path_out', '"""Classification_performances_test.pdf"""'], {}), "(selftest.path_out, 'Classification_performances_test.pdf')\n", (4966, 5025), False, 'import os\n')] |
import random
cor = {
'fim':'\033[m',
'amarelo':'\033[1;033m',
'vermelho':'\033[1;031m',
'vermelhof':'\033[7;031m',
'azul':'\033[1;034m',
'verde':'\033[1;32m',
'verdef':'\033[7;32m',
'branco':'\033[1;030m'
}
print('''
Escolha uma das opções abaixo:
\t {}1{} {}PEDRA{}:
\t {}2{} {}PAPEL{}:
\t {}3{} {}TESOURA{}:'''.format(
cor['vermelho'], cor['fim'], cor['azul'], cor['fim'],
cor['vermelho'], cor['fim'], cor['azul'], cor['fim'],
cor['vermelho'], cor['fim'], cor['azul'], cor['fim']
))
eu = int(input('\t '))
if eu == 1:
me = 'PEDRA'
elif eu == 2:
me = 'PAPEL'
else:
me = 'TESOURA'
pc = ['PEDRA', 'PAPEL', 'TESOURA']
random.shuffle(pc)
if eu < 1 or eu > 3:
print('\n\t\t{}ESCOLHA UM VALOR VÁLIDO{}\n'.format(cor['vermelho'], cor['fim']))
elif eu == 1 and pc[0] == 'PEDRA' or eu == 2 and pc[0] == 'PAPEL' or eu == 3 and pc[0] == 'TESOURA':
print('{}EU{}: {}\t\t{}PC{}: {}'.format(cor['vermelho'], cor['fim'], me, cor['vermelho'], cor['fim'], pc[0]))
print('{} EMPATE, JOGUE OUTRA VEZ {}\n'.format(cor['vermelhof'], cor['fim']))
elif eu == 1 and pc[0] == 'PAPEL':
print('{}EU{}: {}\t\t{}PC{}: {}'.format(cor['vermelho'], cor['fim'], me, cor['vermelho'], cor['fim'], pc[0]))
print('PAPEL {}EMBRULHA{} PEDRA\n'.format(cor['amarelo'], cor['fim']))
elif eu == 1 and pc[0] == 'PAPEL':
print('{}EU{}: {}\t\t{}PC{}: {}'.format(cor['vermelho'], cor['fim'], me, cor['vermelho'], cor['fim'], pc[0]))
print('PEDRA {}QUEBRA{} TESOURA\n'.format(cor['amarelo'], cor['fim']))
elif eu == 2 and pc[0] == 'PEDRA':
print('{}EU{}: {}\t\t{}PC{}: {}'.format(cor['vermelho'], cor['fim'], me, cor['vermelho'], cor['fim'], pc[0]))
print('PAPEL {}EMBRULHA{} PEDRA\n'.format(cor['amarelo'], cor['fim']))
elif eu == 2 and pc[0] == 'TESOURA':
print('{}EU{}: {}\t\t{}PC{}: {}'.format(cor['vermelho'], cor['fim'], me, cor['vermelho'], cor['fim'], pc[0]))
print('TESOURA {}CORTA{} PAPEL\n'.format(cor['amarelo'], cor['fim']))
elif eu == 3 and pc[0] == 'PEDRA':
print('{}EU{}: {}\t\t{}PC{}: {}'.format(cor['vermelho'], cor['fim'], me, cor['vermelho'], cor['fim'], pc[0]))
print('PEDRA {}QUEBRA{} TESOURA\n'.format(cor['amarelo'], cor['fim']))
else:
print('{}EU{}: {}\t\t{}PC{}: {}'.format(cor['vermelho'], cor['fim'], me, cor['vermelho'], cor['fim'], pc[0]))
print('TESOURA {}CORTA{} PAPEL\n'.format(cor['amarelo'], cor['fim']))
| [
"random.shuffle"
] | [((718, 736), 'random.shuffle', 'random.shuffle', (['pc'], {}), '(pc)\n', (732, 736), False, 'import random\n')] |
from pandas.core.algorithms import mode
import torch
import torch.nn as nn
from albumentations import Compose,Resize,Normalize
from albumentations.pytorch import ToTensorV2
import wandb
import time
import torchvision
import torch.nn.functional as F
import torch.optim as optim
from torch.cuda.amp import autocast,GradScaler
import os
import numpy as np
from tqdm import tqdm
from callbacks import EarlyStopping
import pandas as pd
from torch.utils.data import Dataset, DataLoader
import cv2
import torch.nn.functional as F
import random
from build_model import Deformed_Darknet53
torch.manual_seed(2021)
np.random.seed(2021)
random.seed(2021)
torch.backends.cudnn.benchmark = True
torch.backends.cudnn.deterministic = True
DEVICE = "cuda:0" if torch.cuda.is_available() else "cpu"
TOTAL_EPOCHS = 100
scaler = GradScaler()
early_stop = EarlyStopping()
wandb.init(project='deformed-darknet',entity='tensorthug',name='new-darknet-256x256_32')
print("***** Loading the Model in {} *****".format(DEVICE))
Model = Deformed_Darknet53().to(DEVICE)
print("Model Shipped to {}".format(DEVICE))
data = pd.read_csv("data.csv")
train_loss_fn = nn.BCEWithLogitsLoss()
val_loss_fn = nn.BCEWithLogitsLoss()
optim = torch.optim.Adam(Model.parameters())
wandb.watch(Model)
class dog_cat(Dataset):
def __init__(self,df,mode="train",folds=0,transforms=None):
super(dog_cat,self).__init__()
self.df = df
self.mode = mode
self.folds = folds
self.transforms = transforms
if self.mode == "train":
self.data = self.df[self.df.folds != self.folds].reset_index(drop=True)
else:
self.data = self.df[self.df.folds == self.folds].reset_index(drop=True)
def __len__(self):
return len(self.data)
def __getitem__(self,idx):
img = cv2.imread(self.data.loc[idx,"Paths"])
label = self.data.loc[idx,'Labels']
if self.transforms is not None:
image = self.transforms(image=img)['image']
return image,label
def train_loop(epoch,dataloader,model,loss_fn,optim,device=DEVICE):
model.train()
epoch_loss = 0
epoch_acc = 0
#start_time = time.time()
pbar = tqdm(enumerate(dataloader),total=len(dataloader))
for i,(img,label) in pbar:
optim.zero_grad()
img = img.to(DEVICE).float()
label = label.to(DEVICE).float()
#LOAD_TIME = time.time() - start_time
with autocast():
yhat = model(img)
#Loss Calculation
train_loss = loss_fn(input = yhat.flatten(), target = label)
out = (yhat.flatten().sigmoid() > 0.5).float()
correct = (label == out).float().sum()
scaler.scale(train_loss).backward()
scaler.step(optim)
scaler.update()
epoch_loss += train_loss.item()
epoch_acc += correct.item() / out.shape[0]
train_epoch_loss = epoch_loss / len(dataloader)
train_epoch_acc = epoch_acc / len(dataloader)
wandb.log({"Training_Loss":train_epoch_loss})
wandb.log({"Training_Acc":train_epoch_acc})
#print(f"Epoch:{epoch}/{TOTAL_EPOCHS} Epoch Loss:{epoch_loss / len(dataloader):.4f} Epoch Acc:{epoch_acc / len(dataloader):.4f}")
return train_epoch_loss,train_epoch_acc
def val_loop(epoch,dataloader,model,loss_fn,device = DEVICE):
model.eval()
val_epoch_loss = 0
val_epoch_acc = 0
pbar = tqdm(enumerate(dataloader),total=len(dataloader))
with torch.no_grad():
for i,(img,label) in pbar:
img = img.to(device).float()
label = label.to(device).float()
yhat = model(img)
val_loss = loss_fn(input=yhat.flatten(),target=label)
out = (yhat.flatten().sigmoid()>0.5).float()
correct = (label == out).float().sum()
val_epoch_loss += val_loss.item()
val_epoch_acc += correct.item() / out.shape[0]
val_lossd = val_epoch_loss / len(dataloader)
val_accd = val_epoch_acc / len(dataloader)
wandb.log({"Val_Loss":val_lossd,"Epoch":epoch})
wandb.log({"Val_Acc":val_accd/len(dataloader),"Epoch":epoch})
return val_lossd,val_accd
if __name__ == "__main__":
train_per_epoch_loss,train_per_epoch_acc = [],[]
val_per_epoch_loss,val_per_epoch_acc = [],[]
train = dog_cat(data,transforms=Compose([Resize(256,256),Normalize(),ToTensorV2()]))
val = dog_cat(data,mode='val',transforms=Compose([Resize(256,256),Normalize(),ToTensorV2()]))
train_load = DataLoader(train,batch_size=32,shuffle=True,num_workers=4)
val_load = DataLoader(val,batch_size=32,num_workers=4)
for e in range(TOTAL_EPOCHS):
train_loss,train_acc = train_loop(e,train_load,Model,train_loss_fn,optim)
val_loss,val_acc = val_loop(e,val_load,Model,val_loss_fn)
train_per_epoch_loss.append(train_loss)
train_per_epoch_acc.append(train_acc)
val_per_epoch_loss.append(val_loss)
val_per_epoch_acc.append(val_acc)
print(f"TrainLoss:{train_loss:.4f} TrainAcc:{train_acc:.4f}")
print(f"ValLoss:{val_loss:.4f} ValAcc:{val_acc:.4f}")
early_stop(Model,val_loss)
if early_stop.early_stop:
break
| [
"wandb.log",
"pandas.read_csv",
"wandb.init",
"torch.cuda.is_available",
"torch.cuda.amp.GradScaler",
"torch.cuda.amp.autocast",
"numpy.random.seed",
"build_model.Deformed_Darknet53",
"albumentations.Normalize",
"callbacks.EarlyStopping",
"torch.nn.BCEWithLogitsLoss",
"torch.optim.zero_grad",
... | [((585, 608), 'torch.manual_seed', 'torch.manual_seed', (['(2021)'], {}), '(2021)\n', (602, 608), False, 'import torch\n'), ((609, 629), 'numpy.random.seed', 'np.random.seed', (['(2021)'], {}), '(2021)\n', (623, 629), True, 'import numpy as np\n'), ((630, 647), 'random.seed', 'random.seed', (['(2021)'], {}), '(2021)\n', (641, 647), False, 'import random\n'), ((814, 826), 'torch.cuda.amp.GradScaler', 'GradScaler', ([], {}), '()\n', (824, 826), False, 'from torch.cuda.amp import autocast, GradScaler\n'), ((840, 855), 'callbacks.EarlyStopping', 'EarlyStopping', ([], {}), '()\n', (853, 855), False, 'from callbacks import EarlyStopping\n'), ((856, 951), 'wandb.init', 'wandb.init', ([], {'project': '"""deformed-darknet"""', 'entity': '"""tensorthug"""', 'name': '"""new-darknet-256x256_32"""'}), "(project='deformed-darknet', entity='tensorthug', name=\n 'new-darknet-256x256_32')\n", (866, 951), False, 'import wandb\n'), ((1102, 1125), 'pandas.read_csv', 'pd.read_csv', (['"""data.csv"""'], {}), "('data.csv')\n", (1113, 1125), True, 'import pandas as pd\n'), ((1143, 1165), 'torch.nn.BCEWithLogitsLoss', 'nn.BCEWithLogitsLoss', ([], {}), '()\n', (1163, 1165), True, 'import torch.nn as nn\n'), ((1180, 1202), 'torch.nn.BCEWithLogitsLoss', 'nn.BCEWithLogitsLoss', ([], {}), '()\n', (1200, 1202), True, 'import torch.nn as nn\n'), ((1250, 1268), 'wandb.watch', 'wandb.watch', (['Model'], {}), '(Model)\n', (1261, 1268), False, 'import wandb\n'), ((749, 774), 'torch.cuda.is_available', 'torch.cuda.is_available', ([], {}), '()\n', (772, 774), False, 'import torch\n'), ((3052, 3098), 'wandb.log', 'wandb.log', (["{'Training_Loss': train_epoch_loss}"], {}), "({'Training_Loss': train_epoch_loss})\n", (3061, 3098), False, 'import wandb\n'), ((3102, 3146), 'wandb.log', 'wandb.log', (["{'Training_Acc': train_epoch_acc}"], {}), "({'Training_Acc': train_epoch_acc})\n", (3111, 3146), False, 'import wandb\n'), ((4616, 4677), 'torch.utils.data.DataLoader', 'DataLoader', (['train'], {'batch_size': '(32)', 'shuffle': '(True)', 'num_workers': '(4)'}), '(train, batch_size=32, shuffle=True, num_workers=4)\n', (4626, 4677), False, 'from torch.utils.data import Dataset, DataLoader\n'), ((4690, 4735), 'torch.utils.data.DataLoader', 'DataLoader', (['val'], {'batch_size': '(32)', 'num_workers': '(4)'}), '(val, batch_size=32, num_workers=4)\n', (4700, 4735), False, 'from torch.utils.data import Dataset, DataLoader\n'), ((1017, 1037), 'build_model.Deformed_Darknet53', 'Deformed_Darknet53', ([], {}), '()\n', (1035, 1037), False, 'from build_model import Deformed_Darknet53\n'), ((1853, 1892), 'cv2.imread', 'cv2.imread', (["self.data.loc[idx, 'Paths']"], {}), "(self.data.loc[idx, 'Paths'])\n", (1863, 1892), False, 'import cv2\n'), ((2325, 2342), 'torch.optim.zero_grad', 'optim.zero_grad', ([], {}), '()\n', (2340, 2342), True, 'import torch.optim as optim\n'), ((3534, 3549), 'torch.no_grad', 'torch.no_grad', ([], {}), '()\n', (3547, 3549), False, 'import torch\n'), ((4106, 4156), 'wandb.log', 'wandb.log', (["{'Val_Loss': val_lossd, 'Epoch': epoch}"], {}), "({'Val_Loss': val_lossd, 'Epoch': epoch})\n", (4115, 4156), False, 'import wandb\n'), ((2491, 2501), 'torch.cuda.amp.autocast', 'autocast', ([], {}), '()\n', (2499, 2501), False, 'from torch.cuda.amp import autocast, GradScaler\n'), ((4456, 4472), 'albumentations.Resize', 'Resize', (['(256)', '(256)'], {}), '(256, 256)\n', (4462, 4472), False, 'from albumentations import Compose, Resize, Normalize\n'), ((4472, 4483), 'albumentations.Normalize', 'Normalize', ([], {}), '()\n', (4481, 4483), False, 'from albumentations import Compose, Resize, Normalize\n'), ((4484, 4496), 'albumentations.pytorch.ToTensorV2', 'ToTensorV2', ([], {}), '()\n', (4494, 4496), False, 'from albumentations.pytorch import ToTensorV2\n'), ((4554, 4570), 'albumentations.Resize', 'Resize', (['(256)', '(256)'], {}), '(256, 256)\n', (4560, 4570), False, 'from albumentations import Compose, Resize, Normalize\n'), ((4570, 4581), 'albumentations.Normalize', 'Normalize', ([], {}), '()\n', (4579, 4581), False, 'from albumentations import Compose, Resize, Normalize\n'), ((4582, 4594), 'albumentations.pytorch.ToTensorV2', 'ToTensorV2', ([], {}), '()\n', (4592, 4594), False, 'from albumentations.pytorch import ToTensorV2\n')] |
import conftest
from asaprog import pac_encode
from asaprog.util import *
if __name__ == "__main__":
pac = {
'command': asaProgCommand.CHK_DEVICE.value,
'data': b'test'
}
res = pac_encode(pac)
print(res)
print(res[-1])
| [
"asaprog.pac_encode"
] | [((206, 221), 'asaprog.pac_encode', 'pac_encode', (['pac'], {}), '(pac)\n', (216, 221), False, 'from asaprog import pac_encode\n')] |
# -*- coding: UTF=8 -*-
__author__ = '<NAME>'
import glob
from appwebshare.scripts import config
def get_file_list():
without_dir = []
for i in glob.glob(config.DIR + '*.*') :
without_dir.append(i.replace(config.DIR, ""))
return without_dir | [
"glob.glob"
] | [((153, 182), 'glob.glob', 'glob.glob', (["(config.DIR + '*.*')"], {}), "(config.DIR + '*.*')\n", (162, 182), False, 'import glob\n')] |
import pytest
import checkout_sdk
from checkout_sdk.environment import Environment
from checkout_sdk.exception import CheckoutArgumentException
def test_should_create_four_sdk():
checkout_sdk.FourSdk() \
.secret_key('<KEY>') \
.public_key('<KEY>') \
.environment(Environment.sandbox()) \
.build()
sdk = checkout_sdk.FourSdk() \
.secret_key('<KEY>') \
.public_key('<KEY>') \
.environment(Environment.production()) \
.build()
assert sdk is not None
assert sdk.tokens is not None
def test_should_fail_create_four_sdk():
with pytest.raises(CheckoutArgumentException):
checkout_sdk.FourSdk() \
.secret_key('<KEY>') \
.environment(Environment.sandbox()) \
.build()
with pytest.raises(CheckoutArgumentException):
checkout_sdk.FourSdk() \
.public_key('<KEY>') \
.environment(Environment.sandbox()) \
.build()
| [
"checkout_sdk.environment.Environment.production",
"checkout_sdk.environment.Environment.sandbox",
"checkout_sdk.FourSdk",
"pytest.raises"
] | [((613, 653), 'pytest.raises', 'pytest.raises', (['CheckoutArgumentException'], {}), '(CheckoutArgumentException)\n', (626, 653), False, 'import pytest\n'), ((804, 844), 'pytest.raises', 'pytest.raises', (['CheckoutArgumentException'], {}), '(CheckoutArgumentException)\n', (817, 844), False, 'import pytest\n'), ((294, 315), 'checkout_sdk.environment.Environment.sandbox', 'Environment.sandbox', ([], {}), '()\n', (313, 315), False, 'from checkout_sdk.environment import Environment\n'), ((455, 479), 'checkout_sdk.environment.Environment.production', 'Environment.production', ([], {}), '()\n', (477, 479), False, 'from checkout_sdk.environment import Environment\n'), ((748, 769), 'checkout_sdk.environment.Environment.sandbox', 'Environment.sandbox', ([], {}), '()\n', (767, 769), False, 'from checkout_sdk.environment import Environment\n'), ((939, 960), 'checkout_sdk.environment.Environment.sandbox', 'Environment.sandbox', ([], {}), '()\n', (958, 960), False, 'from checkout_sdk.environment import Environment\n'), ((663, 685), 'checkout_sdk.FourSdk', 'checkout_sdk.FourSdk', ([], {}), '()\n', (683, 685), False, 'import checkout_sdk\n'), ((854, 876), 'checkout_sdk.FourSdk', 'checkout_sdk.FourSdk', ([], {}), '()\n', (874, 876), False, 'import checkout_sdk\n'), ((186, 208), 'checkout_sdk.FourSdk', 'checkout_sdk.FourSdk', ([], {}), '()\n', (206, 208), False, 'import checkout_sdk\n'), ((347, 369), 'checkout_sdk.FourSdk', 'checkout_sdk.FourSdk', ([], {}), '()\n', (367, 369), False, 'import checkout_sdk\n')] |
import os.path as op
from urllib.request import urlretrieve
import matplotlib
import numpy as np
from numpy.testing import assert_allclose
import pytest
import hnn_core
from hnn_core import read_params, read_dipole, average_dipoles
from hnn_core import Network, jones_2009_model
from hnn_core.viz import plot_dipole
from hnn_core.dipole import Dipole, simulate_dipole, _rmse
from hnn_core.parallel_backends import requires_mpi4py, requires_psutil
matplotlib.use('agg')
def test_dipole(tmpdir, run_hnn_core_fixture):
"""Test dipole object."""
hnn_core_root = op.dirname(hnn_core.__file__)
params_fname = op.join(hnn_core_root, 'param', 'default.json')
dpl_out_fname = tmpdir.join('dpl1.txt')
params = read_params(params_fname)
times = np.arange(0, 6000 * params['dt'], params['dt'])
data = np.random.random((6000, 3))
dipole = Dipole(times, data)
dipole._baseline_renormalize(params['N_pyr_x'], params['N_pyr_y'])
dipole._convert_fAm_to_nAm()
# test smoothing and scaling
dipole_raw = dipole.copy()
dipole.scale(params['dipole_scalefctr'])
dipole.smooth(window_len=params['dipole_smooth_win'])
with pytest.raises(AssertionError):
assert_allclose(dipole.data['agg'], dipole_raw.data['agg'])
assert_allclose(dipole.data['agg'],
(params['dipole_scalefctr'] * dipole_raw.smooth(
params['dipole_smooth_win']).data['agg']))
dipole.plot(show=False)
plot_dipole([dipole, dipole], show=False)
# Test IO
dipole.write(dpl_out_fname)
dipole_read = read_dipole(dpl_out_fname)
assert_allclose(dipole_read.times, dipole.times, rtol=0, atol=0.00051)
for dpl_key in dipole.data.keys():
assert_allclose(dipole_read.data[dpl_key],
dipole.data[dpl_key], rtol=0, atol=0.000051)
# average two identical dipole objects
dipole_avg = average_dipoles([dipole, dipole_read])
for dpl_key in dipole_avg.data.keys():
assert_allclose(dipole_read.data[dpl_key],
dipole_avg.data[dpl_key], rtol=0, atol=0.000051)
with pytest.raises(ValueError, match="Dipole at index 0 was already an "
"average of 2 trials"):
dipole_avg = average_dipoles([dipole_avg, dipole_read])
# average an n_of_1 dipole list
single_dpl_avg = average_dipoles([dipole])
for dpl_key in single_dpl_avg.data.keys():
assert_allclose(
dipole_read.data[dpl_key],
single_dpl_avg.data[dpl_key],
rtol=0,
atol=0.000051)
# average dipole list with one dipole object and a zero dipole object
n_times = len(dipole_read.data['agg'])
dpl_null = Dipole(np.zeros(n_times, ), np.zeros((n_times, 3)))
dpl_1 = [dipole, dpl_null]
dpl_avg = average_dipoles(dpl_1)
for dpl_key in dpl_avg.data.keys():
assert_allclose(dpl_1[0].data[dpl_key] / 2., dpl_avg.data[dpl_key])
# Test experimental dipole
dipole_exp = Dipole(times, data[:, 1])
dipole_exp.write(dpl_out_fname)
dipole_exp_read = read_dipole(dpl_out_fname)
assert_allclose(dipole_exp.data['agg'], dipole_exp_read.data['agg'],
rtol=1e-2)
dipole_exp_avg = average_dipoles([dipole_exp, dipole_exp])
assert_allclose(dipole_exp.data['agg'], dipole_exp_avg.data['agg'])
# XXX all below to be deprecated in 0.3
dpls_raw, net = run_hnn_core_fixture(backend='joblib', n_jobs=1,
reduced=True, record_isoma=True,
record_vsoma=True)
# test deprecation of postproc
with pytest.warns(DeprecationWarning,
match='The postproc-argument is deprecated'):
dpls, _ = run_hnn_core_fixture(backend='joblib', n_jobs=1,
reduced=True, record_isoma=True,
record_vsoma=True, postproc=True)
with pytest.raises(AssertionError):
assert_allclose(dpls[0].data['agg'], dpls_raw[0].data['agg'])
dpls_raw[0]._post_proc(net._params['dipole_smooth_win'],
net._params['dipole_scalefctr'])
assert_allclose(dpls_raw[0].data['agg'], dpls[0].data['agg'])
def test_dipole_simulation():
"""Test data produced from simulate_dipole() call."""
hnn_core_root = op.dirname(hnn_core.__file__)
params_fname = op.join(hnn_core_root, 'param', 'default.json')
params = read_params(params_fname)
params.update({'N_pyr_x': 3,
'N_pyr_y': 3,
'dipole_smooth_win': 5,
't_evprox_1': 5,
't_evdist_1': 10,
't_evprox_2': 20})
net = jones_2009_model(params, add_drives_from_params=True)
with pytest.raises(ValueError, match="Invalid number of simulations: 0"):
simulate_dipole(net, tstop=25., n_trials=0)
with pytest.raises(TypeError, match="record_vsoma must be bool, got int"):
simulate_dipole(net, tstop=25., n_trials=1, record_vsoma=0)
with pytest.raises(TypeError, match="record_isoma must be bool, got int"):
simulate_dipole(net, tstop=25., n_trials=1, record_vsoma=False,
record_isoma=0)
# test Network.copy() returns 'bare' network after simulating
dpl = simulate_dipole(net, tstop=25., n_trials=1)[0]
net_copy = net.copy()
assert len(net_copy.external_drives['evprox1']['events']) == 0
# test that Dipole.copy() returns the expected exact copy
assert_allclose(dpl.data['agg'], dpl.copy().data['agg'])
with pytest.warns(UserWarning, match='No connections'):
net = Network(params)
# warning triggered on simulate_dipole()
simulate_dipole(net, tstop=0.1, n_trials=1)
# Smoke test for raster plot with no spikes
net.cell_response.plot_spikes_raster()
@requires_mpi4py
@requires_psutil
def test_cell_response_backends(run_hnn_core_fixture):
"""Test cell_response outputs across backends."""
# reduced simulation has n_trials=2
trial_idx, n_trials, gid = 0, 2, 7
_, joblib_net = run_hnn_core_fixture(backend='joblib', n_jobs=1,
reduced=True, record_isoma=True,
record_vsoma=True)
_, mpi_net = run_hnn_core_fixture(backend='mpi', n_procs=2, reduced=True,
record_isoma=True, record_vsoma=True)
n_times = len(joblib_net.cell_response.times)
assert len(joblib_net.cell_response.vsoma) == n_trials
assert len(joblib_net.cell_response.isoma) == n_trials
assert len(joblib_net.cell_response.vsoma[trial_idx][gid]) == n_times
assert len(joblib_net.cell_response.isoma[
trial_idx][gid]['soma_gabaa']) == n_times
assert len(mpi_net.cell_response.vsoma) == n_trials
assert len(mpi_net.cell_response.isoma) == n_trials
assert len(mpi_net.cell_response.vsoma[trial_idx][gid]) == n_times
assert len(mpi_net.cell_response.isoma[
trial_idx][gid]['soma_gabaa']) == n_times
assert mpi_net.cell_response.vsoma == joblib_net.cell_response.vsoma
assert mpi_net.cell_response.isoma == joblib_net.cell_response.isoma
# Test if spike time falls within depolarization window above v_thresh
v_thresh = 0.0
times = np.array(joblib_net.cell_response.times)
spike_times = np.array(joblib_net.cell_response.spike_times[trial_idx])
spike_gids = np.array(joblib_net.cell_response.spike_gids[trial_idx])
vsoma = np.array(joblib_net.cell_response.vsoma[trial_idx][gid])
v_mask = vsoma > v_thresh
assert np.all([spike_times[spike_gids == gid] > times[v_mask][0],
spike_times[spike_gids == gid] < times[v_mask][-1]])
# test that event times before and after simulation are the same
for drive_name, drive in joblib_net.external_drives.items():
gid_ran = joblib_net.gid_ranges[drive_name]
for idx_drive, event_times in enumerate(drive['events'][trial_idx]):
net_ets = [spike_times[i] for i, g in enumerate(spike_gids) if
g == gid_ran[idx_drive]]
assert_allclose(np.array(event_times), np.array(net_ets))
def test_rmse():
"""Test to check RMSE calculation"""
data_url = ('https://raw.githubusercontent.com/jonescompneurolab/hnn/'
'master/data/MEG_detection_data/yes_trial_S1_ERP_all_avg.txt')
if not op.exists('yes_trial_S1_ERP_all_avg.txt'):
urlretrieve(data_url, 'yes_trial_S1_ERP_all_avg.txt')
extdata = np.loadtxt('yes_trial_S1_ERP_all_avg.txt')
exp_dpl = Dipole(times=extdata[:, 0],
data=np.c_[extdata[:, 1], extdata[:, 1], extdata[:, 1]])
hnn_core_root = op.join(op.dirname(hnn_core.__file__))
params_fname = op.join(hnn_core_root, 'param', 'default.json')
params = read_params(params_fname)
expected_rmse = 0.1
test_dpl = Dipole(times=extdata[:, 0],
data=np.c_[extdata[:, 1] + expected_rmse,
extdata[:, 1] + expected_rmse,
extdata[:, 1] + expected_rmse])
avg_rmse = _rmse(test_dpl, exp_dpl, tstop=params['tstop'])
assert_allclose(avg_rmse, expected_rmse)
| [
"hnn_core.jones_2009_model",
"hnn_core.dipole.Dipole",
"numpy.array",
"numpy.loadtxt",
"numpy.arange",
"os.path.exists",
"hnn_core.dipole.simulate_dipole",
"urllib.request.urlretrieve",
"numpy.random.random",
"numpy.testing.assert_allclose",
"hnn_core.average_dipoles",
"hnn_core.dipole._rmse",... | [((450, 471), 'matplotlib.use', 'matplotlib.use', (['"""agg"""'], {}), "('agg')\n", (464, 471), False, 'import matplotlib\n'), ((571, 600), 'os.path.dirname', 'op.dirname', (['hnn_core.__file__'], {}), '(hnn_core.__file__)\n', (581, 600), True, 'import os.path as op\n'), ((620, 667), 'os.path.join', 'op.join', (['hnn_core_root', '"""param"""', '"""default.json"""'], {}), "(hnn_core_root, 'param', 'default.json')\n", (627, 667), True, 'import os.path as op\n'), ((725, 750), 'hnn_core.read_params', 'read_params', (['params_fname'], {}), '(params_fname)\n', (736, 750), False, 'from hnn_core import read_params, read_dipole, average_dipoles\n'), ((763, 810), 'numpy.arange', 'np.arange', (['(0)', "(6000 * params['dt'])", "params['dt']"], {}), "(0, 6000 * params['dt'], params['dt'])\n", (772, 810), True, 'import numpy as np\n'), ((822, 849), 'numpy.random.random', 'np.random.random', (['(6000, 3)'], {}), '((6000, 3))\n', (838, 849), True, 'import numpy as np\n'), ((863, 882), 'hnn_core.dipole.Dipole', 'Dipole', (['times', 'data'], {}), '(times, data)\n', (869, 882), False, 'from hnn_core.dipole import Dipole, simulate_dipole, _rmse\n'), ((1472, 1513), 'hnn_core.viz.plot_dipole', 'plot_dipole', (['[dipole, dipole]'], {'show': '(False)'}), '([dipole, dipole], show=False)\n', (1483, 1513), False, 'from hnn_core.viz import plot_dipole\n'), ((1579, 1605), 'hnn_core.read_dipole', 'read_dipole', (['dpl_out_fname'], {}), '(dpl_out_fname)\n', (1590, 1605), False, 'from hnn_core import read_params, read_dipole, average_dipoles\n'), ((1610, 1680), 'numpy.testing.assert_allclose', 'assert_allclose', (['dipole_read.times', 'dipole.times'], {'rtol': '(0)', 'atol': '(0.00051)'}), '(dipole_read.times, dipole.times, rtol=0, atol=0.00051)\n', (1625, 1680), False, 'from numpy.testing import assert_allclose\n'), ((1901, 1939), 'hnn_core.average_dipoles', 'average_dipoles', (['[dipole, dipole_read]'], {}), '([dipole, dipole_read])\n', (1916, 1939), False, 'from hnn_core import read_params, read_dipole, average_dipoles\n'), ((2354, 2379), 'hnn_core.average_dipoles', 'average_dipoles', (['[dipole]'], {}), '([dipole])\n', (2369, 2379), False, 'from hnn_core import read_params, read_dipole, average_dipoles\n'), ((2810, 2832), 'hnn_core.average_dipoles', 'average_dipoles', (['dpl_1'], {}), '(dpl_1)\n', (2825, 2832), False, 'from hnn_core import read_params, read_dipole, average_dipoles\n'), ((2998, 3023), 'hnn_core.dipole.Dipole', 'Dipole', (['times', 'data[:, 1]'], {}), '(times, data[:, 1])\n', (3004, 3023), False, 'from hnn_core.dipole import Dipole, simulate_dipole, _rmse\n'), ((3082, 3108), 'hnn_core.read_dipole', 'read_dipole', (['dpl_out_fname'], {}), '(dpl_out_fname)\n', (3093, 3108), False, 'from hnn_core import read_params, read_dipole, average_dipoles\n'), ((3113, 3192), 'numpy.testing.assert_allclose', 'assert_allclose', (["dipole_exp.data['agg']", "dipole_exp_read.data['agg']"], {'rtol': '(0.01)'}), "(dipole_exp.data['agg'], dipole_exp_read.data['agg'], rtol=0.01)\n", (3128, 3192), False, 'from numpy.testing import assert_allclose\n'), ((3234, 3275), 'hnn_core.average_dipoles', 'average_dipoles', (['[dipole_exp, dipole_exp]'], {}), '([dipole_exp, dipole_exp])\n', (3249, 3275), False, 'from hnn_core import read_params, read_dipole, average_dipoles\n'), ((3280, 3347), 'numpy.testing.assert_allclose', 'assert_allclose', (["dipole_exp.data['agg']", "dipole_exp_avg.data['agg']"], {}), "(dipole_exp.data['agg'], dipole_exp_avg.data['agg'])\n", (3295, 3347), False, 'from numpy.testing import assert_allclose\n'), ((4189, 4250), 'numpy.testing.assert_allclose', 'assert_allclose', (["dpls_raw[0].data['agg']", "dpls[0].data['agg']"], {}), "(dpls_raw[0].data['agg'], dpls[0].data['agg'])\n", (4204, 4250), False, 'from numpy.testing import assert_allclose\n'), ((4361, 4390), 'os.path.dirname', 'op.dirname', (['hnn_core.__file__'], {}), '(hnn_core.__file__)\n', (4371, 4390), True, 'import os.path as op\n'), ((4410, 4457), 'os.path.join', 'op.join', (['hnn_core_root', '"""param"""', '"""default.json"""'], {}), "(hnn_core_root, 'param', 'default.json')\n", (4417, 4457), True, 'import os.path as op\n'), ((4471, 4496), 'hnn_core.read_params', 'read_params', (['params_fname'], {}), '(params_fname)\n', (4482, 4496), False, 'from hnn_core import read_params, read_dipole, average_dipoles\n'), ((4727, 4780), 'hnn_core.jones_2009_model', 'jones_2009_model', (['params'], {'add_drives_from_params': '(True)'}), '(params, add_drives_from_params=True)\n', (4743, 4780), False, 'from hnn_core import Network, jones_2009_model\n'), ((7349, 7389), 'numpy.array', 'np.array', (['joblib_net.cell_response.times'], {}), '(joblib_net.cell_response.times)\n', (7357, 7389), True, 'import numpy as np\n'), ((7408, 7465), 'numpy.array', 'np.array', (['joblib_net.cell_response.spike_times[trial_idx]'], {}), '(joblib_net.cell_response.spike_times[trial_idx])\n', (7416, 7465), True, 'import numpy as np\n'), ((7483, 7539), 'numpy.array', 'np.array', (['joblib_net.cell_response.spike_gids[trial_idx]'], {}), '(joblib_net.cell_response.spike_gids[trial_idx])\n', (7491, 7539), True, 'import numpy as np\n'), ((7552, 7608), 'numpy.array', 'np.array', (['joblib_net.cell_response.vsoma[trial_idx][gid]'], {}), '(joblib_net.cell_response.vsoma[trial_idx][gid])\n', (7560, 7608), True, 'import numpy as np\n'), ((7651, 7767), 'numpy.all', 'np.all', (['[spike_times[spike_gids == gid] > times[v_mask][0], spike_times[spike_gids ==\n gid] < times[v_mask][-1]]'], {}), '([spike_times[spike_gids == gid] > times[v_mask][0], spike_times[\n spike_gids == gid] < times[v_mask][-1]])\n', (7657, 7767), True, 'import numpy as np\n'), ((8583, 8625), 'numpy.loadtxt', 'np.loadtxt', (['"""yes_trial_S1_ERP_all_avg.txt"""'], {}), "('yes_trial_S1_ERP_all_avg.txt')\n", (8593, 8625), True, 'import numpy as np\n'), ((8641, 8729), 'hnn_core.dipole.Dipole', 'Dipole', ([], {'times': 'extdata[:, 0]', 'data': 'np.c_[extdata[:, 1], extdata[:, 1], extdata[:, 1]]'}), '(times=extdata[:, 0], data=np.c_[extdata[:, 1], extdata[:, 1],\n extdata[:, 1]])\n', (8647, 8729), False, 'from hnn_core.dipole import Dipole, simulate_dipole, _rmse\n'), ((8826, 8873), 'os.path.join', 'op.join', (['hnn_core_root', '"""param"""', '"""default.json"""'], {}), "(hnn_core_root, 'param', 'default.json')\n", (8833, 8873), True, 'import os.path as op\n'), ((8887, 8912), 'hnn_core.read_params', 'read_params', (['params_fname'], {}), '(params_fname)\n', (8898, 8912), False, 'from hnn_core import read_params, read_dipole, average_dipoles\n'), ((8953, 9090), 'hnn_core.dipole.Dipole', 'Dipole', ([], {'times': 'extdata[:, 0]', 'data': 'np.c_[extdata[:, 1] + expected_rmse, extdata[:, 1] + expected_rmse, extdata\n [:, 1] + expected_rmse]'}), '(times=extdata[:, 0], data=np.c_[extdata[:, 1] + expected_rmse, \n extdata[:, 1] + expected_rmse, extdata[:, 1] + expected_rmse])\n', (8959, 9090), False, 'from hnn_core.dipole import Dipole, simulate_dipole, _rmse\n'), ((9189, 9236), 'hnn_core.dipole._rmse', '_rmse', (['test_dpl', 'exp_dpl'], {'tstop': "params['tstop']"}), "(test_dpl, exp_dpl, tstop=params['tstop'])\n", (9194, 9236), False, 'from hnn_core.dipole import Dipole, simulate_dipole, _rmse\n'), ((9242, 9282), 'numpy.testing.assert_allclose', 'assert_allclose', (['avg_rmse', 'expected_rmse'], {}), '(avg_rmse, expected_rmse)\n', (9257, 9282), False, 'from numpy.testing import assert_allclose\n'), ((1164, 1193), 'pytest.raises', 'pytest.raises', (['AssertionError'], {}), '(AssertionError)\n', (1177, 1193), False, 'import pytest\n'), ((1203, 1262), 'numpy.testing.assert_allclose', 'assert_allclose', (["dipole.data['agg']", "dipole_raw.data['agg']"], {}), "(dipole.data['agg'], dipole_raw.data['agg'])\n", (1218, 1262), False, 'from numpy.testing import assert_allclose\n'), ((1728, 1818), 'numpy.testing.assert_allclose', 'assert_allclose', (['dipole_read.data[dpl_key]', 'dipole.data[dpl_key]'], {'rtol': '(0)', 'atol': '(5.1e-05)'}), '(dipole_read.data[dpl_key], dipole.data[dpl_key], rtol=0,\n atol=5.1e-05)\n', (1743, 1818), False, 'from numpy.testing import assert_allclose\n'), ((1991, 2085), 'numpy.testing.assert_allclose', 'assert_allclose', (['dipole_read.data[dpl_key]', 'dipole_avg.data[dpl_key]'], {'rtol': '(0)', 'atol': '(5.1e-05)'}), '(dipole_read.data[dpl_key], dipole_avg.data[dpl_key], rtol=0,\n atol=5.1e-05)\n', (2006, 2085), False, 'from numpy.testing import assert_allclose\n'), ((2117, 2209), 'pytest.raises', 'pytest.raises', (['ValueError'], {'match': '"""Dipole at index 0 was already an average of 2 trials"""'}), "(ValueError, match=\n 'Dipole at index 0 was already an average of 2 trials')\n", (2130, 2209), False, 'import pytest\n'), ((2253, 2295), 'hnn_core.average_dipoles', 'average_dipoles', (['[dipole_avg, dipole_read]'], {}), '([dipole_avg, dipole_read])\n', (2268, 2295), False, 'from hnn_core import read_params, read_dipole, average_dipoles\n'), ((2435, 2533), 'numpy.testing.assert_allclose', 'assert_allclose', (['dipole_read.data[dpl_key]', 'single_dpl_avg.data[dpl_key]'], {'rtol': '(0)', 'atol': '(5.1e-05)'}), '(dipole_read.data[dpl_key], single_dpl_avg.data[dpl_key],\n rtol=0, atol=5.1e-05)\n', (2450, 2533), False, 'from numpy.testing import assert_allclose\n'), ((2720, 2737), 'numpy.zeros', 'np.zeros', (['n_times'], {}), '(n_times)\n', (2728, 2737), True, 'import numpy as np\n'), ((2741, 2763), 'numpy.zeros', 'np.zeros', (['(n_times, 3)'], {}), '((n_times, 3))\n', (2749, 2763), True, 'import numpy as np\n'), ((2881, 2949), 'numpy.testing.assert_allclose', 'assert_allclose', (['(dpl_1[0].data[dpl_key] / 2.0)', 'dpl_avg.data[dpl_key]'], {}), '(dpl_1[0].data[dpl_key] / 2.0, dpl_avg.data[dpl_key])\n', (2896, 2949), False, 'from numpy.testing import assert_allclose\n'), ((3640, 3717), 'pytest.warns', 'pytest.warns', (['DeprecationWarning'], {'match': '"""The postproc-argument is deprecated"""'}), "(DeprecationWarning, match='The postproc-argument is deprecated')\n", (3652, 3717), False, 'import pytest\n'), ((3962, 3991), 'pytest.raises', 'pytest.raises', (['AssertionError'], {}), '(AssertionError)\n', (3975, 3991), False, 'import pytest\n'), ((4001, 4062), 'numpy.testing.assert_allclose', 'assert_allclose', (["dpls[0].data['agg']", "dpls_raw[0].data['agg']"], {}), "(dpls[0].data['agg'], dpls_raw[0].data['agg'])\n", (4016, 4062), False, 'from numpy.testing import assert_allclose\n'), ((4790, 4857), 'pytest.raises', 'pytest.raises', (['ValueError'], {'match': '"""Invalid number of simulations: 0"""'}), "(ValueError, match='Invalid number of simulations: 0')\n", (4803, 4857), False, 'import pytest\n'), ((4867, 4911), 'hnn_core.dipole.simulate_dipole', 'simulate_dipole', (['net'], {'tstop': '(25.0)', 'n_trials': '(0)'}), '(net, tstop=25.0, n_trials=0)\n', (4882, 4911), False, 'from hnn_core.dipole import Dipole, simulate_dipole, _rmse\n'), ((4920, 4988), 'pytest.raises', 'pytest.raises', (['TypeError'], {'match': '"""record_vsoma must be bool, got int"""'}), "(TypeError, match='record_vsoma must be bool, got int')\n", (4933, 4988), False, 'import pytest\n'), ((4998, 5058), 'hnn_core.dipole.simulate_dipole', 'simulate_dipole', (['net'], {'tstop': '(25.0)', 'n_trials': '(1)', 'record_vsoma': '(0)'}), '(net, tstop=25.0, n_trials=1, record_vsoma=0)\n', (5013, 5058), False, 'from hnn_core.dipole import Dipole, simulate_dipole, _rmse\n'), ((5067, 5135), 'pytest.raises', 'pytest.raises', (['TypeError'], {'match': '"""record_isoma must be bool, got int"""'}), "(TypeError, match='record_isoma must be bool, got int')\n", (5080, 5135), False, 'import pytest\n'), ((5145, 5230), 'hnn_core.dipole.simulate_dipole', 'simulate_dipole', (['net'], {'tstop': '(25.0)', 'n_trials': '(1)', 'record_vsoma': '(False)', 'record_isoma': '(0)'}), '(net, tstop=25.0, n_trials=1, record_vsoma=False, record_isoma=0\n )\n', (5160, 5230), False, 'from hnn_core.dipole import Dipole, simulate_dipole, _rmse\n'), ((5326, 5370), 'hnn_core.dipole.simulate_dipole', 'simulate_dipole', (['net'], {'tstop': '(25.0)', 'n_trials': '(1)'}), '(net, tstop=25.0, n_trials=1)\n', (5341, 5370), False, 'from hnn_core.dipole import Dipole, simulate_dipole, _rmse\n'), ((5600, 5649), 'pytest.warns', 'pytest.warns', (['UserWarning'], {'match': '"""No connections"""'}), "(UserWarning, match='No connections')\n", (5612, 5649), False, 'import pytest\n'), ((5665, 5680), 'hnn_core.Network', 'Network', (['params'], {}), '(params)\n', (5672, 5680), False, 'from hnn_core import Network, jones_2009_model\n'), ((5738, 5781), 'hnn_core.dipole.simulate_dipole', 'simulate_dipole', (['net'], {'tstop': '(0.1)', 'n_trials': '(1)'}), '(net, tstop=0.1, n_trials=1)\n', (5753, 5781), False, 'from hnn_core.dipole import Dipole, simulate_dipole, _rmse\n'), ((8464, 8505), 'os.path.exists', 'op.exists', (['"""yes_trial_S1_ERP_all_avg.txt"""'], {}), "('yes_trial_S1_ERP_all_avg.txt')\n", (8473, 8505), True, 'import os.path as op\n'), ((8515, 8568), 'urllib.request.urlretrieve', 'urlretrieve', (['data_url', '"""yes_trial_S1_ERP_all_avg.txt"""'], {}), "(data_url, 'yes_trial_S1_ERP_all_avg.txt')\n", (8526, 8568), False, 'from urllib.request import urlretrieve\n'), ((8776, 8805), 'os.path.dirname', 'op.dirname', (['hnn_core.__file__'], {}), '(hnn_core.__file__)\n', (8786, 8805), True, 'import os.path as op\n'), ((8197, 8218), 'numpy.array', 'np.array', (['event_times'], {}), '(event_times)\n', (8205, 8218), True, 'import numpy as np\n'), ((8220, 8237), 'numpy.array', 'np.array', (['net_ets'], {}), '(net_ets)\n', (8228, 8237), True, 'import numpy as np\n')] |
from ..models import EntityOnServer, AccessToken, Organisation, Server, ServerUser, Key, KeyFetchEvent, AuditNote, AuditEvent, LoginAttempt
from django.template import Context, Template
from django.views.decorators.csrf import csrf_exempt
from django.http import HttpResponse
from uuid import uuid4
from datetime import datetime
import json
@csrf_exempt
def get_keys(request):
"""
Get Keys API - used in conjunction with the v2.1 client
"""
# input is something similar to:
# {
# "server_id":
# "access_token": {
# id:""
# value:""
# },
# username: ""
# origin_ip: ""
# key_fp: ""
# key_type: ""
# }
data = json.loads(request.body)
# 1. Decide if acceptable request
token = AccessToken.get_validated_token(data["access_token"]["id"], data["access_token"]["value"])
# Validate access_token
# FIXME: refaactor all this code to prevent data leakage through errors
server = Server.objects.filter(active=True).filter(org=token.org).filter(public_id=data["server_id"]).get()
la = LoginAttempt()
la.username = data['username']
la.key_fp = data['key_fp']
la.remote_ip = data['origin_ip']
la.server_ip = request.META['REMOTE_ADDR']
la.public_id = str(uuid4())
la.server = server
la.audit_type = AuditEvent.TYPE_KEYFETCH
la.audit_status = AuditEvent.STATUS_OPEN
la.reported_at = datetime.now()
la.save()
# 2. pull key data
key = None
server_user = ServerUser.objects.filter(server=server).filter(name=data["username"])
target_key = Key.objects.filter(key_fingerprint=data["key_fp"]).get()
cont = True
if server_user == 0:
# login attempt
cont = False
if target_key == 0:
cont = False
if cont:
# look for EntityOnServer to match
try:
target_eos = EntityOnServer.objects.filter(server_user=server_user).filter(named_key=target_key).get()
#print("EOS %s" % target_eos )
key = target_key
except Exception:
# FIXME: do a nicer exception
targets = EntityOnServer.objects.filter(server_user=server_user).filter(entity=target_key.owner)
if len(targets) > 0:
key = target_key
else:
raise Exception("Boom")
else:
key = Key.objects.filter(owner=target_eos.entity).filter(id=target_key.id).get()
if key.active and key.key_fingerprint == data["key_fp"]:
pass
else:
key = None
# Key should now be a Key object
#print ("--> %s" % key)
output = ""
if key:
sub_template = Template("ssh-rsa {{ key.key }}")
c = Context({"key":key})
output = sub_template.render(c)
return HttpResponse(output) | [
"json.loads",
"django.template.Template",
"django.http.HttpResponse",
"uuid.uuid4",
"datetime.datetime.now",
"django.template.Context"
] | [((701, 725), 'json.loads', 'json.loads', (['request.body'], {}), '(request.body)\n', (711, 725), False, 'import json\n'), ((1429, 1443), 'datetime.datetime.now', 'datetime.now', ([], {}), '()\n', (1441, 1443), False, 'from datetime import datetime\n'), ((2860, 2880), 'django.http.HttpResponse', 'HttpResponse', (['output'], {}), '(output)\n', (2872, 2880), False, 'from django.http import HttpResponse\n'), ((1286, 1293), 'uuid.uuid4', 'uuid4', ([], {}), '()\n', (1291, 1293), False, 'from uuid import uuid4\n'), ((2737, 2770), 'django.template.Template', 'Template', (['"""ssh-rsa {{ key.key }}"""'], {}), "('ssh-rsa {{ key.key }}')\n", (2745, 2770), False, 'from django.template import Context, Template\n'), ((2787, 2808), 'django.template.Context', 'Context', (["{'key': key}"], {}), "({'key': key})\n", (2794, 2808), False, 'from django.template import Context, Template\n')] |
from typing import List
from parser import parse_bytes, split_bytes_from_lines, get_bytes, parse_instruction_set, wrap_parsed_set
from reader import dump_file_hex_with_locs
class Translator:
"""
Class handling file translations from *.mpy to hex dumps and opcodes
"""
def __init__(self, file: str):
"""
Create new translator
:param file: location of the file
"""
self.file = file
def get_file_hex(self):
"""
Get a full hex dump of the file
:return:
"""
return dump_file_hex_with_locs(self.file)
def get_file_hex_at(self, _from: str, _to: str):
"""
Get a byte dump at a specified location
:param _from: from address
:param _to: to address
:return: bytes from address {_from} to address {_to}
"""
return parse_bytes(self.get_file_hex(), _from, _to)
def get_file(self):
"""
Get the file name
:return:
"""
return self.file
def get_magic(self) -> str:
"""
Get the magic number
:return:
"""
return "".join(self.get_all_bytes()[0][:8])
def get_all_bytes(self):
"""
Get all of the bytes
:return: all of the bytes
"""
return get_bytes(self.get_file_hex().split("\n"))
def get_split_bytes(self) -> List[List[str]]:
"""
Get all of the bytes per line
:return: bytes in list form
"""
split = split_bytes_from_lines(self.get_all_bytes())
split[0] = split[0][4:]
return split
def get_bytes_at(self, _from: str, _to: str) -> List[List[str]]:
"""
Get the bytes between the specified locations
:param _from: start address
:param _to: end address
:return: bytes
"""
return split_bytes_from_lines(self.get_file_hex_at(_from, _to))
def get_instruction_set(self) -> List[str]:
"""
Get the file's instruction set
:return: set
"""
bl = self.get_split_bytes()
# offset of 8, start at first BC_BASE_RESERVED
list_with_offset = bl[0][4:]
_bytes = self.__flatten([list_with_offset, bl[1]])
_set = parse_instruction_set(_bytes)
return wrap_parsed_set(_set)
def get_instructions_at(self, _from: str, _to: str) -> List[str]:
"""
Get the instructions between addresses
:param _from: start address
:param _to: end address
:return: instructions
"""
_bytes = self.__flatten(self.get_bytes_at(_from, _to))
_set = parse_instruction_set(_bytes)
return wrap_parsed_set(_set)
def __flatten(self, _list):
# Lambda replaced by def flatten due to E731
return [item for sublist in _list for item in sublist]
| [
"parser.parse_instruction_set",
"parser.wrap_parsed_set",
"reader.dump_file_hex_with_locs"
] | [((565, 599), 'reader.dump_file_hex_with_locs', 'dump_file_hex_with_locs', (['self.file'], {}), '(self.file)\n', (588, 599), False, 'from reader import dump_file_hex_with_locs\n'), ((2269, 2298), 'parser.parse_instruction_set', 'parse_instruction_set', (['_bytes'], {}), '(_bytes)\n', (2290, 2298), False, 'from parser import parse_bytes, split_bytes_from_lines, get_bytes, parse_instruction_set, wrap_parsed_set\n'), ((2314, 2335), 'parser.wrap_parsed_set', 'wrap_parsed_set', (['_set'], {}), '(_set)\n', (2329, 2335), False, 'from parser import parse_bytes, split_bytes_from_lines, get_bytes, parse_instruction_set, wrap_parsed_set\n'), ((2654, 2683), 'parser.parse_instruction_set', 'parse_instruction_set', (['_bytes'], {}), '(_bytes)\n', (2675, 2683), False, 'from parser import parse_bytes, split_bytes_from_lines, get_bytes, parse_instruction_set, wrap_parsed_set\n'), ((2699, 2720), 'parser.wrap_parsed_set', 'wrap_parsed_set', (['_set'], {}), '(_set)\n', (2714, 2720), False, 'from parser import parse_bytes, split_bytes_from_lines, get_bytes, parse_instruction_set, wrap_parsed_set\n')] |
import json
import boto3 # Amazon S3 client library
s3 = boto3.resource('s3')
dynamodb = boto3.resource('dynamodb')
problems_table = dynamodb.Table('codebreaker-problems')
bucket = s3.Bucket('codebreaker-testdata')
def lambda_handler(event, context):
problemName = event['problemName']
testcaseCount = 0
for obj in bucket.objects.filter(Prefix="{0}/".format(problemName)):
testcaseCount += 1
print(testcaseCount)
problems_table.update_item(
Key = {'problemName':problemName},
UpdateExpression = f'set #b=:a',
ExpressionAttributeValues={':a':int(testcaseCount/2)},
ExpressionAttributeNames={'#b':'testcaseCount'}
)
return {
'statusCode': 200,
'testcaseCount':testcaseCount
}
| [
"boto3.resource"
] | [((57, 77), 'boto3.resource', 'boto3.resource', (['"""s3"""'], {}), "('s3')\n", (71, 77), False, 'import boto3\n'), ((89, 115), 'boto3.resource', 'boto3.resource', (['"""dynamodb"""'], {}), "('dynamodb')\n", (103, 115), False, 'import boto3\n')] |
#!/usr/bin/env python
"""
Southern California Earthquake Center Broadband Platform
Copyright 2010-2017 Southern California Earthquake Center
These are acceptance tests for the broadband platforms
$Id: AcceptTests.py 1795 2017-02-09 16:23:34Z fsilva $
"""
from __future__ import division, print_function
# Import Python modules
import os
import new
import sys
import shutil
import optparse
import unittest
# Import Broadband modules
import bband_utils
import seqnum
import cmp_bbp
from install_cfg import InstallCfg
def find_tests(test, rerun):
"""
# This function searches for .xml files in the accept_inputs directory
"""
install = InstallCfg()
resume = True
accept_test_inputs = "accept_inputs"
accept_test_refs = "accept_refs"
input_dir = os.path.join(install.A_TEST_REF_DIR, accept_test_inputs)
if not os.path.exists(input_dir):
# These are expected to be in the dist
print("Acceptance test inputs dir %s does not exist, aborting" %
(input_dir))
sys.exit()
# Create list of test XML files
files = os.listdir(input_dir)
wfext = ".xml"
# First we find all the tests
test_files = []
for testfile in files:
if testfile.endswith(wfext):
# Don't add SDSU tests on Mac OS X
if sys.platform == 'darwin' and testfile.find("SDSU") >= 0:
if test is None or (test is not None and testfile.find(test) >= 0):
print("*** Mac OS X detected: skipping test %s." %
(testfile))
continue
if test is None:
test_files.append(testfile)
else:
if testfile.find(test) >= 0:
test_files.append(testfile)
resume_file = os.path.join(install.A_OUT_LOG_DIR, "resume.txt")
resume_list = ""
if rerun:
os.remove(resume_file)
# Check for already completed tests if not rerunning
if resume == True and rerun == False:
if os.path.exists(resume_file):
resume_fp = open(resume_file, 'r')
resume_list = resume_fp.read().splitlines()
completed_test_count = len(resume_list)
print("==> Completed Tests : %d" % (completed_test_count))
resume_fp.close()
if ((test is None) and
(completed_test_count >= len(test_files))):
print("All the acceptance tests have passed previously!")
proceed = raw_input("Would you like to re-run "
"all the acceptance tests? (y/n)")
if str.lower(proceed) == 'y':
os.remove(resume_file)
resume_list = ""
else:
sys.exit(0)
# Create unittest test case for each file
for xml_file in test_files:
# Skip test if we ran it already
if xml_file in resume_list:
print("==> Skipping %s" % (xml_file))
continue
file_base = xml_file[0:xml_file.find(wfext)]
# pieces = file_base.split('-')
# Adjust tolerance depending on test mode
tolerance = 0.03
#This defines a method that we're going to add to the
#BBPAcceptanceTests class. The keyword binding has to
#be done b/c Python is storing pointers to 'file' and 'file_base'
#so w/o the keywords, 'file' and 'file_base' in the function will
#point to the final values
def permutation_test(self, file_base=file_base, xml_file=xml_file):
input_dir = os.path.join(self.install.A_TEST_REF_DIR,
accept_test_inputs)
log_dir = os.path.join(self.install.A_OUT_LOG_DIR,
"acceptance_test_logs")
sim_id = int(seqnum.get_seq_num())
self.file_base = file_base
self.log_file = os.path.join(log_dir, "%s.log" % (self.file_base))
self.input_file = os.path.join(input_dir, xml_file)
cmd = ("%s/run_bbp.py -x %s -s %d -l %s" %
(self.install.A_COMP_DIR,
self.input_file, sim_id, self.log_file))
rc = bband_utils.runprog(cmd, False)
self.failIf(rc != 0, "Acceptance test failed to execute")
ref_file_dir = os.path.join(self.install.A_TEST_REF_DIR,
accept_test_refs,
self.file_base)
agree = True
for ref_file in os.listdir(ref_file_dir):
if os.path.isfile(os.path.join(ref_file_dir, ref_file)):
test_file = os.path.join(self.install.A_OUT_DATA_DIR,
str(sim_id),
("%d.%s" % (sim_id, ref_file)))
a_ref_file = os.path.join(ref_file_dir, ref_file)
compare_result = cmp_bbp.cmp_bbp(a_ref_file, test_file,
tolerance=tolerance)
errmsg = ("Output file "
"%s does not match reference file: %s" %
(test_file, a_ref_file))
self.failIf(compare_result != 0, errmsg)
if compare_result != 0:
agree = False
if agree == True:
# Write success to the resume file
resume_fp = open(os.path.join(install.A_OUT_LOG_DIR,
"resume.txt"), 'a')
resume_fp.write("%s\n" % xml_file)
resume_fp.flush()
resume_fp.close()
sys.stdout.flush()
sys.stderr.flush()
# We create a method object which is an instance method for
# BBPAcceptanceTests which executes the code in
# testPermutation
method = new.instancemethod(permutation_test,
None, BBPAcceptanceTests)
# We give the method a new name in BBPAcceptanceTests
# which contains the xml file being run
setattr(BBPAcceptanceTests, "test_%s" % file_base, method)
class BBPAcceptanceTests(unittest.TestCase):
def setUp(self):
self.install = InstallCfg()
accept_test_inputs = "accept_inputs"
src_path = ""
self.resume = True
run_dir = self.install.A_USER_DATA_DIR
# Create run directory, in case it doesn't exist
bband_utils.mkdirs([run_dir], print_cmd=False)
if not os.path.exists(os.path.join(run_dir, "northridge_3_sta.stl")):
src_path = os.path.join(self.install.A_TEST_REF_DIR,
accept_test_inputs,
"northridge_3_sta.stl")
shutil.copy2(src_path, run_dir)
if not os.path.exists(os.path.join(run_dir, "northridge_eq_gp.src")):
src_path = os.path.join(self.install.A_TEST_REF_DIR,
accept_test_inputs,
"northridge_eq_gp.src")
shutil.copy2(src_path, run_dir)
if not os.path.exists(os.path.join(run_dir, "northridge_eq_ucsb.src")):
src_path = os.path.join(self.install.A_TEST_REF_DIR,
accept_test_inputs,
"northridge_eq_ucsb.src")
shutil.copy2(src_path, run_dir)
if not os.path.exists(os.path.join(run_dir, "northridge_eq_song.src")):
src_path = os.path.join(self.install.A_TEST_REF_DIR,
accept_test_inputs,
"northridge_eq_song.src")
shutil.copy2(src_path, run_dir)
if not os.path.exists(os.path.join(self.install.A_OUT_LOG_DIR,
"acceptance_test_logs")):
bband_utils.mkdirs([os.path.join(self.install.A_OUT_LOG_DIR,
"acceptance_test_logs")])
if __name__ == '__main__':
# Parse options
parser = optparse.OptionParser()
parser.add_option("-t", "--test",
dest="test",
help="Execute specific test",
metavar="TEST")
parser.add_option("-r", "--rerun",
action="store_true",
dest="rerun",
help="Rerun tests already completed")
(options, args) = parser.parse_args()
if options.test is not None:
test = options.test
else:
test = None
if options.rerun is not None:
rerun = True
else:
rerun = False
find_tests(test, rerun)
suite = unittest.TestLoader().loadTestsFromTestCase(BBPAcceptanceTests)
print("==> Number of tests to run: %d" % suite.countTestCases())
unittest.TextTestRunner(verbosity=2).run(suite)
| [
"os.path.exists",
"install_cfg.InstallCfg",
"os.listdir",
"bband_utils.runprog",
"bband_utils.mkdirs",
"seqnum.get_seq_num",
"shutil.copy2",
"sys.stderr.flush",
"os.path.join",
"optparse.OptionParser",
"new.instancemethod",
"cmp_bbp.cmp_bbp",
"sys.exit",
"sys.stdout.flush",
"unittest.Tex... | [((654, 666), 'install_cfg.InstallCfg', 'InstallCfg', ([], {}), '()\n', (664, 666), False, 'from install_cfg import InstallCfg\n'), ((781, 837), 'os.path.join', 'os.path.join', (['install.A_TEST_REF_DIR', 'accept_test_inputs'], {}), '(install.A_TEST_REF_DIR, accept_test_inputs)\n', (793, 837), False, 'import os\n'), ((1091, 1112), 'os.listdir', 'os.listdir', (['input_dir'], {}), '(input_dir)\n', (1101, 1112), False, 'import os\n'), ((1791, 1840), 'os.path.join', 'os.path.join', (['install.A_OUT_LOG_DIR', '"""resume.txt"""'], {}), "(install.A_OUT_LOG_DIR, 'resume.txt')\n", (1803, 1840), False, 'import os\n'), ((8169, 8192), 'optparse.OptionParser', 'optparse.OptionParser', ([], {}), '()\n', (8190, 8192), False, 'import optparse\n'), ((849, 874), 'os.path.exists', 'os.path.exists', (['input_dir'], {}), '(input_dir)\n', (863, 874), False, 'import os\n'), ((1031, 1041), 'sys.exit', 'sys.exit', ([], {}), '()\n', (1039, 1041), False, 'import sys\n'), ((1884, 1906), 'os.remove', 'os.remove', (['resume_file'], {}), '(resume_file)\n', (1893, 1906), False, 'import os\n'), ((2017, 2044), 'os.path.exists', 'os.path.exists', (['resume_file'], {}), '(resume_file)\n', (2031, 2044), False, 'import os\n'), ((5965, 6027), 'new.instancemethod', 'new.instancemethod', (['permutation_test', 'None', 'BBPAcceptanceTests'], {}), '(permutation_test, None, BBPAcceptanceTests)\n', (5983, 6027), False, 'import new\n'), ((6332, 6344), 'install_cfg.InstallCfg', 'InstallCfg', ([], {}), '()\n', (6342, 6344), False, 'from install_cfg import InstallCfg\n'), ((6552, 6598), 'bband_utils.mkdirs', 'bband_utils.mkdirs', (['[run_dir]'], {'print_cmd': '(False)'}), '([run_dir], print_cmd=False)\n', (6570, 6598), False, 'import bband_utils\n'), ((3592, 3653), 'os.path.join', 'os.path.join', (['self.install.A_TEST_REF_DIR', 'accept_test_inputs'], {}), '(self.install.A_TEST_REF_DIR, accept_test_inputs)\n', (3604, 3653), False, 'import os\n'), ((3713, 3777), 'os.path.join', 'os.path.join', (['self.install.A_OUT_LOG_DIR', '"""acceptance_test_logs"""'], {}), "(self.install.A_OUT_LOG_DIR, 'acceptance_test_logs')\n", (3725, 3777), False, 'import os\n'), ((3927, 3975), 'os.path.join', 'os.path.join', (['log_dir', "('%s.log' % self.file_base)"], {}), "(log_dir, '%s.log' % self.file_base)\n", (3939, 3975), False, 'import os\n'), ((4008, 4041), 'os.path.join', 'os.path.join', (['input_dir', 'xml_file'], {}), '(input_dir, xml_file)\n', (4020, 4041), False, 'import os\n'), ((4220, 4251), 'bband_utils.runprog', 'bband_utils.runprog', (['cmd', '(False)'], {}), '(cmd, False)\n', (4239, 4251), False, 'import bband_utils\n'), ((4349, 4424), 'os.path.join', 'os.path.join', (['self.install.A_TEST_REF_DIR', 'accept_test_refs', 'self.file_base'], {}), '(self.install.A_TEST_REF_DIR, accept_test_refs, self.file_base)\n', (4361, 4424), False, 'import os\n'), ((4558, 4582), 'os.listdir', 'os.listdir', (['ref_file_dir'], {}), '(ref_file_dir)\n', (4568, 4582), False, 'import os\n'), ((5747, 5765), 'sys.stdout.flush', 'sys.stdout.flush', ([], {}), '()\n', (5763, 5765), False, 'import sys\n'), ((5778, 5796), 'sys.stderr.flush', 'sys.stderr.flush', ([], {}), '()\n', (5794, 5796), False, 'import sys\n'), ((6701, 6790), 'os.path.join', 'os.path.join', (['self.install.A_TEST_REF_DIR', 'accept_test_inputs', '"""northridge_3_sta.stl"""'], {}), "(self.install.A_TEST_REF_DIR, accept_test_inputs,\n 'northridge_3_sta.stl')\n", (6713, 6790), False, 'import os\n'), ((6871, 6902), 'shutil.copy2', 'shutil.copy2', (['src_path', 'run_dir'], {}), '(src_path, run_dir)\n', (6883, 6902), False, 'import shutil\n'), ((7005, 7094), 'os.path.join', 'os.path.join', (['self.install.A_TEST_REF_DIR', 'accept_test_inputs', '"""northridge_eq_gp.src"""'], {}), "(self.install.A_TEST_REF_DIR, accept_test_inputs,\n 'northridge_eq_gp.src')\n", (7017, 7094), False, 'import os\n'), ((7175, 7206), 'shutil.copy2', 'shutil.copy2', (['src_path', 'run_dir'], {}), '(src_path, run_dir)\n', (7187, 7206), False, 'import shutil\n'), ((7311, 7402), 'os.path.join', 'os.path.join', (['self.install.A_TEST_REF_DIR', 'accept_test_inputs', '"""northridge_eq_ucsb.src"""'], {}), "(self.install.A_TEST_REF_DIR, accept_test_inputs,\n 'northridge_eq_ucsb.src')\n", (7323, 7402), False, 'import os\n'), ((7483, 7514), 'shutil.copy2', 'shutil.copy2', (['src_path', 'run_dir'], {}), '(src_path, run_dir)\n', (7495, 7514), False, 'import shutil\n'), ((7619, 7710), 'os.path.join', 'os.path.join', (['self.install.A_TEST_REF_DIR', 'accept_test_inputs', '"""northridge_eq_song.src"""'], {}), "(self.install.A_TEST_REF_DIR, accept_test_inputs,\n 'northridge_eq_song.src')\n", (7631, 7710), False, 'import os\n'), ((7791, 7822), 'shutil.copy2', 'shutil.copy2', (['src_path', 'run_dir'], {}), '(src_path, run_dir)\n', (7803, 7822), False, 'import shutil\n'), ((8797, 8818), 'unittest.TestLoader', 'unittest.TestLoader', ([], {}), '()\n', (8816, 8818), False, 'import unittest\n'), ((8934, 8970), 'unittest.TextTestRunner', 'unittest.TextTestRunner', ([], {'verbosity': '(2)'}), '(verbosity=2)\n', (8957, 8970), False, 'import unittest\n'), ((3838, 3858), 'seqnum.get_seq_num', 'seqnum.get_seq_num', ([], {}), '()\n', (3856, 3858), False, 'import seqnum\n'), ((6630, 6675), 'os.path.join', 'os.path.join', (['run_dir', '"""northridge_3_sta.stl"""'], {}), "(run_dir, 'northridge_3_sta.stl')\n", (6642, 6675), False, 'import os\n'), ((6934, 6979), 'os.path.join', 'os.path.join', (['run_dir', '"""northridge_eq_gp.src"""'], {}), "(run_dir, 'northridge_eq_gp.src')\n", (6946, 6979), False, 'import os\n'), ((7238, 7285), 'os.path.join', 'os.path.join', (['run_dir', '"""northridge_eq_ucsb.src"""'], {}), "(run_dir, 'northridge_eq_ucsb.src')\n", (7250, 7285), False, 'import os\n'), ((7546, 7593), 'os.path.join', 'os.path.join', (['run_dir', '"""northridge_eq_song.src"""'], {}), "(run_dir, 'northridge_eq_song.src')\n", (7558, 7593), False, 'import os\n'), ((7854, 7918), 'os.path.join', 'os.path.join', (['self.install.A_OUT_LOG_DIR', '"""acceptance_test_logs"""'], {}), "(self.install.A_OUT_LOG_DIR, 'acceptance_test_logs')\n", (7866, 7918), False, 'import os\n'), ((2672, 2694), 'os.remove', 'os.remove', (['resume_file'], {}), '(resume_file)\n', (2681, 2694), False, 'import os\n'), ((2774, 2785), 'sys.exit', 'sys.exit', (['(0)'], {}), '(0)\n', (2782, 2785), False, 'import sys\n'), ((4618, 4654), 'os.path.join', 'os.path.join', (['ref_file_dir', 'ref_file'], {}), '(ref_file_dir, ref_file)\n', (4630, 4654), False, 'import os\n'), ((4899, 4935), 'os.path.join', 'os.path.join', (['ref_file_dir', 'ref_file'], {}), '(ref_file_dir, ref_file)\n', (4911, 4935), False, 'import os\n'), ((4973, 5032), 'cmp_bbp.cmp_bbp', 'cmp_bbp.cmp_bbp', (['a_ref_file', 'test_file'], {'tolerance': 'tolerance'}), '(a_ref_file, test_file, tolerance=tolerance)\n', (4988, 5032), False, 'import cmp_bbp\n'), ((5514, 5563), 'os.path.join', 'os.path.join', (['install.A_OUT_LOG_DIR', '"""resume.txt"""'], {}), "(install.A_OUT_LOG_DIR, 'resume.txt')\n", (5526, 5563), False, 'import os\n'), ((7996, 8060), 'os.path.join', 'os.path.join', (['self.install.A_OUT_LOG_DIR', '"""acceptance_test_logs"""'], {}), "(self.install.A_OUT_LOG_DIR, 'acceptance_test_logs')\n", (8008, 8060), False, 'import os\n')] |
import torch
import torch.nn as nn
import torchvision.models as models
from torch.nn.utils.rnn import pack_padded_sequence as pack
from torch.nn.utils.rnn import pad_packed_sequence as unpack
from torch.autograd import Variable
class EncoderCNN(nn.Module):
def __init__(self, embed_size):
"""Load the pretrained ResNet-152 and replace top fc layer."""
super(EncoderCNN, self).__init__()
resnet = models.resnet152(pretrained=True)
modules = list(resnet.children())[:-1] # delete the last fc layer.
self.resnet = nn.Sequential(*modules)
self.linear = nn.Linear(resnet.fc.in_features, embed_size)
self.bn = nn.BatchNorm1d(embed_size, momentum=0.01)
self.init_weights()
def init_weights(self):
"""Initialize the weights."""
self.linear.weight.data.normal_(0.0, 0.02)
self.linear.bias.data.fill_(0)
def forward(self, images):
"""Extract the image feature vectors."""
features = self.resnet(images)
features = Variable(features.data)
features = features.view(features.size(0), -1)
features = self.bn(self.linear(features))
return features
class LayoutEncoder(nn.Module):
def __init__(self, layout_encoding_size, hidden_size, vocab_size, num_layers):
"""Set the hyper-parameters and build the layers."""
super(LayoutEncoder, self).__init__()
self.label_encoder = nn.Embedding(vocab_size, layout_encoding_size)
self.location_encoder = nn.Linear(4, layout_encoding_size)
self.lstm = nn.LSTM(layout_encoding_size, hidden_size, num_layers, batch_first=True)
self.init_weights()
def init_weights(self):
"""Initialize weights."""
self.label_encoder.weight.data.uniform_(-0.1, 0.1)
self.location_encoder.weight.data.uniform_(-0.1, 0.1)
self.location_encoder.bias.data.fill_(0)
def forward(self, label_seqs, location_seqs, lengths):
# sort label sequences and location sequences in batch dimension according to length
batch_idx = sorted(range(len(lengths)), key=lambda k: lengths[k], reverse=True)
reverse_batch_idx = torch.LongTensor([batch_idx.index(i) for i in range(len(batch_idx))])
lens_sorted = sorted(lengths, reverse=True)
label_seqs_sorted = torch.index_select(label_seqs, 0, torch.LongTensor(batch_idx))
location_seqs_sorted = torch.index_select(location_seqs, 0, torch.LongTensor(batch_idx))
# assert torch.equal(torch.index_select(label_seqs_sorted, 0, reverse_batch_idx), label_seqs)
# assert torch.equal(torch.index_select(location_seqs_sorted, 0, reverse_batch_idx), location_seqs)
if torch.cuda.is_available():
reverse_batch_idx = reverse_batch_idx.cuda()
label_seqs_sorted = label_seqs_sorted.cuda()
location_seqs_sorted = location_seqs_sorted.cuda()
# create Variables
label_seqs_sorted_var = Variable(label_seqs_sorted, requires_grad=False)
location_seqs_sorted_var = Variable(location_seqs_sorted, requires_grad=False)
# encode label sequences
label_encoding = self.label_encoder(label_seqs_sorted_var)
# encode location sequences
location_seqs_sorted_var = location_seqs_sorted_var.view(-1, 4)
location_encoding = self.location_encoder(location_seqs_sorted_var)
location_encoding = location_encoding.view(label_encoding.size(0), -1, location_encoding.size(1))
# layout encoding - batch_size x max_seq_len x embed_size
layout_encoding = label_encoding + location_encoding
packed = pack(layout_encoding, lens_sorted, batch_first=True)
hiddens, _ = self.lstm(packed)
# unpack hiddens and get last hidden vector
hiddens_unpack = unpack(hiddens, batch_first=True)[0] # batch_size x max_seq_len x embed_size
last_hidden_idx = torch.zeros(hiddens_unpack.size(0), 1, hiddens_unpack.size(2)).long()
for i in range(hiddens_unpack.size(0)):
last_hidden_idx[i, 0, :] = lens_sorted[i] - 1
if torch.cuda.is_available():
last_hidden_idx = last_hidden_idx.cuda()
last_hidden = torch.gather(hiddens_unpack, 1, Variable(last_hidden_idx, requires_grad=False)) # batch_size x 1 x embed_size
last_hidden = torch.squeeze(last_hidden, 1) # batch_size x embed_size
# convert back to original batch order
last_hidden = torch.index_select(last_hidden, 0, Variable(reverse_batch_idx, requires_grad=False))
return last_hidden
class DecoderRNN(nn.Module):
def __init__(self, embed_size, hidden_size, vocab_size, num_layers):
"""Set the hyper-parameters and build the layers."""
super(DecoderRNN, self).__init__()
self.embed = nn.Embedding(vocab_size, embed_size)
self.lstm = nn.LSTM(embed_size, hidden_size, num_layers, batch_first=True)
self.linear = nn.Linear(hidden_size, vocab_size)
self.init_weights()
def init_weights(self):
"""Initialize weights."""
self.embed.weight.data.uniform_(-0.1, 0.1)
self.linear.weight.data.uniform_(-0.1, 0.1)
self.linear.bias.data.fill_(0)
def forward(self, features, captions, lengths):
"""Decode image feature vectors and generates captions."""
embeddings = self.embed(captions)
embeddings = torch.cat((features.unsqueeze(1), embeddings), 1)
packed = pack(embeddings, lengths, batch_first=True)
hiddens, _ = self.lstm(packed)
outputs = self.linear(hiddens[0])
return outputs
def sample(self, features, states=None):
"""Samples captions for given image features (Greedy search)."""
sampled_ids = []
inputs = features.unsqueeze(1)
for i in range(20): # maximum sampling length
hiddens, states = self.lstm(inputs, states) # (batch_size, 1, hidden_size),
outputs = self.linear(hiddens.squeeze(1)) # (batch_size, vocab_size)
predicted = outputs.max(1)[1]
sampled_ids.append(predicted)
inputs = self.embed(predicted)
sampled_ids = torch.cat(sampled_ids, 1) # (batch_size, 20)
return sampled_ids.squeeze() | [
"torch.nn.Sequential",
"torch.nn.LSTM",
"torch.LongTensor",
"torch.nn.BatchNorm1d",
"torch.cat",
"torchvision.models.resnet152",
"torch.cuda.is_available",
"torch.nn.utils.rnn.pack_padded_sequence",
"torch.nn.Linear",
"torch.squeeze",
"torch.autograd.Variable",
"torch.nn.utils.rnn.pad_packed_s... | [((426, 459), 'torchvision.models.resnet152', 'models.resnet152', ([], {'pretrained': '(True)'}), '(pretrained=True)\n', (442, 459), True, 'import torchvision.models as models\n'), ((562, 585), 'torch.nn.Sequential', 'nn.Sequential', (['*modules'], {}), '(*modules)\n', (575, 585), True, 'import torch.nn as nn\n'), ((608, 652), 'torch.nn.Linear', 'nn.Linear', (['resnet.fc.in_features', 'embed_size'], {}), '(resnet.fc.in_features, embed_size)\n', (617, 652), True, 'import torch.nn as nn\n'), ((671, 712), 'torch.nn.BatchNorm1d', 'nn.BatchNorm1d', (['embed_size'], {'momentum': '(0.01)'}), '(embed_size, momentum=0.01)\n', (685, 712), True, 'import torch.nn as nn\n'), ((1053, 1076), 'torch.autograd.Variable', 'Variable', (['features.data'], {}), '(features.data)\n', (1061, 1076), False, 'from torch.autograd import Variable\n'), ((1459, 1505), 'torch.nn.Embedding', 'nn.Embedding', (['vocab_size', 'layout_encoding_size'], {}), '(vocab_size, layout_encoding_size)\n', (1471, 1505), True, 'import torch.nn as nn\n'), ((1538, 1572), 'torch.nn.Linear', 'nn.Linear', (['(4)', 'layout_encoding_size'], {}), '(4, layout_encoding_size)\n', (1547, 1572), True, 'import torch.nn as nn\n'), ((1593, 1665), 'torch.nn.LSTM', 'nn.LSTM', (['layout_encoding_size', 'hidden_size', 'num_layers'], {'batch_first': '(True)'}), '(layout_encoding_size, hidden_size, num_layers, batch_first=True)\n', (1600, 1665), True, 'import torch.nn as nn\n'), ((2730, 2755), 'torch.cuda.is_available', 'torch.cuda.is_available', ([], {}), '()\n', (2753, 2755), False, 'import torch\n'), ((2994, 3042), 'torch.autograd.Variable', 'Variable', (['label_seqs_sorted'], {'requires_grad': '(False)'}), '(label_seqs_sorted, requires_grad=False)\n', (3002, 3042), False, 'from torch.autograd import Variable\n'), ((3078, 3129), 'torch.autograd.Variable', 'Variable', (['location_seqs_sorted'], {'requires_grad': '(False)'}), '(location_seqs_sorted, requires_grad=False)\n', (3086, 3129), False, 'from torch.autograd import Variable\n'), ((3667, 3719), 'torch.nn.utils.rnn.pack_padded_sequence', 'pack', (['layout_encoding', 'lens_sorted'], {'batch_first': '(True)'}), '(layout_encoding, lens_sorted, batch_first=True)\n', (3671, 3719), True, 'from torch.nn.utils.rnn import pack_padded_sequence as pack\n'), ((4128, 4153), 'torch.cuda.is_available', 'torch.cuda.is_available', ([], {}), '()\n', (4151, 4153), False, 'import torch\n'), ((4363, 4392), 'torch.squeeze', 'torch.squeeze', (['last_hidden', '(1)'], {}), '(last_hidden, 1)\n', (4376, 4392), False, 'import torch\n'), ((4840, 4876), 'torch.nn.Embedding', 'nn.Embedding', (['vocab_size', 'embed_size'], {}), '(vocab_size, embed_size)\n', (4852, 4876), True, 'import torch.nn as nn\n'), ((4897, 4959), 'torch.nn.LSTM', 'nn.LSTM', (['embed_size', 'hidden_size', 'num_layers'], {'batch_first': '(True)'}), '(embed_size, hidden_size, num_layers, batch_first=True)\n', (4904, 4959), True, 'import torch.nn as nn\n'), ((4982, 5016), 'torch.nn.Linear', 'nn.Linear', (['hidden_size', 'vocab_size'], {}), '(hidden_size, vocab_size)\n', (4991, 5016), True, 'import torch.nn as nn\n'), ((5512, 5555), 'torch.nn.utils.rnn.pack_padded_sequence', 'pack', (['embeddings', 'lengths'], {'batch_first': '(True)'}), '(embeddings, lengths, batch_first=True)\n', (5516, 5555), True, 'from torch.nn.utils.rnn import pack_padded_sequence as pack\n'), ((6277, 6302), 'torch.cat', 'torch.cat', (['sampled_ids', '(1)'], {}), '(sampled_ids, 1)\n', (6286, 6302), False, 'import torch\n'), ((2381, 2408), 'torch.LongTensor', 'torch.LongTensor', (['batch_idx'], {}), '(batch_idx)\n', (2397, 2408), False, 'import torch\n'), ((2478, 2505), 'torch.LongTensor', 'torch.LongTensor', (['batch_idx'], {}), '(batch_idx)\n', (2494, 2505), False, 'import torch\n'), ((3837, 3870), 'torch.nn.utils.rnn.pad_packed_sequence', 'unpack', (['hiddens'], {'batch_first': '(True)'}), '(hiddens, batch_first=True)\n', (3843, 3870), True, 'from torch.nn.utils.rnn import pad_packed_sequence as unpack\n'), ((4262, 4308), 'torch.autograd.Variable', 'Variable', (['last_hidden_idx'], {'requires_grad': '(False)'}), '(last_hidden_idx, requires_grad=False)\n', (4270, 4308), False, 'from torch.autograd import Variable\n'), ((4525, 4573), 'torch.autograd.Variable', 'Variable', (['reverse_batch_idx'], {'requires_grad': '(False)'}), '(reverse_batch_idx, requires_grad=False)\n', (4533, 4573), False, 'from torch.autograd import Variable\n')] |
"""
Unit tests for the searcher module. Those tests mock the Entrez class
and do not make any sort of HTTP request.
"""
# pylint: disable=redefined-outer-name
import io
from pathlib import Path
from Bio import Entrez
from dbvirus_searcher import Searcher
def test_searcher_initialization(searcher):
"""
Tests a searcher initialization parameters
"""
assert isinstance(searcher, Searcher)
assert searcher.db == "sra"
new_searcher = Searcher("<EMAIL>", db="other_db")
assert new_searcher.db == "other_db"
def test_searcher_searches_sra(searcher: Searcher, mocker):
"""
Tests if the searcher, when supplied with a valid search string,
calls the correct Biopython's Entrez methods
"""
# We need to supply a return value to the esearch function.
# That return value must be a buffer.
mocker.patch("Bio.Entrez.esearch")
Entrez.esearch.return_value = io.StringIO("{}")
searcher.search('"Homo sapiens"[Organism]')
# pylint: disable=no-member
Entrez.esearch.assert_called_with(
"sra", '"Homo sapiens"[Organism]', retmax=10, retmode="json"
)
def test_searcher_configurer_entrez():
"""
In order for everything to work, the Searcher must set Entrez's e-mail and
API Key parameters
"""
Searcher(email="<EMAIL>", api_key="3141516")
assert Entrez.email == "<EMAIL>"
assert Entrez.api_key == "3141516"
def test_searcher_returns_dictionary(searcher: Searcher, mocker):
"""
The searcher must return a json formatted SRA resultset
"""
mocker.patch("Bio.Entrez.esearch")
Entrez.esearch.return_value = io.StringIO("{}")
result = searcher.search("Human", max_results=3)
assert isinstance(result, dict)
def test_fetch_result(searcher: Searcher, mocker):
"""
Given an Entrez UID, the searcher must acquire the related data
"""
mocker.patch("Bio.Entrez.efetch")
Entrez.efetch.return_value = open(
Path(__file__).parent.absolute().joinpath("sample_efetch_result.xml")
)
data = searcher.fetch("8801091")
# pylint: disable=no-member
Entrez.efetch.assert_called()
assert data
assert isinstance(data, dict)
| [
"dbvirus_searcher.Searcher",
"pathlib.Path",
"Bio.Entrez.esearch.assert_called_with",
"Bio.Entrez.efetch.assert_called",
"io.StringIO"
] | [((459, 493), 'dbvirus_searcher.Searcher', 'Searcher', (['"""<EMAIL>"""'], {'db': '"""other_db"""'}), "('<EMAIL>', db='other_db')\n", (467, 493), False, 'from dbvirus_searcher import Searcher\n'), ((911, 928), 'io.StringIO', 'io.StringIO', (['"""{}"""'], {}), "('{}')\n", (922, 928), False, 'import io\n'), ((1015, 1115), 'Bio.Entrez.esearch.assert_called_with', 'Entrez.esearch.assert_called_with', (['"""sra"""', '""""Homo sapiens"[Organism]"""'], {'retmax': '(10)', 'retmode': '"""json"""'}), '(\'sra\', \'"Homo sapiens"[Organism]\', retmax\n =10, retmode=\'json\')\n', (1048, 1115), False, 'from Bio import Entrez\n'), ((1289, 1333), 'dbvirus_searcher.Searcher', 'Searcher', ([], {'email': '"""<EMAIL>"""', 'api_key': '"""3141516"""'}), "(email='<EMAIL>', api_key='3141516')\n", (1297, 1333), False, 'from dbvirus_searcher import Searcher\n'), ((1628, 1645), 'io.StringIO', 'io.StringIO', (['"""{}"""'], {}), "('{}')\n", (1639, 1645), False, 'import io\n'), ((2108, 2137), 'Bio.Entrez.efetch.assert_called', 'Entrez.efetch.assert_called', ([], {}), '()\n', (2135, 2137), False, 'from Bio import Entrez\n'), ((1957, 1971), 'pathlib.Path', 'Path', (['__file__'], {}), '(__file__)\n', (1961, 1971), False, 'from pathlib import Path\n')] |
#!/usr/bin/env python
"""Command line interface for the compact ACME library."""
import acme_lib
import argparse
import sys
import textwrap
def _gen_account_key(account_key, key_length, algorithm):
key = acme_lib.create_key(key_length=key_length, algorithm=algorithm)
acme_lib.write_file(account_key, key)
def _gen_cert_key(key, key_length, algorithm):
the_key = acme_lib.create_key(key_length=key_length, algorithm=algorithm)
acme_lib.write_file(key, the_key)
def _gen_csr(domains, key, csr, must_staple):
if csr.endswith('.csr'):
config_filename = csr[:-4] + '.cnf'
else:
config_filename = csr + '.cnf'
sys.stderr.write('Writing OpenSSL config to {0}.\n'.format(config_filename))
the_csr = acme_lib.generate_csr(key, config_filename, domains.split(','), must_staple=must_staple)
acme_lib.write_file(csr, the_csr)
def _print_csr(csr):
sys.stdout.write(acme_lib.get_csr_as_text(csr) + '\n')
def _get_root(root_url, cert):
ic = acme_lib.download_certificate(root_url)
if cert is None:
sys.stdout.write(ic + '\n')
else:
acme_lib.write_file(cert, ic + '\n')
sys.stderr.write("Stored root certificate at '{0}'.\n".format(cert))
def _get_intermediate(intermediate_url, cert):
ic = acme_lib.download_certificate(intermediate_url)
if cert is None:
sys.stdout.write(ic + '\n')
else:
acme_lib.write_file(cert, ic + '\n')
sys.stderr.write("Stored intermediate certificate at '{0}'.\n".format(cert))
def _get_certificate(account_key, csr, acme_dir, CA, cert, email):
sys.stderr.write("Preparing challenges...")
state = acme_lib.get_challenges(account_key, csr, CA, email_address=email)
sys.stderr.write(" ok\n")
try:
sys.stderr.write("Writing and verifying challenges...")
acme_lib.write_challenges(state, acme_dir)
acme_lib.verify_challenges(state)
sys.stderr.write(" ok\n")
sys.stderr.write("Notifying CA of challenges...")
acme_lib.notify_challenges(state)
sys.stderr.write(" ok\n")
sys.stderr.write("Verifying domains...\n")
result = acme_lib.check_challenges(state, csr, lambda domain: sys.stderr.write("Verified domain {0}!\n".format(domain)))
sys.stderr.write("Certificate is signed!\n")
if cert is None:
sys.stdout.write(result)
else:
acme_lib.write_file(cert, result)
sys.stderr.write("Stored certificate at '{0}'.\n".format(cert))
finally:
acme_lib.remove_challenges(state, acme_dir)
def _get_certificate_part1(statefile, account_key, csr, acme_dir, CA, email):
sys.stderr.write("Preparing challenges...")
state = acme_lib.get_challenges(account_key, csr, CA, email_address=email)
sys.stderr.write(" ok\n")
sys.stderr.write("Writing challenges...")
acme_lib.write_challenges(state, acme_dir)
sys.stderr.write(" ok\n")
sys.stderr.write("Serializing state...")
with open(statefile, "w") as sf:
sf.write(acme_lib.serialize_state(state))
sys.stderr.write(" ok\n")
def _get_certificate_part2(statefile, csr, cert):
sys.stderr.write("Deserializing state...")
with open(statefile, "r") as sf:
state = acme_lib.deserialize_state(sf.read())
sys.stderr.write(" ok\n")
sys.stderr.write("Verifying challenges...")
acme_lib.verify_challenges(state)
sys.stderr.write(" ok\n")
sys.stderr.write("Notifying CA of challenges...")
acme_lib.notify_challenges(state)
sys.stderr.write(" ok\n")
sys.stderr.write("Verifying domains...\n")
result = acme_lib.check_challenges(state, csr, lambda domain: sys.stderr.write("Verified domain {0}!\n".format(domain)))
sys.stderr.write("Certificate is signed!\n")
if cert is None:
sys.stdout.write(result)
else:
acme_lib.write_file(cert, result)
sys.stderr.write("Stored certificate at '{0}'.\n".format(cert))
if __name__ == "__main__":
try:
parser = argparse.ArgumentParser(
formatter_class=argparse.RawDescriptionHelpFormatter,
description=textwrap.dedent("""\
This script automates the process of getting a signed TLS certificate from
Let's Encrypt using the ACME protocol. It can both be run from the server
and from another machine (when splitting the process up in two steps).
The script needs to have access to your private account key, so PLEASE READ
THROUGH IT! It's only 265+569 lines (including docstrings), so it won't
take too long.
===Example Usage: Creating Letsencrypt account key, private key for certificate and CSR===
python acme_compact.py gen-account-key --account-key /path/to/account.key
python acme_compact.py gen-key --key /path/to/domain.key
python acme_compact.py gen-csr --key /path/to/domain.key --csr /path/to/domain.csr --domains example.com,www.example.com
===================
Note that the email address does not have to be specified.
Also note that by default, RSA keys are generated. If you want ECC keys,
please specify "--algorithm <alg>" with <alg> being "p-256" or "p-384".
===Example Usage: Creating certifiate from CSR on server===
python acme_compact.py get-certificate --account-key /path/to/account.key --email <EMAIL> --csr /path/to/domain.csr --acme-dir /usr/share/nginx/html/.well-known/acme-challenge/ --cert /path/to/signed.crt 2>> /var/log/acme_compact.log
===================
===Example Usage: Creating certifiate from CSR from another machine===
python acme_compact.py get-certificate-part-1 --account-key /path/to/account.key --email <EMAIL> --csr /path/to/domain.csr --statefile /path/to/state.json --acme-dir /tmp/acme-challenge/ 2>> /var/log/acme_compact.log
... copy files from /tmp/acme-challenge/ into /usr/share/nginx/html/.well-known/acme-challenge/ on the web server ...
python acme_compact.py get-certificate-part-2 --csr /path/to/domain.csr --statefile /path/to/state.json --cert /path/to/signed.crt 2>> /var/log/acme_compact.log
===================
===Example Usage: Combining signed certificate with intermediate certificate===
python acme_compact.py get-intermediate --cert /path/to/domain-intermediate.crt
cat /path/to/signed.crt /path/to/domain-intermediate.crt > /path/to/signed-with-intermediate.crt
===================
""")
)
commands = {
'gen-account-key': {
'help': 'Generates an account key.',
'requires': ["account_key"],
'optional': ["key_length", "algorithm"],
'command': _gen_account_key,
},
'gen-key': {
'help': 'Generates a certificate key.',
'requires': ["key"],
'optional': ["key_length", "algorithm"],
'command': _gen_cert_key,
},
'gen-csr': {
'help': 'Generates a certificate signing request (CSR). Under *nix, use /dev/stdin after --key to provide key via stdin.',
'requires': ["domains", "key", "csr"],
'optional': ["must_staple"],
'command': _gen_csr,
},
'print-csr': {
'help': 'Prints the given certificate signing request (CSR) in human-readable form.',
'requires': ["csr"],
'optional': [],
'command': _print_csr,
},
'get-root': {
'help': 'Retrieves the root certificate from the CA server and prints it to stdout (if --cert is not specified).',
'requires': [],
'optional': ["root_url", "cert"],
'command': _get_root,
},
'get-intermediate': {
'help': 'Retrieves the intermediate certificate from the CA server and prints it to stdout (if --cert is not specified).',
'requires': [],
'optional': ["intermediate_url", "cert"],
'command': _get_intermediate,
},
'get-certificate': {
'help': 'Given a CSR and an account key, retrieves a certificate and prints it to stdout (if --cert is not specified).',
'requires': ["account_key", "csr", "acme_dir"],
'optional': ["CA", "cert", "email"],
'command': _get_certificate,
},
'get-certificate-part-1': {
'help': 'Given a CSR and an account key, prepares retrieving a certificate. The generated challenge files must be manually uploaded to their respective positions.',
'requires': ["account_key", "csr", "acme_dir", "statefile"],
'optional': ["CA", "email"],
'command': _get_certificate_part1,
},
'get-certificate-part-2': {
'help': 'Assuming that get-certificate-part-1 ran through and the challenges were uploaded, retrieves a certificate and prints it to stdout (if --cert is not specified).',
'requires': ["csr", "statefile"],
'optional': ["cert"],
'command': _get_certificate_part2,
},
}
parser.add_argument("command", type=str, nargs='?', help="must be one of {0}".format(', '.join('"{0}"'.format(command) for command in sorted(commands.keys()))))
parser.add_argument("--account-key", required=False, help="path to your Let's Encrypt account private key")
parser.add_argument("--algorithm", required=False, default="rsa", help="the algorithm to use (rsa, ...)") # FIXME
parser.add_argument("--key-length", type=int, default=4096, required=False, help="key length for private keys")
parser.add_argument("--key", required=False, help="path to your certificate's private key")
parser.add_argument("--csr", required=False, help="path to your certificate signing request")
parser.add_argument("--acme-dir", required=False, help="path to the .well-known/acme-challenge/ directory")
parser.add_argument("--CA", required=False, default=None, help="CA to use (default: {0})".format(acme_lib.default_ca))
parser.add_argument("--use-staging-CA", required=False, default=False, action='store_true', help="Use Let's Encrypt staging CA")
parser.add_argument("--statefile", required=False, default=None, help="state file for two-part run")
parser.add_argument("-d", "--domains", required=False, default=None, help="a comma-separated list of domain names")
parser.add_argument("--cert", required=False, help="file name to store certificate into (otherwise it is printed on stdout)")
parser.add_argument("--email", required=False, help="email address (will be associated with account)")
parser.add_argument("--intermediate-url", required=False, default=acme_lib.default_intermediate_url, help="URL for the intermediate certificate (default: {0})".format(acme_lib.default_intermediate_url))
parser.add_argument("--root-url", required=False, default=acme_lib.default_root_url, help="URL for the root certificate (default: {0})".format(acme_lib.default_root_url))
parser.add_argument("--must-staple", required=False, default=False, action='store_true', help="request must staple extension for certificate")
args = parser.parse_args()
if args.command is None:
sys.stderr.write("Command must be one of {1}. More information on the available commands:\n\n".format(args.command, ', '.join('"{0}"'.format(command) for command in sorted(commands.keys()))))
for command in sorted(commands.keys()):
cmd = commands[command]
sys.stderr.write(' {0}:\n'.format(command))
sys.stderr.write('{0}\n'.format(textwrap.indent(cmd['help'], prefix=' ')))
if cmd['requires']:
sys.stderr.write(' Mandatory options: {0}\n'.format(', '.join(['--{0}'.format(opt.replace('_', '-')) for opt in cmd['requires']])))
if cmd['optional']:
sys.stderr.write(' Optional options: {0}\n'.format(', '.join(['--{0}'.format(opt.replace('_', '-')) for opt in cmd['optional']])))
sys.exit(-1)
elif args.command not in commands:
sys.stderr.write("Unknown command '{0}'! Command must be one of {1}.\n".format(args.command, ', '.join('"{0}"'.format(command) for command in sorted(commands.keys()))))
sys.exit(-1)
else:
cmd = commands[args.command]
accepted = set()
values = {}
if args.__dict__['use_staging_CA']:
if args.__dict__['CA'] is not None:
sys.stderr.write("Cannot specify both '--use-staging-CA' and provide '--CA'!\n")
sys.exit(-1)
args.__dict__['CA'] = acme_lib.staging_ca
for req in cmd['requires']:
accepted.add(req)
if args.__dict__[req] is None:
sys.stderr.write("Command '{0}' requires that option '{1}' is set!\n".format(args.command, req))
sys.exit(-1)
values[req] = args.__dict__[req]
for opt in cmd['optional']:
accepted.add(opt)
values[opt] = args.__dict__[opt]
for opt in args.__dict__:
if opt == 'command':
continue
if args.__dict__[opt] is not parser.get_default(opt):
if opt not in accepted:
sys.stderr.write("Warning: option '{0}' is ignored for this command.\n".format(opt))
if 'CA' in values and values['CA'] is None:
values['CA'] = acme_lib.default_ca
cmd['command'](**values)
except Exception as e:
sys.stderr.write("Error occured: {0}\n".format(str(e)))
sys.exit(-2)
| [
"acme_lib.write_file",
"acme_lib.notify_challenges",
"textwrap.dedent",
"sys.exit",
"acme_lib.get_challenges",
"acme_lib.download_certificate",
"textwrap.indent",
"sys.stderr.write",
"acme_lib.create_key",
"acme_lib.get_csr_as_text",
"acme_lib.serialize_state",
"acme_lib.remove_challenges",
... | [((211, 274), 'acme_lib.create_key', 'acme_lib.create_key', ([], {'key_length': 'key_length', 'algorithm': 'algorithm'}), '(key_length=key_length, algorithm=algorithm)\n', (230, 274), False, 'import acme_lib\n'), ((279, 316), 'acme_lib.write_file', 'acme_lib.write_file', (['account_key', 'key'], {}), '(account_key, key)\n', (298, 316), False, 'import acme_lib\n'), ((380, 443), 'acme_lib.create_key', 'acme_lib.create_key', ([], {'key_length': 'key_length', 'algorithm': 'algorithm'}), '(key_length=key_length, algorithm=algorithm)\n', (399, 443), False, 'import acme_lib\n'), ((448, 481), 'acme_lib.write_file', 'acme_lib.write_file', (['key', 'the_key'], {}), '(key, the_key)\n', (467, 481), False, 'import acme_lib\n'), ((840, 873), 'acme_lib.write_file', 'acme_lib.write_file', (['csr', 'the_csr'], {}), '(csr, the_csr)\n', (859, 873), False, 'import acme_lib\n'), ((998, 1037), 'acme_lib.download_certificate', 'acme_lib.download_certificate', (['root_url'], {}), '(root_url)\n', (1027, 1037), False, 'import acme_lib\n'), ((1285, 1332), 'acme_lib.download_certificate', 'acme_lib.download_certificate', (['intermediate_url'], {}), '(intermediate_url)\n', (1314, 1332), False, 'import acme_lib\n'), ((1603, 1646), 'sys.stderr.write', 'sys.stderr.write', (['"""Preparing challenges..."""'], {}), "('Preparing challenges...')\n", (1619, 1646), False, 'import sys\n'), ((1659, 1725), 'acme_lib.get_challenges', 'acme_lib.get_challenges', (['account_key', 'csr', 'CA'], {'email_address': 'email'}), '(account_key, csr, CA, email_address=email)\n', (1682, 1725), False, 'import acme_lib\n'), ((1730, 1755), 'sys.stderr.write', 'sys.stderr.write', (['""" ok\n"""'], {}), "(' ok\\n')\n", (1746, 1755), False, 'import sys\n'), ((2670, 2713), 'sys.stderr.write', 'sys.stderr.write', (['"""Preparing challenges..."""'], {}), "('Preparing challenges...')\n", (2686, 2713), False, 'import sys\n'), ((2726, 2792), 'acme_lib.get_challenges', 'acme_lib.get_challenges', (['account_key', 'csr', 'CA'], {'email_address': 'email'}), '(account_key, csr, CA, email_address=email)\n', (2749, 2792), False, 'import acme_lib\n'), ((2797, 2822), 'sys.stderr.write', 'sys.stderr.write', (['""" ok\n"""'], {}), "(' ok\\n')\n", (2813, 2822), False, 'import sys\n'), ((2827, 2868), 'sys.stderr.write', 'sys.stderr.write', (['"""Writing challenges..."""'], {}), "('Writing challenges...')\n", (2843, 2868), False, 'import sys\n'), ((2873, 2915), 'acme_lib.write_challenges', 'acme_lib.write_challenges', (['state', 'acme_dir'], {}), '(state, acme_dir)\n', (2898, 2915), False, 'import acme_lib\n'), ((2920, 2945), 'sys.stderr.write', 'sys.stderr.write', (['""" ok\n"""'], {}), "(' ok\\n')\n", (2936, 2945), False, 'import sys\n'), ((2950, 2990), 'sys.stderr.write', 'sys.stderr.write', (['"""Serializing state..."""'], {}), "('Serializing state...')\n", (2966, 2990), False, 'import sys\n'), ((3082, 3107), 'sys.stderr.write', 'sys.stderr.write', (['""" ok\n"""'], {}), "(' ok\\n')\n", (3098, 3107), False, 'import sys\n'), ((3164, 3206), 'sys.stderr.write', 'sys.stderr.write', (['"""Deserializing state..."""'], {}), "('Deserializing state...')\n", (3180, 3206), False, 'import sys\n'), ((3302, 3327), 'sys.stderr.write', 'sys.stderr.write', (['""" ok\n"""'], {}), "(' ok\\n')\n", (3318, 3327), False, 'import sys\n'), ((3332, 3375), 'sys.stderr.write', 'sys.stderr.write', (['"""Verifying challenges..."""'], {}), "('Verifying challenges...')\n", (3348, 3375), False, 'import sys\n'), ((3380, 3413), 'acme_lib.verify_challenges', 'acme_lib.verify_challenges', (['state'], {}), '(state)\n', (3406, 3413), False, 'import acme_lib\n'), ((3418, 3443), 'sys.stderr.write', 'sys.stderr.write', (['""" ok\n"""'], {}), "(' ok\\n')\n", (3434, 3443), False, 'import sys\n'), ((3448, 3497), 'sys.stderr.write', 'sys.stderr.write', (['"""Notifying CA of challenges..."""'], {}), "('Notifying CA of challenges...')\n", (3464, 3497), False, 'import sys\n'), ((3502, 3535), 'acme_lib.notify_challenges', 'acme_lib.notify_challenges', (['state'], {}), '(state)\n', (3528, 3535), False, 'import acme_lib\n'), ((3540, 3565), 'sys.stderr.write', 'sys.stderr.write', (['""" ok\n"""'], {}), "(' ok\\n')\n", (3556, 3565), False, 'import sys\n'), ((3570, 3612), 'sys.stderr.write', 'sys.stderr.write', (['"""Verifying domains...\n"""'], {}), "('Verifying domains...\\n')\n", (3586, 3612), False, 'import sys\n'), ((3742, 3786), 'sys.stderr.write', 'sys.stderr.write', (['"""Certificate is signed!\n"""'], {}), "('Certificate is signed!\\n')\n", (3758, 3786), False, 'import sys\n'), ((1067, 1094), 'sys.stdout.write', 'sys.stdout.write', (["(ic + '\\n')"], {}), "(ic + '\\n')\n", (1083, 1094), False, 'import sys\n'), ((1113, 1149), 'acme_lib.write_file', 'acme_lib.write_file', (['cert', "(ic + '\\n')"], {}), "(cert, ic + '\\n')\n", (1132, 1149), False, 'import acme_lib\n'), ((1362, 1389), 'sys.stdout.write', 'sys.stdout.write', (["(ic + '\\n')"], {}), "(ic + '\\n')\n", (1378, 1389), False, 'import sys\n'), ((1408, 1444), 'acme_lib.write_file', 'acme_lib.write_file', (['cert', "(ic + '\\n')"], {}), "(cert, ic + '\\n')\n", (1427, 1444), False, 'import acme_lib\n'), ((1773, 1828), 'sys.stderr.write', 'sys.stderr.write', (['"""Writing and verifying challenges..."""'], {}), "('Writing and verifying challenges...')\n", (1789, 1828), False, 'import sys\n'), ((1837, 1879), 'acme_lib.write_challenges', 'acme_lib.write_challenges', (['state', 'acme_dir'], {}), '(state, acme_dir)\n', (1862, 1879), False, 'import acme_lib\n'), ((1888, 1921), 'acme_lib.verify_challenges', 'acme_lib.verify_challenges', (['state'], {}), '(state)\n', (1914, 1921), False, 'import acme_lib\n'), ((1930, 1955), 'sys.stderr.write', 'sys.stderr.write', (['""" ok\n"""'], {}), "(' ok\\n')\n", (1946, 1955), False, 'import sys\n'), ((1964, 2013), 'sys.stderr.write', 'sys.stderr.write', (['"""Notifying CA of challenges..."""'], {}), "('Notifying CA of challenges...')\n", (1980, 2013), False, 'import sys\n'), ((2022, 2055), 'acme_lib.notify_challenges', 'acme_lib.notify_challenges', (['state'], {}), '(state)\n', (2048, 2055), False, 'import acme_lib\n'), ((2064, 2089), 'sys.stderr.write', 'sys.stderr.write', (['""" ok\n"""'], {}), "(' ok\\n')\n", (2080, 2089), False, 'import sys\n'), ((2098, 2140), 'sys.stderr.write', 'sys.stderr.write', (['"""Verifying domains...\n"""'], {}), "('Verifying domains...\\n')\n", (2114, 2140), False, 'import sys\n'), ((2278, 2322), 'sys.stderr.write', 'sys.stderr.write', (['"""Certificate is signed!\n"""'], {}), "('Certificate is signed!\\n')\n", (2294, 2322), False, 'import sys\n'), ((2542, 2585), 'acme_lib.remove_challenges', 'acme_lib.remove_challenges', (['state', 'acme_dir'], {}), '(state, acme_dir)\n', (2568, 2585), False, 'import acme_lib\n'), ((3816, 3840), 'sys.stdout.write', 'sys.stdout.write', (['result'], {}), '(result)\n', (3832, 3840), False, 'import sys\n'), ((3859, 3892), 'acme_lib.write_file', 'acme_lib.write_file', (['cert', 'result'], {}), '(cert, result)\n', (3878, 3892), False, 'import acme_lib\n'), ((918, 947), 'acme_lib.get_csr_as_text', 'acme_lib.get_csr_as_text', (['csr'], {}), '(csr)\n', (942, 947), False, 'import acme_lib\n'), ((2360, 2384), 'sys.stdout.write', 'sys.stdout.write', (['result'], {}), '(result)\n', (2376, 2384), False, 'import sys\n'), ((2411, 2444), 'acme_lib.write_file', 'acme_lib.write_file', (['cert', 'result'], {}), '(cert, result)\n', (2430, 2444), False, 'import acme_lib\n'), ((3045, 3076), 'acme_lib.serialize_state', 'acme_lib.serialize_state', (['state'], {}), '(state)\n', (3069, 3076), False, 'import acme_lib\n'), ((12611, 12623), 'sys.exit', 'sys.exit', (['(-1)'], {}), '(-1)\n', (12619, 12623), False, 'import sys\n'), ((14286, 14298), 'sys.exit', 'sys.exit', (['(-2)'], {}), '(-2)\n', (14294, 14298), False, 'import sys\n'), ((4135, 6733), 'textwrap.dedent', 'textwrap.dedent', (['""" This script automates the process of getting a signed TLS certificate from\n Let\'s Encrypt using the ACME protocol. It can both be run from the server\n and from another machine (when splitting the process up in two steps).\n The script needs to have access to your private account key, so PLEASE READ\n THROUGH IT! It\'s only 265+569 lines (including docstrings), so it won\'t\n take too long.\n\n ===Example Usage: Creating Letsencrypt account key, private key for certificate and CSR===\n python acme_compact.py gen-account-key --account-key /path/to/account.key\n python acme_compact.py gen-key --key /path/to/domain.key\n python acme_compact.py gen-csr --key /path/to/domain.key --csr /path/to/domain.csr --domains example.com,www.example.com\n ===================\n Note that the email address does not have to be specified.\n\n Also note that by default, RSA keys are generated. If you want ECC keys,\n please specify "--algorithm <alg>" with <alg> being "p-256" or "p-384".\n\n ===Example Usage: Creating certifiate from CSR on server===\n python acme_compact.py get-certificate --account-key /path/to/account.key --email <EMAIL> --csr /path/to/domain.csr --acme-dir /usr/share/nginx/html/.well-known/acme-challenge/ --cert /path/to/signed.crt 2>> /var/log/acme_compact.log\n ===================\n\n ===Example Usage: Creating certifiate from CSR from another machine===\n python acme_compact.py get-certificate-part-1 --account-key /path/to/account.key --email <EMAIL> --csr /path/to/domain.csr --statefile /path/to/state.json --acme-dir /tmp/acme-challenge/ 2>> /var/log/acme_compact.log\n ... copy files from /tmp/acme-challenge/ into /usr/share/nginx/html/.well-known/acme-challenge/ on the web server ...\n python acme_compact.py get-certificate-part-2 --csr /path/to/domain.csr --statefile /path/to/state.json --cert /path/to/signed.crt 2>> /var/log/acme_compact.log\n ===================\n\n ===Example Usage: Combining signed certificate with intermediate certificate===\n python acme_compact.py get-intermediate --cert /path/to/domain-intermediate.crt\n cat /path/to/signed.crt /path/to/domain-intermediate.crt > /path/to/signed-with-intermediate.crt\n ===================\n """'], {}), '(\n """ This script automates the process of getting a signed TLS certificate from\n Let\'s Encrypt using the ACME protocol. It can both be run from the server\n and from another machine (when splitting the process up in two steps).\n The script needs to have access to your private account key, so PLEASE READ\n THROUGH IT! It\'s only 265+569 lines (including docstrings), so it won\'t\n take too long.\n\n ===Example Usage: Creating Letsencrypt account key, private key for certificate and CSR===\n python acme_compact.py gen-account-key --account-key /path/to/account.key\n python acme_compact.py gen-key --key /path/to/domain.key\n python acme_compact.py gen-csr --key /path/to/domain.key --csr /path/to/domain.csr --domains example.com,www.example.com\n ===================\n Note that the email address does not have to be specified.\n\n Also note that by default, RSA keys are generated. If you want ECC keys,\n please specify "--algorithm <alg>" with <alg> being "p-256" or "p-384".\n\n ===Example Usage: Creating certifiate from CSR on server===\n python acme_compact.py get-certificate --account-key /path/to/account.key --email <EMAIL> --csr /path/to/domain.csr --acme-dir /usr/share/nginx/html/.well-known/acme-challenge/ --cert /path/to/signed.crt 2>> /var/log/acme_compact.log\n ===================\n\n ===Example Usage: Creating certifiate from CSR from another machine===\n python acme_compact.py get-certificate-part-1 --account-key /path/to/account.key --email <EMAIL> --csr /path/to/domain.csr --statefile /path/to/state.json --acme-dir /tmp/acme-challenge/ 2>> /var/log/acme_compact.log\n ... copy files from /tmp/acme-challenge/ into /usr/share/nginx/html/.well-known/acme-challenge/ on the web server ...\n python acme_compact.py get-certificate-part-2 --csr /path/to/domain.csr --statefile /path/to/state.json --cert /path/to/signed.crt 2>> /var/log/acme_compact.log\n ===================\n\n ===Example Usage: Combining signed certificate with intermediate certificate===\n python acme_compact.py get-intermediate --cert /path/to/domain-intermediate.crt\n cat /path/to/signed.crt /path/to/domain-intermediate.crt > /path/to/signed-with-intermediate.crt\n ===================\n """\n )\n', (4150, 6733), False, 'import textwrap\n'), ((12860, 12872), 'sys.exit', 'sys.exit', (['(-1)'], {}), '(-1)\n', (12868, 12872), False, 'import sys\n'), ((12172, 12215), 'textwrap.indent', 'textwrap.indent', (["cmd['help']"], {'prefix': '""" """'}), "(cmd['help'], prefix=' ')\n", (12187, 12215), False, 'import textwrap\n'), ((13101, 13186), 'sys.stderr.write', 'sys.stderr.write', (['"""Cannot specify both \'--use-staging-CA\' and provide \'--CA\'!\n"""'], {}), '("Cannot specify both \'--use-staging-CA\' and provide \'--CA\'!\\n"\n )\n', (13117, 13186), False, 'import sys\n'), ((13202, 13214), 'sys.exit', 'sys.exit', (['(-1)'], {}), '(-1)\n', (13210, 13214), False, 'import sys\n'), ((13531, 13543), 'sys.exit', 'sys.exit', (['(-1)'], {}), '(-1)\n', (13539, 13543), False, 'import sys\n')] |
"""
VPC stack for running ConsoleMe on ECS
"""
import urllib.request
from aws_cdk import (
aws_ec2 as ec2,
core as cdk
)
class VPCStack(cdk.NestedStack):
"""
VPC stack for running ConsoleMe on ECS
"""
def __init__(self, scope: cdk.Construct, id: str, **kwargs) -> None:
super().__init__(scope, id, **kwargs)
# VPC and security groups
vpc = ec2.Vpc(
self, 'Vpc',
max_azs=2
)
consoleme_sg = ec2.SecurityGroup(
self,
'LBSG',
vpc=vpc,
description='Consoleme ECS service load balancer security group',
allow_all_outbound=True
)
# Open ingress to the deploying computer public IP
my_ip_cidr = urllib.request.urlopen(
'http://checkip.amazonaws.com').read().decode('utf-8').strip() + '/32'
consoleme_sg.add_ingress_rule(
peer=ec2.Peer.ipv4(cidr_ip=my_ip_cidr),
connection=ec2.Port.tcp(port=443),
description='Allow HTTPS traffic'
)
redis_sg = ec2.SecurityGroup(
self,
'ECSG',
vpc=vpc,
description='Consoleme Redis security group',
allow_all_outbound=True
)
redis_sg.connections.allow_from(consoleme_sg, port_range=ec2.Port.tcp(
port=6379), description='Allow ingress from ConsoleMe containers')
self.vpc = vpc
self.redis_sg = redis_sg
self.consoleme_sg = consoleme_sg
| [
"aws_cdk.aws_ec2.SecurityGroup",
"aws_cdk.aws_ec2.Vpc",
"aws_cdk.aws_ec2.Port.tcp",
"aws_cdk.aws_ec2.Peer.ipv4"
] | [((395, 426), 'aws_cdk.aws_ec2.Vpc', 'ec2.Vpc', (['self', '"""Vpc"""'], {'max_azs': '(2)'}), "(self, 'Vpc', max_azs=2)\n", (402, 426), True, 'from aws_cdk import aws_ec2 as ec2, core as cdk\n'), ((485, 625), 'aws_cdk.aws_ec2.SecurityGroup', 'ec2.SecurityGroup', (['self', '"""LBSG"""'], {'vpc': 'vpc', 'description': '"""Consoleme ECS service load balancer security group"""', 'allow_all_outbound': '(True)'}), "(self, 'LBSG', vpc=vpc, description=\n 'Consoleme ECS service load balancer security group',\n allow_all_outbound=True)\n", (502, 625), True, 'from aws_cdk import aws_ec2 as ec2, core as cdk\n'), ((1091, 1207), 'aws_cdk.aws_ec2.SecurityGroup', 'ec2.SecurityGroup', (['self', '"""ECSG"""'], {'vpc': 'vpc', 'description': '"""Consoleme Redis security group"""', 'allow_all_outbound': '(True)'}), "(self, 'ECSG', vpc=vpc, description=\n 'Consoleme Redis security group', allow_all_outbound=True)\n", (1108, 1207), True, 'from aws_cdk import aws_ec2 as ec2, core as cdk\n'), ((933, 966), 'aws_cdk.aws_ec2.Peer.ipv4', 'ec2.Peer.ipv4', ([], {'cidr_ip': 'my_ip_cidr'}), '(cidr_ip=my_ip_cidr)\n', (946, 966), True, 'from aws_cdk import aws_ec2 as ec2, core as cdk\n'), ((991, 1013), 'aws_cdk.aws_ec2.Port.tcp', 'ec2.Port.tcp', ([], {'port': '(443)'}), '(port=443)\n', (1003, 1013), True, 'from aws_cdk import aws_ec2 as ec2, core as cdk\n'), ((1339, 1362), 'aws_cdk.aws_ec2.Port.tcp', 'ec2.Port.tcp', ([], {'port': '(6379)'}), '(port=6379)\n', (1351, 1362), True, 'from aws_cdk import aws_ec2 as ec2, core as cdk\n')] |
from torch import nn, optim
import torch
import model
import torch.nn.utils
import utils
import argparse
device = torch.device('cuda' if torch.cuda.is_available() else 'cpu')
parser = argparse.ArgumentParser(description='training parameters')
parser.add_argument('--n_hid', type=int, default=128,
help='hidden size of recurrent net')
parser.add_argument('--T', type=int, default=100,
help='length of sequences')
parser.add_argument('--max_steps', type=int, default=60000,
help='max learning steps')
parser.add_argument('--log_interval', type=int, default=100,
help='log interval')
parser.add_argument('--batch', type=int, default=50,
help='batch size')
parser.add_argument('--batch_test', type=int, default=1000,
help='size of test set')
parser.add_argument('--lr', type=float, default=2e-2,
help='learning rate')
parser.add_argument('--dt',type=float, default=6e-2,
help='step size <dt> of the coRNN')
parser.add_argument('--gamma',type=float, default=66,
help='y controle parameter <gamma> of the coRNN')
parser.add_argument('--epsilon',type=float, default = 15,
help='z controle parameter <epsilon> of the coRNN')
args = parser.parse_args()
n_inp = 2
n_out = 1
model = model.coRNN(n_inp, args.n_hid, n_out, args.dt, args.gamma, args.epsilon).to(device)
objective = nn.MSELoss()
optimizer = optim.Adam(model.parameters(), lr=args.lr)
def test():
model.eval()
with torch.no_grad():
data, label = utils.get_batch(args.T, args.batch_test)
label = label.unsqueeze(1)
out = model(data.to(device))
loss = objective(out, label.to(device))
return loss.item()
def train():
test_mse = []
for i in range(args.max_steps):
data, label = utils.get_batch(args.T,args.batch)
label = label.unsqueeze(1)
optimizer.zero_grad()
out = model(data.to(device))
loss = objective(out, label.to(device))
loss.backward()
optimizer.step()
if(i%100==0 and i!=0):
mse_error = test()
print('Test MSE: {:.6f}'.format(mse_error))
test_mse.append(mse_error)
model.train()
if __name__ == '__main__':
train()
| [
"model.train",
"utils.get_batch",
"model.parameters",
"argparse.ArgumentParser",
"torch.nn.MSELoss",
"torch.no_grad",
"torch.cuda.is_available",
"model.coRNN",
"model.eval"
] | [((186, 244), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {'description': '"""training parameters"""'}), "(description='training parameters')\n", (209, 244), False, 'import argparse\n'), ((1476, 1488), 'torch.nn.MSELoss', 'nn.MSELoss', ([], {}), '()\n', (1486, 1488), False, 'from torch import nn, optim\n'), ((1512, 1530), 'model.parameters', 'model.parameters', ([], {}), '()\n', (1528, 1530), False, 'import model\n'), ((1561, 1573), 'model.eval', 'model.eval', ([], {}), '()\n', (1571, 1573), False, 'import model\n'), ((137, 162), 'torch.cuda.is_available', 'torch.cuda.is_available', ([], {}), '()\n', (160, 162), False, 'import torch\n'), ((1378, 1450), 'model.coRNN', 'model.coRNN', (['n_inp', 'args.n_hid', 'n_out', 'args.dt', 'args.gamma', 'args.epsilon'], {}), '(n_inp, args.n_hid, n_out, args.dt, args.gamma, args.epsilon)\n', (1389, 1450), False, 'import model\n'), ((1583, 1598), 'torch.no_grad', 'torch.no_grad', ([], {}), '()\n', (1596, 1598), False, 'import torch\n'), ((1622, 1662), 'utils.get_batch', 'utils.get_batch', (['args.T', 'args.batch_test'], {}), '(args.T, args.batch_test)\n', (1637, 1662), False, 'import utils\n'), ((1897, 1932), 'utils.get_batch', 'utils.get_batch', (['args.T', 'args.batch'], {}), '(args.T, args.batch)\n', (1912, 1932), False, 'import utils\n'), ((2302, 2315), 'model.train', 'model.train', ([], {}), '()\n', (2313, 2315), False, 'import model\n')] |
import time
import asyncio
import sortedcontainers
from hailtop.utils import retry_transient_errors
class K8sCache:
def __init__(self, client, refresh_time, max_size=100):
self.client = client
self.refresh_time = refresh_time
self.max_size = max_size
self.secrets = {}
self.secret_ids = sortedcontainers.SortedSet(
key=lambda id: self.secrets[id][1])
self.secret_locks = {}
self.service_accounts = {}
self.service_account_ids = sortedcontainers.SortedSet(
key=lambda id: self.service_accounts[id][1])
self.service_account_locks = {}
async def read_secret(self, name, namespace, timeout):
id = (name, namespace)
lock = self.secret_locks.get(id)
if lock is None:
lock = asyncio.Lock()
self.secret_locks[id] = lock
async with lock:
secret, time_updated = self.secrets.get(id, (None, None))
if time_updated and time.time() < time_updated + self.refresh_time:
return secret
if len(self.secrets) == self.max_size:
head_id = self.secret_ids.pop(0)
del self.secrets[head_id]
secret = await retry_transient_errors(
self.client.read_namespaced_secret,
name,
namespace,
_request_timeout=timeout)
self.secrets[id] = (secret, time.time())
self.secret_ids.add(id)
del self.secret_locks[id]
return secret
async def read_service_account(self, name, namespace, timeout):
id = (name, namespace)
lock = self.service_account_locks.get(id)
if lock is None:
lock = asyncio.Lock()
self.service_account_locks[id] = lock
async with lock:
sa, time_updated = self.service_accounts.get(id, (None, None))
if time_updated and time.time() < time_updated + self.refresh_time:
return sa
if len(self.service_accounts) == self.max_size:
head_id = self.service_account_ids.pop(0)
del self.service_accounts[head_id]
sa = await retry_transient_errors(
self.client.read_namespaced_service_account,
name,
namespace,
_request_timeout=timeout)
self.service_accounts[id] = (sa, time.time())
self.service_account_ids.add(id)
del self.service_account_locks[id]
return sa
| [
"time.time",
"sortedcontainers.SortedSet",
"asyncio.Lock",
"hailtop.utils.retry_transient_errors"
] | [((335, 397), 'sortedcontainers.SortedSet', 'sortedcontainers.SortedSet', ([], {'key': '(lambda id: self.secrets[id][1])'}), '(key=lambda id: self.secrets[id][1])\n', (361, 397), False, 'import sortedcontainers\n'), ((513, 584), 'sortedcontainers.SortedSet', 'sortedcontainers.SortedSet', ([], {'key': '(lambda id: self.service_accounts[id][1])'}), '(key=lambda id: self.service_accounts[id][1])\n', (539, 584), False, 'import sortedcontainers\n'), ((815, 829), 'asyncio.Lock', 'asyncio.Lock', ([], {}), '()\n', (827, 829), False, 'import asyncio\n'), ((1765, 1779), 'asyncio.Lock', 'asyncio.Lock', ([], {}), '()\n', (1777, 1779), False, 'import asyncio\n'), ((1248, 1353), 'hailtop.utils.retry_transient_errors', 'retry_transient_errors', (['self.client.read_namespaced_secret', 'name', 'namespace'], {'_request_timeout': 'timeout'}), '(self.client.read_namespaced_secret, name, namespace,\n _request_timeout=timeout)\n', (1270, 1353), False, 'from hailtop.utils import retry_transient_errors\n'), ((1456, 1467), 'time.time', 'time.time', ([], {}), '()\n', (1465, 1467), False, 'import time\n'), ((2231, 2345), 'hailtop.utils.retry_transient_errors', 'retry_transient_errors', (['self.client.read_namespaced_service_account', 'name', 'namespace'], {'_request_timeout': 'timeout'}), '(self.client.read_namespaced_service_account, name,\n namespace, _request_timeout=timeout)\n', (2253, 2345), False, 'from hailtop.utils import retry_transient_errors\n'), ((2453, 2464), 'time.time', 'time.time', ([], {}), '()\n', (2462, 2464), False, 'import time\n'), ((999, 1010), 'time.time', 'time.time', ([], {}), '()\n', (1008, 1010), False, 'import time\n'), ((1963, 1974), 'time.time', 'time.time', ([], {}), '()\n', (1972, 1974), False, 'import time\n')] |
# Copyright (C) 2020 GreenWaves Technologies, SAS
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <https://www.gnu.org/licenses/>.
import argparse
from copy import deepcopy
from cmd2 import Cmd2ArgumentParser, with_argparser
from interpreter.nntool_shell_base import NNToolShellBase
from quantization.qtype import QType
from utils.node_id import NodeId
from graph.types import ImageFormatParameters, NNEdge, TransposeParameters
from graph.manipulations.formatter import insert_formatter, remove_formatter
class ImageFormatCommand(NNToolShellBase):
def inputs_choices(self):
if self.G is None:
return []
return [node.name for node in self.G.inputs()]
def format_choices(self):
return [fmt.lower() for fmt in ImageFormatParameters.FORMAT_CHANGES] + ['none']
def norm_choices(self):
return [fmt.lower() for fmt in ImageFormatParameters.NORMALIZATIONS] + ['none']
# IMAGEFORMAT COMMAND
parser_imageformat = Cmd2ArgumentParser(
"inserts image format node into graphs")
parser_imageformat.add_argument('input_node',
choices_method=inputs_choices,
help='input node name to format')
parser_imageformat.add_argument('image_formatter',
choices_method=format_choices,
help='input node name to format')
parser_imageformat.add_argument('image_normalizer',
choices_method=norm_choices,
help='input node name to format')
@with_argparser(parser_imageformat)
def do_imageformat(self, args: argparse.Namespace):
""" Add or modify image format options."""
self._check_graph()
if args.input_node not in self.G:
self.perror("input node not found")
return
input_node = self.G[args.input_node]
out_edges = self.G.out_edges(input_node.name)
if len(out_edges) == 1 and isinstance(out_edges[0].to_node, ImageFormatParameters):
remove_formatter(self.G, out_edges[0].to_node)
self.G.add_dimensions()
self.pfeedback(f'removed image formatter {out_edges[0].to_node.name}')
return
if args.image_formatter == "none" and args.image_normalizer == "none":
self.pfeedback("no formatting set")
self.G.add_dimensions()
return
insert_formatter(self.G, input_node,
args.image_formatter, args.image_normalizer)
self.G.add_dimensions()
self.pfeedback(f'inserted image formatter after node {input_node.name} with'
f'format {args.image_formatter} and normalization {args.image_normalizer}')
| [
"cmd2.with_argparser",
"graph.manipulations.formatter.insert_formatter",
"cmd2.Cmd2ArgumentParser",
"graph.manipulations.formatter.remove_formatter"
] | [((1549, 1608), 'cmd2.Cmd2ArgumentParser', 'Cmd2ArgumentParser', (['"""inserts image format node into graphs"""'], {}), "('inserts image format node into graphs')\n", (1567, 1608), False, 'from cmd2 import Cmd2ArgumentParser, with_argparser\n'), ((2194, 2228), 'cmd2.with_argparser', 'with_argparser', (['parser_imageformat'], {}), '(parser_imageformat)\n', (2208, 2228), False, 'from cmd2 import Cmd2ArgumentParser, with_argparser\n'), ((3051, 3137), 'graph.manipulations.formatter.insert_formatter', 'insert_formatter', (['self.G', 'input_node', 'args.image_formatter', 'args.image_normalizer'], {}), '(self.G, input_node, args.image_formatter, args.\n image_normalizer)\n', (3067, 3137), False, 'from graph.manipulations.formatter import insert_formatter, remove_formatter\n'), ((2676, 2722), 'graph.manipulations.formatter.remove_formatter', 'remove_formatter', (['self.G', 'out_edges[0].to_node'], {}), '(self.G, out_edges[0].to_node)\n', (2692, 2722), False, 'from graph.manipulations.formatter import insert_formatter, remove_formatter\n')] |
# USAGE
# python extract_embeddings.py --dataset dataset --embeddings output/embeddings.pickle \
# --detector face_detection_model --embedding-model openface_nn4.small2.v1.t7
# import the necessary packages
from imutils.face_utils import FaceAligner
from imutils import paths
import numpy as np
import argparse
import imutils
import pickle
import cv2
import os
import dlib
from PIL import Image
from yolo import YOLO, detect_video
from yolo3.utils import letterbox_image
from keras import backend as K
def detect_image(self, image):
if self.model_image_size != (None, None):
assert self.model_image_size[0]%32 == 0, 'Multiples of 32 required'
assert self.model_image_size[1]%32 == 0, 'Multiples of 32 required'
boxed_image = letterbox_image(image, tuple(reversed(self.model_image_size)))
else:
new_image_size = (image.width - (image.width % 32),
image.height - (image.height % 32))
boxed_image = letterbox_image(image, new_image_size)
image_data = np.array(boxed_image, dtype='float32')
#print(image_data.shape)
image_data /= 255.
image_data = np.expand_dims(image_data, 0) # Add batch dimension.
out_boxes, out_scores, out_classes = self.sess.run(
[self.boxes, self.scores, self.classes],
feed_dict={
self.yolo_model.input: image_data,
self.input_image_shape: [image.size[1], image.size[0]],
K.learning_phase(): 0
})
print('Found {} boxes for {}'.format(len(out_boxes), 'img'))
return out_boxes, out_scores, out_classes
# construct the argument parser and parse the arguments
ap = argparse.ArgumentParser()
ap.add_argument("-i", "--dataset", required=True,
help="path to input directory of faces + images")
ap.add_argument("-e", "--embeddings", required=True,
help="path to output serialized db of facial embeddings")
ap.add_argument("-m", "--embedding-model", required=True,
help="path to OpenCV's deep learning face embedding model")
ap.add_argument("-p", "--shape-predictor", required=True,
help="path to facial landmark predictor")
args = vars(ap.parse_args())
# load our serialized face detector from disk
print("[INFO] loading face detector...")
predictor = dlib.shape_predictor(args["shape_predictor"])
#detector = dlib.get_frontal_face_detector()
detector = YOLO()
# load our serialized face embedding model from disk
print("[INFO] loading face recognizer...")
embedder = cv2.dnn.readNetFromTorch(args["embedding_model"])
# grab the paths to the input images in our dataset
print("[INFO] quantifying faces...")
imagePaths = list(paths.list_images(args["dataset"]))
# initialize our lists of extracted facial embeddings and
# corresponding people names
knownEmbeddings = []
knownNames = []
# initialize the total number of faces processed
total = 0
# loop over the image paths
for (i, imagePath) in enumerate(imagePaths):
# extract the person name from the image path
print("[INFO] processing image {}/{}".format(i + 1,
len(imagePaths)))
name = imagePath.split(os.path.sep)[-2]
# load the image, resize it to have a width of 800 pixels (while
# maintaining the aspect ratio), and then grab the image
# dimensions
image = cv2.imread(imagePath)
image = imutils.resize(image, width=800)
#try to rise resolution
#gray = cv2.cvtColor(image, cv2.COLOR_BGR2GRAY)
#blurred = cv2.GaussianBlur(gray, (5, 5), 0)
#image = blurred
#clahe = cv2.createCLAHE(clipLimit=4.0, tileGridSize=(8,8))
#image = clahe.apply(image)
#image = cv2.cvtColor(image, cv2.COLOR_GRAY2RGB)
gray = cv2.cvtColor(image, cv2.COLOR_BGR2GRAY)
(h, w) = image.shape[:2]
# we're making the assumption that each image has only ONE
# face, so find the bounding box with the largest probability
#align_faces
fa = FaceAligner(predictor, desiredFaceWidth=256)
#gray = cv2.cvtColor(image, cv2.COLOR_BGR2GRAY)
#rects = detector(gray, 2)
rects = []
out_boxes, out_scores, out_classes = detect_image(detector, Image.fromarray(image))
for i, c in reversed(list(enumerate(out_classes))):
(x, y, x1, y1) = out_boxes[i]
w = abs(x - x1)
h = abs(y - y1)
startX = int(min(x1, x))
endX = startX + w
startY = int(min(y1, y))
endY = startY + h
left, right, bottom, top = startX, endX, endY, startY
rect = dlib.rectangle(int(top), int(left), int(bottom) , int(right))
rects.append(rect)
for rect in rects:
faceAligned = fa.align(image, gray, rect)
print(faceAligned)
cv2.imshow("Aligned", np.asarray(faceAligned))
cv2.waitKey(0)
face = faceAligned
(fH, fW) = face.shape[:2]
# ensure the face width and height are sufficiently large
if fW < 20 or fH < 20:
continue
# construct a blob for the face ROI, then pass the blob
# through our face embedding model to obtain the 128-d
# quantification of the face
faceBlob = cv2.dnn.blobFromImage(face, 1.0 / 255,
(96, 96), (0, 0, 0), swapRB=True, crop=False)
embedder.setInput(faceBlob)
vec = embedder.forward()
# add the name of the person + corresponding face
# embedding to their respective lists
knownNames.append(name)
knownEmbeddings.append(vec.flatten())
total += 1
# dump the facial embeddings + names to disk
print("[INFO] serializing {} encodings...".format(total))
data = {"embeddings": knownEmbeddings, "names": knownNames}
f = open(args["embeddings"], "wb")
f.write(pickle.dumps(data))
f.close() | [
"cv2.dnn.blobFromImage",
"PIL.Image.fromarray",
"cv2.dnn.readNetFromTorch",
"argparse.ArgumentParser",
"yolo.YOLO",
"pickle.dumps",
"keras.backend.learning_phase",
"numpy.asarray",
"dlib.shape_predictor",
"numpy.array",
"imutils.resize",
"cv2.waitKey",
"imutils.paths.list_images",
"numpy.e... | [((1650, 1675), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {}), '()\n', (1673, 1675), False, 'import argparse\n'), ((2238, 2283), 'dlib.shape_predictor', 'dlib.shape_predictor', (["args['shape_predictor']"], {}), "(args['shape_predictor'])\n", (2258, 2283), False, 'import dlib\n'), ((2340, 2346), 'yolo.YOLO', 'YOLO', ([], {}), '()\n', (2344, 2346), False, 'from yolo import YOLO, detect_video\n'), ((2455, 2504), 'cv2.dnn.readNetFromTorch', 'cv2.dnn.readNetFromTorch', (["args['embedding_model']"], {}), "(args['embedding_model'])\n", (2479, 2504), False, 'import cv2\n'), ((1028, 1066), 'numpy.array', 'np.array', (['boxed_image'], {'dtype': '"""float32"""'}), "(boxed_image, dtype='float32')\n", (1036, 1066), True, 'import numpy as np\n'), ((1137, 1166), 'numpy.expand_dims', 'np.expand_dims', (['image_data', '(0)'], {}), '(image_data, 0)\n', (1151, 1166), True, 'import numpy as np\n'), ((2613, 2647), 'imutils.paths.list_images', 'paths.list_images', (["args['dataset']"], {}), "(args['dataset'])\n", (2630, 2647), False, 'from imutils import paths\n'), ((3217, 3238), 'cv2.imread', 'cv2.imread', (['imagePath'], {}), '(imagePath)\n', (3227, 3238), False, 'import cv2\n'), ((3248, 3280), 'imutils.resize', 'imutils.resize', (['image'], {'width': '(800)'}), '(image, width=800)\n', (3262, 3280), False, 'import imutils\n'), ((3572, 3611), 'cv2.cvtColor', 'cv2.cvtColor', (['image', 'cv2.COLOR_BGR2GRAY'], {}), '(image, cv2.COLOR_BGR2GRAY)\n', (3584, 3611), False, 'import cv2\n'), ((3790, 3834), 'imutils.face_utils.FaceAligner', 'FaceAligner', (['predictor'], {'desiredFaceWidth': '(256)'}), '(predictor, desiredFaceWidth=256)\n', (3801, 3834), False, 'from imutils.face_utils import FaceAligner\n'), ((4846, 4934), 'cv2.dnn.blobFromImage', 'cv2.dnn.blobFromImage', (['face', '(1.0 / 255)', '(96, 96)', '(0, 0, 0)'], {'swapRB': '(True)', 'crop': '(False)'}), '(face, 1.0 / 255, (96, 96), (0, 0, 0), swapRB=True,\n crop=False)\n', (4867, 4934), False, 'import cv2\n'), ((5362, 5380), 'pickle.dumps', 'pickle.dumps', (['data'], {}), '(data)\n', (5374, 5380), False, 'import pickle\n'), ((972, 1010), 'yolo3.utils.letterbox_image', 'letterbox_image', (['image', 'new_image_size'], {}), '(image, new_image_size)\n', (987, 1010), False, 'from yolo3.utils import letterbox_image\n'), ((3990, 4012), 'PIL.Image.fromarray', 'Image.fromarray', (['image'], {}), '(image)\n', (4005, 4012), False, 'from PIL import Image\n'), ((4526, 4540), 'cv2.waitKey', 'cv2.waitKey', (['(0)'], {}), '(0)\n', (4537, 4540), False, 'import cv2\n'), ((4499, 4522), 'numpy.asarray', 'np.asarray', (['faceAligned'], {}), '(faceAligned)\n', (4509, 4522), True, 'import numpy as np\n'), ((1442, 1460), 'keras.backend.learning_phase', 'K.learning_phase', ([], {}), '()\n', (1458, 1460), True, 'from keras import backend as K\n')] |
"""route.py
Linux parsers for the following commands:
* route
"""
# python
import re
# metaparser
from genie.metaparser import MetaParser
from genie.metaparser.util.schemaengine import Schema, Any, Optional
from netaddr import IPAddress, IPNetwork
# =======================================================
# Schema for 'route'
# =======================================================
class RouteSchema(MetaParser):
"""Schema for route"""
# Destination Gateway Genmask Flags Metric Ref Use Iface
# 0.0.0.0 192.168.1.1 0.0.0.0 UG 0 0 0 wlo1
schema = {
'routes': {
Any(): { # 'destination'
'mask': {
Any(): {
'nexthop': {
Any(): { # index: 1, 2, 3, etc
'interface': str,
Optional('flags'): str,
Optional('gateway'): str,
Optional('metric'): int,
Optional('ref'): int,
Optional('use'): int,
Optional('scope'): str,
Optional('proto'): str,
Optional('src'): str,
Optional('broadcast'): bool,
Optional('table'): str,
Optional('local'): bool
}
}
}
}
}
}
}
# =======================================================
# Parser for 'route'
# =======================================================
class Route(RouteSchema):
"""Parser for
* route
* route -4 -n
* route -4n
* route -n4
* route -n -4
"""
cli_command = ['route', 'route {flag}']
def cli(self, flag=None, output=None):
if output is None:
cmd = self.cli_command[0]
if flag in ['-4 -n', '-4n', '-n4']:
command = self.cli_command[1].replace('{flag}', flag)
out = self.device.execute(cmd)
else:
out = output
# Destination Gateway Genmask Flags Metric Ref Use Iface
# 192.168.1.0 0.0.0.0 255.255.255.0 U 600 0 0 wlo1
p1 = re.compile(r'(?P<destination>[a-z0-9\.\:]+)'
' +(?P<gateway>[a-z0-9\.\:_]+)'
' +(?P<mask>[a-z0-9\.\:]+)'
' +(?P<flags>[a-zA-Z]+)'
' +(?P<metric>(\d+))'
' +(?P<ref>(\d+))'
' +(?P<use>(\d+))'
' +(?P<interface>\S+)'
)
# Initializes the Python dictionary variable
parsed_dict = {}
# Defines the "for" loop, to pattern match each line of output
for line in out.splitlines():
line = line.strip()
# 192.168.1.0 0.0.0.0 255.255.255.0 U 600 0 0 wlo1
m = p1.match(line)
if m:
if 'routes' not in parsed_dict:
parsed_dict.setdefault('routes', {})
group = m.groupdict()
destination = group['destination']
mask = group['mask']
index_dict = {}
for str_k in ['interface', 'flags', 'gateway']:
index_dict[str_k] = group[str_k]
for int_k in ['metric', 'ref', 'use']:
index_dict[int_k] = int(group[int_k])
if destination in parsed_dict['routes']:
if mask in parsed_dict['routes'][destination]['mask']:
parsed_dict['routes'][destination]['mask'][mask].\
setdefault('nexthop', {index+1: index_dict})
else:
index = 1
parsed_dict['routes'][destination]['mask'].\
setdefault(mask, {}).\
setdefault('nexthop', {index: index_dict})
else:
index = 1
parsed_dict['routes'].setdefault(destination, {}).\
setdefault('mask', {}).\
setdefault(mask, {}).\
setdefault('nexthop', {index: index_dict})
continue
return parsed_dict
# =======================================================
# Parser for 'netstat -rn'
# =======================================================
class ShowNetworkStatusRoute(Route, RouteSchema):
"""Parser for
* netstat -rn
"""
cli_command = ['netstat -rn']
def cli(self, output=None):
if output is None:
cmd = self.cli_command[0]
out = self.device.execute(cmd)
else:
out = output
return super().cli(output=out)
# =====================================================
# Parser for ip route show table all
# =====================================================
class IpRouteShowTableAll(RouteSchema):
"""
Parser for
* ip route show table all
"""
cli_command = ['ip route show table all']
def cli(self, output=None):
if output is None:
cmd = self.cli_command[0]
out = self.device.execute(cmd)
else:
out = output
# default via 192.168.1.1 dev enp7s0 proto dhcp metric 100
p1 = re.compile(r'default via (?P<gateway>[a-z0-9\.\:]+)'
' dev (?P<device>[a-z0-9\.\-]+)'
' proto (?P<proto>[a-z]+)'
' metric (?P<metric>[\d]+)'
)
# 169.254.0.0/16 dev enp7s0 scope link metric 1000
p2 = re.compile(r'(?P<destination>[a-z0-9\.\:\/]+)'
' dev (?P<device>[a-z0-9\.\-]+)'
' scope (?P<scope>\w+)'
' metric (?P<metric>[\d]+)'
)
# 172.17.0.0/16 dev docker0 proto kernel scope link src 172.17.0.1
p3 = re.compile(r'(?P<destination>[a-z0-9\.\:\/]+)'
' dev (?P<device>[a-z0-9\.\-]+)'
' proto (?P<proto>\w+)'
' scope (?P<scope>\w+)'
' src (?P<src>[a-z0-9\.\:\/]+)'
)
# 172.18.0.0/16 dev br-d19b23fac393 proto kernel scope link src 172.18.0.1 linkdown
p4 = re.compile(r'(?P<destination>[a-z0-9\.\:\/]+)'
' dev (?P<device>[a-z0-9\.\-]+)'
' proto (?P<proto>\w+)'
' scope (?P<scope>\w+)'
' src (?P<src>[a-z0-9\.\:\/]+)'
' linkdown '
)
# 192.168.1.0/24 dev enp7s0 proto kernel scope link src 192.168.1.212 metric 100
p5 = re.compile(r'(?P<destination>[a-z0-9\.\:\/]+)'
' dev (?P<device>[a-z0-9\.\-]+)'
' proto (?P<proto>\w+)'
' scope (?P<scope>\w+)'
' src (?P<src>[a-z0-9\.\:\/]+)'
' metric (?P<metric>[\d]+)'
)
# broadcast 127.0.0.0 dev lo table local proto kernel scope link src 127.0.0.1
p6 = re.compile(r'broadcast (?P<destination>[a-z0-9\.\:\/]+)'
' dev (?P<device>[a-z0-9\.\-]+)'
' table (?P<table>\w+)'
' proto (?P<proto>\w+)'
' scope (?P<scope>\w+)'
' src (?P<src>[a-z0-9\.\:\/]+)'
)
# local 10.233.44.70 dev kube-ipvs0 table local proto kernel scope host src 10.233.44.70
p7 = re.compile(r'local (?P<destination>[a-z0-9\.\:\/]+)'
' dev (?P<device>[a-z0-9\.\-]+)'
' table (?P<table>\w+)'
' proto (?P<proto>\w+)'
' scope (?P<scope>\w+)'
' src (?P<src>[a-z0-9\.\:\/]+)'
)
# Initializes the Python dictionary variable
parsed_dict = {}
# Defines the "for" loop, to pattern match each line of output
for line in out.splitlines():
line = line.strip()
# default via 192.168.1.1 dev enp7s0 proto dhcp metric 100
m = p1.match(line)
if m:
if 'routes' not in parsed_dict:
parsed_dict.setdefault('routes', {})
group = m.groupdict()
gateway = group['gateway']
interface = group['device']
metric = int(group['metric'])
if gateway:
parsed_dict['routes'] = { '0.0.0.0': {
'mask': {
'0.0.0.0': {
'nexthop': {
1:{
'gateway': gateway,
'interface': interface,
'metric': metric
}
}
}
}
}
}
# 169.254.0.0/16 dev enp7s0 scope link metric 1000
m = p2.match(line)
if m:
group = m.groupdict()
destination = IPNetwork(group['destination'])
mask = str(destination.netmask)
destination_addr = str(destination.ip)
interface = group['device']
metric = int(group['metric'])
scope = group['scope']
index_dict = {'interface' : interface,
'scope' : scope,
'metric': metric
}
index = 1
parsed_dict['routes'].setdefault(destination_addr, {}).\
setdefault('mask', {}).\
setdefault(mask, {}).\
setdefault('nexthop', {index: index_dict})
# 172.17.0.0/16 dev docker0 proto kernel scope link src 172.17.0.1
m = p3.match(line)
if m:
group = m.groupdict()
destination = IPNetwork(group['destination'])
mask = str(destination.netmask)
destination_addr = str(destination.ip)
interface = group['device']
scope = group['scope']
proto = group['proto']
src = group['src']
index_dict = {'interface' : interface,
'scope' : scope,
'proto' : proto ,
'src' : src
}
index = 1
parsed_dict['routes'].setdefault(destination_addr, {}).\
setdefault('mask', {}).\
setdefault(mask, {}).\
setdefault('nexthop', {index: index_dict})
# 172.18.0.0/16 dev br-d19b23fac393 proto kernel scope link src 172.18.0.1 linkdown
m = p4.match(line)
if m:
group = m.groupdict()
destination = IPNetwork(group['destination'])
mask = str(destination.netmask)
destination_addr = str(destination.ip)
interface = group['device']
scope = group['scope']
proto = group['proto']
src = group['src']
index_dict = {'interface' : interface,
'scope' : scope,
'proto' : proto ,
'src' : src
}
index = 1
parsed_dict['routes'].setdefault(destination_addr, {}).\
setdefault('mask', {}).\
setdefault(mask, {}).\
setdefault('nexthop', {index: index_dict})
# 192.168.1.0/24 dev enp7s0 proto kernel scope link src 192.168.1.212 metric 100
m = p5.match(line)
if m:
group = m.groupdict()
destination = IPNetwork(group['destination'])
mask = str(destination.netmask)
destination_addr = str(destination.ip)
interface = group['device']
scope = group['scope']
proto = group['proto']
metric = group['metric']
src = group['src']
index_dict = {'interface' : interface,
'scope' : scope,
'proto' : proto ,
'src' : src,
'metric': metric
}
index = 1
parsed_dict['routes'].setdefault(destination_addr, {}).\
setdefault('mask', {}).\
setdefault(mask, {}).\
setdefault('nexthop', {index: index_dict})
# broadcast 127.0.0.0 dev lo table local proto kernel scope link src 127.0.0.1
m = p6.match(line)
if m:
group = m.groupdict()
destination = IPNetwork(group['destination'])
mask = str(destination.netmask)
destination_addr = str(destination.ip)
interface = group['device']
scope = group['scope']
proto = group['proto']
src = group['src']
table = group['table']
index_dict = {'interface' : interface,
'scope' : scope,
'proto' : proto ,
'src' : src,
'broadcast': True,
'table': table
}
index = 1
parsed_dict['routes'].setdefault(destination_addr, {}).\
setdefault('mask', {}).\
setdefault(mask, {}).\
setdefault('nexthop', {index: index_dict})
# local 10.233.44.70 dev kube-ipvs0 table local proto kernel scope host src 10.233.44.70
m = p7.match(line)
if m:
group = m.groupdict()
destination = IPNetwork(group['destination'])
mask = str(destination.netmask)
destination_addr = str(destination.ip)
interface = group['device']
scope = group['scope']
proto = group['proto']
src = group['src']
table = group['table']
index_dict = {'interface' : interface,
'scope' : scope,
'proto' : proto ,
'src' : src,
'local': True,
'table': table
}
index = 1
parsed_dict['routes'].setdefault(destination_addr, {}).\
setdefault('mask', {}).\
setdefault(mask, {}).\
setdefault('nexthop', {index: index_dict})
return parsed_dict
| [
"netaddr.IPNetwork",
"genie.metaparser.util.schemaengine.Any",
"genie.metaparser.util.schemaengine.Optional",
"re.compile"
] | [((2484, 2695), 're.compile', 're.compile', (['"""(?P<destination>[a-z0-9\\\\.\\\\:]+) +(?P<gateway>[a-z0-9\\\\.\\\\:_]+) +(?P<mask>[a-z0-9\\\\.\\\\:]+) +(?P<flags>[a-zA-Z]+) +(?P<metric>(\\\\d+)) +(?P<ref>(\\\\d+)) +(?P<use>(\\\\d+)) +(?P<interface>\\\\S+)"""'], {}), "(\n '(?P<destination>[a-z0-9\\\\.\\\\:]+) +(?P<gateway>[a-z0-9\\\\.\\\\:_]+) +(?P<mask>[a-z0-9\\\\.\\\\:]+) +(?P<flags>[a-zA-Z]+) +(?P<metric>(\\\\d+)) +(?P<ref>(\\\\d+)) +(?P<use>(\\\\d+)) +(?P<interface>\\\\S+)'\n )\n", (2494, 2695), False, 'import re\n'), ((5753, 5899), 're.compile', 're.compile', (['"""default via (?P<gateway>[a-z0-9\\\\.\\\\:]+) dev (?P<device>[a-z0-9\\\\.\\\\-]+) proto (?P<proto>[a-z]+) metric (?P<metric>[\\\\d]+)"""'], {}), "(\n 'default via (?P<gateway>[a-z0-9\\\\.\\\\:]+) dev (?P<device>[a-z0-9\\\\.\\\\-]+) proto (?P<proto>[a-z]+) metric (?P<metric>[\\\\d]+)'\n )\n", (5763, 5899), False, 'import re\n'), ((6071, 6210), 're.compile', 're.compile', (['"""(?P<destination>[a-z0-9\\\\.\\\\:\\\\/]+) dev (?P<device>[a-z0-9\\\\.\\\\-]+) scope (?P<scope>\\\\w+) metric (?P<metric>[\\\\d]+)"""'], {}), "(\n '(?P<destination>[a-z0-9\\\\.\\\\:\\\\/]+) dev (?P<device>[a-z0-9\\\\.\\\\-]+) scope (?P<scope>\\\\w+) metric (?P<metric>[\\\\d]+)'\n )\n", (6081, 6210), False, 'import re\n'), ((6395, 6562), 're.compile', 're.compile', (['"""(?P<destination>[a-z0-9\\\\.\\\\:\\\\/]+) dev (?P<device>[a-z0-9\\\\.\\\\-]+) proto (?P<proto>\\\\w+) scope (?P<scope>\\\\w+) src (?P<src>[a-z0-9\\\\.\\\\:\\\\/]+)"""'], {}), "(\n '(?P<destination>[a-z0-9\\\\.\\\\:\\\\/]+) dev (?P<device>[a-z0-9\\\\.\\\\-]+) proto (?P<proto>\\\\w+) scope (?P<scope>\\\\w+) src (?P<src>[a-z0-9\\\\.\\\\:\\\\/]+)'\n )\n", (6405, 6562), False, 'import re\n'), ((6790, 6967), 're.compile', 're.compile', (['"""(?P<destination>[a-z0-9\\\\.\\\\:\\\\/]+) dev (?P<device>[a-z0-9\\\\.\\\\-]+) proto (?P<proto>\\\\w+) scope (?P<scope>\\\\w+) src (?P<src>[a-z0-9\\\\.\\\\:\\\\/]+) linkdown """'], {}), "(\n '(?P<destination>[a-z0-9\\\\.\\\\:\\\\/]+) dev (?P<device>[a-z0-9\\\\.\\\\-]+) proto (?P<proto>\\\\w+) scope (?P<scope>\\\\w+) src (?P<src>[a-z0-9\\\\.\\\\:\\\\/]+) linkdown '\n )\n", (6800, 6967), False, 'import re\n'), ((7226, 7419), 're.compile', 're.compile', (['"""(?P<destination>[a-z0-9\\\\.\\\\:\\\\/]+) dev (?P<device>[a-z0-9\\\\.\\\\-]+) proto (?P<proto>\\\\w+) scope (?P<scope>\\\\w+) src (?P<src>[a-z0-9\\\\.\\\\:\\\\/]+) metric (?P<metric>[\\\\d]+)"""'], {}), "(\n '(?P<destination>[a-z0-9\\\\.\\\\:\\\\/]+) dev (?P<device>[a-z0-9\\\\.\\\\-]+) proto (?P<proto>\\\\w+) scope (?P<scope>\\\\w+) src (?P<src>[a-z0-9\\\\.\\\\:\\\\/]+) metric (?P<metric>[\\\\d]+)'\n )\n", (7236, 7419), False, 'import re\n'), ((7669, 7868), 're.compile', 're.compile', (['"""broadcast (?P<destination>[a-z0-9\\\\.\\\\:\\\\/]+) dev (?P<device>[a-z0-9\\\\.\\\\-]+) table (?P<table>\\\\w+) proto (?P<proto>\\\\w+) scope (?P<scope>\\\\w+) src (?P<src>[a-z0-9\\\\.\\\\:\\\\/]+)"""'], {}), "(\n 'broadcast (?P<destination>[a-z0-9\\\\.\\\\:\\\\/]+) dev (?P<device>[a-z0-9\\\\.\\\\-]+) table (?P<table>\\\\w+) proto (?P<proto>\\\\w+) scope (?P<scope>\\\\w+) src (?P<src>[a-z0-9\\\\.\\\\:\\\\/]+)'\n )\n", (7679, 7868), False, 'import re\n'), ((8161, 8356), 're.compile', 're.compile', (['"""local (?P<destination>[a-z0-9\\\\.\\\\:\\\\/]+) dev (?P<device>[a-z0-9\\\\.\\\\-]+) table (?P<table>\\\\w+) proto (?P<proto>\\\\w+) scope (?P<scope>\\\\w+) src (?P<src>[a-z0-9\\\\.\\\\:\\\\/]+)"""'], {}), "(\n 'local (?P<destination>[a-z0-9\\\\.\\\\:\\\\/]+) dev (?P<device>[a-z0-9\\\\.\\\\-]+) table (?P<table>\\\\w+) proto (?P<proto>\\\\w+) scope (?P<scope>\\\\w+) src (?P<src>[a-z0-9\\\\.\\\\:\\\\/]+)'\n )\n", (8171, 8356), False, 'import re\n'), ((670, 675), 'genie.metaparser.util.schemaengine.Any', 'Any', ([], {}), '()\n', (673, 675), False, 'from genie.metaparser.util.schemaengine import Schema, Any, Optional\n'), ((10398, 10429), 'netaddr.IPNetwork', 'IPNetwork', (["group['destination']"], {}), "(group['destination'])\n", (10407, 10429), False, 'from netaddr import IPAddress, IPNetwork\n'), ((11361, 11392), 'netaddr.IPNetwork', 'IPNetwork', (["group['destination']"], {}), "(group['destination'])\n", (11370, 11392), False, 'from netaddr import IPAddress, IPNetwork\n'), ((12402, 12433), 'netaddr.IPNetwork', 'IPNetwork', (["group['destination']"], {}), "(group['destination'])\n", (12411, 12433), False, 'from netaddr import IPAddress, IPNetwork\n'), ((13451, 13482), 'netaddr.IPNetwork', 'IPNetwork', (["group['destination']"], {}), "(group['destination'])\n", (13460, 13482), False, 'from netaddr import IPAddress, IPNetwork\n'), ((14587, 14618), 'netaddr.IPNetwork', 'IPNetwork', (["group['destination']"], {}), "(group['destination'])\n", (14596, 14618), False, 'from netaddr import IPAddress, IPNetwork\n'), ((15782, 15813), 'netaddr.IPNetwork', 'IPNetwork', (["group['destination']"], {}), "(group['destination'])\n", (15791, 15813), False, 'from netaddr import IPAddress, IPNetwork\n'), ((741, 746), 'genie.metaparser.util.schemaengine.Any', 'Any', ([], {}), '()\n', (744, 746), False, 'from genie.metaparser.util.schemaengine import Schema, Any, Optional\n'), ((815, 820), 'genie.metaparser.util.schemaengine.Any', 'Any', ([], {}), '()\n', (818, 820), False, 'from genie.metaparser.util.schemaengine import Schema, Any, Optional\n'), ((928, 945), 'genie.metaparser.util.schemaengine.Optional', 'Optional', (['"""flags"""'], {}), "('flags')\n", (936, 945), False, 'from genie.metaparser.util.schemaengine import Schema, Any, Optional\n'), ((984, 1003), 'genie.metaparser.util.schemaengine.Optional', 'Optional', (['"""gateway"""'], {}), "('gateway')\n", (992, 1003), False, 'from genie.metaparser.util.schemaengine import Schema, Any, Optional\n'), ((1042, 1060), 'genie.metaparser.util.schemaengine.Optional', 'Optional', (['"""metric"""'], {}), "('metric')\n", (1050, 1060), False, 'from genie.metaparser.util.schemaengine import Schema, Any, Optional\n'), ((1099, 1114), 'genie.metaparser.util.schemaengine.Optional', 'Optional', (['"""ref"""'], {}), "('ref')\n", (1107, 1114), False, 'from genie.metaparser.util.schemaengine import Schema, Any, Optional\n'), ((1153, 1168), 'genie.metaparser.util.schemaengine.Optional', 'Optional', (['"""use"""'], {}), "('use')\n", (1161, 1168), False, 'from genie.metaparser.util.schemaengine import Schema, Any, Optional\n'), ((1207, 1224), 'genie.metaparser.util.schemaengine.Optional', 'Optional', (['"""scope"""'], {}), "('scope')\n", (1215, 1224), False, 'from genie.metaparser.util.schemaengine import Schema, Any, Optional\n'), ((1263, 1280), 'genie.metaparser.util.schemaengine.Optional', 'Optional', (['"""proto"""'], {}), "('proto')\n", (1271, 1280), False, 'from genie.metaparser.util.schemaengine import Schema, Any, Optional\n'), ((1319, 1334), 'genie.metaparser.util.schemaengine.Optional', 'Optional', (['"""src"""'], {}), "('src')\n", (1327, 1334), False, 'from genie.metaparser.util.schemaengine import Schema, Any, Optional\n'), ((1373, 1394), 'genie.metaparser.util.schemaengine.Optional', 'Optional', (['"""broadcast"""'], {}), "('broadcast')\n", (1381, 1394), False, 'from genie.metaparser.util.schemaengine import Schema, Any, Optional\n'), ((1434, 1451), 'genie.metaparser.util.schemaengine.Optional', 'Optional', (['"""table"""'], {}), "('table')\n", (1442, 1451), False, 'from genie.metaparser.util.schemaengine import Schema, Any, Optional\n'), ((1490, 1507), 'genie.metaparser.util.schemaengine.Optional', 'Optional', (['"""local"""'], {}), "('local')\n", (1498, 1507), False, 'from genie.metaparser.util.schemaengine import Schema, Any, Optional\n')] |
import json
import datetime
import requests
from nameko.web.handlers import http
from nameko.timer import timer
from statsd import StatsClient
from circuitbreaker import circuit
class DemoChassisService:
name = "demo_chassis_service"
statsd = StatsClient('localhost', 8125, prefix='simplebank-demo')
@http('GET', '/health')
@statsd.timer('health')
def health(self, _request):
return json.dumps({'ok': datetime.datetime.utcnow().__str__()})
@http('GET', '/external')
@circuit(failure_threshold=5, expected_exception=ConnectionError)
@statsd.timer('external')
def external_request(self, _request):
response = requests.get('https://jsonplaceholder.typicode.com/posts/1')
return json.dumps({'code': response.status_code, 'body': response.text})
@http('GET', '/error')
@circuit(failure_threshold=5, expected_exception=ZeroDivisionError)
@statsd.timer('http_error')
def error_http_request(self):
return json.dumps({1 / 0})
class HealthCheckService:
name = "health_check_service"
statsd = StatsClient('localhost', 8125, prefix='simplebank-demo')
@timer(interval=10)
@statsd.timer('check_demo_service')
def check_demo_service(self):
response = requests.get('http://0.0.0.0:8000/health')
print("DemoChassisService HEALTH CHECK: status_code {}, response: {}".format(
response.status_code, response.text))
| [
"datetime.datetime.utcnow",
"circuitbreaker.circuit",
"json.dumps",
"requests.get",
"statsd.StatsClient",
"nameko.timer.timer",
"nameko.web.handlers.http"
] | [((254, 310), 'statsd.StatsClient', 'StatsClient', (['"""localhost"""', '(8125)'], {'prefix': '"""simplebank-demo"""'}), "('localhost', 8125, prefix='simplebank-demo')\n", (265, 310), False, 'from statsd import StatsClient\n'), ((317, 339), 'nameko.web.handlers.http', 'http', (['"""GET"""', '"""/health"""'], {}), "('GET', '/health')\n", (321, 339), False, 'from nameko.web.handlers import http\n'), ((478, 502), 'nameko.web.handlers.http', 'http', (['"""GET"""', '"""/external"""'], {}), "('GET', '/external')\n", (482, 502), False, 'from nameko.web.handlers import http\n'), ((508, 572), 'circuitbreaker.circuit', 'circuit', ([], {'failure_threshold': '(5)', 'expected_exception': 'ConnectionError'}), '(failure_threshold=5, expected_exception=ConnectionError)\n', (515, 572), False, 'from circuitbreaker import circuit\n'), ((812, 833), 'nameko.web.handlers.http', 'http', (['"""GET"""', '"""/error"""'], {}), "('GET', '/error')\n", (816, 833), False, 'from nameko.web.handlers import http\n'), ((839, 905), 'circuitbreaker.circuit', 'circuit', ([], {'failure_threshold': '(5)', 'expected_exception': 'ZeroDivisionError'}), '(failure_threshold=5, expected_exception=ZeroDivisionError)\n', (846, 905), False, 'from circuitbreaker import circuit\n'), ((1083, 1139), 'statsd.StatsClient', 'StatsClient', (['"""localhost"""', '(8125)'], {'prefix': '"""simplebank-demo"""'}), "('localhost', 8125, prefix='simplebank-demo')\n", (1094, 1139), False, 'from statsd import StatsClient\n'), ((1146, 1164), 'nameko.timer.timer', 'timer', ([], {'interval': '(10)'}), '(interval=10)\n', (1151, 1164), False, 'from nameko.timer import timer\n'), ((664, 724), 'requests.get', 'requests.get', (['"""https://jsonplaceholder.typicode.com/posts/1"""'], {}), "('https://jsonplaceholder.typicode.com/posts/1')\n", (676, 724), False, 'import requests\n'), ((740, 805), 'json.dumps', 'json.dumps', (["{'code': response.status_code, 'body': response.text}"], {}), "({'code': response.status_code, 'body': response.text})\n", (750, 805), False, 'import json\n'), ((987, 1006), 'json.dumps', 'json.dumps', (['{1 / 0}'], {}), '({1 / 0})\n', (997, 1006), False, 'import json\n'), ((1258, 1300), 'requests.get', 'requests.get', (['"""http://0.0.0.0:8000/health"""'], {}), "('http://0.0.0.0:8000/health')\n", (1270, 1300), False, 'import requests\n'), ((433, 459), 'datetime.datetime.utcnow', 'datetime.datetime.utcnow', ([], {}), '()\n', (457, 459), False, 'import datetime\n')] |
from mpl_toolkits.basemap import Basemap
import matplotlib.pyplot as plt
map = Basemap(projection='cyl')
map.drawmapboundary(fill_color='aqua')
map.fillcontinents(color='coral',lake_color='aqua')
map.drawcoastlines()
plt.show() | [
"mpl_toolkits.basemap.Basemap",
"matplotlib.pyplot.show"
] | [((80, 105), 'mpl_toolkits.basemap.Basemap', 'Basemap', ([], {'projection': '"""cyl"""'}), "(projection='cyl')\n", (87, 105), False, 'from mpl_toolkits.basemap import Basemap\n'), ((220, 230), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (228, 230), True, 'import matplotlib.pyplot as plt\n')] |
"""Unit tests for pynlpir's cli.py file."""
import os
import shutil
import stat
import unittest
try:
from urllib.error import URLError
from urllib.request import urlopen
except ImportError:
from urllib2 import URLError, urlopen
from click.testing import CliRunner
from pynlpir import cli
TEST_DIR = os.path.abspath(os.path.dirname(__file__))
LICENSE_FILE = os.path.join(TEST_DIR, 'data', 'NLPIR.user')
def can_reach_github():
"""Check if we can reach GitHub's website."""
try:
urlopen('http://github.com')
return True
except URLError:
return False
@unittest.skipIf(can_reach_github() is False, 'Unable to reach GitHub')
class TestCLI(unittest.TestCase):
"""Unit tests for the PyNLPIR CLI."""
def setUp(self):
self.runner = CliRunner()
def tearDown(self):
self.runner = None
def test_initial_license_download(self):
"""Tests that an initial license download works correctly."""
with self.runner.isolated_filesystem():
result = self.runner.invoke(cli.cli, ('update', '-d.'))
self.assertEqual(0, result.exit_code)
self.assertEqual('License updated.\n', result.output)
def test_license_update(self):
"Test that a regular license update works correctly."""
with self.runner.isolated_filesystem():
shutil.copyfile(LICENSE_FILE, os.path.basename(LICENSE_FILE))
result = self.runner.invoke(cli.cli, ('update', '-d.'))
self.assertEqual(0, result.exit_code)
self.assertEqual('License updated.\n', result.output)
result = self.runner.invoke(cli.cli, ('update', '-d.'))
self.assertEqual(0, result.exit_code)
self.assertEqual('Your license is already up-to-date.\n',
result.output)
def test_license_write_fail(self):
"""Test tha writing a license file fails appropriately."""
with self.runner.isolated_filesystem():
cwd = os.getcwd()
os.chmod(cwd, stat.S_IREAD)
with self.assertRaises((IOError, OSError)):
cli.update_license_file(cwd)
| [
"urllib2.urlopen",
"pynlpir.cli.update_license_file",
"os.path.join",
"click.testing.CliRunner",
"os.getcwd",
"os.chmod",
"os.path.dirname",
"os.path.basename"
] | [((372, 416), 'os.path.join', 'os.path.join', (['TEST_DIR', '"""data"""', '"""NLPIR.user"""'], {}), "(TEST_DIR, 'data', 'NLPIR.user')\n", (384, 416), False, 'import os\n'), ((330, 355), 'os.path.dirname', 'os.path.dirname', (['__file__'], {}), '(__file__)\n', (345, 355), False, 'import os\n'), ((510, 538), 'urllib2.urlopen', 'urlopen', (['"""http://github.com"""'], {}), "('http://github.com')\n", (517, 538), False, 'from urllib2 import URLError, urlopen\n'), ((795, 806), 'click.testing.CliRunner', 'CliRunner', ([], {}), '()\n', (804, 806), False, 'from click.testing import CliRunner\n'), ((2020, 2031), 'os.getcwd', 'os.getcwd', ([], {}), '()\n', (2029, 2031), False, 'import os\n'), ((2044, 2071), 'os.chmod', 'os.chmod', (['cwd', 'stat.S_IREAD'], {}), '(cwd, stat.S_IREAD)\n', (2052, 2071), False, 'import os\n'), ((1397, 1427), 'os.path.basename', 'os.path.basename', (['LICENSE_FILE'], {}), '(LICENSE_FILE)\n', (1413, 1427), False, 'import os\n'), ((2144, 2172), 'pynlpir.cli.update_license_file', 'cli.update_license_file', (['cwd'], {}), '(cwd)\n', (2167, 2172), False, 'from pynlpir import cli\n')] |
import logging
import ibmsecurity.utilities.tools
import time
logger = logging.getLogger(__name__)
requires_model = "Appliance"
def get(isamAppliance, check_mode=False, force=False):
"""
Retrieving the current FIPS Mode configuration
"""
return isamAppliance.invoke_get("Retrieving the current FIPS Mode configuration",
"/fips_cfg", requires_model=requires_model)
def set(isamAppliance, fipsEnabled=True, tlsv10Enabled=True, tlsv11Enabled=False, check_mode=False, force=False):
"""
Updating the FIPS Mode configuration
"""
obj = _check(isamAppliance, fipsEnabled, tlsv10Enabled, tlsv11Enabled)
if force is True or obj['value'] is False:
if check_mode is True:
return isamAppliance.create_return_object(changed=True, warnings=obj['warnings'])
else:
return isamAppliance.invoke_put(
"Updating the FIPS Mode configuration",
"/fips_cfg",
{
"fipsEnabled": fipsEnabled,
"tlsv10Enabled": tlsv10Enabled,
"tlsv11Enabled": tlsv11Enabled
},
requires_model=requires_model
)
return isamAppliance.create_return_object(warnings=obj['warnings'])
def restart(isamAppliance, check_mode=False, force=False):
"""
Rebooting and enabling the FIPS Mode configuration
:param isamAppliance:
:param check_mode:
:param force:
:return:
"""
if check_mode is True:
return isamAppliance.create_return_object(changed=True)
else:
return isamAppliance.invoke_put(
"Rebooting and enabling the FIPS Mode configuration",
"/fips_cfg/restart",
{}, requires_model=requires_model
)
def restart_and_wait(isamAppliance, wait_time=300, check_freq=5, check_mode=False, force=False):
"""
Restart after FIPS configuration changes
:param isamAppliance:
:param wait_time:
:param check_freq:
:param check_mode:
:param force:
:return:
"""
if isamAppliance.facts['model'] != "Appliance":
return isamAppliance.create_return_object(
warnings="API invoked requires model: {0}, appliance is of deployment model: {1}.".format(
requires_model, isamAppliance.facts['model']))
warnings = []
if check_mode is True:
return isamAppliance.create_return_object(changed=True)
else:
firmware = ibmsecurity.isam.base.firmware.get(isamAppliance, check_mode=check_mode, force=force)
ret_obj = restart(isamAppliance)
if ret_obj['rc'] == 0:
sec = 0
# Now check if it is up and running
while 1:
ret_obj = ibmsecurity.isam.base.firmware.get(isamAppliance, check_mode=check_mode, force=force,
ignore_error=True)
# check partition last_boot time
if ret_obj['rc'] == 0 and isinstance(ret_obj['data'], list) and len(ret_obj['data']) > 0 and \
(('last_boot' in ret_obj['data'][0] and ret_obj['data'][0]['last_boot'] != firmware['data'][0][
'last_boot'] and ret_obj['data'][0]['active'] == True) or (
'last_boot' in ret_obj['data'][1] and ret_obj['data'][1]['last_boot'] !=
firmware['data'][1]['last_boot'] and ret_obj['data'][1]['active'] == True)):
logger.info("Server is responding and has a different boot time!")
return isamAppliance.create_return_object(warnings=warnings)
else:
time.sleep(check_freq)
sec += check_freq
logger.debug(
"Server is not responding yet. Waited for {0} secs, next check in {1} secs.".format(sec,
check_freq))
if sec >= wait_time:
warnings.append(
"The FIPS restart not detected or completed, exiting... after {0} seconds".format(sec))
break
return isamAppliance.create_return_object(warnings=warnings)
def _check(isamAppliance, fipsEnabled, tlsv10Enabled, tlsv11Enabled):
obj = {'value': True, 'warnings': ""}
ret_obj = get(isamAppliance)
obj['warnings'] = ret_obj['warnings']
if ret_obj['data']['fipsEnabled'] != fipsEnabled:
logger.info("fipsEnabled change to {0}".format(fipsEnabled))
obj['value'] = False
return obj
if ret_obj['data']['tlsv10Enabled'] != tlsv10Enabled:
logger.info("TLS v1.0 change to {0}".format(tlsv10Enabled))
obj['value'] = False
return obj
if ret_obj['data']['tlsv11Enabled'] != tlsv11Enabled:
logger.info("TLS v1.1 change to {0}".format(tlsv11Enabled))
obj['value'] = False
return obj
return obj
def compare(isamAppliance1, isamAppliance2):
ret_obj1 = get(isamAppliance1)
ret_obj2 = get(isamAppliance2)
return ibmsecurity.utilities.tools.json_compare(ret_obj1, ret_obj2, deleted_keys=[])
| [
"logging.getLogger",
"time.sleep"
] | [((72, 99), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (89, 99), False, 'import logging\n'), ((3746, 3768), 'time.sleep', 'time.sleep', (['check_freq'], {}), '(check_freq)\n', (3756, 3768), False, 'import time\n')] |
#!/usr/bin/env python
""" HIAS AI Model Data Augmentation Class.
Provides data augmentation methods.
MIT License
Copyright (c) 2021 Asociación de Investigacion en Inteligencia Artificial
Para la Leucemia <NAME>
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files(the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and / or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
Contributors:
- <NAME> - First version - 2021-5-2
"""
import cv2
import random
import numpy as np
from numpy.random import seed
from scipy import ndimage
from skimage import transform as tm
class augmentation():
""" HIAS AI Model Data Augmentation Class
Provides data augmentation methods.
"""
def __init__(self, helpers):
""" Initializes the class. """
self.helpers = helpers
self.seed = self.helpers.confs["data"]["seed"]
seed(self.seed)
self.helpers.logger.info(
"Augmentation class initialization complete.")
def grayscale(self, data):
""" Creates a grayscale copy. """
gray = cv2.cvtColor(data, cv2.COLOR_BGR2GRAY)
return np.dstack([gray, gray, gray]).astype(np.float32)/255.
def equalize_hist(self, data):
""" Creates a histogram equalized copy. """
img_to_yuv = cv2.cvtColor(data, cv2.COLOR_BGR2YUV)
img_to_yuv[:, :, 0] = cv2.equalizeHist(img_to_yuv[:, :, 0])
hist_equalization_result = cv2.cvtColor(img_to_yuv, cv2.COLOR_YUV2BGR)
return hist_equalization_result.astype(np.float32)/255.
def reflection(self, data):
""" Creates a reflected copy. """
return cv2.flip(data, 0).astype(np.float32)/255., cv2.flip(data, 1).astype(np.float32)/255.
def gaussian(self, data):
""" Creates a gaussian blurred copy. """
return ndimage.gaussian_filter(
data, sigma=5.11).astype(np.float32)/255.
def translate(self, data):
""" Creates transformed copy. """
cols, rows, chs = data.shape
return cv2.warpAffine(
data, np.float32([[1, 0, 84], [0, 1, 56]]), (rows, cols),
borderMode=cv2.BORDER_CONSTANT,
borderValue=(144, 159, 162)).astype(np.float32)/255.
def rotation(self, data, label, tdata, tlabels):
""" Creates rotated copies. """
cols, rows, chs = data.shape
for i in range(0, self.helpers.confs["data"]["rotations"]):
# Seed needs to be set each time randint is called
random.seed(self.seed)
rand_deg = random.randint(-180, 180)
matrix = cv2.getRotationMatrix2D(
(cols/2, rows/2), rand_deg, 0.70)
rotated = cv2.warpAffine(
data, matrix, (rows, cols),
borderMode=cv2.BORDER_CONSTANT,
borderValue=(144, 159, 162))
rotated = rotated.astype(np.float32)/255.
tdata.append(rotated)
tlabels.append(label)
return tdata, tlabels
def shear(self, data):
""" Creates a histogram equalized copy. """
at = tm.AffineTransform(shear=0.5)
return tm.warp(data, inverse_map=at)
| [
"numpy.dstack",
"cv2.warpAffine",
"cv2.flip",
"numpy.float32",
"skimage.transform.AffineTransform",
"skimage.transform.warp",
"random.seed",
"cv2.equalizeHist",
"numpy.random.seed",
"cv2.cvtColor",
"scipy.ndimage.gaussian_filter",
"cv2.getRotationMatrix2D",
"random.randint"
] | [((1721, 1736), 'numpy.random.seed', 'seed', (['self.seed'], {}), '(self.seed)\n', (1725, 1736), False, 'from numpy.random import seed\n'), ((1921, 1959), 'cv2.cvtColor', 'cv2.cvtColor', (['data', 'cv2.COLOR_BGR2GRAY'], {}), '(data, cv2.COLOR_BGR2GRAY)\n', (1933, 1959), False, 'import cv2\n'), ((2139, 2176), 'cv2.cvtColor', 'cv2.cvtColor', (['data', 'cv2.COLOR_BGR2YUV'], {}), '(data, cv2.COLOR_BGR2YUV)\n', (2151, 2176), False, 'import cv2\n'), ((2207, 2244), 'cv2.equalizeHist', 'cv2.equalizeHist', (['img_to_yuv[:, :, 0]'], {}), '(img_to_yuv[:, :, 0])\n', (2223, 2244), False, 'import cv2\n'), ((2280, 2323), 'cv2.cvtColor', 'cv2.cvtColor', (['img_to_yuv', 'cv2.COLOR_YUV2BGR'], {}), '(img_to_yuv, cv2.COLOR_YUV2BGR)\n', (2292, 2323), False, 'import cv2\n'), ((3930, 3959), 'skimage.transform.AffineTransform', 'tm.AffineTransform', ([], {'shear': '(0.5)'}), '(shear=0.5)\n', (3948, 3959), True, 'from skimage import transform as tm\n'), ((3975, 4004), 'skimage.transform.warp', 'tm.warp', (['data'], {'inverse_map': 'at'}), '(data, inverse_map=at)\n', (3982, 4004), True, 'from skimage import transform as tm\n'), ((3338, 3360), 'random.seed', 'random.seed', (['self.seed'], {}), '(self.seed)\n', (3349, 3360), False, 'import random\n'), ((3384, 3409), 'random.randint', 'random.randint', (['(-180)', '(180)'], {}), '(-180, 180)\n', (3398, 3409), False, 'import random\n'), ((3431, 3491), 'cv2.getRotationMatrix2D', 'cv2.getRotationMatrix2D', (['(cols / 2, rows / 2)', 'rand_deg', '(0.7)'], {}), '((cols / 2, rows / 2), rand_deg, 0.7)\n', (3454, 3491), False, 'import cv2\n'), ((3528, 3635), 'cv2.warpAffine', 'cv2.warpAffine', (['data', 'matrix', '(rows, cols)'], {'borderMode': 'cv2.BORDER_CONSTANT', 'borderValue': '(144, 159, 162)'}), '(data, matrix, (rows, cols), borderMode=cv2.BORDER_CONSTANT,\n borderValue=(144, 159, 162))\n', (3542, 3635), False, 'import cv2\n'), ((1975, 2004), 'numpy.dstack', 'np.dstack', (['[gray, gray, gray]'], {}), '([gray, gray, gray])\n', (1984, 2004), True, 'import numpy as np\n'), ((2660, 2701), 'scipy.ndimage.gaussian_filter', 'ndimage.gaussian_filter', (['data'], {'sigma': '(5.11)'}), '(data, sigma=5.11)\n', (2683, 2701), False, 'from scipy import ndimage\n'), ((2479, 2496), 'cv2.flip', 'cv2.flip', (['data', '(0)'], {}), '(data, 0)\n', (2487, 2496), False, 'import cv2\n'), ((2522, 2539), 'cv2.flip', 'cv2.flip', (['data', '(1)'], {}), '(data, 1)\n', (2530, 2539), False, 'import cv2\n'), ((2901, 2937), 'numpy.float32', 'np.float32', (['[[1, 0, 84], [0, 1, 56]]'], {}), '([[1, 0, 84], [0, 1, 56]])\n', (2911, 2937), True, 'import numpy as np\n')] |
from pytest import raises
from vedro._core._scenario_finder._file_filters import FileFilter
def test_file_filter():
with raises(Exception) as exc_info:
FileFilter()
assert exc_info.type is TypeError
assert "Can't instantiate abstract class FileFilter" in str(exc_info.value)
| [
"pytest.raises",
"vedro._core._scenario_finder._file_filters.FileFilter"
] | [((128, 145), 'pytest.raises', 'raises', (['Exception'], {}), '(Exception)\n', (134, 145), False, 'from pytest import raises\n'), ((167, 179), 'vedro._core._scenario_finder._file_filters.FileFilter', 'FileFilter', ([], {}), '()\n', (177, 179), False, 'from vedro._core._scenario_finder._file_filters import FileFilter\n')] |
# Program 8_plot_data_perstation.py written by <NAME> (<EMAIL>)
file_name= '8_plot_data_perstation.py'
# Uses receiver functions computed to produce a nice graph for every directory in DATARF
import obspy
from obspy import read
from obspy.core import Stream
from obspy.core import trace
import matplotlib.pyplot as plt
import os.path
import time
import glob
import shutil
import numpy as np
from obspy import UTCDateTime
import receiver_function as rf
direc = 'DataRF'
flag = 'SV'
filt = 'jgf1'
stadirs = glob.glob(direc+'/*')
for stadir in stadirs:
print(stadir)
with open(stadir+'/selected_RFs_jgf1.dat','r') as f:
goodrfs= f.read().replace('\n', '')
# loop through events
stalist=glob.glob(stadir+'/*.PICKLE')
print(stalist)
c=0
# Loop through data
if(len(stalist)>0):
for i in range(len(stalist)): #range(cat.count()):
print(stalist[i])
seis=read(stalist[i],format='PICKLE')
distdg=seis[0].stats['dist']
if stalist[i] in goodrfs:
good=True
print('YAY',seis[0].stats['event'].magnitudes[0].mag)
else:
good=False
print('NO',seis[0].stats['event'].magnitudes[0].mag)
tshift=UTCDateTime(seis[0].stats['starttime'])-seis[0].stats['event'].origins[0].time
#Ptime=Ptime
plt.subplot(1,3,1)
vertical = seis.select(channel='BHZ')[0]
vertical.filter('bandpass', freqmin=0.01,freqmax=.1, corners=2, zerophase=True)
windowed=vertical[np.where(vertical.times()>seis[0].stats.traveltimes['P']-100) and np.where(vertical.times()<seis[0].stats.traveltimes['P']+100)]
norm=np.max(np.abs(windowed))
if good:
plt.plot(vertical.times()-seis[0].stats.traveltimes['P'], vertical.data/norm+np.round(distdg),'k')
else:
plt.plot(vertical.times()-seis[0].stats.traveltimes['P'], vertical.data/norm+np.round(distdg),'r')
#plt.plot(seis[0].stats.traveltimes['P'],np.round(distdg),'.b')
#plt.plot(seis[0].stats.traveltimes['S'],np.round(distdg),'.g')
plt.xlim([-25,150])
plt.ylim([30,92])
plt.subplot(1,3,2)
radial = seis.select(channel='BHR')[0]
radial.filter('bandpass', freqmin=0.01,freqmax=.1, corners=2, zerophase=True)
windowed=vertical[np.where(radial.times()>seis[0].stats.traveltimes['P']-100) and np.where(radial.times()<seis[0].stats.traveltimes['P']+100)]
norm=np.max(np.abs(windowed))
if good:
plt.plot(radial.times()-seis[0].stats.traveltimes['P'], radial.data/norm+np.round(distdg),'k')
else:
plt.plot(radial.times()-seis[0].stats.traveltimes['P'], radial.data/norm+np.round(distdg),'r')
plt.xlim([-25,150])
plt.plot(seis[0].stats.traveltimes['P'],np.round(distdg),'.b')
plt.plot(seis[0].stats.traveltimes['S'],np.round(distdg),'.g')
plt.ylim([30,92])
plt.subplot(1,3,3)
RF=getattr(seis[0],filt)['iterativedeconvolution']
time=getattr(seis[0],filt)['time']
if good:
plt.plot(time, RF/np.max(np.abs(RF))+np.round(distdg),'k')
else:
plt.plot(time, RF/np.max(np.abs(RF))+np.round(distdg),'r')
plt.subplot(1,3,1)
plt.title('vertical')
plt.ylabel('distance')
plt.xlabel('time')
plt.subplot(1,3,2)
plt.title('radial')
plt.ylabel('distance')
plt.xlabel('time')
plt.subplot(1,3,3)
plt.title('receiver functions')
plt.ylabel('distance')
plt.xlabel('time')
#plt.xlim([-150,1000])
plt.show()
| [
"obspy.read",
"numpy.abs",
"matplotlib.pyplot.ylabel",
"numpy.round",
"matplotlib.pyplot.xlabel",
"obspy.UTCDateTime",
"matplotlib.pyplot.title",
"matplotlib.pyplot.xlim",
"matplotlib.pyplot.ylim",
"matplotlib.pyplot.subplot",
"glob.glob",
"matplotlib.pyplot.show"
] | [((509, 532), 'glob.glob', 'glob.glob', (["(direc + '/*')"], {}), "(direc + '/*')\n", (518, 532), False, 'import glob\n'), ((713, 744), 'glob.glob', 'glob.glob', (["(stadir + '/*.PICKLE')"], {}), "(stadir + '/*.PICKLE')\n", (722, 744), False, 'import glob\n'), ((3703, 3723), 'matplotlib.pyplot.subplot', 'plt.subplot', (['(1)', '(3)', '(1)'], {}), '(1, 3, 1)\n', (3714, 3723), True, 'import matplotlib.pyplot as plt\n'), ((3730, 3751), 'matplotlib.pyplot.title', 'plt.title', (['"""vertical"""'], {}), "('vertical')\n", (3739, 3751), True, 'import matplotlib.pyplot as plt\n'), ((3760, 3782), 'matplotlib.pyplot.ylabel', 'plt.ylabel', (['"""distance"""'], {}), "('distance')\n", (3770, 3782), True, 'import matplotlib.pyplot as plt\n'), ((3791, 3809), 'matplotlib.pyplot.xlabel', 'plt.xlabel', (['"""time"""'], {}), "('time')\n", (3801, 3809), True, 'import matplotlib.pyplot as plt\n'), ((3818, 3838), 'matplotlib.pyplot.subplot', 'plt.subplot', (['(1)', '(3)', '(2)'], {}), '(1, 3, 2)\n', (3829, 3838), True, 'import matplotlib.pyplot as plt\n'), ((3845, 3864), 'matplotlib.pyplot.title', 'plt.title', (['"""radial"""'], {}), "('radial')\n", (3854, 3864), True, 'import matplotlib.pyplot as plt\n'), ((3873, 3895), 'matplotlib.pyplot.ylabel', 'plt.ylabel', (['"""distance"""'], {}), "('distance')\n", (3883, 3895), True, 'import matplotlib.pyplot as plt\n'), ((3904, 3922), 'matplotlib.pyplot.xlabel', 'plt.xlabel', (['"""time"""'], {}), "('time')\n", (3914, 3922), True, 'import matplotlib.pyplot as plt\n'), ((3931, 3951), 'matplotlib.pyplot.subplot', 'plt.subplot', (['(1)', '(3)', '(3)'], {}), '(1, 3, 3)\n', (3942, 3951), True, 'import matplotlib.pyplot as plt\n'), ((3958, 3989), 'matplotlib.pyplot.title', 'plt.title', (['"""receiver functions"""'], {}), "('receiver functions')\n", (3967, 3989), True, 'import matplotlib.pyplot as plt\n'), ((3998, 4020), 'matplotlib.pyplot.ylabel', 'plt.ylabel', (['"""distance"""'], {}), "('distance')\n", (4008, 4020), True, 'import matplotlib.pyplot as plt\n'), ((4029, 4047), 'matplotlib.pyplot.xlabel', 'plt.xlabel', (['"""time"""'], {}), "('time')\n", (4039, 4047), True, 'import matplotlib.pyplot as plt\n'), ((4095, 4105), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (4103, 4105), True, 'import matplotlib.pyplot as plt\n'), ((932, 965), 'obspy.read', 'read', (['stalist[i]'], {'format': '"""PICKLE"""'}), "(stalist[i], format='PICKLE')\n", (936, 965), False, 'from obspy import read\n'), ((1436, 1456), 'matplotlib.pyplot.subplot', 'plt.subplot', (['(1)', '(3)', '(1)'], {}), '(1, 3, 1)\n', (1447, 1456), True, 'import matplotlib.pyplot as plt\n'), ((2298, 2318), 'matplotlib.pyplot.xlim', 'plt.xlim', (['[-25, 150]'], {}), '([-25, 150])\n', (2306, 2318), True, 'import matplotlib.pyplot as plt\n'), ((2334, 2352), 'matplotlib.pyplot.ylim', 'plt.ylim', (['[30, 92]'], {}), '([30, 92])\n', (2342, 2352), True, 'import matplotlib.pyplot as plt\n'), ((2368, 2388), 'matplotlib.pyplot.subplot', 'plt.subplot', (['(1)', '(3)', '(2)'], {}), '(1, 3, 2)\n', (2379, 2388), True, 'import matplotlib.pyplot as plt\n'), ((3035, 3055), 'matplotlib.pyplot.xlim', 'plt.xlim', (['[-25, 150]'], {}), '([-25, 150])\n', (3043, 3055), True, 'import matplotlib.pyplot as plt\n'), ((3247, 3265), 'matplotlib.pyplot.ylim', 'plt.ylim', (['[30, 92]'], {}), '([30, 92])\n', (3255, 3265), True, 'import matplotlib.pyplot as plt\n'), ((3294, 3314), 'matplotlib.pyplot.subplot', 'plt.subplot', (['(1)', '(3)', '(3)'], {}), '(1, 3, 3)\n', (3305, 3314), True, 'import matplotlib.pyplot as plt\n'), ((1309, 1348), 'obspy.UTCDateTime', 'UTCDateTime', (["seis[0].stats['starttime']"], {}), "(seis[0].stats['starttime'])\n", (1320, 1348), False, 'from obspy import UTCDateTime\n'), ((1799, 1815), 'numpy.abs', 'np.abs', (['windowed'], {}), '(windowed)\n', (1805, 1815), True, 'import numpy as np\n'), ((2724, 2740), 'numpy.abs', 'np.abs', (['windowed'], {}), '(windowed)\n', (2730, 2740), True, 'import numpy as np\n'), ((3129, 3145), 'numpy.round', 'np.round', (['distdg'], {}), '(distdg)\n', (3137, 3145), True, 'import numpy as np\n'), ((3208, 3224), 'numpy.round', 'np.round', (['distdg'], {}), '(distdg)\n', (3216, 3224), True, 'import numpy as np\n'), ((1939, 1955), 'numpy.round', 'np.round', (['distdg'], {}), '(distdg)\n', (1947, 1955), True, 'import numpy as np\n'), ((2080, 2096), 'numpy.round', 'np.round', (['distdg'], {}), '(distdg)\n', (2088, 2096), True, 'import numpy as np\n'), ((2860, 2876), 'numpy.round', 'np.round', (['distdg'], {}), '(distdg)\n', (2868, 2876), True, 'import numpy as np\n'), ((2997, 3013), 'numpy.round', 'np.round', (['distdg'], {}), '(distdg)\n', (3005, 3013), True, 'import numpy as np\n'), ((3513, 3529), 'numpy.round', 'np.round', (['distdg'], {}), '(distdg)\n', (3521, 3529), True, 'import numpy as np\n'), ((3614, 3630), 'numpy.round', 'np.round', (['distdg'], {}), '(distdg)\n', (3622, 3630), True, 'import numpy as np\n'), ((3501, 3511), 'numpy.abs', 'np.abs', (['RF'], {}), '(RF)\n', (3507, 3511), True, 'import numpy as np\n'), ((3602, 3612), 'numpy.abs', 'np.abs', (['RF'], {}), '(RF)\n', (3608, 3612), True, 'import numpy as np\n')] |
# -*- coding: utf-8 -*-
################################################################################
# _____ _ _____ _ #
# / ____(_) / ____| | | #
# | | _ ___ ___ ___ | (___ _ _ ___| |_ ___ _ __ ___ ___ #
# | | | / __|/ __/ _ \ \___ \| | | / __| __/ _ \ '_ ` _ \/ __| #
# | |____| \__ \ (_| (_) | ____) | |_| \__ \ || __/ | | | | \__ \ #
# \_____|_|___/\___\___/ |_____/ \__, |___/\__\___|_| |_| |_|___/ #
# __/ | #
# |___/ #
# _ __ _____ _ _____ ______ #
# | |/ / / ____| | |/ ____| ____| #
# | ' / ___ _ __ ___ __ _ | (___ ___ | | (___ | |__ #
# | < / _ \| '__/ _ \/ _` | \___ \ / _ \| |\___ \| __| #
# | . \ (_) | | | __/ (_| | ____) | (_) | |____) | |____ #
# |_|\_\___/|_| \___|\__,_| |_____/ \___/|_|_____/|______| #
# #
################################################################################
# #
# Copyright (c) 2016 Cisco Systems #
# All Rights Reserved. #
# #
# Licensed under the Apache License, Version 2.0 (the "License"); you may #
# not use this file except in compliance with the License. You may obtain #
# a copy of the License at #
# #
# http://www.apache.org/licenses/LICENSE-2.0 #
# #
# Unless required by applicable law or agreed to in writing, software #
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT #
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the #
# License for the specific language governing permissions and limitations #
# under the License. #
# #
################################################################################
import re
import json
from pygics import Burst
from django.contrib import admin
from django.contrib.auth.decorators import login_required
from django.http import JsonResponse
from archon.settings import SESSION_COOKIE_AGE
from archon.view import *
ARCHON_DEBUG = False
class ManagerAbstraction:
__MANAGER__ = None
@classmethod
def instance(cls, *argv, **kargs):
if cls.__MANAGER__ == None: cls.__MANAGER__ = cls(*argv, **kargs)
return cls.__MANAGER__
def getSummary(self, r, m, v):
return {
'name' : '?',
'icon' : 'Default.png',
'desc' : 'This is Unknown Manager',
'link' : '/dashboard',
'view' : DIV()
}
class ArchonReq:
def __init__(self, request, method, path, query, data):
self.Request = request
self.Method = method
self.Path = path
self.Query = query
self.Data = data
def __str__(self):
return '%s:%s\nQuery:%s\nData:%s' % (self.Method, self.Path, self.Query, self.Data)
class ArchonView:
class PageContent(TAG):
def __init__(self):
TAG.__init__(self, 'div', CLASS='pagecontent')
def __init__(self, app, lang):
self.Menu = DIV()
self.Page = ArchonView.PageContent()
self._app = app
self._lang = lang
def __call__(self, key):
glb_locale = archon_locales['GLOBAL']
if self._app in archon_locales:
app_locale = archon_locales[self._app]
if key in app_locale:
key_locale = app_locale[key]
for lang in self._lang:
if lang in key_locale: return key_locale[lang]
if key in glb_locale:
key_locale = glb_locale[key]
for lang in self._lang:
if lang in key_locale: return key_locale[lang]
return key
def __render__(self):
return {'menu' : self.Menu, 'page' : self.Page}
@classmethod
def __error__(cls, title, msg):
return {'menu' : DIV(), 'page' : ALERT(title, msg, CLASS='alert-danger')}
def pageview(manager_class, **async_path):
def wrapper(view):
@login_required
def decofunc(request):
request.session.set_expiry(SESSION_COOKIE_AGE)
method = request.method
path = filter(None, request.path.split('/'))
lang = filter(None, re.split(';|,|q=0.\d', request.META['HTTP_ACCEPT_LANGUAGE']))
app = view.__module__.split('.')[1]
v = ArchonView(app, lang)
try: m = manager_class.instance()
except Exception as e: return JsonResponse(ArchonView.__error__(v('manager allocation error'), str(e)))
try:
if method == 'GET':
query = dict(request.GET)
data = {}
elif method == 'POST':
query = dict(request.POST)
if not hasattr(request, '_body') and request._read_started: data = request.FILES
else: data = json.loads(request.body)
elif method == 'PUT':
query = dict(request.PUT)
if not hasattr(request, '_body') and request._read_started: data = request.FILES
else: data = json.loads(request.body)
elif method == 'DELETE':
query = {}
data = {}
else:
query = {}
data = {}
except Exception as e: return JsonResponse(ArchonView.__error__(v('request error'), str(e)))
r = ArchonReq(request, method, path, query, data)
async_path_names = async_path.keys()
for async_path_name in async_path_names:
if async_path_name in path:
try: return JsonResponse(async_path[async_path_name](r, m, v))
except Exception as e: return JsonResponse(ArchonView.__error__(v('application error'), str(e)))
try: view(r, m, v)
except Exception as e: return JsonResponse(ArchonView.__error__(v('application error'), str(e)))
return JsonResponse(v.__render__())
def decofunc_debug(request):
method = request.method
path = filter(None, request.path.split('/'))
lang = filter(None, re.split(';|,|q=0.\d', request.META['HTTP_ACCEPT_LANGUAGE']))
app = view.__module__.split('.')[1]
v = ArchonView(app, lang)
m = manager_class.instance()
if method == 'GET':
query = dict(request.GET)
data = {}
elif method == 'POST':
query = dict(request.POST)
if not hasattr(request, '_body') and request._read_started: data = request.FILES
else: data = json.loads(request.body)
elif method == 'PUT':
query = dict(request.PUT)
if not hasattr(request, '_body') and request._read_started: data = request.FILES
else: data = json.loads(request.body)
elif method == 'DELETE':
query = {}
data = {}
else:
query = {}
data = {}
r = ArchonReq(request, method, path, query, data)
async_path_names = async_path.keys()
for async_path_name in async_path_names:
if async_path_name in path:
return JsonResponse(async_path[async_path_name](r, m, v))
view(r, m, v)
return JsonResponse(v.__render__())
if ARCHON_DEBUG: return decofunc_debug
else: return decofunc
return wrapper
def modelview(model):
admin.site.register(model, admin.ModelAdmin)
| [
"re.split",
"django.contrib.admin.site.register",
"json.loads"
] | [((9003, 9047), 'django.contrib.admin.site.register', 'admin.site.register', (['model', 'admin.ModelAdmin'], {}), '(model, admin.ModelAdmin)\n', (9022, 9047), False, 'from django.contrib import admin\n'), ((5406, 5467), 're.split', 're.split', (['""";|,|q=0.\\\\d"""', "request.META['HTTP_ACCEPT_LANGUAGE']"], {}), "(';|,|q=0.\\\\d', request.META['HTTP_ACCEPT_LANGUAGE'])\n", (5414, 5467), False, 'import re\n'), ((7483, 7544), 're.split', 're.split', (['""";|,|q=0.\\\\d"""', "request.META['HTTP_ACCEPT_LANGUAGE']"], {}), "(';|,|q=0.\\\\d', request.META['HTTP_ACCEPT_LANGUAGE'])\n", (7491, 7544), False, 'import re\n'), ((8014, 8038), 'json.loads', 'json.loads', (['request.body'], {}), '(request.body)\n', (8024, 8038), False, 'import json\n'), ((6105, 6129), 'json.loads', 'json.loads', (['request.body'], {}), '(request.body)\n', (6115, 6129), False, 'import json\n'), ((8245, 8269), 'json.loads', 'json.loads', (['request.body'], {}), '(request.body)\n', (8255, 8269), False, 'import json\n'), ((6352, 6376), 'json.loads', 'json.loads', (['request.body'], {}), '(request.body)\n', (6362, 6376), False, 'import json\n')] |
import sys
import comtypes
from comtypes.client import CreateObject
try:
# Connecting | coneccion
xl = CreateObject("Excel.Application")
except (OSError, comtypes.COMError):
print("No tiene instalada el programa(Excel).")
sys.exit(-1)
xl.Visible = True
print (xl) | [
"comtypes.client.CreateObject",
"sys.exit"
] | [((112, 145), 'comtypes.client.CreateObject', 'CreateObject', (['"""Excel.Application"""'], {}), "('Excel.Application')\n", (124, 145), False, 'from comtypes.client import CreateObject\n'), ((236, 248), 'sys.exit', 'sys.exit', (['(-1)'], {}), '(-1)\n', (244, 248), False, 'import sys\n')] |
from selenium import webdriver
import time
chromedriver = "C:/Users/deniz/chromedriver/chromedriver"
driver = webdriver.Chrome(chromedriver)
driver.get('http://127.0.0.1:8000/')
dashboard = '//*[@id="accordionSidebar"]/li[1]/a'
sectors_1 = '//*[@id="sectors"]'
sectors_1_element = '//*[@id="sectors"]/option[4]'
add_sector = '//*[@id="select_filter_form"]/div[1]/input[1]'
remove_sector = '//*[@id="select_filter_form"]/div[1]/input[2]'
sectors_2 = '//*[@id="sectors2"]'
sectors_2_element = '//*[@id="sectors2"]/option[4]'
time.sleep(2)
driver.find_element_by_xpath(dashboard).click()
time.sleep(5)
driver.find_element_by_xpath(sectors_1).click()
time.sleep(2)
driver.find_element_by_xpath(sectors_1_element).click()
time.sleep(5)
driver.find_element_by_xpath(add_sector).click()
time.sleep(5)
driver.find_element_by_xpath(sectors_2).click()
time.sleep(2)
driver.find_element_by_xpath(sectors_2_element).click()
time.sleep(5)
driver.find_element_by_xpath(remove_sector).click()
| [
"selenium.webdriver.Chrome",
"time.sleep"
] | [((111, 141), 'selenium.webdriver.Chrome', 'webdriver.Chrome', (['chromedriver'], {}), '(chromedriver)\n', (127, 141), False, 'from selenium import webdriver\n'), ((528, 541), 'time.sleep', 'time.sleep', (['(2)'], {}), '(2)\n', (538, 541), False, 'import time\n'), ((590, 603), 'time.sleep', 'time.sleep', (['(5)'], {}), '(5)\n', (600, 603), False, 'import time\n'), ((652, 665), 'time.sleep', 'time.sleep', (['(2)'], {}), '(2)\n', (662, 665), False, 'import time\n'), ((722, 735), 'time.sleep', 'time.sleep', (['(5)'], {}), '(5)\n', (732, 735), False, 'import time\n'), ((785, 798), 'time.sleep', 'time.sleep', (['(5)'], {}), '(5)\n', (795, 798), False, 'import time\n'), ((847, 860), 'time.sleep', 'time.sleep', (['(2)'], {}), '(2)\n', (857, 860), False, 'import time\n'), ((917, 930), 'time.sleep', 'time.sleep', (['(5)'], {}), '(5)\n', (927, 930), False, 'import time\n')] |
import tensorflow as tf
from netensorflow.ann.ANN import ANN
from netensorflow.ann.macro_layer.MacroLayer import MacroLayer
from netensorflow.ann.macro_layer.layer_structure.InputLayerStructure import InputLayerStructure
from netensorflow.ann.macro_layer.layer_structure.LayerStructure import LayerStructure, LayerType
from netensorflow.ann.macro_layer.layer_structure.layers.FullConnected import FullConnected
from netensorflow.ann.macro_layer.layer_structure.layers.FullConnectedWithSoftmaxLayer import FullConnectedWithSoftmaxLayer
'''
ann Creation and simple usage, the goal of this code is simply run the most simpler artificial neural network
'''
def main():
# tensorflow
tf_sess = tf.Session()
# Layers:
input_dim = [None, 3]
hidden_layer = FullConnected(inputs_amount=20)
out_layer = FullConnectedWithSoftmaxLayer(inputs_amount=10)
# Layer Structures
input_layer_structure = InputLayerStructure(input_dim)
hidden_layer_structure = LayerStructure('Hidden', layer_type=LayerType.ONE_DIMENSION, layers=[hidden_layer])
output_layer_structure = LayerStructure('Output', layer_type=LayerType.ONE_DIMENSION,layers=[out_layer])
# Macro Layer
macro_layers = MacroLayer(layers_structure=[input_layer_structure, hidden_layer_structure, output_layer_structure])
# ann
ann = ANN(macro_layers=macro_layers, tf_session=tf_sess, base_folder='./tensorboard_logs/')
ann.connect_and_initialize()
# Execute
for it in range(100):
import numpy as np
input_tensor_value = [np.random.uniform(0.0, 10.0, 3)]
print(ann.run(global_iteration=it, input_tensor_value=input_tensor_value))
if __name__ == '__main__':
main()
| [
"netensorflow.ann.macro_layer.layer_structure.InputLayerStructure.InputLayerStructure",
"netensorflow.ann.macro_layer.layer_structure.layers.FullConnectedWithSoftmaxLayer.FullConnectedWithSoftmaxLayer",
"tensorflow.Session",
"netensorflow.ann.macro_layer.layer_structure.layers.FullConnected.FullConnected",
... | [((706, 718), 'tensorflow.Session', 'tf.Session', ([], {}), '()\n', (716, 718), True, 'import tensorflow as tf\n'), ((779, 810), 'netensorflow.ann.macro_layer.layer_structure.layers.FullConnected.FullConnected', 'FullConnected', ([], {'inputs_amount': '(20)'}), '(inputs_amount=20)\n', (792, 810), False, 'from netensorflow.ann.macro_layer.layer_structure.layers.FullConnected import FullConnected\n'), ((827, 874), 'netensorflow.ann.macro_layer.layer_structure.layers.FullConnectedWithSoftmaxLayer.FullConnectedWithSoftmaxLayer', 'FullConnectedWithSoftmaxLayer', ([], {'inputs_amount': '(10)'}), '(inputs_amount=10)\n', (856, 874), False, 'from netensorflow.ann.macro_layer.layer_structure.layers.FullConnectedWithSoftmaxLayer import FullConnectedWithSoftmaxLayer\n'), ((927, 957), 'netensorflow.ann.macro_layer.layer_structure.InputLayerStructure.InputLayerStructure', 'InputLayerStructure', (['input_dim'], {}), '(input_dim)\n', (946, 957), False, 'from netensorflow.ann.macro_layer.layer_structure.InputLayerStructure import InputLayerStructure\n'), ((987, 1075), 'netensorflow.ann.macro_layer.layer_structure.LayerStructure.LayerStructure', 'LayerStructure', (['"""Hidden"""'], {'layer_type': 'LayerType.ONE_DIMENSION', 'layers': '[hidden_layer]'}), "('Hidden', layer_type=LayerType.ONE_DIMENSION, layers=[\n hidden_layer])\n", (1001, 1075), False, 'from netensorflow.ann.macro_layer.layer_structure.LayerStructure import LayerStructure, LayerType\n'), ((1100, 1185), 'netensorflow.ann.macro_layer.layer_structure.LayerStructure.LayerStructure', 'LayerStructure', (['"""Output"""'], {'layer_type': 'LayerType.ONE_DIMENSION', 'layers': '[out_layer]'}), "('Output', layer_type=LayerType.ONE_DIMENSION, layers=[out_layer]\n )\n", (1114, 1185), False, 'from netensorflow.ann.macro_layer.layer_structure.LayerStructure import LayerStructure, LayerType\n'), ((1218, 1322), 'netensorflow.ann.macro_layer.MacroLayer.MacroLayer', 'MacroLayer', ([], {'layers_structure': '[input_layer_structure, hidden_layer_structure, output_layer_structure]'}), '(layers_structure=[input_layer_structure, hidden_layer_structure,\n output_layer_structure])\n', (1228, 1322), False, 'from netensorflow.ann.macro_layer.MacroLayer import MacroLayer\n'), ((1340, 1430), 'netensorflow.ann.ANN.ANN', 'ANN', ([], {'macro_layers': 'macro_layers', 'tf_session': 'tf_sess', 'base_folder': '"""./tensorboard_logs/"""'}), "(macro_layers=macro_layers, tf_session=tf_sess, base_folder=\n './tensorboard_logs/')\n", (1343, 1430), False, 'from netensorflow.ann.ANN import ANN\n'), ((1557, 1588), 'numpy.random.uniform', 'np.random.uniform', (['(0.0)', '(10.0)', '(3)'], {}), '(0.0, 10.0, 3)\n', (1574, 1588), True, 'import numpy as np\n')] |
import yaml
import os
### Sample Contents of config.yaml:
# 0002_info_leakage:
# category: Sifu C/C++
# points: 100
# description: Leave no trace
# vulnerability: CWE-14 * Information Leakage
# directory: Challenges/C_CPP/0002_info_leakage
# send_dir: true
# file: func_0009.c
# fname: func.c
# chal_id: c94062933919
# root: template
# root_file: chal_files.html
# run: ./run.py
# flag: f296-5420-65a9-7fc8
# type: c_makefile
# disable: false
# feedback: collect
# addHeader: |
# #define __OVERWRITE
# #include "utils.h"
# #include "deprecated.h"
# #include "redirect.h"
# #include "log.h"
localPath = os.path.join(os.path.dirname(__file__))
def FilesToJson(files, path=localPath):
"""
returns a {filename: contents} dict for
the given files on the given path
"""
contents = {}
# for multiple files, iterate over each
if type(files)==list:
for file in files:
with open(os.path.join(path, file)) as f:
contents[file]=f.read()
# for just one, do the deed
elif type(files)==str:
with open(os.path.join(path, files)) as f:
contents[files]=f.read()
# if we're here, we screwed up
else:
raise TypeError('[utils_testing] excuse me')
return contents
def fileContentsToStr(file):
with open(file, 'r') as f:
return f.read()
def makeIOforTest(path, inFileNames, outFileNames):
"""
Use to generate the test parametrization lists
----
Inputs: root path, expected input file names, expected output file names
Output: lists of one dict per param set (to be used with zip when parametrizing)
{
in_params:
[{inSet1_file1: inSet1_file1_contents, ..},
{inSet2_file2: inSet2__file2_contents}]
out_params:
[{outSet1_file1: outSet1_file1_contents, ..},
{outSet2_file2: outSet2__file2_contents}]
}
"""
test_in = []
test_out = []
for (dirpath, _, filenames) in os.walk(path):
if 'tc-' in dirpath:
files_in = {}
files_out = {}
for file in inFileNames:
files_in[file] = fileContentsToStr(os.path.join(dirpath,file))
for file in outFileNames:
files_out[file] = fileContentsToStr(os.path.join(dirpath,file))
test_in.append(files_in)
test_out.append(files_out)
return {'in_params': test_in,
'out_params': test_out}
if __name__=='__main__':
# local 'testing'
print("chalID for '0002_info_leakage' is:", chalNameToChalID('0002_info_leakage') )
print("files and filenames:\n", getFilesForChalID(chalNameToChalID('0002_info_leakage')))
print(FilesToJson(getFilesForChalID(chalNameToChalID('0002_info_leakage'))['fileNames'], path='../Challenges/C_CPP/0001_buffer_overflow'))
print("\n\n")
EgPathAsSeenByTests = '0002_info_leakage'
inFiles = ['database.json', 'func_0009.c']
outFiles = ['ai.json', 'log.txt']
outFiles_noLog = ['ai.json']
print(makeIOforTest('IO/0002_info_leakage', inFiles, outFiles))
| [
"os.path.dirname",
"os.path.join",
"os.walk"
] | [((780, 805), 'os.path.dirname', 'os.path.dirname', (['__file__'], {}), '(__file__)\n', (795, 805), False, 'import os\n'), ((2197, 2210), 'os.walk', 'os.walk', (['path'], {}), '(path)\n', (2204, 2210), False, 'import os\n'), ((1088, 1112), 'os.path.join', 'os.path.join', (['path', 'file'], {}), '(path, file)\n', (1100, 1112), False, 'import os\n'), ((1242, 1267), 'os.path.join', 'os.path.join', (['path', 'files'], {}), '(path, files)\n', (1254, 1267), False, 'import os\n'), ((2383, 2410), 'os.path.join', 'os.path.join', (['dirpath', 'file'], {}), '(dirpath, file)\n', (2395, 2410), False, 'import os\n'), ((2501, 2528), 'os.path.join', 'os.path.join', (['dirpath', 'file'], {}), '(dirpath, file)\n', (2513, 2528), False, 'import os\n')] |
import logging
class Logger(object):
stream_handler = logging.StreamHandler()
formatter = logging.Formatter("[%(levelname)s %(pathname)s:%(lineno)d] %(message)s")
stream_handler.setFormatter(formatter)
stream_handler.setLevel(logging.INFO)
my_logger = logging.Logger("arachne.runtime.rpc")
my_logger.addHandler(stream_handler)
@staticmethod
def logger():
return Logger.my_logger
| [
"logging.Logger",
"logging.Formatter",
"logging.StreamHandler"
] | [((60, 83), 'logging.StreamHandler', 'logging.StreamHandler', ([], {}), '()\n', (81, 83), False, 'import logging\n'), ((100, 172), 'logging.Formatter', 'logging.Formatter', (['"""[%(levelname)s %(pathname)s:%(lineno)d] %(message)s"""'], {}), "('[%(levelname)s %(pathname)s:%(lineno)d] %(message)s')\n", (117, 172), False, 'import logging\n'), ((275, 312), 'logging.Logger', 'logging.Logger', (['"""arachne.runtime.rpc"""'], {}), "('arachne.runtime.rpc')\n", (289, 312), False, 'import logging\n')] |
#pylint: disable = line-too-long
import os
import time
import board
import neopixel
import keypad
import usb_hid
import pwmio
import rainbowio
from adafruit_hid.keyboard import Keyboard
from pykey.keycode import KB_Keycode as KC
from adafruit_hid.keyboard_layout_us import KeyboardLayoutUS
# Hardware definition: GPIO where RGB LED is connected.
pixel_pin = board.NEOPIXEL
num_pixels = 61
pixels = neopixel.NeoPixel(pixel_pin, num_pixels, brightness=1, auto_write=False)
cyclecount = 0
def rainbow_cycle(wait):
for i in range(num_pixels):
rc_index = (i * 256 // num_pixels) + wait
pixels[i] = rainbowio.colorwheel(rc_index & 255)
pixels.show()
buzzer = pwmio.PWMOut(board.SPEAKER, variable_frequency=True)
OFF = 0
ON = 2**15
# Hardware definition: Switch Matrix Setup.
keys = keypad.KeyMatrix(
row_pins=(board.ROW1, board.ROW2, board.ROW3, board.ROW4, board.ROW5),
column_pins=(board.COL1, board.COL2, board.COL3, board.COL4, board.COL5, board.COL6, board.COL7,
board.COL8, board.COL9, board.COL10, board.COL11, board.COL12, board.COL13, board.COL14),
columns_to_anodes=True,
)
# CONFIGURABLES ------------------------
MACRO_FOLDER = '/layers'
# CLASSES AND FUNCTIONS ----------------
class Layer:
""" Class representing a layer, for which we have a set
of macro sequences or keycodes"""
def __init__(self, layerdata):
self.name = layerdata['name']
self.macros = layerdata['macros']
# Neopixel update function
def update_pixels(color):
for i in range(num_pixels):
pixels[i] = color
pixels.show()
# INITIALIZATION -----------------------
# Load all the macro key setups from .py files in MACRO_FOLDER
layers = []
files = os.listdir(MACRO_FOLDER)
files.sort()
for filename in files:
print(filename)
if filename.endswith('.py'):
try:
module = __import__(MACRO_FOLDER + '/' + filename[:-3])
layers.append(Layer(module.layer))
except (SyntaxError, ImportError, AttributeError, KeyError, NameError,
IndexError, TypeError) as err:
print(err)
pass
if not layers:
print('NO MACRO FILES FOUND')
while True:
pass
layer_count = len(layers)
# print(layer_count)
def get_active_layer(layer_keys_pressed, layer_count):
tmp = 0
if len(layer_keys_pressed)>0:
for layer_id in layer_keys_pressed:
if layer_id > tmp: # use highest layer number
tmp = layer_id
if tmp >= layer_count:
tmp = layer_count-1
return tmp
# setup variables
keyboard = Keyboard(usb_hid.devices)
keyboard_layout = KeyboardLayoutUS(keyboard)
active_keys = []
not_sleeping = True
layer_index = 0
buzzer.duty_cycle = ON
buzzer.frequency = 440 #
time.sleep(0.05)
buzzer.frequency = 880 #
time.sleep(0.05)
buzzer.frequency = 440 #
time.sleep(0.05)
buzzer.duty_cycle = OFF
while not_sleeping:
key_event = keys.events.get()
if key_event:
key_number = key_event.key_number
cyclecount = cyclecount +1
rainbow_cycle(cyclecount)
# keep track of keys being pressed for layer determination
if key_event.pressed:
active_keys.append(key_number)
else:
active_keys.remove(key_number)
# reset the layers and identify which layer key is pressed.
layer_keys_pressed = []
for active_key in active_keys:
group = layers[0].macros[active_key][2]
for item in group:
if isinstance(item, int):
if (item >= KC.LAYER_0) and (item <= KC.LAYER_F) :
layer_keys_pressed.append(item - KC.LAYER_0)
layer_index = get_active_layer(layer_keys_pressed, layer_count)
# print(layer_index)
# print(layers[layer_index].macros[key_number][1])
group = layers[layer_index].macros[key_number][2]
color = layers[layer_index].macros[key_number][0]
if key_event.pressed:
update_pixels(color)
for item in group:
if isinstance(item, int):
keyboard.press(item)
else:
keyboard_layout.write(item)
else:
for item in group:
if isinstance(item, int):
if item >= 0:
keyboard.release(item)
#update_pixels(0x000000)
time.sleep(0.002)
| [
"adafruit_hid.keyboard.Keyboard",
"os.listdir",
"rainbowio.colorwheel",
"time.sleep",
"neopixel.NeoPixel",
"adafruit_hid.keyboard_layout_us.KeyboardLayoutUS",
"pwmio.PWMOut",
"keypad.KeyMatrix"
] | [((401, 473), 'neopixel.NeoPixel', 'neopixel.NeoPixel', (['pixel_pin', 'num_pixels'], {'brightness': '(1)', 'auto_write': '(False)'}), '(pixel_pin, num_pixels, brightness=1, auto_write=False)\n', (418, 473), False, 'import neopixel\n'), ((709, 761), 'pwmio.PWMOut', 'pwmio.PWMOut', (['board.SPEAKER'], {'variable_frequency': '(True)'}), '(board.SPEAKER, variable_frequency=True)\n', (721, 761), False, 'import pwmio\n'), ((833, 1149), 'keypad.KeyMatrix', 'keypad.KeyMatrix', ([], {'row_pins': '(board.ROW1, board.ROW2, board.ROW3, board.ROW4, board.ROW5)', 'column_pins': '(board.COL1, board.COL2, board.COL3, board.COL4, board.COL5, board.COL6,\n board.COL7, board.COL8, board.COL9, board.COL10, board.COL11, board.\n COL12, board.COL13, board.COL14)', 'columns_to_anodes': '(True)'}), '(row_pins=(board.ROW1, board.ROW2, board.ROW3, board.ROW4,\n board.ROW5), column_pins=(board.COL1, board.COL2, board.COL3, board.\n COL4, board.COL5, board.COL6, board.COL7, board.COL8, board.COL9, board\n .COL10, board.COL11, board.COL12, board.COL13, board.COL14),\n columns_to_anodes=True)\n', (849, 1149), False, 'import keypad\n'), ((1766, 1790), 'os.listdir', 'os.listdir', (['MACRO_FOLDER'], {}), '(MACRO_FOLDER)\n', (1776, 1790), False, 'import os\n'), ((2637, 2662), 'adafruit_hid.keyboard.Keyboard', 'Keyboard', (['usb_hid.devices'], {}), '(usb_hid.devices)\n', (2645, 2662), False, 'from adafruit_hid.keyboard import Keyboard\n'), ((2681, 2707), 'adafruit_hid.keyboard_layout_us.KeyboardLayoutUS', 'KeyboardLayoutUS', (['keyboard'], {}), '(keyboard)\n', (2697, 2707), False, 'from adafruit_hid.keyboard_layout_us import KeyboardLayoutUS\n'), ((2811, 2827), 'time.sleep', 'time.sleep', (['(0.05)'], {}), '(0.05)\n', (2821, 2827), False, 'import time\n'), ((2854, 2870), 'time.sleep', 'time.sleep', (['(0.05)'], {}), '(0.05)\n', (2864, 2870), False, 'import time\n'), ((2897, 2913), 'time.sleep', 'time.sleep', (['(0.05)'], {}), '(0.05)\n', (2907, 2913), False, 'import time\n'), ((4480, 4497), 'time.sleep', 'time.sleep', (['(0.002)'], {}), '(0.002)\n', (4490, 4497), False, 'import time\n'), ((631, 667), 'rainbowio.colorwheel', 'rainbowio.colorwheel', (['(rc_index & 255)'], {}), '(rc_index & 255)\n', (651, 667), False, 'import rainbowio\n')] |
from pytest_factoryboy import register
from tests.factories.specification import (
CallbackFactory,
ComponentsFactory,
ContactFactory,
DiscriminatorFactory,
EncodingFactory,
ExampleFactory,
ExternalDocumentationFactory,
HeaderFactory,
InfoFactory,
LicenseFactory,
LinkFactory,
MediaTypeFactory,
OAuthFlowFactory,
OAuthFlowsFactory,
OpenAPIFactory,
OperationFactory,
ParameterFactory,
PathItemFactory,
PathsFactory,
ReferenceFactory,
RequestBodyFactory,
ResponseFactory,
ResponsesFactory,
SchemaFactory,
SecurityRequirementFactory,
SecuritySchemeFactory,
ServerFactory,
ServerVariableFactory,
TagFactory,
)
register(OpenAPIFactory)
register(InfoFactory)
register(ContactFactory)
register(LicenseFactory)
register(ServerFactory)
register(ServerVariableFactory)
register(ComponentsFactory)
register(PathsFactory)
register(PathItemFactory)
register(OperationFactory)
register(ExternalDocumentationFactory)
register(ParameterFactory)
register(RequestBodyFactory)
register(MediaTypeFactory)
register(EncodingFactory)
register(ResponsesFactory)
register(ResponseFactory)
register(CallbackFactory)
register(ExampleFactory)
register(LinkFactory)
register(HeaderFactory)
register(TagFactory)
register(ReferenceFactory)
register(SchemaFactory)
register(SchemaFactory, "second_schema")
register(DiscriminatorFactory)
register(SecuritySchemeFactory)
register(OAuthFlowsFactory, "oauth_flows")
register(OAuthFlowFactory, "oauth_flow")
register(OAuthFlowFactory, "second_oauth_flow")
register(SecurityRequirementFactory)
| [
"pytest_factoryboy.register"
] | [((724, 748), 'pytest_factoryboy.register', 'register', (['OpenAPIFactory'], {}), '(OpenAPIFactory)\n', (732, 748), False, 'from pytest_factoryboy import register\n'), ((749, 770), 'pytest_factoryboy.register', 'register', (['InfoFactory'], {}), '(InfoFactory)\n', (757, 770), False, 'from pytest_factoryboy import register\n'), ((771, 795), 'pytest_factoryboy.register', 'register', (['ContactFactory'], {}), '(ContactFactory)\n', (779, 795), False, 'from pytest_factoryboy import register\n'), ((796, 820), 'pytest_factoryboy.register', 'register', (['LicenseFactory'], {}), '(LicenseFactory)\n', (804, 820), False, 'from pytest_factoryboy import register\n'), ((821, 844), 'pytest_factoryboy.register', 'register', (['ServerFactory'], {}), '(ServerFactory)\n', (829, 844), False, 'from pytest_factoryboy import register\n'), ((845, 876), 'pytest_factoryboy.register', 'register', (['ServerVariableFactory'], {}), '(ServerVariableFactory)\n', (853, 876), False, 'from pytest_factoryboy import register\n'), ((877, 904), 'pytest_factoryboy.register', 'register', (['ComponentsFactory'], {}), '(ComponentsFactory)\n', (885, 904), False, 'from pytest_factoryboy import register\n'), ((905, 927), 'pytest_factoryboy.register', 'register', (['PathsFactory'], {}), '(PathsFactory)\n', (913, 927), False, 'from pytest_factoryboy import register\n'), ((928, 953), 'pytest_factoryboy.register', 'register', (['PathItemFactory'], {}), '(PathItemFactory)\n', (936, 953), False, 'from pytest_factoryboy import register\n'), ((954, 980), 'pytest_factoryboy.register', 'register', (['OperationFactory'], {}), '(OperationFactory)\n', (962, 980), False, 'from pytest_factoryboy import register\n'), ((981, 1019), 'pytest_factoryboy.register', 'register', (['ExternalDocumentationFactory'], {}), '(ExternalDocumentationFactory)\n', (989, 1019), False, 'from pytest_factoryboy import register\n'), ((1020, 1046), 'pytest_factoryboy.register', 'register', (['ParameterFactory'], {}), '(ParameterFactory)\n', (1028, 1046), False, 'from pytest_factoryboy import register\n'), ((1047, 1075), 'pytest_factoryboy.register', 'register', (['RequestBodyFactory'], {}), '(RequestBodyFactory)\n', (1055, 1075), False, 'from pytest_factoryboy import register\n'), ((1076, 1102), 'pytest_factoryboy.register', 'register', (['MediaTypeFactory'], {}), '(MediaTypeFactory)\n', (1084, 1102), False, 'from pytest_factoryboy import register\n'), ((1103, 1128), 'pytest_factoryboy.register', 'register', (['EncodingFactory'], {}), '(EncodingFactory)\n', (1111, 1128), False, 'from pytest_factoryboy import register\n'), ((1129, 1155), 'pytest_factoryboy.register', 'register', (['ResponsesFactory'], {}), '(ResponsesFactory)\n', (1137, 1155), False, 'from pytest_factoryboy import register\n'), ((1156, 1181), 'pytest_factoryboy.register', 'register', (['ResponseFactory'], {}), '(ResponseFactory)\n', (1164, 1181), False, 'from pytest_factoryboy import register\n'), ((1182, 1207), 'pytest_factoryboy.register', 'register', (['CallbackFactory'], {}), '(CallbackFactory)\n', (1190, 1207), False, 'from pytest_factoryboy import register\n'), ((1208, 1232), 'pytest_factoryboy.register', 'register', (['ExampleFactory'], {}), '(ExampleFactory)\n', (1216, 1232), False, 'from pytest_factoryboy import register\n'), ((1233, 1254), 'pytest_factoryboy.register', 'register', (['LinkFactory'], {}), '(LinkFactory)\n', (1241, 1254), False, 'from pytest_factoryboy import register\n'), ((1255, 1278), 'pytest_factoryboy.register', 'register', (['HeaderFactory'], {}), '(HeaderFactory)\n', (1263, 1278), False, 'from pytest_factoryboy import register\n'), ((1279, 1299), 'pytest_factoryboy.register', 'register', (['TagFactory'], {}), '(TagFactory)\n', (1287, 1299), False, 'from pytest_factoryboy import register\n'), ((1300, 1326), 'pytest_factoryboy.register', 'register', (['ReferenceFactory'], {}), '(ReferenceFactory)\n', (1308, 1326), False, 'from pytest_factoryboy import register\n'), ((1327, 1350), 'pytest_factoryboy.register', 'register', (['SchemaFactory'], {}), '(SchemaFactory)\n', (1335, 1350), False, 'from pytest_factoryboy import register\n'), ((1351, 1391), 'pytest_factoryboy.register', 'register', (['SchemaFactory', '"""second_schema"""'], {}), "(SchemaFactory, 'second_schema')\n", (1359, 1391), False, 'from pytest_factoryboy import register\n'), ((1392, 1422), 'pytest_factoryboy.register', 'register', (['DiscriminatorFactory'], {}), '(DiscriminatorFactory)\n', (1400, 1422), False, 'from pytest_factoryboy import register\n'), ((1423, 1454), 'pytest_factoryboy.register', 'register', (['SecuritySchemeFactory'], {}), '(SecuritySchemeFactory)\n', (1431, 1454), False, 'from pytest_factoryboy import register\n'), ((1455, 1497), 'pytest_factoryboy.register', 'register', (['OAuthFlowsFactory', '"""oauth_flows"""'], {}), "(OAuthFlowsFactory, 'oauth_flows')\n", (1463, 1497), False, 'from pytest_factoryboy import register\n'), ((1498, 1538), 'pytest_factoryboy.register', 'register', (['OAuthFlowFactory', '"""oauth_flow"""'], {}), "(OAuthFlowFactory, 'oauth_flow')\n", (1506, 1538), False, 'from pytest_factoryboy import register\n'), ((1539, 1586), 'pytest_factoryboy.register', 'register', (['OAuthFlowFactory', '"""second_oauth_flow"""'], {}), "(OAuthFlowFactory, 'second_oauth_flow')\n", (1547, 1586), False, 'from pytest_factoryboy import register\n'), ((1587, 1623), 'pytest_factoryboy.register', 'register', (['SecurityRequirementFactory'], {}), '(SecurityRequirementFactory)\n', (1595, 1623), False, 'from pytest_factoryboy import register\n')] |
import logging
from os.path import expanduser, join
from unittest import mock
import pytest
from click.testing import CliRunner
from configparser import ConfigParser
from apparate.configure import configure
from apparate.cli_commands import upload, upload_and_update
logging.basicConfig(level=logging.INFO)
logger = logging.getLogger('apparate.cli_commands')
def test_configure_no_existing_config():
expected_stdout = (
'Databricks host (e.g. https://my-organization.cloud.databricks.com): '
'https://test_host\n'
'Databricks API token: \n'
'Repeat for confirmation: \n'
'Databricks folder for production libraries: test_folder\n'
)
filename = join(expanduser('~'), '.apparatecfg')
expected_call_list = [
mock.call(filename, encoding=None),
mock.call(filename, 'w+'),
mock.call().write('[DEFAULT]\n'),
mock.call().write('host = https://test_host\n'),
mock.call().write('token = test_token\n'),
mock.call().write('prod_folder = test_folder\n'),
mock.call().write('\n'),
]
with mock.patch('builtins.open', mock.mock_open(read_data='')) as m_open:
runner = CliRunner()
result = runner.invoke(
configure,
input=(
'https://test_host\n'
'test_token\n'
'test_token\n'
'test_folder\n'
),
)
m_open.assert_has_calls(expected_call_list, any_order=True)
assert not result.exception
assert result.output == expected_stdout
def test_configure_extra_slash_in_host():
expected_stdout = (
'Databricks host (e.g. https://my-organization.cloud.databricks.com): '
'https://test_host/\n'
'Databricks API token: \n'
'Repeat for confirmation: \n'
'Databricks folder for production libraries: test_folder\n'
)
filename = join(expanduser('~'), '.apparatecfg')
expected_call_list = [
mock.call(filename, encoding=None),
mock.call(filename, 'w+'),
mock.call().write('[DEFAULT]\n'),
mock.call().write('host = https://test_host\n'),
mock.call().write('token = test_token\n'),
mock.call().write('prod_folder = test_folder\n'),
mock.call().write('\n'),
]
with mock.patch('builtins.open', mock.mock_open(read_data='')) as m_open:
runner = CliRunner()
result = runner.invoke(
configure,
input=(
'https://test_host/\n'
'test_token\n'
'test_token\n'
'test_folder\n'
),
)
m_open.assert_has_calls(expected_call_list, any_order=True)
assert not result.exception
assert result.output == expected_stdout
def test_configure_extra_slash_in_folder():
expected_stdout = (
'Databricks host (e.g. https://my-organization.cloud.databricks.com): '
'https://test_host\n'
'Databricks API token: \n'
'Repeat for confirmation: \n'
'Databricks folder for production libraries: test_folder/\n'
)
filename = join(expanduser('~'), '.apparatecfg')
expected_call_list = [
mock.call(filename, encoding=None),
mock.call(filename, 'w+'),
mock.call().write('[DEFAULT]\n'),
mock.call().write('host = https://test_host\n'),
mock.call().write('token = test_token\n'),
mock.call().write('prod_folder = test_folder\n'),
mock.call().write('\n'),
]
with mock.patch('builtins.open', mock.mock_open(read_data='')) as m_open:
runner = CliRunner()
result = runner.invoke(
configure,
input=(
'https://test_host\n'
'test_token\n'
'test_token\n'
'test_folder/\n'
),
)
m_open.assert_has_calls(expected_call_list, any_order=True)
assert not result.exception
assert result.output == expected_stdout
def test_configure_no_http_in_host():
expected_stdout = (
'Databricks host (e.g. https://my-organization.cloud.databricks.com): '
'test_host\n'
"looks like there's an issue - make sure the host name starts "
'with http: https://test_host\n'
'Databricks API token: \n'
'Repeat for confirmation: \n'
'Databricks folder for production libraries: test_folder\n'
)
filename = join(expanduser('~'), '.apparatecfg')
expected_call_list = [
mock.call(filename, encoding=None),
mock.call(filename, 'w+'),
mock.call().write('[DEFAULT]\n'),
mock.call().write('host = https://test_host\n'),
mock.call().write('token = <PASSWORD>'),
mock.call().write('prod_folder = test_folder\n'),
mock.call().write('\n'),
]
with mock.patch('builtins.open', mock.mock_open(read_data='')) as m_open:
runner = CliRunner()
result = runner.invoke(
configure,
input=(
'test_host\n'
'https://test_host\n'
'test_token\n'
'test_token\n'
'test_folder\n'
),
)
m_open.assert_has_calls(expected_call_list, any_order=True)
assert not result.exception
assert result.output == expected_stdout
@pytest.mark.fixture('existing_config')
@mock.patch('apparate.cli_commands._load_config')
@mock.patch('apparate.cli_commands.update_databricks')
def test_upload(update_databricks_mock, config_mock, existing_config):
config_mock.return_value = existing_config
runner = CliRunner()
result = runner.invoke(
upload,
['--path', '/path/to/egg']
)
config_mock.assert_called_once()
update_databricks_mock.assert_called_with(
logger,
'/path/to/egg',
'test_token',
'test_folder',
cleanup=False,
update_jobs=False,
)
assert not result.exception
@pytest.mark.fixture('existing_config')
@mock.patch('apparate.cli_commands._load_config')
@mock.patch('apparate.cli_commands.update_databricks')
def test_upload_all_options(
update_databricks_mock,
config_mock,
existing_config
):
config_mock.return_value = existing_config
runner = CliRunner()
result = runner.invoke(
upload,
[
'--path',
'/path/to/egg',
'--token',
'new_token',
'--folder',
'new_folder'
]
)
config_mock.assert_called_once()
update_databricks_mock.assert_called_with(
logger,
'/path/to/egg',
'new_token',
'new_folder',
cleanup=False,
update_jobs=False,
)
assert not result.exception
@pytest.mark.fixture('empty_config')
@mock.patch('apparate.cli_commands._load_config')
def test_upload_missing_token(config_mock, empty_config):
config_mock.return_value = empty_config
runner = CliRunner()
result = runner.invoke(
upload,
['--path', '/path/to/egg', '--folder', 'test_folder']
)
assert str(result.exception) == (
'no token found - either provide a command line argument or set up'
' a default by running `apparate configure`'
)
@pytest.mark.fixture('empty_config')
@mock.patch('apparate.cli_commands._load_config')
def test_upload_missing_folder(config_mock, empty_config):
config_mock.return_value = empty_config
runner = CliRunner()
result = runner.invoke(
upload,
['--path', '/path/to/egg', '--token', 'test_token']
)
assert str(result.exception) == (
'no folder found - either provide a command line argument or set up'
' a default by running `apparate configure`'
)
@pytest.mark.fixture('existing_config')
@mock.patch('apparate.cli_commands._load_config')
@mock.patch('apparate.cli_commands.update_databricks')
def test_upload_and_update_cleanup(
update_databricks_mock,
config_mock,
existing_config
):
config_mock.return_value = existing_config
runner = CliRunner()
result = runner.invoke(
upload_and_update,
['--path', '/path/to/egg']
)
config_mock.assert_called_once()
update_databricks_mock.assert_called_with(
logger,
'/path/to/egg',
'test_token',
'test_folder',
cleanup=True,
update_jobs=True,
)
assert not result.exception
@pytest.mark.fixture('existing_config')
@mock.patch('apparate.cli_commands._load_config')
@mock.patch('apparate.cli_commands.update_databricks')
def test_upload_and_update_no_cleanup(
update_databricks_mock,
config_mock,
existing_config
):
config_mock.return_value = existing_config
runner = CliRunner()
result = runner.invoke(
upload_and_update,
['--path', '/path/to/egg', '--no-cleanup']
)
config_mock.assert_called_once()
update_databricks_mock.assert_called_with(
logger,
'/path/to/egg',
'test_token',
'test_folder',
cleanup=False,
update_jobs=True,
)
assert not result.exception
@mock.patch('apparate.cli_commands._load_config')
def test_upload_and_update_missing_token(config_mock):
existing_config = ConfigParser()
existing_config['DEFAULT'] = {'prod_folder': 'test_folder'}
config_mock.return_value = existing_config
runner = CliRunner()
result = runner.invoke(
upload_and_update,
['--path', '/path/to/egg']
)
config_mock.assert_called_once()
assert str(result.exception) == (
'no token found - either provide a command line argument or set up'
' a default by running `apparate configure`'
)
@pytest.mark.fixture('empty_config')
@mock.patch('apparate.cli_commands._load_config')
def test_upload_and_update_missing_folder(config_mock, empty_config):
config_mock.return_value = empty_config
runner = CliRunner()
result = runner.invoke(
upload_and_update,
['-p', '/path/to/egg', '--token', '<PASSWORD>_token']
)
config_mock.assert_called_once()
assert str(result.exception) == (
'no folder found - either provide a command line argument or set up'
' a default by running `apparate configure`'
)
| [
"logging.basicConfig",
"logging.getLogger",
"configparser.ConfigParser",
"unittest.mock.mock_open",
"unittest.mock.call",
"pytest.mark.fixture",
"click.testing.CliRunner",
"unittest.mock.patch",
"os.path.expanduser"
] | [((271, 310), 'logging.basicConfig', 'logging.basicConfig', ([], {'level': 'logging.INFO'}), '(level=logging.INFO)\n', (290, 310), False, 'import logging\n'), ((320, 362), 'logging.getLogger', 'logging.getLogger', (['"""apparate.cli_commands"""'], {}), "('apparate.cli_commands')\n", (337, 362), False, 'import logging\n'), ((5368, 5406), 'pytest.mark.fixture', 'pytest.mark.fixture', (['"""existing_config"""'], {}), "('existing_config')\n", (5387, 5406), False, 'import pytest\n'), ((5408, 5456), 'unittest.mock.patch', 'mock.patch', (['"""apparate.cli_commands._load_config"""'], {}), "('apparate.cli_commands._load_config')\n", (5418, 5456), False, 'from unittest import mock\n'), ((5458, 5511), 'unittest.mock.patch', 'mock.patch', (['"""apparate.cli_commands.update_databricks"""'], {}), "('apparate.cli_commands.update_databricks')\n", (5468, 5511), False, 'from unittest import mock\n'), ((6003, 6041), 'pytest.mark.fixture', 'pytest.mark.fixture', (['"""existing_config"""'], {}), "('existing_config')\n", (6022, 6041), False, 'import pytest\n'), ((6043, 6091), 'unittest.mock.patch', 'mock.patch', (['"""apparate.cli_commands._load_config"""'], {}), "('apparate.cli_commands._load_config')\n", (6053, 6091), False, 'from unittest import mock\n'), ((6093, 6146), 'unittest.mock.patch', 'mock.patch', (['"""apparate.cli_commands.update_databricks"""'], {}), "('apparate.cli_commands.update_databricks')\n", (6103, 6146), False, 'from unittest import mock\n'), ((6794, 6829), 'pytest.mark.fixture', 'pytest.mark.fixture', (['"""empty_config"""'], {}), "('empty_config')\n", (6813, 6829), False, 'import pytest\n'), ((6831, 6879), 'unittest.mock.patch', 'mock.patch', (['"""apparate.cli_commands._load_config"""'], {}), "('apparate.cli_commands._load_config')\n", (6841, 6879), False, 'from unittest import mock\n'), ((7298, 7333), 'pytest.mark.fixture', 'pytest.mark.fixture', (['"""empty_config"""'], {}), "('empty_config')\n", (7317, 7333), False, 'import pytest\n'), ((7335, 7383), 'unittest.mock.patch', 'mock.patch', (['"""apparate.cli_commands._load_config"""'], {}), "('apparate.cli_commands._load_config')\n", (7345, 7383), False, 'from unittest import mock\n'), ((7802, 7840), 'pytest.mark.fixture', 'pytest.mark.fixture', (['"""existing_config"""'], {}), "('existing_config')\n", (7821, 7840), False, 'import pytest\n'), ((7842, 7890), 'unittest.mock.patch', 'mock.patch', (['"""apparate.cli_commands._load_config"""'], {}), "('apparate.cli_commands._load_config')\n", (7852, 7890), False, 'from unittest import mock\n'), ((7892, 7945), 'unittest.mock.patch', 'mock.patch', (['"""apparate.cli_commands.update_databricks"""'], {}), "('apparate.cli_commands.update_databricks')\n", (7902, 7945), False, 'from unittest import mock\n'), ((8479, 8517), 'pytest.mark.fixture', 'pytest.mark.fixture', (['"""existing_config"""'], {}), "('existing_config')\n", (8498, 8517), False, 'import pytest\n'), ((8519, 8567), 'unittest.mock.patch', 'mock.patch', (['"""apparate.cli_commands._load_config"""'], {}), "('apparate.cli_commands._load_config')\n", (8529, 8567), False, 'from unittest import mock\n'), ((8569, 8622), 'unittest.mock.patch', 'mock.patch', (['"""apparate.cli_commands.update_databricks"""'], {}), "('apparate.cli_commands.update_databricks')\n", (8579, 8622), False, 'from unittest import mock\n'), ((9176, 9224), 'unittest.mock.patch', 'mock.patch', (['"""apparate.cli_commands._load_config"""'], {}), "('apparate.cli_commands._load_config')\n", (9186, 9224), False, 'from unittest import mock\n'), ((9765, 9800), 'pytest.mark.fixture', 'pytest.mark.fixture', (['"""empty_config"""'], {}), "('empty_config')\n", (9784, 9800), False, 'import pytest\n'), ((9802, 9850), 'unittest.mock.patch', 'mock.patch', (['"""apparate.cli_commands._load_config"""'], {}), "('apparate.cli_commands._load_config')\n", (9812, 9850), False, 'from unittest import mock\n'), ((5645, 5656), 'click.testing.CliRunner', 'CliRunner', ([], {}), '()\n', (5654, 5656), False, 'from click.testing import CliRunner\n'), ((6306, 6317), 'click.testing.CliRunner', 'CliRunner', ([], {}), '()\n', (6315, 6317), False, 'from click.testing import CliRunner\n'), ((6997, 7008), 'click.testing.CliRunner', 'CliRunner', ([], {}), '()\n', (7006, 7008), False, 'from click.testing import CliRunner\n'), ((7502, 7513), 'click.testing.CliRunner', 'CliRunner', ([], {}), '()\n', (7511, 7513), False, 'from click.testing import CliRunner\n'), ((8112, 8123), 'click.testing.CliRunner', 'CliRunner', ([], {}), '()\n', (8121, 8123), False, 'from click.testing import CliRunner\n'), ((8792, 8803), 'click.testing.CliRunner', 'CliRunner', ([], {}), '()\n', (8801, 8803), False, 'from click.testing import CliRunner\n'), ((9303, 9317), 'configparser.ConfigParser', 'ConfigParser', ([], {}), '()\n', (9315, 9317), False, 'from configparser import ConfigParser\n'), ((9443, 9454), 'click.testing.CliRunner', 'CliRunner', ([], {}), '()\n', (9452, 9454), False, 'from click.testing import CliRunner\n'), ((9980, 9991), 'click.testing.CliRunner', 'CliRunner', ([], {}), '()\n', (9989, 9991), False, 'from click.testing import CliRunner\n'), ((708, 723), 'os.path.expanduser', 'expanduser', (['"""~"""'], {}), "('~')\n", (718, 723), False, 'from os.path import expanduser, join\n'), ((776, 810), 'unittest.mock.call', 'mock.call', (['filename'], {'encoding': 'None'}), '(filename, encoding=None)\n', (785, 810), False, 'from unittest import mock\n'), ((820, 845), 'unittest.mock.call', 'mock.call', (['filename', '"""w+"""'], {}), "(filename, 'w+')\n", (829, 845), False, 'from unittest import mock\n'), ((1190, 1201), 'click.testing.CliRunner', 'CliRunner', ([], {}), '()\n', (1199, 1201), False, 'from click.testing import CliRunner\n'), ((1926, 1941), 'os.path.expanduser', 'expanduser', (['"""~"""'], {}), "('~')\n", (1936, 1941), False, 'from os.path import expanduser, join\n'), ((1994, 2028), 'unittest.mock.call', 'mock.call', (['filename'], {'encoding': 'None'}), '(filename, encoding=None)\n', (2003, 2028), False, 'from unittest import mock\n'), ((2038, 2063), 'unittest.mock.call', 'mock.call', (['filename', '"""w+"""'], {}), "(filename, 'w+')\n", (2047, 2063), False, 'from unittest import mock\n'), ((2408, 2419), 'click.testing.CliRunner', 'CliRunner', ([], {}), '()\n', (2417, 2419), False, 'from click.testing import CliRunner\n'), ((3147, 3162), 'os.path.expanduser', 'expanduser', (['"""~"""'], {}), "('~')\n", (3157, 3162), False, 'from os.path import expanduser, join\n'), ((3215, 3249), 'unittest.mock.call', 'mock.call', (['filename'], {'encoding': 'None'}), '(filename, encoding=None)\n', (3224, 3249), False, 'from unittest import mock\n'), ((3259, 3284), 'unittest.mock.call', 'mock.call', (['filename', '"""w+"""'], {}), "(filename, 'w+')\n", (3268, 3284), False, 'from unittest import mock\n'), ((3629, 3640), 'click.testing.CliRunner', 'CliRunner', ([], {}), '()\n', (3638, 3640), False, 'from click.testing import CliRunner\n'), ((4466, 4481), 'os.path.expanduser', 'expanduser', (['"""~"""'], {}), "('~')\n", (4476, 4481), False, 'from os.path import expanduser, join\n'), ((4534, 4568), 'unittest.mock.call', 'mock.call', (['filename'], {'encoding': 'None'}), '(filename, encoding=None)\n', (4543, 4568), False, 'from unittest import mock\n'), ((4578, 4603), 'unittest.mock.call', 'mock.call', (['filename', '"""w+"""'], {}), "(filename, 'w+')\n", (4587, 4603), False, 'from unittest import mock\n'), ((4946, 4957), 'click.testing.CliRunner', 'CliRunner', ([], {}), '()\n', (4955, 4957), False, 'from click.testing import CliRunner\n'), ((1132, 1160), 'unittest.mock.mock_open', 'mock.mock_open', ([], {'read_data': '""""""'}), "(read_data='')\n", (1146, 1160), False, 'from unittest import mock\n'), ((2350, 2378), 'unittest.mock.mock_open', 'mock.mock_open', ([], {'read_data': '""""""'}), "(read_data='')\n", (2364, 2378), False, 'from unittest import mock\n'), ((3571, 3599), 'unittest.mock.mock_open', 'mock.mock_open', ([], {'read_data': '""""""'}), "(read_data='')\n", (3585, 3599), False, 'from unittest import mock\n'), ((4888, 4916), 'unittest.mock.mock_open', 'mock.mock_open', ([], {'read_data': '""""""'}), "(read_data='')\n", (4902, 4916), False, 'from unittest import mock\n'), ((855, 866), 'unittest.mock.call', 'mock.call', ([], {}), '()\n', (864, 866), False, 'from unittest import mock\n'), ((897, 908), 'unittest.mock.call', 'mock.call', ([], {}), '()\n', (906, 908), False, 'from unittest import mock\n'), ((954, 965), 'unittest.mock.call', 'mock.call', ([], {}), '()\n', (963, 965), False, 'from unittest import mock\n'), ((1005, 1016), 'unittest.mock.call', 'mock.call', ([], {}), '()\n', (1014, 1016), False, 'from unittest import mock\n'), ((1063, 1074), 'unittest.mock.call', 'mock.call', ([], {}), '()\n', (1072, 1074), False, 'from unittest import mock\n'), ((2073, 2084), 'unittest.mock.call', 'mock.call', ([], {}), '()\n', (2082, 2084), False, 'from unittest import mock\n'), ((2115, 2126), 'unittest.mock.call', 'mock.call', ([], {}), '()\n', (2124, 2126), False, 'from unittest import mock\n'), ((2172, 2183), 'unittest.mock.call', 'mock.call', ([], {}), '()\n', (2181, 2183), False, 'from unittest import mock\n'), ((2223, 2234), 'unittest.mock.call', 'mock.call', ([], {}), '()\n', (2232, 2234), False, 'from unittest import mock\n'), ((2281, 2292), 'unittest.mock.call', 'mock.call', ([], {}), '()\n', (2290, 2292), False, 'from unittest import mock\n'), ((3294, 3305), 'unittest.mock.call', 'mock.call', ([], {}), '()\n', (3303, 3305), False, 'from unittest import mock\n'), ((3336, 3347), 'unittest.mock.call', 'mock.call', ([], {}), '()\n', (3345, 3347), False, 'from unittest import mock\n'), ((3393, 3404), 'unittest.mock.call', 'mock.call', ([], {}), '()\n', (3402, 3404), False, 'from unittest import mock\n'), ((3444, 3455), 'unittest.mock.call', 'mock.call', ([], {}), '()\n', (3453, 3455), False, 'from unittest import mock\n'), ((3502, 3513), 'unittest.mock.call', 'mock.call', ([], {}), '()\n', (3511, 3513), False, 'from unittest import mock\n'), ((4613, 4624), 'unittest.mock.call', 'mock.call', ([], {}), '()\n', (4622, 4624), False, 'from unittest import mock\n'), ((4655, 4666), 'unittest.mock.call', 'mock.call', ([], {}), '()\n', (4664, 4666), False, 'from unittest import mock\n'), ((4712, 4723), 'unittest.mock.call', 'mock.call', ([], {}), '()\n', (4721, 4723), False, 'from unittest import mock\n'), ((4761, 4772), 'unittest.mock.call', 'mock.call', ([], {}), '()\n', (4770, 4772), False, 'from unittest import mock\n'), ((4819, 4830), 'unittest.mock.call', 'mock.call', ([], {}), '()\n', (4828, 4830), False, 'from unittest import mock\n')] |
# Flight duration model: Just distance
# In this exercise you'll build a regression model to predict flight duration (the duration column).
# For the moment you'll keep the model simple, including only the distance of the flight (the km column) as a predictor.
# The data are in flights. The first few records are displayed in the terminal. These data have also been split into training and testing sets and are available as flights_train and flights_test.
# Instructions
# 100 XP
# Create a linear regression object. Specify the name of the label column. Fit it to the training data.
# Make predictions on the testing data.
# Create a regression evaluator object and use it to evaluate RMSE on the testing data.
from pyspark.ml.regression import LinearRegression
from pyspark.ml.evaluation import RegressionEvaluator
# Create a regression object and train on training data
regression = LinearRegression(labelCol='duration').fit(flights_train)
# Create predictions for the testing data and take a look at the predictions
predictions = regression.transform(flights_test)
predictions.select('duration', 'prediction').show(5, False)
# Calculate the RMSE
RegressionEvaluator(labelCol='duration').evaluate(predictions) | [
"pyspark.ml.evaluation.RegressionEvaluator",
"pyspark.ml.regression.LinearRegression"
] | [((892, 929), 'pyspark.ml.regression.LinearRegression', 'LinearRegression', ([], {'labelCol': '"""duration"""'}), "(labelCol='duration')\n", (908, 929), False, 'from pyspark.ml.regression import LinearRegression\n'), ((1158, 1198), 'pyspark.ml.evaluation.RegressionEvaluator', 'RegressionEvaluator', ([], {'labelCol': '"""duration"""'}), "(labelCol='duration')\n", (1177, 1198), False, 'from pyspark.ml.evaluation import RegressionEvaluator\n')] |
"""
Main Larch interpreter
Safe(ish) evaluator of python expressions, using ast module.
The emphasis here is on mathematical expressions, and so
numpy functions are imported if available and used.
"""
from __future__ import division, print_function
import os
import sys
import ast
import math
import numpy
from . import builtins
from . import site_config
from .symboltable import SymbolTable, Group, isgroup
from .larchlib import LarchExceptionHolder, Procedure, DefinedVariable
from .utils import Closure
OPERATORS = {ast.Is: lambda a, b: a is b,
ast.IsNot: lambda a, b: a is not b,
ast.In: lambda a, b: a in b,
ast.NotIn: lambda a, b: a not in b,
ast.Add: lambda a, b: a + b,
ast.BitAnd: lambda a, b: a & b,
ast.BitOr: lambda a, b: a | b,
ast.BitXor: lambda a, b: a ^ b,
ast.Div: lambda a, b: a / b,
ast.FloorDiv: lambda a, b: a // b,
ast.LShift: lambda a, b: a << b,
ast.RShift: lambda a, b: a >> b,
ast.Mult: lambda a, b: a * b,
ast.Pow: lambda a, b: a ** b,
ast.Sub: lambda a, b: a - b,
ast.Mod: lambda a, b: a % b,
ast.And: lambda a, b: a and b,
ast.Or: lambda a, b: a or b,
ast.Eq: lambda a, b: a == b,
ast.Gt: lambda a, b: a > b,
ast.GtE: lambda a, b: a >= b,
ast.Lt: lambda a, b: a < b,
ast.LtE: lambda a, b: a <= b,
ast.NotEq: lambda a, b: a != b,
ast.Invert: lambda a: ~a,
ast.Not: lambda a: not a,
ast.UAdd: lambda a: +a,
ast.USub: lambda a: -a}
class Interpreter:
"""larch program compiler and interpreter.
This module compiles expressions and statements to AST representation,
using python's ast module, and then executes the AST representation
using a custom SymbolTable for named object (variable, functions).
This then gives a restricted version of Python, with slightly modified
namespace rules. The program syntax here is expected to be valid Python,
but that may have been translated as with the inputText module.
The following Python syntax is not supported:
Exec, Lambda, Class, Global, Generators, Yield, Decorators
In addition, Function is greatly altered so as to allow a Larch procedure.
"""
supported_nodes = ('arg', 'assert', 'assign', 'attribute', 'augassign',
'binop', 'boolop', 'break', 'call', 'compare',
'continue', 'delete', 'dict', 'ellipsis',
'excepthandler', 'expr', 'expression', 'extslice',
'for', 'functiondef', 'if', 'ifexp', 'import',
'importfrom', 'index', 'interrupt', 'list',
'listcomp', 'module', 'name', 'num', 'pass',
'print', 'raise', 'repr', 'return', 'slice', 'str',
'subscript', 'tryexcept', 'tuple', 'unaryop',
'while')
def __init__(self, symtable=None, writer=None):
self.writer = writer or sys.stdout
if symtable is None:
symtable = SymbolTable(larch=self)
self.symtable = symtable
self._interrupt = None
self.error = []
self.expr = None
self.retval = None
self.func = None
self.fname = '<stdin>'
self.lineno = 0
builtingroup = getattr(symtable,'_builtin')
mathgroup = getattr(symtable,'_math')
setattr(mathgroup, 'j', 1j)
for sym in builtins.from_math:
setattr(mathgroup, sym, getattr(math, sym))
for sym in builtins.from_builtin:
setattr(builtingroup, sym, __builtins__[sym])
for sym in builtins.from_numpy:
try:
setattr(mathgroup, sym, getattr(numpy, sym))
except AttributeError:
pass
for fname, sym in list(builtins.numpy_renames.items()):
setattr(mathgroup, fname, getattr(numpy, sym))
for fname, fcn in list(builtins.local_funcs.items()):
setattr(builtingroup, fname,
Closure(func=fcn, _larch=self, _name=fname))
setattr(builtingroup, 'definevar',
Closure(func=self.set_definedvariable))
# add all plugins in standard plugins folder
plugins_dir = os.path.join(site_config.sys_larchdir, 'plugins')
for pname in os.listdir(plugins_dir):
pdir = os.path.join(plugins_dir, pname)
if os.path.isdir(pdir):
self.add_plugin(pdir)
self.node_handlers = dict(((node, getattr(self, "on_%s" % node))
for node in self.supported_nodes))
def add_plugin(self, mod, **kws):
"""add plugin components from plugin directory"""
builtins._addplugin(mod, _larch=self, **kws)
def set_definedvariable(self, name, expr):
"""define a defined variable (re-evaluate on access)"""
self.symtable.set_symbol(name,
DefinedVariable(expr=expr, _larch=self))
def unimplemented(self, node):
"unimplemented nodes"
self.raise_exception(node, exc=NotImplementedError,
msg="'%s' not supported" % (node.__class__.__name__))
def raise_exception(self, node, exc=None, msg='', expr=None,
fname=None, lineno=None, func=None):
"add an exception"
if self.error is None:
self.error = []
if expr is None:
expr = self.expr
if fname is None:
fname = self.fname
if lineno is None:
lineno = self.lineno
if func is None:
func = self.func
if len(self.error) > 0 and not isinstance(node, ast.Module):
msg = '%s' % msg
err = LarchExceptionHolder(node, exc=exc, msg=msg, expr=expr,
fname=fname, lineno=lineno, func=func)
self._interrupt = ast.Break()
self.error.append(err)
self.symtable._sys.last_error = err
#raise RuntimeError
# main entry point for Ast node evaluation
# parse: text of statements -> ast
# run: ast -> result
# eval: string statement -> result = run(parse(statement))
def parse(self, text, fname=None, lineno=-1):
"""parse statement/expression to Ast representation """
self.expr = text
try:
return ast.parse(text)
except:
self.raise_exception(None, exc=SyntaxError, msg='Syntax Error',
expr=text, fname=fname, lineno=lineno)
def run(self, node, expr=None, func=None,
fname=None, lineno=None, with_raise=False):
"""executes parsed Ast representation for an expression"""
# Note: keep the 'node is None' test: internal code here may run
# run(None) and expect a None in return.
# print(" Run", node, expr)
if node is None:
return None
if isinstance(node, str):
node = self.parse(node)
if lineno is not None:
self.lineno = lineno
if fname is not None:
self.fname = fname
if expr is not None:
self.expr = expr
if func is not None:
self.func = func
# get handler for this node:
# on_xxx with handle nodes of type 'xxx', etc
if node.__class__.__name__.lower() not in self.node_handlers:
return self.unimplemented(node)
handler = self.node_handlers[node.__class__.__name__.lower()]
# run the handler: this will likely generate
# recursive calls into this run method.
try:
ret = handler(node)
if isinstance(ret, enumerate):
ret = list(ret)
return ret
except:
self.raise_exception(node, expr=self.expr,
fname=self.fname, lineno=self.lineno)
def __call__(self, expr, **kw):
return self.eval(expr, **kw)
def eval(self, expr, fname=None, lineno=0):
"""evaluates a single statement"""
self.fname = fname
self.lineno = lineno
self.error = []
try:
node = self.parse(expr, fname=fname, lineno=lineno)
except RuntimeError:
errmsg = sys.exc_info()[1]
if len(self.error) > 0:
errtype, errmsg = self.error[0].get_error()
return
out = None
try:
return self.run(node, expr=expr, fname=fname, lineno=lineno)
except RuntimeError:
return
def run_init_scripts(self):
for fname in site_config.init_files:
if os.path.exists(fname):
try:
builtins._run(filename=fname, _larch=self,
printall = True)
except:
self.raise_exception(None, exc=RuntimeError,
msg='Initialization Error')
def dump(self, node, **kw):
"simple ast dumper"
return ast.dump(node, **kw)
# handlers for ast components
def on_expr(self, node):
"expression"
return self.run(node.value) # ('value',)
def on_index(self, node):
"index"
return self.run(node.value) # ('value',)
def on_return(self, node): # ('value',)
"return statement"
self.retval = self.run(node.value)
return
def on_repr(self, node):
"repr "
return repr(self.run(node.value)) # ('value',)
def on_module(self, node): # ():('body',)
"module def"
out = None
for tnode in node.body:
out = self.run(tnode)
return out
def on_expression(self, node):
"basic expression"
return self.on_module(node) # ():('body',)
def on_pass(self, node):
"pass statement"
return None # ()
def on_ellipsis(self, node):
"ellipses"
return Ellipsis
# for break and continue: set the instance variable _interrupt
def on_interrupt(self, node): # ()
"interrupt handler"
self._interrupt = node
return node
def on_break(self, node):
"break"
return self.on_interrupt(node)
def on_continue(self, node):
"continue"
return self.on_interrupt(node)
def on_arg(self, node):
"arg for function definitions"
return node.arg
def on_assert(self, node): # ('test', 'msg')
"assert statement"
testval = self.run(node.test)
if not testval:
self.raise_exception(node, exc=AssertionError, msg=node.msg)
return True
def on_list(self, node): # ('elt', 'ctx')
"list"
return [self.run(e) for e in node.elts]
def on_tuple(self, node): # ('elts', 'ctx')
"tuple"
return tuple(self.on_list(node))
def on_dict(self, node): # ('keys', 'values')
"dictionary"
nodevals = list(zip(node.keys, node.values))
run = self.run
return dict([(run(k), run(v)) for k, v in nodevals])
def on_num(self, node):
'return number'
return node.n # ('n',)
def on_str(self, node):
'return string'
return node.s # ('s',)
def on_name(self, node): # ('id', 'ctx')
""" Name node """
ctx = node.ctx.__class__
if ctx == ast.Del:
val = self.symtable.del_symbol(node.id)
elif ctx == ast.Param: # for Function Def
val = str(node.id)
else:
# val = self.symtable.get_symbol(node.id)
try:
val = self.symtable.get_symbol(node.id)
except (NameError, LookupError):
msg = "name '%s' is not defined" % node.id
self.raise_exception(node, msg=msg)
if isinstance(val, DefinedVariable):
val = val.evaluate()
return val
def node_assign(self, node, val):
"""here we assign a value (not the node.value object) to a node
this is used by on_assign, but also by for, list comprehension, etc.
"""
if len(self.error) > 0:
return
if node.__class__ == ast.Name:
sym = self.symtable.set_symbol(node.id, value=val)
elif node.__class__ == ast.Attribute:
if node.ctx.__class__ == ast.Load:
errmsg = "cannot assign to attribute %s" % node.attr
self.raise_exception(node, exc=AttributeError, msg=errmsg)
setattr(self.run(node.value), node.attr, val)
elif node.__class__ == ast.Subscript:
sym = self.run(node.value)
xslice = self.run(node.slice)
if isinstance(node.slice, ast.Index):
sym[xslice] = val
elif isinstance(node.slice, ast.Slice):
i = xslice.start
sym[slice(xslice.start, xslice.stop)] = val
elif isinstance(node.slice, ast.ExtSlice):
sym[(xslice)] = val
elif node.__class__ in (ast.Tuple, ast.List):
if len(val) == len(node.elts):
for telem, tval in zip(node.elts, val):
self.node_assign(telem, tval)
else:
raise ValueError('too many values to unpack')
def on_attribute(self, node): # ('value', 'attr', 'ctx')
"extract attribute"
ctx = node.ctx.__class__
# print("on_attribute",node.value,node.attr,ctx)
if ctx == ast.Load:
sym = self.run(node.value)
if hasattr(sym, node.attr):
val = getattr(sym, node.attr)
if isinstance(val, DefinedVariable):
val = val.evaluate()
return val
else:
obj = self.run(node.value)
fmt = "%s does not have member '%s'"
if not isgroup(obj):
obj = obj.__class__
fmt = "%s does not have attribute '%s'"
msg = fmt % (obj, node.attr)
self.raise_exception(node, exc=AttributeError, msg=msg)
elif ctx == ast.Del:
return delattr(sym, node.attr)
elif ctx == ast.Store:
msg = "attribute for storage: shouldn't be here!"
self.raise_exception(node, exc=RuntimeError, msg=msg)
def on_assign(self, node): # ('targets', 'value')
"simple assignment"
val = self.run(node.value)
if len(self.error) > 0:
return
for tnode in node.targets:
self.node_assign(tnode, val)
return # return val
def on_augassign(self, node): # ('target', 'op', 'value')
"augmented assign"
# print( "AugASSIGN ", node.target, node.value)
return self.on_assign(ast.Assign(targets=[node.target],
value=ast.BinOp(left = node.target,
op = node.op,
right= node.value)))
def on_slice(self, node): # ():('lower', 'upper', 'step')
"simple slice"
return slice(self.run(node.lower), self.run(node.upper),
self.run(node.step))
def on_extslice(self, node): # ():('dims',)
"extended slice"
return tuple([self.run(tnode) for tnode in node.dims])
def on_subscript(self, node): # ('value', 'slice', 'ctx')
"subscript handling -- one of the tricky parts"
# print("on_subscript: ", ast.dump(node))
val = self.run(node.value)
nslice = self.run(node.slice)
ctx = node.ctx.__class__
if ctx in ( ast.Load, ast.Store):
if isinstance(node.slice, (ast.Index, ast.Slice, ast.Ellipsis)):
return val.__getitem__(nslice)
elif isinstance(node.slice, ast.ExtSlice):
return val[(nslice)]
else:
msg = "subscript with unknown context"
self.raise_exception(node, msg=msg)
def on_delete(self, node): # ('targets',)
"delete statement"
for tnode in node.targets:
if tnode.ctx.__class__ != ast.Del:
break
children = []
while tnode.__class__ == ast.Attribute:
children.append(tnode.attr)
tnode = tnode.value
if tnode.__class__ == ast.Name:
children.append(tnode.id)
children.reverse()
self.symtable.del_symbol('.'.join(children))
else:
msg = "could not delete symbol"
self.raise_exception(node, msg=msg)
def on_unaryop(self, node): # ('op', 'operand')
"unary operator"
return OPERATORS[node.op.__class__](self.run(node.operand))
def on_binop(self, node): # ('left', 'op', 'right')
"binary operator"
# print( 'BINARY OP! ', node.left, node.right, node.op)
return OPERATORS[node.op.__class__](self.run(node.left),
self.run(node.right))
def on_boolop(self, node): # ('op', 'values')
"boolean operator"
val = self.run(node.values[0])
is_and = ast.And == node.op.__class__
if (is_and and val) or (not is_and and not val):
for n in node.values[1:]:
val = OPERATORS[node.op.__class__](val, self.run(n))
if (is_and and not val) or (not is_and and val):
break
return val
def on_compare(self, node): # ('left', 'ops', 'comparators')
"comparison operators"
lval = self.run(node.left)
out = True
for oper, rnode in zip(node.ops, node.comparators):
comp = OPERATORS[oper.__class__]
rval = self.run(rnode)
out = comp(lval, rval)
lval = rval
if isinstance(out, numpy.ndarray) and out.any():
break
elif not out:
break
return out
def on_print(self, node): # ('dest', 'values', 'nl')
""" note: implements Python2 style print statement, not
print() function. Probably, the 'larch2py' translation
should look for and translate print -> print_() to become
a customized function call.
"""
dest = self.run(node.dest) or self.writer
end = ''
if node.nl:
end = '\n'
out = [self.run(tnode) for tnode in node.values]
if out and len(self.error)==0:
print(*out, file=dest, end=end)
def on_if(self, node): # ('test', 'body', 'orelse')
"regular if-then-else statement"
block = node.body
if not self.run(node.test):
block = node.orelse
for tnode in block:
self.run(tnode)
def on_ifexp(self, node): # ('test', 'body', 'orelse')
"if expressions"
expr = node.orelse
if self.run(node.test):
expr = node.body
return self.run(expr)
def on_while(self, node): # ('test', 'body', 'orelse')
"while blocks"
while self.run(node.test):
self._interrupt = None
for tnode in node.body:
self.run(tnode)
if self._interrupt is not None:
break
if isinstance(self._interrupt, ast.Break):
break
else:
for tnode in node.orelse:
self.run(tnode)
self._interrupt = None
def on_for(self, node): # ('target', 'iter', 'body', 'orelse')
"for blocks"
for val in self.run(node.iter):
self.node_assign(node.target, val)
if len(self.error) > 0:
return
self._interrupt = None
for tnode in node.body:
self.run(tnode)
if len(self.error) > 0:
return
if self._interrupt is not None:
break
if isinstance(self._interrupt, ast.Break):
break
else:
for tnode in node.orelse:
self.run(tnode)
self._interrupt = None
def on_listcomp(self, node): # ('elt', 'generators')
"list comprehension"
out = []
for tnode in node.generators:
if tnode.__class__ == ast.comprehension:
for val in self.run(tnode.iter):
self.node_assign(tnode.target, val)
if len(self.error) > 0:
return
add = True
for cond in tnode.ifs:
add = add and self.run(cond)
if add:
out.append(self.run(node.elt))
return out
#
def on_excepthandler(self, node): # ('type', 'name', 'body')
"exception handler..."
# print("except handler %s / %s " % (node.type, ast.dump(node.name)))
return (self.run(node.type), node.name, node.body)
def on_tryexcept(self, node): # ('body', 'handlers', 'orelse')
"try/except blocks"
no_errors = True
for tnode in node.body:
# print(" Try Node: " , self.dump(tnode))
self.run(tnode)
# print(" Error len: " , len(self.error))
no_errors = no_errors and len(self.error) == 0
if self.error:
e_type, e_value, e_tb = self.error[-1].exc_info
#print(" ERROR: ", e_type, e_value, e_tb)
#print(" ... ", self.error)
this_exc = e_type()
for hnd in node.handlers:
htype = None
if hnd.type is not None:
htype = __builtins__.get(hnd.type.id, None)
# print(" ERR HANDLER ", htype)
if htype is None or isinstance(this_exc, htype):
self.error = []
if hnd.name is not None:
self.node_assign(hnd.name, e_value)
for tline in hnd.body:
self.run(tline)
break
if no_errors:
for tnode in node.orelse:
self.run(tnode)
def on_raise(self, node): # ('type', 'inst', 'tback')
"raise statement"
# print(" ON RAISE ", node.type, node.inst, node.tback)
if sys.version_info[0] == 3:
excnode = node.exc
msgnode = node.cause
else:
excnode = node.type
msgnode = node.inst
out = self.run(excnode)
msg = ' '.join(out.args)
msg2 = self.run(msgnode)
if msg2 not in (None, 'None'):
msg = "%s: %s" % (msg, msg2)
self.raise_exception(None, exc=out.__class__, msg=msg, expr='')
def on_call(self, node):
"function/procedure execution"
# ('func', 'args', 'keywords', 'starargs', 'kwargs')
func = self.run(node.func)
if not hasattr(func, '__call__') and not isinstance(func, type):
msg = "'%s' is not callable!!" % (func)
self.raise_exception(node, exc=TypeError, msg=msg)
args = [self.run(targ) for targ in node.args]
if node.starargs is not None:
args = args + self.run(node.starargs)
keywords = {}
for key in node.keywords:
if not isinstance(key, ast.keyword):
msg = "keyword error in function call '%s'" % (func)
self.raise_exception(node, exc=TypeError, msg=msg)
keywords[key.arg] = self.run(key.value)
if node.kwargs is not None:
keywords.update(self.run(node.kwargs))
self.func = func
out = func(*args, **keywords)
self.func = None
return out
# try:
# except:
# self.raise_exception(node, exc=RuntimeError, func=func,
# msg = "Error running %s" % (func))
def on_functiondef(self, node):
"define procedures"
# ('name', 'args', 'body', 'decorator_list')
if node.decorator_list != []:
raise Warning("decorated procedures not supported!")
kwargs = []
offset = len(node.args.args) - len(node.args.defaults)
for idef, defnode in enumerate(node.args.defaults):
defval = self.run(defnode)
keyval = self.run(node.args.args[idef+offset])
kwargs.append((keyval, defval))
# kwargs.reverse()
args = [tnode.id for tnode in node.args.args[:offset]]
doc = None
if (isinstance(node.body[0], ast.Expr) and
isinstance(node.body[0].value, ast.Str)):
docnode = node.body[0]
doc = docnode.value.s
proc = Procedure(node.name, _larch=self, doc= doc,
body = node.body,
fname = self.fname,
lineno = self.lineno,
args = args,
kwargs = kwargs,
vararg = node.args.vararg,
varkws = node.args.kwarg)
self.symtable.set_symbol(node.name, value=proc)
# imports
def on_import(self, node): # ('names',)
"simple import"
for tnode in node.names:
self.import_module(tnode.name, asname=tnode.asname)
def on_importfrom(self, node): # ('module', 'names', 'level')
"import/from"
fromlist, asname = [], []
for tnode in node.names:
fromlist.append(tnode.name)
asname.append(tnode.asname)
self.import_module(node.module,
asname=asname, fromlist=fromlist)
def import_module(self, name, asname=None,
fromlist=None, do_reload=False):
"""
import a module (larch or python), installing it into the symbol table.
required arg:
name name of module to import
'foo' in 'import foo'
options:
fromlist list of symbols to import with 'from-import'
['x','y'] in 'from foo import x, y'
asname alias for imported name(s)
'bar' in 'import foo as bar'
or
['s','t'] in 'from foo import x as s, y as t'
this method covers a lot of cases (larch or python, import
or from-import, use of asname) and so is fairly long.
"""
st_sys = self.symtable._sys
for idir in st_sys.path:
if idir not in sys.path and os.path.exists(idir):
sys.path.append(idir)
# step 1 import the module to a global location
# either sys.modules for python modules
# or st_sys.modules for larch modules
# reload takes effect here in the normal python way:
if (do_reload or
((name not in st_sys.modules) and (name not in sys.modules))):
# first look for "name.lar"
# print('import_mod A ', name)
islarch = False
larchname = "%s.lar" % name
for dirname in st_sys.path:
if not os.path.exists(dirname):
continue
if larchname in os.listdir(dirname):
islarch = True
modname = os.path.abspath(os.path.join(dirname, larchname))
try:
thismod = builtins._run(filename=modname, _larch=self,
new_module=name)
except:
self.raise_exception(None, exc=ImportError, msg='Import Error')
# save current module group
# create new group, set as moduleGroup and localGroup
if len(self.error) > 0:
st_sys.modules.pop(name)
# thismod = None
return
# or, if not a larch module, load as a regular python module
if not islarch and name not in sys.modules:
try:
# print('import_mod: py import! ', name)
__import__(name)
thismod = sys.modules[name]
except:
self.raise_exception(None, exc=ImportError, msg='Import Error')
return
else: # previously loaded module, just do lookup
# print("prev loaded?")
if name in st_sys.modules:
thismod = st_sys.modules[name]
elif name in sys.modules:
thismod = sys.modules[name]
# now we install thismodule into the current moduleGroup
# import full module
if fromlist is None:
if asname is None:
asname = name
parts = asname.split('.')
asname = parts.pop()
targetgroup = st_sys.moduleGroup
while len(parts) > 0:
subname = parts.pop(0)
subgrp = Group()
setattr(targetgroup, subname, subgrp)
targetgroup = subgrp
setattr(targetgroup, asname, thismod)
# import-from construct
else:
if asname is None:
asname = [None]*len(fromlist)
targetgroup = st_sys.moduleGroup
for sym, alias in zip(fromlist, asname):
if alias is None:
alias = sym
setattr(targetgroup, alias, getattr(thismod, sym))
# end of import_module
| [
"os.path.exists",
"os.listdir",
"ast.Break",
"os.path.join",
"sys.exc_info",
"os.path.isdir",
"ast.dump",
"ast.parse",
"ast.BinOp",
"sys.path.append"
] | [((4539, 4588), 'os.path.join', 'os.path.join', (['site_config.sys_larchdir', '"""plugins"""'], {}), "(site_config.sys_larchdir, 'plugins')\n", (4551, 4588), False, 'import os\n'), ((4610, 4633), 'os.listdir', 'os.listdir', (['plugins_dir'], {}), '(plugins_dir)\n', (4620, 4633), False, 'import os\n'), ((6207, 6218), 'ast.Break', 'ast.Break', ([], {}), '()\n', (6216, 6218), False, 'import ast\n'), ((9395, 9415), 'ast.dump', 'ast.dump', (['node'], {}), '(node, **kw)\n', (9403, 9415), False, 'import ast\n'), ((4654, 4686), 'os.path.join', 'os.path.join', (['plugins_dir', 'pname'], {}), '(plugins_dir, pname)\n', (4666, 4686), False, 'import os\n'), ((4702, 4721), 'os.path.isdir', 'os.path.isdir', (['pdir'], {}), '(pdir)\n', (4715, 4721), False, 'import os\n'), ((6681, 6696), 'ast.parse', 'ast.parse', (['text'], {}), '(text)\n', (6690, 6696), False, 'import ast\n'), ((9003, 9024), 'os.path.exists', 'os.path.exists', (['fname'], {}), '(fname)\n', (9017, 9024), False, 'import os\n'), ((27187, 27207), 'os.path.exists', 'os.path.exists', (['idir'], {}), '(idir)\n', (27201, 27207), False, 'import os\n'), ((27225, 27246), 'sys.path.append', 'sys.path.append', (['idir'], {}), '(idir)\n', (27240, 27246), False, 'import sys\n'), ((8623, 8637), 'sys.exc_info', 'sys.exc_info', ([], {}), '()\n', (8635, 8637), False, 'import sys\n'), ((15274, 15331), 'ast.BinOp', 'ast.BinOp', ([], {'left': 'node.target', 'op': 'node.op', 'right': 'node.value'}), '(left=node.target, op=node.op, right=node.value)\n', (15283, 15331), False, 'import ast\n'), ((27779, 27802), 'os.path.exists', 'os.path.exists', (['dirname'], {}), '(dirname)\n', (27793, 27802), False, 'import os\n'), ((27865, 27884), 'os.listdir', 'os.listdir', (['dirname'], {}), '(dirname)\n', (27875, 27884), False, 'import os\n'), ((27967, 27999), 'os.path.join', 'os.path.join', (['dirname', 'larchname'], {}), '(dirname, larchname)\n', (27979, 27999), False, 'import os\n')] |
# Explicit API functions
from api_functions import api_function1, api_function2
from package3 import api_function3
# API Packages
import package1, package2
import package3
from package4 import api_class1
# Defined functions
def defined_function_1(d_f_arg1, d_f_arg2):
a = api_function1(d_f_arg1)
b = (api_function2(d_f_arg2, d_f_arg1), api_function3())
def defined_function_2(d_f_arg1, d_f_arg2, d_f_arg3):
api_function1()
package1.p1_function1(d_f_arg1, d_f_arg2, d_f_arg3)
a, b = api_class1.cl1_function1(1, 2, '3')
def defined_function_3():
package1.p1_function1()
package3.p3_function1() | [
"package4.api_class1.cl1_function1",
"api_functions.api_function1",
"package3.api_function3",
"package1.p1_function1",
"api_functions.api_function2",
"package3.p3_function1"
] | [((280, 303), 'api_functions.api_function1', 'api_function1', (['d_f_arg1'], {}), '(d_f_arg1)\n', (293, 303), False, 'from api_functions import api_function1, api_function2\n'), ((425, 440), 'api_functions.api_function1', 'api_function1', ([], {}), '()\n', (438, 440), False, 'from api_functions import api_function1, api_function2\n'), ((445, 496), 'package1.p1_function1', 'package1.p1_function1', (['d_f_arg1', 'd_f_arg2', 'd_f_arg3'], {}), '(d_f_arg1, d_f_arg2, d_f_arg3)\n', (466, 496), False, 'import package1, package2\n'), ((508, 543), 'package4.api_class1.cl1_function1', 'api_class1.cl1_function1', (['(1)', '(2)', '"""3"""'], {}), "(1, 2, '3')\n", (532, 543), False, 'from package4 import api_class1\n'), ((576, 599), 'package1.p1_function1', 'package1.p1_function1', ([], {}), '()\n', (597, 599), False, 'import package1, package2\n'), ((604, 627), 'package3.p3_function1', 'package3.p3_function1', ([], {}), '()\n', (625, 627), False, 'import package3\n'), ((313, 346), 'api_functions.api_function2', 'api_function2', (['d_f_arg2', 'd_f_arg1'], {}), '(d_f_arg2, d_f_arg1)\n', (326, 346), False, 'from api_functions import api_function1, api_function2\n'), ((348, 363), 'package3.api_function3', 'api_function3', ([], {}), '()\n', (361, 363), False, 'from package3 import api_function3\n')] |
"""Tests for making datasets for contradictory-claims."""
# -*- coding: utf-8 -*-
import os
import unittest
from contradictory_claims.data.make_dataset import load_drug_virus_lexicons, load_mancon_corpus_from_sent_pairs, \
load_med_nli, load_multi_nli
from .constants import drug_lex_path, mancon_sent_pairs, mednli_dev_path, mednli_test_path, mednli_train_path, \
multinli_test_path, multinli_train_path, sample_drug_lex_path, sample_mancon_sent_pairs, \
sample_multinli_test_path, sample_multinli_train_path, sample_virus_lex_path, virus_lex_path
class TestMakeDataset(unittest.TestCase):
"""Tests for making datasets for contradictory-claims."""
@unittest.skip("This test can be used to check that datasets are found at the correct locations locally")
def test_find_files(self):
"""Test that input files are found properly."""
self.assertTrue(os.path.isfile(multinli_train_path),
"MultiNLI training data not found at {}".format(multinli_train_path))
self.assertTrue(os.path.isfile(multinli_test_path),
"MultiNLI test data not found at {}".format(multinli_test_path))
self.assertTrue(os.path.isfile(mednli_train_path),
"MedNLI training data not found at {}".format(mednli_train_path))
self.assertTrue(os.path.isfile(mednli_dev_path),
"MedNLI dev set data not found at {}".format(mednli_dev_path))
self.assertTrue(os.path.isfile(mednli_test_path),
"MedNLI test data not found at {}".format(mednli_test_path))
self.assertTrue(os.path.isfile(mancon_sent_pairs),
"ManConCorpus sentence pairs training data not found at {}".format(mancon_sent_pairs))
self.assertTrue(os.path.isfile(drug_lex_path),
"Drug lexicon not found at {}".format(drug_lex_path))
self.assertTrue(os.path.isfile(virus_lex_path),
"Virus lexicon not found at {}".format(virus_lex_path))
@unittest.skip("This test can be used locally to check that MultiNLI loads properly")
def test_load_multi_nli(self):
"""Test that MultiNLI is loaded as expected."""
x_train, y_train, x_test, y_test = load_multi_nli(multinli_train_path, multinli_test_path)
self.assertEqual(len(x_train), 391165)
self.assertEqual(y_train.shape, (391165, 3))
self.assertEqual(len(x_test), 9897)
self.assertEqual(y_test.shape, (9897, 3))
def test_load_multi_nli_sample(self):
"""Test that MultiNLI SAMPLE DATA are loaded as expected."""
x_train, y_train, x_test, y_test = load_multi_nli(sample_multinli_train_path, sample_multinli_test_path)
self.assertEqual(len(x_train), 49)
self.assertEqual(y_train.shape, (49, 3))
self.assertEqual(len(x_test), 49)
self.assertEqual(y_test.shape, (49, 3))
@unittest.skip("This test can be used locally to check that MedNLI loads properly")
def test_load_med_nli(self):
"""Test that MedNLI is loaded as expected."""
x_train, y_train, x_test, y_test = load_med_nli(mednli_train_path, mednli_dev_path, mednli_test_path)
self.assertEqual(len(x_train), 12627)
self.assertEqual(y_train.shape, (12627, 3))
self.assertEqual(len(x_test), 1422)
self.assertEqual(y_test.shape, (1422, 3))
@unittest.skip("This test can be used locally to check that ManConCorpus loads properly")
def test_load_mancon_corpus_from_sent_pairs(self):
"""Test that ManConCorpus is loaded as expected."""
x_train, y_train, x_test, y_test = load_mancon_corpus_from_sent_pairs(mancon_sent_pairs)
self.assertEqual(len(x_train), 14328)
self.assertEqual(y_train.shape, (14328, 3))
self.assertEqual(len(x_test), 3583)
self.assertEqual(y_test.shape, (3583, 3))
def test_load_mancon_corpus_from_sent_pairs_sample(self):
"""Test that ManConCorpus is loaded as expected."""
x_train, y_train, x_test, y_test = load_mancon_corpus_from_sent_pairs(sample_mancon_sent_pairs)
self.assertEqual(len(x_train), 39)
self.assertEqual(y_train.shape, (39, 3))
self.assertEqual(len(x_test), 10)
self.assertEqual(y_test.shape, (10, 3))
def test_load_drug_virus_lexicons(self):
"""Test that the virus and drug lexicons are loaded properly."""
drug_names, virus_names = load_drug_virus_lexicons(sample_drug_lex_path, sample_virus_lex_path)
drugs = ["hydroxychloroquine", "remdesivir", "ritonavir", "chloroquine", "lopinavir"]
virus_syns = ["COVID-19", "SARS-CoV-2", "Coronavirus Disease 2019"]
self.assertTrue(set(drugs).issubset(set(drug_names)))
self.assertTrue(set(virus_syns).issubset(set(virus_names)))
| [
"contradictory_claims.data.make_dataset.load_med_nli",
"contradictory_claims.data.make_dataset.load_mancon_corpus_from_sent_pairs",
"os.path.isfile",
"contradictory_claims.data.make_dataset.load_multi_nli",
"unittest.skip",
"contradictory_claims.data.make_dataset.load_drug_virus_lexicons"
] | [((677, 791), 'unittest.skip', 'unittest.skip', (['"""This test can be used to check that datasets are found at the correct locations locally"""'], {}), "(\n 'This test can be used to check that datasets are found at the correct locations locally'\n )\n", (690, 791), False, 'import unittest\n'), ((2057, 2146), 'unittest.skip', 'unittest.skip', (['"""This test can be used locally to check that MultiNLI loads properly"""'], {}), "(\n 'This test can be used locally to check that MultiNLI loads properly')\n", (2070, 2146), False, 'import unittest\n'), ((2941, 3028), 'unittest.skip', 'unittest.skip', (['"""This test can be used locally to check that MedNLI loads properly"""'], {}), "(\n 'This test can be used locally to check that MedNLI loads properly')\n", (2954, 3028), False, 'import unittest\n'), ((3420, 3513), 'unittest.skip', 'unittest.skip', (['"""This test can be used locally to check that ManConCorpus loads properly"""'], {}), "(\n 'This test can be used locally to check that ManConCorpus loads properly')\n", (3433, 3513), False, 'import unittest\n'), ((2276, 2331), 'contradictory_claims.data.make_dataset.load_multi_nli', 'load_multi_nli', (['multinli_train_path', 'multinli_test_path'], {}), '(multinli_train_path, multinli_test_path)\n', (2290, 2331), False, 'from contradictory_claims.data.make_dataset import load_drug_virus_lexicons, load_mancon_corpus_from_sent_pairs, load_med_nli, load_multi_nli\n'), ((2682, 2751), 'contradictory_claims.data.make_dataset.load_multi_nli', 'load_multi_nli', (['sample_multinli_train_path', 'sample_multinli_test_path'], {}), '(sample_multinli_train_path, sample_multinli_test_path)\n', (2696, 2751), False, 'from contradictory_claims.data.make_dataset import load_drug_virus_lexicons, load_mancon_corpus_from_sent_pairs, load_med_nli, load_multi_nli\n'), ((3154, 3220), 'contradictory_claims.data.make_dataset.load_med_nli', 'load_med_nli', (['mednli_train_path', 'mednli_dev_path', 'mednli_test_path'], {}), '(mednli_train_path, mednli_dev_path, mednli_test_path)\n', (3166, 3220), False, 'from contradictory_claims.data.make_dataset import load_drug_virus_lexicons, load_mancon_corpus_from_sent_pairs, load_med_nli, load_multi_nli\n'), ((3667, 3720), 'contradictory_claims.data.make_dataset.load_mancon_corpus_from_sent_pairs', 'load_mancon_corpus_from_sent_pairs', (['mancon_sent_pairs'], {}), '(mancon_sent_pairs)\n', (3701, 3720), False, 'from contradictory_claims.data.make_dataset import load_drug_virus_lexicons, load_mancon_corpus_from_sent_pairs, load_med_nli, load_multi_nli\n'), ((4080, 4140), 'contradictory_claims.data.make_dataset.load_mancon_corpus_from_sent_pairs', 'load_mancon_corpus_from_sent_pairs', (['sample_mancon_sent_pairs'], {}), '(sample_mancon_sent_pairs)\n', (4114, 4140), False, 'from contradictory_claims.data.make_dataset import load_drug_virus_lexicons, load_mancon_corpus_from_sent_pairs, load_med_nli, load_multi_nli\n'), ((4477, 4546), 'contradictory_claims.data.make_dataset.load_drug_virus_lexicons', 'load_drug_virus_lexicons', (['sample_drug_lex_path', 'sample_virus_lex_path'], {}), '(sample_drug_lex_path, sample_virus_lex_path)\n', (4501, 4546), False, 'from contradictory_claims.data.make_dataset import load_drug_virus_lexicons, load_mancon_corpus_from_sent_pairs, load_med_nli, load_multi_nli\n'), ((893, 928), 'os.path.isfile', 'os.path.isfile', (['multinli_train_path'], {}), '(multinli_train_path)\n', (907, 928), False, 'import os\n'), ((1048, 1082), 'os.path.isfile', 'os.path.isfile', (['multinli_test_path'], {}), '(multinli_test_path)\n', (1062, 1082), False, 'import os\n'), ((1198, 1231), 'os.path.isfile', 'os.path.isfile', (['mednli_train_path'], {}), '(mednli_train_path)\n', (1212, 1231), False, 'import os\n'), ((1347, 1378), 'os.path.isfile', 'os.path.isfile', (['mednli_dev_path'], {}), '(mednli_dev_path)\n', (1361, 1378), False, 'import os\n'), ((1491, 1523), 'os.path.isfile', 'os.path.isfile', (['mednli_test_path'], {}), '(mednli_test_path)\n', (1505, 1523), False, 'import os\n'), ((1635, 1668), 'os.path.isfile', 'os.path.isfile', (['mancon_sent_pairs'], {}), '(mancon_sent_pairs)\n', (1649, 1668), False, 'import os\n'), ((1806, 1835), 'os.path.isfile', 'os.path.isfile', (['drug_lex_path'], {}), '(drug_lex_path)\n', (1820, 1835), False, 'import os\n'), ((1939, 1969), 'os.path.isfile', 'os.path.isfile', (['virus_lex_path'], {}), '(virus_lex_path)\n', (1953, 1969), False, 'import os\n')] |
# import modules
import numpy as np
import argparse
import cv2
# construct the argument parse and parse the arguments
ap = argparse.ArgumentParser()
ap.add_argument("-b", "--buffer", type=int, default=64,
help="max buffer size")
args = vars(ap.parse_args())
# define the lower and upper boundaries of the colors in the HSV color space
lower = {'red': (166, 84, 141), 'green': (66, 122, 129), 'blue': (97, 100, 117), 'yellow': (23, 59, 119), 'orange': (0, 50, 80)} # assign new item lower['blue'] = (93, 10, 0)
upper = {'red': (186, 255, 255), 'green': (86, 255, 255), 'blue': (117, 255, 255), 'yellow': (54, 255, 255), 'orange': (20, 255, 255)}
# define standard colors for circle around the object
colors = {'red': (0, 0, 255), 'green': (0, 255, 0), 'blue': (255, 0, 0), 'yellow': (0, 255, 217), 'orange': (0, 140, 255)}
camera = cv2.VideoCapture(0 + cv2.CAP_DSHOW)
# keep looping
while True:
# grab the current frame
(grabbed, frame) = camera.read()
# resize the frame, blur it, and convert it to the HSV
# color space
frame = cv2.resize(frame, (640, 480))
blurred = cv2.GaussianBlur(frame, (11, 11), 0)
hsv = cv2.cvtColor(blurred, cv2.COLOR_BGR2HSV)
# for each color in dictionary check object in frame
for key, value in upper.items():
# construct a mask for the color from dictionary`1, then perform
# a series of dilations and erosions to remove any small
# blobs left in the mask
kernel = np.ones((9, 9), np.uint8)
mask = cv2.inRange(hsv, lower[key], upper[key])
mask = cv2.morphologyEx(mask, cv2.MORPH_OPEN, kernel)
mask = cv2.morphologyEx(mask, cv2.MORPH_CLOSE, kernel)
# find contours in the mask and initialize the current
# (x, y) center of the ball
cnts = cv2.findContours(mask.copy(), cv2.RETR_EXTERNAL,
cv2.CHAIN_APPROX_SIMPLE)[-2]
center = None
# only proceed if at least one contour was found
if len(cnts) > 0:
# find the largest contour in the mask, then use
# it to compute the minimum enclosing circle and
# centroid
c = max(cnts, key=cv2.contourArea)
((x, y), radius) = cv2.minEnclosingCircle(c)
M = cv2.moments(c)
center = (int(M["m10"] / M["m00"]), int(M["m01"] / M["m00"]))
# only proceed if the radius meets a minimum size. Correct this value for your obect's size
if radius > 0.5:
# draw the circle and centroid on the frame,
# then update the list of tracked points
cv2.circle(frame, (int(x), int(y)), int(radius), colors[key], 2)
cv2.putText(frame, key, (int(x - radius), int(y - radius)), cv2.FONT_HERSHEY_SIMPLEX, 0.6, colors[key], 2)
# show the frame to our screen
cv2.imshow("Frame", frame)
key = cv2.waitKey(1) & 0xFF
# if the 'q' key is pressed, stop the loop
if key == ord("q"):
break
# cleanup the camera and close any open windows
camera.release()
cv2.destroyAllWindows()
| [
"numpy.ones",
"argparse.ArgumentParser",
"cv2.inRange",
"cv2.minEnclosingCircle",
"cv2.imshow",
"cv2.morphologyEx",
"cv2.destroyAllWindows",
"cv2.VideoCapture",
"cv2.cvtColor",
"cv2.moments",
"cv2.resize",
"cv2.GaussianBlur",
"cv2.waitKey"
] | [((125, 150), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {}), '()\n', (148, 150), False, 'import argparse\n'), ((853, 888), 'cv2.VideoCapture', 'cv2.VideoCapture', (['(0 + cv2.CAP_DSHOW)'], {}), '(0 + cv2.CAP_DSHOW)\n', (869, 888), False, 'import cv2\n'), ((3087, 3110), 'cv2.destroyAllWindows', 'cv2.destroyAllWindows', ([], {}), '()\n', (3108, 3110), False, 'import cv2\n'), ((1073, 1102), 'cv2.resize', 'cv2.resize', (['frame', '(640, 480)'], {}), '(frame, (640, 480))\n', (1083, 1102), False, 'import cv2\n'), ((1118, 1154), 'cv2.GaussianBlur', 'cv2.GaussianBlur', (['frame', '(11, 11)', '(0)'], {}), '(frame, (11, 11), 0)\n', (1134, 1154), False, 'import cv2\n'), ((1165, 1205), 'cv2.cvtColor', 'cv2.cvtColor', (['blurred', 'cv2.COLOR_BGR2HSV'], {}), '(blurred, cv2.COLOR_BGR2HSV)\n', (1177, 1205), False, 'import cv2\n'), ((2876, 2902), 'cv2.imshow', 'cv2.imshow', (['"""Frame"""', 'frame'], {}), "('Frame', frame)\n", (2886, 2902), False, 'import cv2\n'), ((1488, 1513), 'numpy.ones', 'np.ones', (['(9, 9)', 'np.uint8'], {}), '((9, 9), np.uint8)\n', (1495, 1513), True, 'import numpy as np\n'), ((1529, 1569), 'cv2.inRange', 'cv2.inRange', (['hsv', 'lower[key]', 'upper[key]'], {}), '(hsv, lower[key], upper[key])\n', (1540, 1569), False, 'import cv2\n'), ((1585, 1631), 'cv2.morphologyEx', 'cv2.morphologyEx', (['mask', 'cv2.MORPH_OPEN', 'kernel'], {}), '(mask, cv2.MORPH_OPEN, kernel)\n', (1601, 1631), False, 'import cv2\n'), ((1647, 1694), 'cv2.morphologyEx', 'cv2.morphologyEx', (['mask', 'cv2.MORPH_CLOSE', 'kernel'], {}), '(mask, cv2.MORPH_CLOSE, kernel)\n', (1663, 1694), False, 'import cv2\n'), ((2914, 2928), 'cv2.waitKey', 'cv2.waitKey', (['(1)'], {}), '(1)\n', (2925, 2928), False, 'import cv2\n'), ((2249, 2274), 'cv2.minEnclosingCircle', 'cv2.minEnclosingCircle', (['c'], {}), '(c)\n', (2271, 2274), False, 'import cv2\n'), ((2291, 2305), 'cv2.moments', 'cv2.moments', (['c'], {}), '(c)\n', (2302, 2305), False, 'import cv2\n')] |
# Generated by Django 3.2 on 2021-10-21 19:26
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('topics', '0002_alter_modelingprocess_modeling_type'),
]
operations = [
migrations.AddField(
model_name='topic',
name='word',
field=models.JSONField(default='{}'),
preserve_default=False,
),
]
| [
"django.db.models.JSONField"
] | [((345, 375), 'django.db.models.JSONField', 'models.JSONField', ([], {'default': '"""{}"""'}), "(default='{}')\n", (361, 375), False, 'from django.db import migrations, models\n')] |
from CHECLabPy.plotting.setup import Plotter
from CHECLabPy.plotting.camera import CameraImage
from CHECLabPy.utils.files import create_directory
from CHECLabPy.utils.mapping import get_ctapipe_camera_geometry
from sstcam_sandbox import get_plot, get_data
from os.path import join
from matplotlib import pyplot as plt
from tqdm import tqdm
import numpy as np
import pandas as pd
import warnings
from CHECOnsky.calib import obtain_cleaning_mask
from CHECLabPy.calib import TimeCalibrator
from mpl_toolkits.axes_grid1 import make_axes_locatable
from IPython import embed
def colorbar(mappable, label):
ax = mappable.axes
fig = ax.figure
divider = make_axes_locatable(ax)
_ = divider.append_axes("right", size="10%", pad=0.15)
cax = divider.append_axes("right", size="10%", pad=0.15)
return fig.colorbar(mappable, label=label, cax=cax, aspect=20)
class CameraMovie(Plotter):
def __init__(self, mapping, output_path):
super().__init__()
self.fig = plt.figure(figsize=(8, 3))
self.ax_goldfish = self.fig.add_axes([0, 0, 0.4, 1])
self.ax_image = self.fig.add_axes([0.4, 0, 0.4, 1])
self.ax_cb = self.fig.add_axes([0.68, 0, 0.15, 1])
self.ax_image.patch.set_alpha(0)
self.ax_cb.patch.set_alpha(0)
self.ax_cb.axis('off')
self.ci_image = CameraImage.from_mapping(mapping, ax=self.ax_image)
self.ci_image.add_colorbar(
"Pixel Amplitude (p.e.)", ax=self.ax_cb, pad=-0.5
)
self.ci_goldfish = CameraImage.from_mapping(mapping, ax=self.ax_goldfish)
self.output_path = output_path
self.source_point_image = None
self.source_point_goldfish = None
self.source_label_image = None
self.source_label_goldfish = None
self.alpha_line = None
self.timestamp = None
self.iframe = 0
def set_source_position(self, x_src, y_src):
offset = 0.004
if self.source_point_image is None:
self.source_point_image, = self.ax_image.plot(
x_src, y_src, 'x', c='red'
)
self.source_label_image = self.ax_image.text(
x_src+offset, y_src+offset, "Mrk421", color='red', size=10
)
else:
self.source_point_image.set_xdata(x_src)
self.source_point_image.set_ydata(y_src)
self.source_label_image.set_position((x_src+offset, y_src+offset))
if self.source_point_goldfish is None:
self.source_point_goldfish, = self.ax_goldfish.plot(
x_src, y_src, 'x', c='red'
)
self.source_label_goldfish = self.ax_goldfish.text(
x_src+offset, y_src+offset, "Mrk421", color='red', size=10
)
else:
self.source_point_goldfish.set_xdata(x_src)
self.source_point_goldfish.set_ydata(y_src)
self.source_label_goldfish.set_position((x_src+offset, y_src+offset))
def set_timestamp(self, timestamp):
timestamp_str = str(timestamp)
timestamp_len = len(timestamp_str)
missing = 29 - timestamp_len
timestamp_str += "0" * missing
if self.timestamp is None:
self.timestamp = self.fig.text(
0.4, -0.1, timestamp_str, horizontalalignment='center', size=12
)
else:
self.timestamp.set_text(timestamp_str)
def set_image(self, image, min_=None, max_=None):
self.ci_image.image = image
self.ci_image.set_limits_minmax(min_, max_)
def set_goldfish(self, slice, min_=None, max_=None):
self.ci_goldfish.image = slice
self.ci_goldfish.set_limits_minmax(min_, max_)
def set_alpha_line(self, cog_x, cog_y, psi):
y_min, y_max = self.ax_image.get_ylim()
x_min = cog_x - (cog_y - y_min) / np.tan(psi)
x_max = cog_x - (cog_y - y_max) / np.tan(psi)
if self.alpha_line is None:
self.alpha_line, = self.ax_image.plot(
[x_min, x_max], [y_min, y_max], ls="--", c='red'
)
else:
self.alpha_line.set_xdata([x_min, x_max])
self.alpha_line.set_ydata([y_min, y_max])
def save_frame(self):
path = self.output_path.format(self.iframe)
self.fig.savefig(path, bbox_inches='tight')
self.iframe += 1
def main():
path = get_data("d190717_alpha/wobble.h5")
with pd.HDFStore(path, mode='r') as store:
df = store['data'].loc[::4]
mapping = store['mapping']
with warnings.catch_warnings():
warnings.simplefilter('ignore', UserWarning)
mapping.metadata = store.get_storer('mapping').attrs.metadata
tc = TimeCalibrator()
geom = get_ctapipe_camera_geometry(mapping)
n_row = df.index.size
p_camera = CameraMovie(mapping, get_plot(
"d190717_alpha/wobble_animation_goldfish/frames/{:04d}.png"
))
for _, row in tqdm(df.iterrows(), total=n_row):
timestamp = row['timestamp']
iobs = row['iobs']
iev = row['iev']
x_src = row['x_src']
y_src = row['y_src']
dl1 = row['dl1'].values
time = row['dl1_pulse_time'].values
r1 = row['r1']
x_cog = row['x_cog']
y_cog = row['y_cog']
psi = row['psi']
p_camera.set_source_position(x_src, y_src)
n_pixels, n_samples = r1.shape
shifted = tc(r1)
mask = obtain_cleaning_mask(geom, dl1, time)
if not mask.any():
msg = f"No pixels survived cleaning for: RUN {iobs} IEV {iev}"
print(msg)
continue
# raise ValueError(msg)
dl1_ma = np.ma.masked_array(dl1, mask=~mask)
min_pixel = dl1_ma.argmin()
max_pixel = dl1_ma.argmax()
min_image = -4
max_image = 0.7 * dl1.max()
min_gf = shifted[max_pixel, :20].min()
max_gf = shifted[max_pixel].max() * 0.8
st = int(np.min(time[mask]) - 3)
et = int(np.max(time[mask]) + 6)
st = st if st > 0 else 0
et = et if et < n_samples else n_samples
# embed()
p_camera.set_image(dl1, min_image, max_image)
for t in range(st, et, 3):
slice_ = shifted[:, t]
p_camera.set_timestamp(timestamp + pd.Timedelta(f"{t}ns"))
p_camera.set_goldfish(slice_, min_gf, max_gf)
p_camera.save_frame()
if __name__ == '__main__':
main()
| [
"CHECLabPy.plotting.camera.CameraImage.from_mapping",
"sstcam_sandbox.get_plot",
"CHECOnsky.calib.obtain_cleaning_mask",
"numpy.tan",
"CHECLabPy.utils.mapping.get_ctapipe_camera_geometry",
"CHECLabPy.calib.TimeCalibrator",
"pandas.Timedelta",
"warnings.catch_warnings",
"numpy.max",
"sstcam_sandbox... | [((659, 682), 'mpl_toolkits.axes_grid1.make_axes_locatable', 'make_axes_locatable', (['ax'], {}), '(ax)\n', (678, 682), False, 'from mpl_toolkits.axes_grid1 import make_axes_locatable\n'), ((4380, 4415), 'sstcam_sandbox.get_data', 'get_data', (['"""d190717_alpha/wobble.h5"""'], {}), "('d190717_alpha/wobble.h5')\n", (4388, 4415), False, 'from sstcam_sandbox import get_plot, get_data\n'), ((4715, 4731), 'CHECLabPy.calib.TimeCalibrator', 'TimeCalibrator', ([], {}), '()\n', (4729, 4731), False, 'from CHECLabPy.calib import TimeCalibrator\n'), ((4743, 4779), 'CHECLabPy.utils.mapping.get_ctapipe_camera_geometry', 'get_ctapipe_camera_geometry', (['mapping'], {}), '(mapping)\n', (4770, 4779), False, 'from CHECLabPy.utils.mapping import get_ctapipe_camera_geometry\n'), ((993, 1019), 'matplotlib.pyplot.figure', 'plt.figure', ([], {'figsize': '(8, 3)'}), '(figsize=(8, 3))\n', (1003, 1019), True, 'from matplotlib import pyplot as plt\n'), ((1334, 1385), 'CHECLabPy.plotting.camera.CameraImage.from_mapping', 'CameraImage.from_mapping', (['mapping'], {'ax': 'self.ax_image'}), '(mapping, ax=self.ax_image)\n', (1358, 1385), False, 'from CHECLabPy.plotting.camera import CameraImage\n'), ((1521, 1575), 'CHECLabPy.plotting.camera.CameraImage.from_mapping', 'CameraImage.from_mapping', (['mapping'], {'ax': 'self.ax_goldfish'}), '(mapping, ax=self.ax_goldfish)\n', (1545, 1575), False, 'from CHECLabPy.plotting.camera import CameraImage\n'), ((4425, 4452), 'pandas.HDFStore', 'pd.HDFStore', (['path'], {'mode': '"""r"""'}), "(path, mode='r')\n", (4436, 4452), True, 'import pandas as pd\n'), ((4843, 4912), 'sstcam_sandbox.get_plot', 'get_plot', (['"""d190717_alpha/wobble_animation_goldfish/frames/{:04d}.png"""'], {}), "('d190717_alpha/wobble_animation_goldfish/frames/{:04d}.png')\n", (4851, 4912), False, 'from sstcam_sandbox import get_plot, get_data\n'), ((5442, 5479), 'CHECOnsky.calib.obtain_cleaning_mask', 'obtain_cleaning_mask', (['geom', 'dl1', 'time'], {}), '(geom, dl1, time)\n', (5462, 5479), False, 'from CHECOnsky.calib import obtain_cleaning_mask\n'), ((5680, 5715), 'numpy.ma.masked_array', 'np.ma.masked_array', (['dl1'], {'mask': '(~mask)'}), '(dl1, mask=~mask)\n', (5698, 5715), True, 'import numpy as np\n'), ((4547, 4572), 'warnings.catch_warnings', 'warnings.catch_warnings', ([], {}), '()\n', (4570, 4572), False, 'import warnings\n'), ((4586, 4630), 'warnings.simplefilter', 'warnings.simplefilter', (['"""ignore"""', 'UserWarning'], {}), "('ignore', UserWarning)\n", (4607, 4630), False, 'import warnings\n'), ((3845, 3856), 'numpy.tan', 'np.tan', (['psi'], {}), '(psi)\n', (3851, 3856), True, 'import numpy as np\n'), ((3899, 3910), 'numpy.tan', 'np.tan', (['psi'], {}), '(psi)\n', (3905, 3910), True, 'import numpy as np\n'), ((5963, 5981), 'numpy.min', 'np.min', (['time[mask]'], {}), '(time[mask])\n', (5969, 5981), True, 'import numpy as np\n'), ((6004, 6022), 'numpy.max', 'np.max', (['time[mask]'], {}), '(time[mask])\n', (6010, 6022), True, 'import numpy as np\n'), ((6301, 6323), 'pandas.Timedelta', 'pd.Timedelta', (['f"""{t}ns"""'], {}), "(f'{t}ns')\n", (6313, 6323), True, 'import pandas as pd\n')] |
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
Runs tempest tests
This command is used for running the tempest tests
Test Selection
==============
Tempest run has several options:
* **--regex/-r**: This is a selection regex like what testr uses. It will run
any tests that match on re.match() with the regex
* **--smoke/-s**: Run all the tests tagged as smoke
There are also the **--blacklist-file** and **--whitelist-file** options that
let you pass a filepath to tempest run with the file format being a line
separated regex, with '#' used to signify the start of a comment on a line.
For example::
# Regex file
^regex1 # Match these tests
.*regex2 # Match those tests
The blacklist file will be used to construct a negative lookahead regex and
the whitelist file will simply OR all the regexes in the file. The whitelist
and blacklist file options are mutually exclusive so you can't use them
together. However, you can combine either with a normal regex or the *--smoke*
flag. When used with a blacklist file the generated regex will be combined to
something like::
^((?!black_regex1|black_regex2).)*$cli_regex1
When combined with a whitelist file all the regexes from the file and the CLI
regexes will be ORed.
You can also use the **--list-tests** option in conjunction with selection
arguments to list which tests will be run.
You can also use the **--load-list** option that lets you pass a filepath to
tempest run with the file format being in a non-regex format, similar to the
tests generated by the **--list-tests** option. You can specify target tests
by removing unnecessary tests from a list file which is generated from
**--list-tests** option.
Test Execution
==============
There are several options to control how the tests are executed. By default
tempest will run in parallel with a worker for each CPU present on the machine.
If you want to adjust the number of workers use the **--concurrency** option
and if you want to run tests serially use **--serial/-t**
Running with Workspaces
-----------------------
Tempest run enables you to run your tempest tests from any setup tempest
workspace it relies on you having setup a tempest workspace with either the
``tempest init`` or ``tempest workspace`` commands. Then using the
``--workspace`` CLI option you can specify which one of your workspaces you
want to run tempest from. Using this option you don't have to run Tempest
directly with you current working directory being the workspace, Tempest will
take care of managing everything to be executed from there.
Running from Anywhere
---------------------
Tempest run provides you with an option to execute tempest from anywhere on
your system. You are required to provide a config file in this case with the
``--config-file`` option. When run tempest will create a .testrepository
directory and a .testr.conf file in your current working directory. This way
you can use testr commands directly to inspect the state of the previous run.
Test Output
===========
By default tempest run's output to STDOUT will be generated using the
subunit-trace output filter. But, if you would prefer a subunit v2 stream be
output to STDOUT use the **--subunit** flag
Combining Runs
==============
There are certain situations in which you want to split a single run of tempest
across 2 executions of tempest run. (for example to run part of the tests
serially and others in parallel) To accomplish this but still treat the results
as a single run you can leverage the **--combine** option which will append
the current run's results with the previous runs.
"""
import io
import os
import sys
import tempfile
import threading
from cliff import command
from os_testr import regex_builder
from os_testr import subunit_trace
from oslo_serialization import jsonutils as json
import six
from testrepository.commands import run_argv
from tempest import clients
from tempest.cmd import cleanup_service
from tempest.cmd import init
from tempest.cmd import workspace
from tempest.common import credentials_factory as credentials
from tempest import config
CONF = config.CONF
SAVED_STATE_JSON = "saved_state.json"
class TempestRun(command.Command):
def _set_env(self, config_file=None):
if config_file:
CONF.set_config_path(os.path.abspath(config_file))
# NOTE(mtreinish): This is needed so that testr doesn't gobble up any
# stacktraces on failure.
if 'TESTR_PDB' in os.environ:
return
else:
os.environ["TESTR_PDB"] = ""
# NOTE(dims): most of our .testr.conf try to test for PYTHON
# environment variable and fall back to "python", under python3
# if it does not exist. we should set it to the python3 executable
# to deal with this situation better for now.
if six.PY3 and 'PYTHON' not in os.environ:
os.environ['PYTHON'] = sys.executable
def _create_testrepository(self):
if not os.path.isdir('.testrepository'):
returncode = run_argv(['testr', 'init'], sys.stdin, sys.stdout,
sys.stderr)
if returncode:
sys.exit(returncode)
def _create_testr_conf(self):
top_level_path = os.path.dirname(os.path.dirname(__file__))
discover_path = os.path.join(top_level_path, 'test_discover')
file_contents = init.TESTR_CONF % (top_level_path, discover_path)
with open('.testr.conf', 'w+') as testr_conf_file:
testr_conf_file.write(file_contents)
def take_action(self, parsed_args):
returncode = 0
if parsed_args.config_file:
self._set_env(parsed_args.config_file)
else:
self._set_env()
# Workspace execution mode
if parsed_args.workspace:
workspace_mgr = workspace.WorkspaceManager(
parsed_args.workspace_path)
path = workspace_mgr.get_workspace(parsed_args.workspace)
if not path:
sys.exit(
"The %r workspace isn't registered in "
"%r. Use 'tempest init' to "
"register the workspace." %
(parsed_args.workspace, workspace_mgr.path))
os.chdir(path)
# NOTE(mtreinish): tempest init should create a .testrepository dir
# but since workspaces can be imported let's sanity check and
# ensure that one is created
self._create_testrepository()
# Local execution mode
elif os.path.isfile('.testr.conf'):
# If you're running in local execution mode and there is not a
# testrepository dir create one
self._create_testrepository()
# local execution with config file mode
elif parsed_args.config_file:
self._create_testr_conf()
self._create_testrepository()
else:
print("No .testr.conf file was found for local execution")
sys.exit(2)
if parsed_args.state:
self._init_state()
else:
pass
if parsed_args.combine:
temp_stream = tempfile.NamedTemporaryFile()
return_code = run_argv(['tempest', 'last', '--subunit'], sys.stdin,
temp_stream, sys.stderr)
if return_code > 0:
sys.exit(return_code)
regex = self._build_regex(parsed_args)
if parsed_args.list_tests:
argv = ['tempest', 'list-tests', regex]
returncode = run_argv(argv, sys.stdin, sys.stdout, sys.stderr)
else:
options = self._build_options(parsed_args)
returncode = self._run(regex, options)
if returncode > 0:
sys.exit(returncode)
if parsed_args.combine:
return_code = run_argv(['tempest', 'last', '--subunit'], sys.stdin,
temp_stream, sys.stderr)
if return_code > 0:
sys.exit(return_code)
returncode = run_argv(['tempest', 'load', temp_stream.name],
sys.stdin, sys.stdout, sys.stderr)
sys.exit(returncode)
def get_description(self):
return 'Run tempest'
def _init_state(self):
print("Initializing saved state.")
data = {}
self.global_services = cleanup_service.get_global_cleanup_services()
self.admin_mgr = clients.Manager(
credentials.get_configured_admin_credentials())
admin_mgr = self.admin_mgr
kwargs = {'data': data,
'is_dry_run': False,
'saved_state_json': data,
'is_preserve': False,
'is_save_state': True}
for service in self.global_services:
svc = service(admin_mgr, **kwargs)
svc.run()
with open(SAVED_STATE_JSON, 'w+') as f:
f.write(json.dumps(data,
sort_keys=True, indent=2, separators=(',', ': ')))
def get_parser(self, prog_name):
parser = super(TempestRun, self).get_parser(prog_name)
parser = self._add_args(parser)
return parser
def _add_args(self, parser):
# workspace args
parser.add_argument('--workspace', default=None,
help='Name of tempest workspace to use for running'
' tests. You can see a list of workspaces '
'with tempest workspace list')
parser.add_argument('--workspace-path', default=None,
dest='workspace_path',
help="The path to the workspace file, the default "
"is ~/.tempest/workspace.yaml")
# Configuration flags
parser.add_argument('--config-file', default=None, dest='config_file',
help='Configuration file to run tempest with')
# test selection args
regex = parser.add_mutually_exclusive_group()
regex.add_argument('--smoke', '-s', action='store_true',
help="Run the smoke tests only")
regex.add_argument('--regex', '-r', default='',
help='A normal testr selection regex used to '
'specify a subset of tests to run')
list_selector = parser.add_mutually_exclusive_group()
list_selector.add_argument('--whitelist-file', '--whitelist_file',
help="Path to a whitelist file, this file "
"contains a separate regex on each "
"newline.")
list_selector.add_argument('--blacklist-file', '--blacklist_file',
help='Path to a blacklist file, this file '
'contains a separate regex exclude on '
'each newline')
list_selector.add_argument('--load-list', '--load_list',
help='Path to a non-regex whitelist file, '
'this file contains a seperate test '
'on each newline. This command'
'supports files created by the tempest'
'run ``--list-tests`` command')
# list only args
parser.add_argument('--list-tests', '-l', action='store_true',
help='List tests',
default=False)
# execution args
parser.add_argument('--concurrency', '-w',
help="The number of workers to use, defaults to "
"the number of cpus")
parallel = parser.add_mutually_exclusive_group()
parallel.add_argument('--parallel', dest='parallel',
action='store_true',
help='Run tests in parallel (this is the'
' default)')
parallel.add_argument('--serial', '-t', dest='parallel',
action='store_false',
help='Run tests serially')
parser.add_argument('--save-state', dest='state',
action='store_true',
help="To save the state of the cloud before "
"running tempest.")
# output args
parser.add_argument("--subunit", action='store_true',
help='Enable subunit v2 output')
parser.add_argument("--combine", action='store_true',
help='Combine the output of this run with the '
"previous run's as a combined stream in the "
"testr repository after it finish")
parser.set_defaults(parallel=True)
return parser
def _build_regex(self, parsed_args):
regex = ''
if parsed_args.smoke:
regex = 'smoke'
elif parsed_args.regex:
regex = parsed_args.regex
if parsed_args.whitelist_file or parsed_args.blacklist_file:
regex = regex_builder.construct_regex(parsed_args.blacklist_file,
parsed_args.whitelist_file,
regex, False)
return regex
def _build_options(self, parsed_args):
options = []
if parsed_args.subunit:
options.append("--subunit")
if parsed_args.parallel:
options.append("--parallel")
if parsed_args.concurrency:
options.append("--concurrency=%s" % parsed_args.concurrency)
if parsed_args.load_list:
options.append("--load-list=%s" % parsed_args.load_list)
return options
def _run(self, regex, options):
returncode = 0
argv = ['tempest', 'run', regex] + options
if '--subunit' in options:
returncode = run_argv(argv, sys.stdin, sys.stdout, sys.stderr)
else:
argv.append('--subunit')
stdin = io.StringIO()
stdout_r, stdout_w = os.pipe()
subunit_w = os.fdopen(stdout_w, 'wt')
subunit_r = os.fdopen(stdout_r)
returncodes = {}
def run_argv_thread():
returncodes['testr'] = run_argv(argv, stdin, subunit_w,
sys.stderr)
subunit_w.close()
run_thread = threading.Thread(target=run_argv_thread)
run_thread.start()
returncodes['subunit-trace'] = subunit_trace.trace(
subunit_r, sys.stdout, post_fails=True, print_failures=True)
run_thread.join()
subunit_r.close()
# python version of pipefail
if returncodes['testr']:
returncode = returncodes['testr']
elif returncodes['subunit-trace']:
returncode = returncodes['subunit-trace']
return returncode
| [
"os_testr.regex_builder.construct_regex",
"os_testr.subunit_trace.trace",
"sys.exit",
"oslo_serialization.jsonutils.dumps",
"os.path.isdir",
"tempfile.NamedTemporaryFile",
"io.StringIO",
"testrepository.commands.run_argv",
"os.path.isfile",
"os.path.dirname",
"tempest.common.credentials_factory.... | [((5855, 5900), 'os.path.join', 'os.path.join', (['top_level_path', '"""test_discover"""'], {}), "(top_level_path, 'test_discover')\n", (5867, 5900), False, 'import os\n'), ((8749, 8769), 'sys.exit', 'sys.exit', (['returncode'], {}), '(returncode)\n', (8757, 8769), False, 'import sys\n'), ((8951, 8996), 'tempest.cmd.cleanup_service.get_global_cleanup_services', 'cleanup_service.get_global_cleanup_services', ([], {}), '()\n', (8994, 8996), False, 'from tempest.cmd import cleanup_service\n'), ((5508, 5540), 'os.path.isdir', 'os.path.isdir', (['""".testrepository"""'], {}), "('.testrepository')\n", (5521, 5540), False, 'import os\n'), ((5567, 5629), 'testrepository.commands.run_argv', 'run_argv', (["['testr', 'init']", 'sys.stdin', 'sys.stdout', 'sys.stderr'], {}), "(['testr', 'init'], sys.stdin, sys.stdout, sys.stderr)\n", (5575, 5629), False, 'from testrepository.commands import run_argv\n'), ((5804, 5829), 'os.path.dirname', 'os.path.dirname', (['__file__'], {}), '(__file__)\n', (5819, 5829), False, 'import os\n'), ((6377, 6431), 'tempest.cmd.workspace.WorkspaceManager', 'workspace.WorkspaceManager', (['parsed_args.workspace_path'], {}), '(parsed_args.workspace_path)\n', (6403, 6431), False, 'from tempest.cmd import workspace\n'), ((6804, 6818), 'os.chdir', 'os.chdir', (['path'], {}), '(path)\n', (6812, 6818), False, 'import os\n'), ((7100, 7129), 'os.path.isfile', 'os.path.isfile', (['""".testr.conf"""'], {}), "('.testr.conf')\n", (7114, 7129), False, 'import os\n'), ((7718, 7747), 'tempfile.NamedTemporaryFile', 'tempfile.NamedTemporaryFile', ([], {}), '()\n', (7745, 7747), False, 'import tempfile\n'), ((7774, 7852), 'testrepository.commands.run_argv', 'run_argv', (["['tempest', 'last', '--subunit']", 'sys.stdin', 'temp_stream', 'sys.stderr'], {}), "(['tempest', 'last', '--subunit'], sys.stdin, temp_stream, sys.stderr)\n", (7782, 7852), False, 'from testrepository.commands import run_argv\n'), ((8118, 8167), 'testrepository.commands.run_argv', 'run_argv', (['argv', 'sys.stdin', 'sys.stdout', 'sys.stderr'], {}), '(argv, sys.stdin, sys.stdout, sys.stderr)\n', (8126, 8167), False, 'from testrepository.commands import run_argv\n'), ((8415, 8493), 'testrepository.commands.run_argv', 'run_argv', (["['tempest', 'last', '--subunit']", 'sys.stdin', 'temp_stream', 'sys.stderr'], {}), "(['tempest', 'last', '--subunit'], sys.stdin, temp_stream, sys.stderr)\n", (8423, 8493), False, 'from testrepository.commands import run_argv\n'), ((8624, 8711), 'testrepository.commands.run_argv', 'run_argv', (["['tempest', 'load', temp_stream.name]", 'sys.stdin', 'sys.stdout', 'sys.stderr'], {}), "(['tempest', 'load', temp_stream.name], sys.stdin, sys.stdout, sys.\n stderr)\n", (8632, 8711), False, 'from testrepository.commands import run_argv\n'), ((9051, 9097), 'tempest.common.credentials_factory.get_configured_admin_credentials', 'credentials.get_configured_admin_credentials', ([], {}), '()\n', (9095, 9097), True, 'from tempest.common import credentials_factory as credentials\n'), ((13898, 14002), 'os_testr.regex_builder.construct_regex', 'regex_builder.construct_regex', (['parsed_args.blacklist_file', 'parsed_args.whitelist_file', 'regex', '(False)'], {}), '(parsed_args.blacklist_file, parsed_args.\n whitelist_file, regex, False)\n', (13927, 14002), False, 'from os_testr import regex_builder\n'), ((14736, 14785), 'testrepository.commands.run_argv', 'run_argv', (['argv', 'sys.stdin', 'sys.stdout', 'sys.stderr'], {}), '(argv, sys.stdin, sys.stdout, sys.stderr)\n', (14744, 14785), False, 'from testrepository.commands import run_argv\n'), ((14857, 14870), 'io.StringIO', 'io.StringIO', ([], {}), '()\n', (14868, 14870), False, 'import io\n'), ((14904, 14913), 'os.pipe', 'os.pipe', ([], {}), '()\n', (14911, 14913), False, 'import os\n'), ((14938, 14963), 'os.fdopen', 'os.fdopen', (['stdout_w', '"""wt"""'], {}), "(stdout_w, 'wt')\n", (14947, 14963), False, 'import os\n'), ((14988, 15007), 'os.fdopen', 'os.fdopen', (['stdout_r'], {}), '(stdout_r)\n', (14997, 15007), False, 'import os\n'), ((15265, 15305), 'threading.Thread', 'threading.Thread', ([], {'target': 'run_argv_thread'}), '(target=run_argv_thread)\n', (15281, 15305), False, 'import threading\n'), ((15380, 15465), 'os_testr.subunit_trace.trace', 'subunit_trace.trace', (['subunit_r', 'sys.stdout'], {'post_fails': '(True)', 'print_failures': '(True)'}), '(subunit_r, sys.stdout, post_fails=True, print_failures=True\n )\n', (15399, 15465), False, 'from os_testr import subunit_trace\n'), ((4829, 4857), 'os.path.abspath', 'os.path.abspath', (['config_file'], {}), '(config_file)\n', (4844, 4857), False, 'import os\n'), ((5707, 5727), 'sys.exit', 'sys.exit', (['returncode'], {}), '(returncode)\n', (5715, 5727), False, 'import sys\n'), ((6560, 6714), 'sys.exit', 'sys.exit', (['("The %r workspace isn\'t registered in %r. Use \'tempest init\' to register the workspace."\n % (parsed_args.workspace, workspace_mgr.path))'], {}), '(\n "The %r workspace isn\'t registered in %r. Use \'tempest init\' to register the workspace."\n % (parsed_args.workspace, workspace_mgr.path))\n', (6568, 6714), False, 'import sys\n'), ((7936, 7957), 'sys.exit', 'sys.exit', (['return_code'], {}), '(return_code)\n', (7944, 7957), False, 'import sys\n'), ((8335, 8355), 'sys.exit', 'sys.exit', (['returncode'], {}), '(returncode)\n', (8343, 8355), False, 'import sys\n'), ((8577, 8598), 'sys.exit', 'sys.exit', (['return_code'], {}), '(return_code)\n', (8585, 8598), False, 'import sys\n'), ((9513, 9579), 'oslo_serialization.jsonutils.dumps', 'json.dumps', (['data'], {'sort_keys': '(True)', 'indent': '(2)', 'separators': "(',', ': ')"}), "(data, sort_keys=True, indent=2, separators=(',', ': '))\n", (9523, 9579), True, 'from oslo_serialization import jsonutils as json\n'), ((15112, 15156), 'testrepository.commands.run_argv', 'run_argv', (['argv', 'stdin', 'subunit_w', 'sys.stderr'], {}), '(argv, stdin, subunit_w, sys.stderr)\n', (15120, 15156), False, 'from testrepository.commands import run_argv\n'), ((7555, 7566), 'sys.exit', 'sys.exit', (['(2)'], {}), '(2)\n', (7563, 7566), False, 'import sys\n')] |
from __future__ import absolute_import, division, print_function
__metaclass__ = type
import os
import sys
import warnings
import ansible.constants
import ansible.errors
import ansible.utils
import pytest
from pprint import pprint
# The positive path test
def test_zos_tso_command_listuser(ansible_adhoc):
hosts = ansible_adhoc(inventory='localhost', connection='local')
print('--- hosts.all ---')
pprint(hosts.all)
pprint(hosts.all.options)
pprint(vars(hosts.all.options['inventory_manager']))
pprint(hosts.all.options['inventory_manager']._inventory.hosts)
hosts.all.options['inventory_manager']._inventory.hosts
results = hosts.localhost.zos_tso_command(commands=["LU"])
print('--- results.contacted ---')
pprint(results.contacted)
for result in results.contacted.values():
assert result.get("output")[0].get("rc") == 0
assert result.get("changed") is True
| [
"pprint.pprint"
] | [((414, 431), 'pprint.pprint', 'pprint', (['hosts.all'], {}), '(hosts.all)\n', (420, 431), False, 'from pprint import pprint\n'), ((436, 461), 'pprint.pprint', 'pprint', (['hosts.all.options'], {}), '(hosts.all.options)\n', (442, 461), False, 'from pprint import pprint\n'), ((523, 586), 'pprint.pprint', 'pprint', (["hosts.all.options['inventory_manager']._inventory.hosts"], {}), "(hosts.all.options['inventory_manager']._inventory.hosts)\n", (529, 586), False, 'from pprint import pprint\n'), ((753, 778), 'pprint.pprint', 'pprint', (['results.contacted'], {}), '(results.contacted)\n', (759, 778), False, 'from pprint import pprint\n')] |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# Author : <NAME>
# E-mail : <EMAIL>
# Description:
# Date : 15/10/2019: 22:13
# File Name : network
import argparse
import numpy as np
import torch
def worker_init_fn(pid):
np.random.seed(torch.initial_seed() % (2 ** 31 - 1))
def my_collate(batch):
batch = list(filter(lambda x: x is not None, batch))
return torch.utils.data.dataloader.default_collate(batch)
def parse():
parser = argparse.ArgumentParser(description="audio2height")
parser.add_argument("--tag", type=str, default="")
parser.add_argument("--epoch", type=int, default=500)
parser.add_argument("--mode", choices=["train", "test"], default="train")
parser.add_argument("--bs", type=int, default=10)
parser.add_argument("--hidden-dim", type=int, default=256)
parser.add_argument("--layer-num", type=int, default=1)
parser.add_argument("--lstm", action="store_true")
parser.add_argument("--cuda", action="store_true")
parser.add_argument("--gpu", type=int, default=0)
parser.add_argument("--bottle-train", type=str, default="0")
parser.add_argument("--bottle-test", type=str, default="")
parser.add_argument("--lr", type=float, default=0.0001)
parser.add_argument("--snr_db", type=float, required=True)
parser.add_argument("--mono-coe", type=float, default=0.001)
parser.add_argument("--load-model", type=str, default="")
parser.add_argument("--load-epoch", type=int, default=-1)
parser.add_argument("--model-path", type=str, default="./assets/learned_models", help="pre-trained model path")
parser.add_argument("--data-path", type=str, default="h5py_dataset", help="data path")
parser.add_argument("--log-interval", type=int, default=10)
parser.add_argument("--save-interval", type=int, default=10)
parser.add_argument("--robot", action="store_true")
parser.add_argument("--multi", action="store_true")
parser.add_argument("--minus_wrench_first", action="store_true")
parser.add_argument("--stft_force", action="store_true")
parser.add_argument("--bidirectional", action="store_true")
parser.add_argument("--draw_acc_fig", action="store_true")
parser.add_argument("--acc_fig_name", type=str, default="")
parser.add_argument("--multi-detail", choices=["2loss2rnn", "2loss1rnn", "1loss1rnn", "audio_only", "a_guide_f",
"a_f_early_fusion", "force_only", "1loss2rnn"], default="audio_only")
args = parser.parse_args()
if args.bottle_test == "":
args.bottle_test = args.bottle_train
if args.tag != "":
args.tag += "_"
base = args.tag + "{}_{}{}_h{}_bs{}_bottle{}to{}_mono_coe{}_snr{}_{}_{}_{}_{}"
tag = base.format("multi" if args.multi else "audio", "lstm" if args.lstm else "gru", args.layer_num,
args.hidden_dim, args.bs, args.bottle_train, args.bottle_test, args.mono_coe, args.snr_db,
args.multi_detail, "minus_wrench_first" if args.minus_wrench_first else "raw",
"stft_force" if args.stft_force else "raw_force",
"bidirectional" if args.bidirectional else "unidirectional")
args.tag = tag
args.acc_fig_name = "snr{}_{}".format(args.snr_db, "lstm" if args.lstm else "gru")
return args
| [
"torch.initial_seed",
"torch.utils.data.dataloader.default_collate",
"argparse.ArgumentParser"
] | [((389, 439), 'torch.utils.data.dataloader.default_collate', 'torch.utils.data.dataloader.default_collate', (['batch'], {}), '(batch)\n', (432, 439), False, 'import torch\n'), ((468, 519), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {'description': '"""audio2height"""'}), "(description='audio2height')\n", (491, 519), False, 'import argparse\n'), ((258, 278), 'torch.initial_seed', 'torch.initial_seed', ([], {}), '()\n', (276, 278), False, 'import torch\n')] |
import parsetools
from benchDesc import benchsDesc
import matplotlib.pyplot as plt
import matplotlib
import getopt, sys
try:
opts, args = getopt.getopt(sys.argv[1:], "h", ["arch="])
except getopt.GetoptError as err:
print(err)
sys.exit(2)
file_postfix = ""
for o,a in opts:
if o == "--arch":
if a == "simple":
file_postfix = file_postfix + "_simple"
elif a == "complex":
file_postfix = file_postfix + "_complex"
else:
print ("ERROR, the architecture must be either simple or complex")
p = parsetools.BoundedEventsCountParser()
res = p.parse_all_files("../log_2020_09/log")
res = benchsDesc.regrouping_parallel_res(res)
bounded_count = res
print("BOUNDED=", bounded_count)
p = parsetools.UnboundedEventsCountParser()
res = p.parse_all_files("../log_2020_09/log")
res = benchsDesc.regrouping_parallel_res(res)
unbounded_count = res
print("UNBOUNDED=", unbounded_count)
p = parsetools.WcetResParser()
res = p.parse_all_files("../log_2020_09/log_xddilp_15"+file_postfix)
res = benchsDesc.regrouping_parallel_res(res)
wcet_xdd = res
#add a single result
print(res)
print(len(res))
p = parsetools.WcetResParser()
res = p.parse_all_files("../log_2020_09/log_hlts_15"+file_postfix)
res = benchsDesc.regrouping_parallel_res(res)
wcet_hlts = res
print(res)
print(len(res))
p = parsetools.WcetResParser()
res = p.parse_all_files("../log_2020_09/log_WCETmax_15"+file_postfix)
res = benchsDesc.regrouping_parallel_res(res)
wcet_max = res
print(res)
print(len(res))
p = parsetools.WcetResParser()
res = p.parse_all_files("../log_2020_09/log_exhaustive_15"+file_postfix)
res = benchsDesc.regrouping_parallel_res(res)
wcet_exhau = res
print(res)
print(len(res))
x = list(range(1,len(res)+1))
print(x)
print("=======================================================")
BIGGER_SIZE = 11
BIGGER_BIGGER_SIZE=15
matplotlib.rc('font', size=BIGGER_SIZE) # controls default text sizes
matplotlib.rc('axes', titlesize=BIGGER_SIZE) # fontsize of the axes title
matplotlib.rc('axes', labelsize=BIGGER_SIZE) # fontsize of the x and y labels
matplotlib.rc('xtick', labelsize=BIGGER_SIZE) # fontsize of the tick labels
matplotlib.rc('ytick', labelsize=BIGGER_SIZE) # fontsize of the tick labels
matplotlib.rc('legend', fontsize=BIGGER_BIGGER_SIZE) # legend fontsize
matplotlib.rc('figure', titlesize=BIGGER_SIZE) # fontsize of the figure title
fig = plt.figure()
#unbound_ratio = [ float(x[1]) / float(x[1]+y[1]) for x,y in zip(unbounded_count,bounded_count)]
unbound_ratio = [( x[0], float(x[1]) / float(x[1]+y[1]) ) for x,y in zip(unbounded_count,bounded_count)]
unbound_ratio.sort(key = lambda i:i[1])
print("***************************")
print(unbound_ratio)
print("***************************")
label_order = [x[0] for x in unbound_ratio]
print(label_order)
unbound_ratio = [x[1] for x in unbound_ratio]
wcet_xdd.sort(key = lambda i: label_order.index(i[0]))
wcet_hlts.sort(key = lambda i: label_order.index(i[0]))
wcet_max.sort(key = lambda i: label_order.index(i[0]))
wcet_exhau.sort(key = lambda i: label_order.index(i[0]))
wcet_xdd = [x[1] for x in wcet_xdd]
wcet_hlts = [x[1] for x in wcet_hlts]
wcet_max = [x[1] for x in wcet_max]
wcet_exhau = [x[1] for x in wcet_exhau]
wcet_xdd = [(y-x)/y for x,y in zip(wcet_xdd,wcet_max)]
wcet_hlts = [(y-x)/y for x,y in zip(wcet_hlts,wcet_max)]
## Rounding, due to imprecision of Etime
wcet_hlts = [ 0.0 if x < 0.0 else x for x in wcet_hlts ]
wcet_exhau = [(y-x)/y for x,y in zip(wcet_exhau,wcet_max)]
print("=======================================================")
print(wcet_xdd)
print(len(res))
print("=======================================================")
print(wcet_exhau)
print(len(res))
print("=======================================================")
print(wcet_hlts)
print(len(res))
ax = fig.add_subplot(111)
width = 0.2
ax.bar([y-width for y in x],wcet_xdd,label='xdd',width=width, color ="1.0" , edgecolor='black')
ax.bar([y for y in x],wcet_exhau,label='exhaustive',width=width, color = "0.7", edgecolor='black')
ax.bar([y+width for y in x],wcet_hlts,label='Etime',width=width, color = "0",edgecolor='black')
#ax.bar([y+0.2 for y in x],wcet_max,label='MAX',width=0.5,color='darkgray')
ax.set_ylabel('WCET / WCET of max partitioning',fontsize=12)
#ax.set_xlabel('benchmark',fontsize=12)
ax.set_xticks(x)
ax.set_xticklabels(label_order,rotation=80)
ax.legend(loc='upper left')
#plt.yscale('log')
plt.ylim(top=0.6)
unbound_ratio = [x for x in unbound_ratio]
ax1 = ax.twinx()
ax1.set_ylabel("percentage on unbounded events")
ax1.plot(x,unbound_ratio,'o-',color='black')
plt.subplots_adjust(bottom=0.17,top=0.70,right=0.965,left=0.042)
plt.yticks(fontsize=15)
"""
plt.tick_params(
axis='x', # changes apply to the x-axis
which='both', # both major and minor ticks are affected
bottom=True, # ticks along the bottom edge are off
top=False, # ticks along the top edge are off
labelbottom=False
) # labels along the bottom edge are off
"""
plt.show()
#ax = df.plot.scatter(x='evt',)
| [
"getopt.getopt",
"parsetools.BoundedEventsCountParser",
"parsetools.UnboundedEventsCountParser",
"benchDesc.benchsDesc.regrouping_parallel_res",
"matplotlib.pyplot.figure",
"matplotlib.pyplot.yticks",
"matplotlib.rc",
"sys.exit",
"matplotlib.pyplot.ylim",
"parsetools.WcetResParser",
"matplotlib.... | [((569, 606), 'parsetools.BoundedEventsCountParser', 'parsetools.BoundedEventsCountParser', ([], {}), '()\n', (604, 606), False, 'import parsetools\n'), ((659, 698), 'benchDesc.benchsDesc.regrouping_parallel_res', 'benchsDesc.regrouping_parallel_res', (['res'], {}), '(res)\n', (693, 698), False, 'from benchDesc import benchsDesc\n'), ((757, 796), 'parsetools.UnboundedEventsCountParser', 'parsetools.UnboundedEventsCountParser', ([], {}), '()\n', (794, 796), False, 'import parsetools\n'), ((849, 888), 'benchDesc.benchsDesc.regrouping_parallel_res', 'benchsDesc.regrouping_parallel_res', (['res'], {}), '(res)\n', (883, 888), False, 'from benchDesc import benchsDesc\n'), ((954, 980), 'parsetools.WcetResParser', 'parsetools.WcetResParser', ([], {}), '()\n', (978, 980), False, 'import parsetools\n'), ((1056, 1095), 'benchDesc.benchsDesc.regrouping_parallel_res', 'benchsDesc.regrouping_parallel_res', (['res'], {}), '(res)\n', (1090, 1095), False, 'from benchDesc import benchsDesc\n'), ((1164, 1190), 'parsetools.WcetResParser', 'parsetools.WcetResParser', ([], {}), '()\n', (1188, 1190), False, 'import parsetools\n'), ((1264, 1303), 'benchDesc.benchsDesc.regrouping_parallel_res', 'benchsDesc.regrouping_parallel_res', (['res'], {}), '(res)\n', (1298, 1303), False, 'from benchDesc import benchsDesc\n'), ((1353, 1379), 'parsetools.WcetResParser', 'parsetools.WcetResParser', ([], {}), '()\n', (1377, 1379), False, 'import parsetools\n'), ((1456, 1495), 'benchDesc.benchsDesc.regrouping_parallel_res', 'benchsDesc.regrouping_parallel_res', (['res'], {}), '(res)\n', (1490, 1495), False, 'from benchDesc import benchsDesc\n'), ((1543, 1569), 'parsetools.WcetResParser', 'parsetools.WcetResParser', ([], {}), '()\n', (1567, 1569), False, 'import parsetools\n'), ((1649, 1688), 'benchDesc.benchsDesc.regrouping_parallel_res', 'benchsDesc.regrouping_parallel_res', (['res'], {}), '(res)\n', (1683, 1688), False, 'from benchDesc import benchsDesc\n'), ((1880, 1919), 'matplotlib.rc', 'matplotlib.rc', (['"""font"""'], {'size': 'BIGGER_SIZE'}), "('font', size=BIGGER_SIZE)\n", (1893, 1919), False, 'import matplotlib\n'), ((1959, 2003), 'matplotlib.rc', 'matplotlib.rc', (['"""axes"""'], {'titlesize': 'BIGGER_SIZE'}), "('axes', titlesize=BIGGER_SIZE)\n", (1972, 2003), False, 'import matplotlib\n'), ((2037, 2081), 'matplotlib.rc', 'matplotlib.rc', (['"""axes"""'], {'labelsize': 'BIGGER_SIZE'}), "('axes', labelsize=BIGGER_SIZE)\n", (2050, 2081), False, 'import matplotlib\n'), ((2118, 2163), 'matplotlib.rc', 'matplotlib.rc', (['"""xtick"""'], {'labelsize': 'BIGGER_SIZE'}), "('xtick', labelsize=BIGGER_SIZE)\n", (2131, 2163), False, 'import matplotlib\n'), ((2197, 2242), 'matplotlib.rc', 'matplotlib.rc', (['"""ytick"""'], {'labelsize': 'BIGGER_SIZE'}), "('ytick', labelsize=BIGGER_SIZE)\n", (2210, 2242), False, 'import matplotlib\n'), ((2276, 2328), 'matplotlib.rc', 'matplotlib.rc', (['"""legend"""'], {'fontsize': 'BIGGER_BIGGER_SIZE'}), "('legend', fontsize=BIGGER_BIGGER_SIZE)\n", (2289, 2328), False, 'import matplotlib\n'), ((2350, 2396), 'matplotlib.rc', 'matplotlib.rc', (['"""figure"""'], {'titlesize': 'BIGGER_SIZE'}), "('figure', titlesize=BIGGER_SIZE)\n", (2363, 2396), False, 'import matplotlib\n'), ((2438, 2450), 'matplotlib.pyplot.figure', 'plt.figure', ([], {}), '()\n', (2448, 2450), True, 'import matplotlib.pyplot as plt\n'), ((4460, 4477), 'matplotlib.pyplot.ylim', 'plt.ylim', ([], {'top': '(0.6)'}), '(top=0.6)\n', (4468, 4477), True, 'import matplotlib.pyplot as plt\n'), ((4633, 4699), 'matplotlib.pyplot.subplots_adjust', 'plt.subplots_adjust', ([], {'bottom': '(0.17)', 'top': '(0.7)', 'right': '(0.965)', 'left': '(0.042)'}), '(bottom=0.17, top=0.7, right=0.965, left=0.042)\n', (4652, 4699), True, 'import matplotlib.pyplot as plt\n'), ((4698, 4721), 'matplotlib.pyplot.yticks', 'plt.yticks', ([], {'fontsize': '(15)'}), '(fontsize=15)\n', (4708, 4721), True, 'import matplotlib.pyplot as plt\n'), ((5075, 5085), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (5083, 5085), True, 'import matplotlib.pyplot as plt\n'), ((143, 186), 'getopt.getopt', 'getopt.getopt', (['sys.argv[1:]', '"""h"""', "['arch=']"], {}), "(sys.argv[1:], 'h', ['arch='])\n", (156, 186), False, 'import getopt, sys\n'), ((240, 251), 'sys.exit', 'sys.exit', (['(2)'], {}), '(2)\n', (248, 251), False, 'import getopt, sys\n')] |
""" Assemblies List View. """
import logging
from datetime import datetime
from cornice.service import Service
from fabrikApi.models.assembly import DBAssembly
from fabrikApi.models.mixins import arrow
# from fabrikApi.util.cors import CORS_LOCATION, CORS_MAX_AGE
logger = logging.getLogger(__name__)
# SERVICES
assemblies = Service(cors_origins=('*',),
name='assemblies',
description='List Assemblies.',
path='/assemblies')
@assemblies.get(permission='public')
def get_assemblies(request):
"""Returns all assemblies which are either public or accessible by the current user.
"""
# load all active assemblies
# TODO: filter only active assemblies
assemblies = request.dbsession.query(DBAssembly).all()
for assembly in assemblies:
# assembly.patch()
assembly.setup_lineage(request)
# show only assemblies with at least view permission.
assemblies = list(
filter(lambda assembly: request.has_public_permission(assembly),
assemblies)
)
assemblies = {v.identifier: v for v in assemblies}
return({
'assemblies': assemblies,
'access_date': arrow.utcnow()
})
| [
"logging.getLogger",
"cornice.service.Service",
"fabrikApi.models.mixins.arrow.utcnow"
] | [((277, 304), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (294, 304), False, 'import logging\n'), ((331, 435), 'cornice.service.Service', 'Service', ([], {'cors_origins': "('*',)", 'name': '"""assemblies"""', 'description': '"""List Assemblies."""', 'path': '"""/assemblies"""'}), "(cors_origins=('*',), name='assemblies', description=\n 'List Assemblies.', path='/assemblies')\n", (338, 435), False, 'from cornice.service import Service\n'), ((1159, 1173), 'fabrikApi.models.mixins.arrow.utcnow', 'arrow.utcnow', ([], {}), '()\n', (1171, 1173), False, 'from fabrikApi.models.mixins import arrow\n')] |
import argparse
import numpy as np
import os
import random
import torch
import torch.nn as nn
import torch.optim as optim
from torch.utils.data import DataLoader
from datasets import custom_collate_fn, load_data, WebDataset
from models import WebObjExtractionNet
from train import train_model, evaluate_model
from utils import print_and_log
########## CMDLINE ARGS ##########
parser = argparse.ArgumentParser('Train Model')
parser.add_argument('-d', '--device', type=int, default=0)
parser.add_argument('-e', '--n_epochs', type=int, default=100)
parser.add_argument('-bb', '--backbone', type=str, default='alexnet', choices=['alexnet', 'resnet'])
parser.add_argument('-tc', '--trainable_convnet', type=int, default=1, choices=[0,1])
parser.add_argument('-lr', '--learning_rate', type=float, default=0.0005)
parser.add_argument('-bs', '--batch_size', type=int, default=25)
parser.add_argument('-cs', '--context_size', type=int, default=6)
parser.add_argument('-att', '--attention', type=int, default=1, choices=[0,1])
parser.add_argument('-hd', '--hidden_dim', type=int, default=300)
parser.add_argument('-r', '--roi', type=int, default=1)
parser.add_argument('-bbf', '--bbox_feat', type=int, default=1, choices=[0,1])
parser.add_argument('-wd', '--weight_decay', type=float, default=0)
parser.add_argument('-dp', '--drop_prob', type=float, default=0.5)
parser.add_argument('-mbb', '--max_bg_boxes', type=int, default=-1)
parser.add_argument('-nw', '--num_workers', type=int, default=8)
args = parser.parse_args()
device = torch.device('cuda:%d' % args.device if torch.cuda.is_available() else 'cpu')
########## MAKING RESULTS REPRODUCIBLE ##########
seed = 1
random.seed(seed)
np.random.seed(seed)
torch.manual_seed(seed)
torch.cuda.manual_seed(seed)
# torch.backends.cudnn.deterministic = True
# torch.backends.cudnn.benchmark = False
########## PARAMETERS ##########
N_CLASSES = 4
CLASS_NAMES = ['BG', 'Price', 'Title', 'Image']
IMG_HEIGHT = 1280 # Image assumed to have same height and width
EVAL_INTERVAL = 3 # Number of Epochs after which model is evaluated
NUM_WORKERS = args.num_workers # multithreaded data loading
DATA_DIR = '/shared/data_product_info/v2_8.3k/' # Contains .png and .pkl files for train and test data
OUTPUT_DIR = 'results_attn' # logs are saved here!
# NOTE: if same hyperparameter configuration is run again, previous log file and saved model will be overwritten
if not os.path.exists(OUTPUT_DIR):
os.makedirs(OUTPUT_DIR)
SPLIT_DIR = 'splits'
train_img_ids = np.loadtxt('%s/train_imgs.txt' % SPLIT_DIR, dtype=np.int32)
val_img_ids = np.loadtxt('%s/val_imgs.txt' % SPLIT_DIR, dtype=np.int32)
test_img_ids = np.loadtxt('%s/test_imgs.txt' % SPLIT_DIR, dtype=np.int32)
test_domains = np.loadtxt('%s/test_domains.txt' % SPLIT_DIR, dtype=str) # for calculating macro accuracy
########## HYPERPARAMETERS ##########
N_EPOCHS = args.n_epochs
BACKBONE = args.backbone
TRAINABLE_CONVNET = bool(args.trainable_convnet)
LEARNING_RATE = args.learning_rate
BATCH_SIZE = args.batch_size
CONTEXT_SIZE = args.context_size
USE_ATTENTION = bool(args.attention)
HIDDEN_DIM = args.hidden_dim
ROI_POOL_OUTPUT_SIZE = (args.roi, args.roi)
USE_BBOX_FEAT = bool(args.bbox_feat)
WEIGHT_DECAY = args.weight_decay
DROP_PROB = args.drop_prob
MAX_BG_BOXES = args.max_bg_boxes if args.max_bg_boxes > 0 else -1
params = '%s lr-%.0e batch-%d cs-%d att-%d hd-%d roi-%d bbf-%d wd-%.0e dp-%.2f mbb-%d' % (BACKBONE, LEARNING_RATE, BATCH_SIZE, CONTEXT_SIZE, USE_ATTENTION,
HIDDEN_DIM, ROI_POOL_OUTPUT_SIZE[0], USE_BBOX_FEAT, WEIGHT_DECAY, DROP_PROB, MAX_BG_BOXES)
log_file = '%s/%s logs.txt' % (OUTPUT_DIR, params)
test_acc_domainwise_file = '%s/%s test_acc_domainwise.csv' % (OUTPUT_DIR, params)
model_save_file = '%s/%s saved_model.pth' % (OUTPUT_DIR, params)
print('logs will be saved in \"%s\"' % (log_file))
print_and_log('Backbone Convnet: %s' % (BACKBONE), log_file, 'w')
print_and_log('Trainable Convnet: %s' % (TRAINABLE_CONVNET), log_file)
print_and_log('Learning Rate: %.0e' % (LEARNING_RATE), log_file)
print_and_log('Batch Size: %d' % (BATCH_SIZE), log_file)
print_and_log('Context Size: %d' % (CONTEXT_SIZE), log_file)
print_and_log('Attention: %s' % (USE_ATTENTION), log_file)
print_and_log('Hidden Dim: %d' % (HIDDEN_DIM), log_file)
print_and_log('RoI Pool Output Size: (%d, %d)' % ROI_POOL_OUTPUT_SIZE, log_file)
print_and_log('BBox Features: %s' % (USE_BBOX_FEAT), log_file)
print_and_log('Weight Decay: %.0e' % (WEIGHT_DECAY), log_file)
print_and_log('Dropout Probability: %.2f' % (DROP_PROB), log_file)
print_and_log('Max BG Boxes: %d\n' % (MAX_BG_BOXES), log_file)
########## DATA LOADERS ##########
train_loader, val_loader, test_loader = load_data(DATA_DIR, train_img_ids, val_img_ids, test_img_ids, CONTEXT_SIZE, BATCH_SIZE, NUM_WORKERS, MAX_BG_BOXES)
########## CREATE MODEL & LOSS FN ##########
model = WebObjExtractionNet(ROI_POOL_OUTPUT_SIZE, IMG_HEIGHT, N_CLASSES, BACKBONE, USE_ATTENTION, HIDDEN_DIM, TRAINABLE_CONVNET, DROP_PROB,
USE_BBOX_FEAT, CLASS_NAMES).to(device)
optimizer = optim.Adam(model.parameters(), lr=LEARNING_RATE, weight_decay=WEIGHT_DECAY)
criterion = nn.CrossEntropyLoss(reduction='sum').to(device)
########## TRAIN MODEL ##########
train_model(model, train_loader, optimizer, criterion, N_EPOCHS, device, val_loader, EVAL_INTERVAL, log_file, 'ckpt_%d.pth' % args.device)
########## EVALUATE TEST PERFORMANCE ##########
print('Evaluating test data class wise accuracies...')
evaluate_model(model, test_loader, criterion, device, 'TEST', log_file)
with open (test_acc_domainwise_file, 'w') as f:
f.write('Domain,N_examples,%s,%s,%s\n' % (CLASS_NAMES[1], CLASS_NAMES[2], CLASS_NAMES[3]))
print('Evaluating per domain accuracy for %d test domains...' % len(test_domains))
for domain in test_domains:
print('\n---> Domain:', domain)
test_dataset = WebDataset(DATA_DIR, np.loadtxt('%s/domain_wise_imgs/%s.txt' % (SPLIT_DIR, domain), np.int32).reshape(-1), CONTEXT_SIZE, max_bg_boxes=-1)
test_loader = DataLoader(test_dataset, batch_size=1, shuffle=False, num_workers=NUM_WORKERS, collate_fn=custom_collate_fn, drop_last=False)
per_class_acc = evaluate_model(model, test_loader, criterion, device, 'TEST')
with open (test_acc_domainwise_file, 'a') as f:
f.write('%s,%d,%.2f,%.2f,%.2f\n' % (domain, len(test_dataset), 100*per_class_acc[1], 100*per_class_acc[2], 100*per_class_acc[3]))
macro_acc_test = np.loadtxt(test_acc_domainwise_file, delimiter=',', skiprows=1, dtype=str)[:,2:].astype(np.float32).mean(0)
for i in range(1, len(CLASS_NAMES)):
print_and_log('%s Macro Acc: %.2f%%' % (CLASS_NAMES[i], macro_acc_test[i-1]), log_file)
########## SAVE MODEL ##########
torch.save(model.state_dict(), model_save_file)
print_and_log('Model can be restored from \"%s\"' % (model_save_file), log_file)
| [
"torch.manual_seed",
"os.path.exists",
"train.train_model",
"argparse.ArgumentParser",
"os.makedirs",
"utils.print_and_log",
"models.WebObjExtractionNet",
"torch.nn.CrossEntropyLoss",
"random.seed",
"numpy.loadtxt",
"torch.cuda.is_available",
"numpy.random.seed",
"torch.utils.data.DataLoader... | [((388, 426), 'argparse.ArgumentParser', 'argparse.ArgumentParser', (['"""Train Model"""'], {}), "('Train Model')\n", (411, 426), False, 'import argparse\n'), ((1664, 1681), 'random.seed', 'random.seed', (['seed'], {}), '(seed)\n', (1675, 1681), False, 'import random\n'), ((1682, 1702), 'numpy.random.seed', 'np.random.seed', (['seed'], {}), '(seed)\n', (1696, 1702), True, 'import numpy as np\n'), ((1703, 1726), 'torch.manual_seed', 'torch.manual_seed', (['seed'], {}), '(seed)\n', (1720, 1726), False, 'import torch\n'), ((1727, 1755), 'torch.cuda.manual_seed', 'torch.cuda.manual_seed', (['seed'], {}), '(seed)\n', (1749, 1755), False, 'import torch\n'), ((2499, 2558), 'numpy.loadtxt', 'np.loadtxt', (["('%s/train_imgs.txt' % SPLIT_DIR)"], {'dtype': 'np.int32'}), "('%s/train_imgs.txt' % SPLIT_DIR, dtype=np.int32)\n", (2509, 2558), True, 'import numpy as np\n'), ((2573, 2630), 'numpy.loadtxt', 'np.loadtxt', (["('%s/val_imgs.txt' % SPLIT_DIR)"], {'dtype': 'np.int32'}), "('%s/val_imgs.txt' % SPLIT_DIR, dtype=np.int32)\n", (2583, 2630), True, 'import numpy as np\n'), ((2646, 2704), 'numpy.loadtxt', 'np.loadtxt', (["('%s/test_imgs.txt' % SPLIT_DIR)"], {'dtype': 'np.int32'}), "('%s/test_imgs.txt' % SPLIT_DIR, dtype=np.int32)\n", (2656, 2704), True, 'import numpy as np\n'), ((2721, 2777), 'numpy.loadtxt', 'np.loadtxt', (["('%s/test_domains.txt' % SPLIT_DIR)"], {'dtype': 'str'}), "('%s/test_domains.txt' % SPLIT_DIR, dtype=str)\n", (2731, 2777), True, 'import numpy as np\n'), ((3821, 3884), 'utils.print_and_log', 'print_and_log', (["('Backbone Convnet: %s' % BACKBONE)", 'log_file', '"""w"""'], {}), "('Backbone Convnet: %s' % BACKBONE, log_file, 'w')\n", (3834, 3884), False, 'from utils import print_and_log\n'), ((3887, 3955), 'utils.print_and_log', 'print_and_log', (["('Trainable Convnet: %s' % TRAINABLE_CONVNET)", 'log_file'], {}), "('Trainable Convnet: %s' % TRAINABLE_CONVNET, log_file)\n", (3900, 3955), False, 'from utils import print_and_log\n'), ((3958, 4020), 'utils.print_and_log', 'print_and_log', (["('Learning Rate: %.0e' % LEARNING_RATE)", 'log_file'], {}), "('Learning Rate: %.0e' % LEARNING_RATE, log_file)\n", (3971, 4020), False, 'from utils import print_and_log\n'), ((4023, 4077), 'utils.print_and_log', 'print_and_log', (["('Batch Size: %d' % BATCH_SIZE)", 'log_file'], {}), "('Batch Size: %d' % BATCH_SIZE, log_file)\n", (4036, 4077), False, 'from utils import print_and_log\n'), ((4080, 4138), 'utils.print_and_log', 'print_and_log', (["('Context Size: %d' % CONTEXT_SIZE)", 'log_file'], {}), "('Context Size: %d' % CONTEXT_SIZE, log_file)\n", (4093, 4138), False, 'from utils import print_and_log\n'), ((4141, 4197), 'utils.print_and_log', 'print_and_log', (["('Attention: %s' % USE_ATTENTION)", 'log_file'], {}), "('Attention: %s' % USE_ATTENTION, log_file)\n", (4154, 4197), False, 'from utils import print_and_log\n'), ((4200, 4254), 'utils.print_and_log', 'print_and_log', (["('Hidden Dim: %d' % HIDDEN_DIM)", 'log_file'], {}), "('Hidden Dim: %d' % HIDDEN_DIM, log_file)\n", (4213, 4254), False, 'from utils import print_and_log\n'), ((4257, 4342), 'utils.print_and_log', 'print_and_log', (["('RoI Pool Output Size: (%d, %d)' % ROI_POOL_OUTPUT_SIZE)", 'log_file'], {}), "('RoI Pool Output Size: (%d, %d)' % ROI_POOL_OUTPUT_SIZE, log_file\n )\n", (4270, 4342), False, 'from utils import print_and_log\n'), ((4338, 4398), 'utils.print_and_log', 'print_and_log', (["('BBox Features: %s' % USE_BBOX_FEAT)", 'log_file'], {}), "('BBox Features: %s' % USE_BBOX_FEAT, log_file)\n", (4351, 4398), False, 'from utils import print_and_log\n'), ((4401, 4461), 'utils.print_and_log', 'print_and_log', (["('Weight Decay: %.0e' % WEIGHT_DECAY)", 'log_file'], {}), "('Weight Decay: %.0e' % WEIGHT_DECAY, log_file)\n", (4414, 4461), False, 'from utils import print_and_log\n'), ((4464, 4528), 'utils.print_and_log', 'print_and_log', (["('Dropout Probability: %.2f' % DROP_PROB)", 'log_file'], {}), "('Dropout Probability: %.2f' % DROP_PROB, log_file)\n", (4477, 4528), False, 'from utils import print_and_log\n'), ((4531, 4591), 'utils.print_and_log', 'print_and_log', (["('Max BG Boxes: %d\\n' % MAX_BG_BOXES)", 'log_file'], {}), "('Max BG Boxes: %d\\n' % MAX_BG_BOXES, log_file)\n", (4544, 4591), False, 'from utils import print_and_log\n'), ((4670, 4788), 'datasets.load_data', 'load_data', (['DATA_DIR', 'train_img_ids', 'val_img_ids', 'test_img_ids', 'CONTEXT_SIZE', 'BATCH_SIZE', 'NUM_WORKERS', 'MAX_BG_BOXES'], {}), '(DATA_DIR, train_img_ids, val_img_ids, test_img_ids, CONTEXT_SIZE,\n BATCH_SIZE, NUM_WORKERS, MAX_BG_BOXES)\n', (4679, 4788), False, 'from datasets import custom_collate_fn, load_data, WebDataset\n'), ((5222, 5364), 'train.train_model', 'train_model', (['model', 'train_loader', 'optimizer', 'criterion', 'N_EPOCHS', 'device', 'val_loader', 'EVAL_INTERVAL', 'log_file', "('ckpt_%d.pth' % args.device)"], {}), "(model, train_loader, optimizer, criterion, N_EPOCHS, device,\n val_loader, EVAL_INTERVAL, log_file, 'ckpt_%d.pth' % args.device)\n", (5233, 5364), False, 'from train import train_model, evaluate_model\n'), ((5465, 5536), 'train.evaluate_model', 'evaluate_model', (['model', 'test_loader', 'criterion', 'device', '"""TEST"""', 'log_file'], {}), "(model, test_loader, criterion, device, 'TEST', log_file)\n", (5479, 5536), False, 'from train import train_model, evaluate_model\n'), ((6741, 6817), 'utils.print_and_log', 'print_and_log', (['(\'Model can be restored from "%s"\' % model_save_file)', 'log_file'], {}), '(\'Model can be restored from "%s"\' % model_save_file, log_file)\n', (6754, 6817), False, 'from utils import print_and_log\n'), ((2405, 2431), 'os.path.exists', 'os.path.exists', (['OUTPUT_DIR'], {}), '(OUTPUT_DIR)\n', (2419, 2431), False, 'import os\n'), ((2437, 2460), 'os.makedirs', 'os.makedirs', (['OUTPUT_DIR'], {}), '(OUTPUT_DIR)\n', (2448, 2460), False, 'import os\n'), ((6004, 6134), 'torch.utils.data.DataLoader', 'DataLoader', (['test_dataset'], {'batch_size': '(1)', 'shuffle': '(False)', 'num_workers': 'NUM_WORKERS', 'collate_fn': 'custom_collate_fn', 'drop_last': '(False)'}), '(test_dataset, batch_size=1, shuffle=False, num_workers=\n NUM_WORKERS, collate_fn=custom_collate_fn, drop_last=False)\n', (6014, 6134), False, 'from torch.utils.data import DataLoader\n'), ((6151, 6212), 'train.evaluate_model', 'evaluate_model', (['model', 'test_loader', 'criterion', 'device', '"""TEST"""'], {}), "(model, test_loader, criterion, device, 'TEST')\n", (6165, 6212), False, 'from train import train_model, evaluate_model\n'), ((6571, 6665), 'utils.print_and_log', 'print_and_log', (["('%s Macro Acc: %.2f%%' % (CLASS_NAMES[i], macro_acc_test[i - 1]))", 'log_file'], {}), "('%s Macro Acc: %.2f%%' % (CLASS_NAMES[i], macro_acc_test[i - \n 1]), log_file)\n", (6584, 6665), False, 'from utils import print_and_log\n'), ((1566, 1591), 'torch.cuda.is_available', 'torch.cuda.is_available', ([], {}), '()\n', (1589, 1591), False, 'import torch\n'), ((4839, 5006), 'models.WebObjExtractionNet', 'WebObjExtractionNet', (['ROI_POOL_OUTPUT_SIZE', 'IMG_HEIGHT', 'N_CLASSES', 'BACKBONE', 'USE_ATTENTION', 'HIDDEN_DIM', 'TRAINABLE_CONVNET', 'DROP_PROB', 'USE_BBOX_FEAT', 'CLASS_NAMES'], {}), '(ROI_POOL_OUTPUT_SIZE, IMG_HEIGHT, N_CLASSES, BACKBONE,\n USE_ATTENTION, HIDDEN_DIM, TRAINABLE_CONVNET, DROP_PROB, USE_BBOX_FEAT,\n CLASS_NAMES)\n', (4858, 5006), False, 'from models import WebObjExtractionNet\n'), ((5139, 5175), 'torch.nn.CrossEntropyLoss', 'nn.CrossEntropyLoss', ([], {'reduction': '"""sum"""'}), "(reduction='sum')\n", (5158, 5175), True, 'import torch.nn as nn\n'), ((5869, 5941), 'numpy.loadtxt', 'np.loadtxt', (["('%s/domain_wise_imgs/%s.txt' % (SPLIT_DIR, domain))", 'np.int32'], {}), "('%s/domain_wise_imgs/%s.txt' % (SPLIT_DIR, domain), np.int32)\n", (5879, 5941), True, 'import numpy as np\n'), ((6422, 6496), 'numpy.loadtxt', 'np.loadtxt', (['test_acc_domainwise_file'], {'delimiter': '""","""', 'skiprows': '(1)', 'dtype': 'str'}), "(test_acc_domainwise_file, delimiter=',', skiprows=1, dtype=str)\n", (6432, 6496), True, 'import numpy as np\n')] |
import numpy as np
from typing import Union, Optional, List, Dict, Any
from buffers.chunk_buffer import ChunkReplayBuffer
class IntrospectiveChunkReplayBuffer(ChunkReplayBuffer):
def __init__(self, buffer_size: int, *args, **kwargs):
super().__init__(buffer_size, *args, **kwargs)
self.sample_counts = np.zeros((buffer_size,), dtype=np.int)
self.first_access = np.zeros((buffer_size,), dtype=np.int) - 1
def _log_indices(self, indices):
self.sample_counts[indices] += 1
mask = np.zeros_like(self.first_access, dtype=bool)
mask[indices] = 1
self.first_access[(self.first_access == -1) & mask] = self.pos
def add(self,
obs: np.ndarray,
next_obs: np.ndarray,
action: np.ndarray,
reward: np.ndarray,
done: np.ndarray,
infos: List[Dict[str, Any]]
):
super().add(obs, next_obs, action, reward, done, infos)
def _get_chunk_batches(self, beginnings):
sampled_indices = super()._get_chunk_batches(beginnings)
self._log_indices(sampled_indices.flatten())
return sampled_indices
| [
"numpy.zeros",
"numpy.zeros_like"
] | [((333, 371), 'numpy.zeros', 'np.zeros', (['(buffer_size,)'], {'dtype': 'np.int'}), '((buffer_size,), dtype=np.int)\n', (341, 371), True, 'import numpy as np\n'), ((537, 581), 'numpy.zeros_like', 'np.zeros_like', (['self.first_access'], {'dtype': 'bool'}), '(self.first_access, dtype=bool)\n', (550, 581), True, 'import numpy as np\n'), ((400, 438), 'numpy.zeros', 'np.zeros', (['(buffer_size,)'], {'dtype': 'np.int'}), '((buffer_size,), dtype=np.int)\n', (408, 438), True, 'import numpy as np\n')] |
# This file is executed on every boot (including wake-boot from deepsleep)
import esp
import gc
import machine
import network
esp.osdebug(None)
# machine.freq(160000000)
def do_connect(wifi_name, wifi_pass):
ssid = 'microsonar'
password = '<PASSWORD>'
ap_if = network.WLAN(network.AP_IF)
ap_if.active(True)
# ap_if.config(essid=ssid, password=password)
ap_if.config(essid=ssid, authmode=network.AUTH_OPEN)
while not ap_if.active():
pass
print('Access Point created')
print(ap_if.ifconfig())
wlan = network.WLAN(network.STA_IF)
wlan.active(True)
wlans = wlan.scan()
if wifi_name in str(wlans):
print('connecting to network...')
wlan.connect(wifi_name, wifi_pass)
while not wlan.isconnected():
pass
print('network config:', wlan.ifconfig())
else:
wlan.active(False)
machine.Pin(2, machine.Pin.OUT).off()
do_connect('royter', 'traveller22')
gc.collect()
print('wifi connected')
| [
"machine.Pin",
"network.WLAN",
"esp.osdebug",
"gc.collect"
] | [((127, 144), 'esp.osdebug', 'esp.osdebug', (['None'], {}), '(None)\n', (138, 144), False, 'import esp\n'), ((962, 974), 'gc.collect', 'gc.collect', ([], {}), '()\n', (972, 974), False, 'import gc\n'), ((277, 304), 'network.WLAN', 'network.WLAN', (['network.AP_IF'], {}), '(network.AP_IF)\n', (289, 304), False, 'import network\n'), ((552, 580), 'network.WLAN', 'network.WLAN', (['network.STA_IF'], {}), '(network.STA_IF)\n', (564, 580), False, 'import network\n'), ((888, 919), 'machine.Pin', 'machine.Pin', (['(2)', 'machine.Pin.OUT'], {}), '(2, machine.Pin.OUT)\n', (899, 919), False, 'import machine\n')] |
# 02_blink_twice.py
# From the code for the Electronics Starter Kit for the Raspberry Pi by MonkMakes.com
import RPi.GPIO as GPIO
import time
def word_separation(pin):
sleep_time = 7
GPIO.output(pin, False) # True means that LED turns on
time.sleep(sleep_time)
def pulse(pin, length = "dot"):
pulse_time = 0
sleep_time = 1
if length == "dash":
pulse_time = 3
elif length == "dot":
pulse_time = 1
elif length == "stop":
sleep_time = 3
if length != 'stop':
GPIO.output(pin, True) # True means that LED turns on
time.sleep(pulse_time) # delay 0.5 seconds
GPIO.output(pin, False) # True means that LED turns on
time.sleep(sleep_time)
def get_morse_dictionary(letter):
morse_dict = {'a':['dot','dash','stop'],
'b':['dash','dot','dot','dot','stop'],
'c':['dash','dot','dash','dot','stop'],
'd':['dash','dot','dot','stop'],
'e':['dot','stop'],
'f':['dot','dot','dash','dot','stop'],
'g':['dash','dash','dot','stop'],
'h':['dot','dot','dot','dot','stop'],
'i':['dot','dot','stop'],
'j':['dot','dash','dash','dash','stop'],
'k':['dash','dot','dash','stop'],
'l':['dot','dash','dot','dot','stop'],
'm':['dash','dash','stop'],
'n':['dash','dot','stop'],
'o':['dash','dash','dash','stop'],
'p':['dot','dash','dash','dot','stop'],
'q':['dash','dash','dot','dash','stop'],
'r':['dot','dash','dot','stop'],
's':['dot','dot','dot','stop'],
't':['dash','stop'],
'u':['dot','dot','dash','stop'],
'v':['dot','dot','dot','dash','stop'],
'w':['dot','dash','dash','stop'],
'x':['dash','dot','dot','dash','stop'],
'y':['dash','dot','dash','dash','stop'],
'z':['dash','dash','dot','dot','stop'],
}
return morse_dict[letter]
def pulse_letter(letter, pin):
if letter == ' ':
word_separation(pin)
else:
pulse_list = get_morse_dictionary(letter)
for beep in pulse_list:
print(beep)
pulse(pin, beep)
# Configure the Pi to use the BCM (Broadcom) pin names, rather than the pin positions
GPIO.setmode(GPIO.BCM)
red_pin1 = 18
GPIO.setup(red_pin1, GPIO.OUT)
try:
words = input('Enter a word: ')
for letter in words:
pulse_letter(letter, red_pin1)
finally:
print("Cleaning up")
GPIO.cleanup()
# You could get rid of the try: finally: code and just have the while loop
# and its contents. However, the try: finally: construct makes sure that
# when you CTRL-c the program to end it, all the pins are set back to
# being inputs. This helps protect your Pi from accidental shorts-circuits
# if something metal touches the GPIO pins.
| [
"RPi.GPIO.cleanup",
"RPi.GPIO.setup",
"RPi.GPIO.output",
"time.sleep",
"RPi.GPIO.setmode"
] | [((2520, 2542), 'RPi.GPIO.setmode', 'GPIO.setmode', (['GPIO.BCM'], {}), '(GPIO.BCM)\n', (2532, 2542), True, 'import RPi.GPIO as GPIO\n'), ((2557, 2587), 'RPi.GPIO.setup', 'GPIO.setup', (['red_pin1', 'GPIO.OUT'], {}), '(red_pin1, GPIO.OUT)\n', (2567, 2587), True, 'import RPi.GPIO as GPIO\n'), ((195, 218), 'RPi.GPIO.output', 'GPIO.output', (['pin', '(False)'], {}), '(pin, False)\n', (206, 218), True, 'import RPi.GPIO as GPIO\n'), ((258, 280), 'time.sleep', 'time.sleep', (['sleep_time'], {}), '(sleep_time)\n', (268, 280), False, 'import time\n'), ((666, 689), 'RPi.GPIO.output', 'GPIO.output', (['pin', '(False)'], {}), '(pin, False)\n', (677, 689), True, 'import RPi.GPIO as GPIO\n'), ((729, 751), 'time.sleep', 'time.sleep', (['sleep_time'], {}), '(sleep_time)\n', (739, 751), False, 'import time\n'), ((2736, 2750), 'RPi.GPIO.cleanup', 'GPIO.cleanup', ([], {}), '()\n', (2748, 2750), True, 'import RPi.GPIO as GPIO\n'), ((537, 559), 'RPi.GPIO.output', 'GPIO.output', (['pin', '(True)'], {}), '(pin, True)\n', (548, 559), True, 'import RPi.GPIO as GPIO\n'), ((603, 625), 'time.sleep', 'time.sleep', (['pulse_time'], {}), '(pulse_time)\n', (613, 625), False, 'import time\n')] |
""" Identify low-level jets in wind profile data.
<NAME>
December 2020
"""
import numpy as np
import xarray as xr
def detect_llj(x, axis=None, falloff=0, output='strength', inverse=False):
""" Identify maxima in wind profiles.
args:
- x : ndarray with wind profile data
- axis : specifies the vertical dimension
is internally used with np.apply_along_axis
- falloff : threshold for labeling as low-level jet
default 0; can be masked later, e.g. llj[falloff>2.0]
- output : specifiy return type: 'strength' or 'index'
returns (depending on <output> argument):
- strength : 0 if no maximum identified, otherwise falloff strength
- index : nan if no maximum identified, otherwise index along
<axis>, to get the height of the jet etc.
"""
def inner(x, output):
if inverse:
x = x[::-1, ...]
# Identify local maxima
x = x[~np.isnan(x)]
dx = x[1:] - x[:-1]
ind = np.where((np.hstack((dx, 0)) < 0) & (np.hstack((0, dx)) >= 0))[0]
# Last value of x cannot be llj
if ind.size and ind[-1] == x.size - 1:
ind = ind[:-1]
# Compute the falloff strength for each local maxima
if ind.size: # this assumes height increases along axis!!!
strength = np.array([x[i] - min(x[i:]) for i in ind])
imax = np.argmax(strength)
# Return jet_strength and index of maximum:
if output == 'strength':
r = max(strength) if ind.size else 0
elif output == 'index':
r = ind[imax] if ind.size else 0
return r
# Wrapper interface to apply 1d function to ndarray
return np.apply_along_axis(inner, axis, x, output=output)
def detect_llj_vectorized(xs,
axis=-1,
output='falloff',
mask_inv=False,
inverse=False):
""" Identify local maxima in wind profiles.
args:
- x : ndarray with wind profile data
- axis : specifies the vertical dimension
- output : specifiy return type: 'falloff', 'strength' or 'index'
- mask_inv : use np.ma to mask nan values
returns (depending on <output> argument and whether llj is identified):
- falloff : 0 or largest difference between local max and subseq min
- strength : 0 or wind speed at jet height
- index : -1 or index along <axis>
"""
# Move <axis> to first dimension, to easily index and iterate over it.
xv = np.rollaxis(xs, axis)
if inverse:
xv = xv[::-1, ...]
if mask_inv:
xv = np.ma.masked_invalid(xv)
# Set initial arrays
min_elem = xv[-1].copy()
max_elem = np.zeros(min_elem.shape)
max_diff = np.zeros(min_elem.shape)
max_idx = np.ones(min_elem.shape, dtype=int) * (-1)
# Start at end of array and search backwards for larger differences.
for i, elem in reversed(list(enumerate(xv))):
min_elem = np.minimum(elem, min_elem)
new_max_identified = elem - min_elem > max_diff
max_diff = np.where(new_max_identified, elem - min_elem, max_diff)
max_elem = np.where(new_max_identified, elem, max_elem)
max_idx = np.where(new_max_identified, i, max_idx)
if output == 'falloff':
r = max_diff
elif output == 'strength':
r = max_elem
elif output == 'index':
r = max_idx
else:
raise ValueError('Invalid argument for <output>: %s' % output)
return r
def detect_llj_xarray(da, inverse=False):
""" Identify local maxima in wind profiles.
args:
- da : xarray.DataArray with wind profile data
- inverse : to flip the array if the data is stored upside down
returns: : xarray.Dataset with vertical dimension removed containing:
- falloff : 0 or largest difference between local max and subseq min
- strength : 0 or wind speed at jet height
- index : -1 or index along <axis>
Note: vertical dimension should be labeled 'level' and axis=1
"""
# Move <axis> to first dimension, to easily index and iterate over it.
xv = np.rollaxis(da.values, 1)
if inverse:
xv = xv[::-1, ...]
# Set initial arrays
min_elem = xv[-1].copy()
max_elem = np.zeros(min_elem.shape)
max_diff = np.zeros(min_elem.shape)
max_idx = np.ones(min_elem.shape, dtype=int) * (-1)
# Start at end of array and search backwards for larger differences.
for i, elem in reversed(list(enumerate(xv))):
min_elem = np.minimum(elem, min_elem)
new_max_identified = elem - min_elem > max_diff
max_diff = np.where(new_max_identified, elem - min_elem, max_diff)
max_elem = np.where(new_max_identified, elem, max_elem)
max_idx = np.where(new_max_identified, i, max_idx)
# Combine the results in a dataframe
get_height = lambda i: np.where(i > 0, da.level.values[i], da.level.values[
-1])
dims = da.isel(level=0).drop('level').dims
coords = da.isel(level=0).drop('level').coords
lljs = xr.Dataset(
{
'falloff': (dims, max_diff),
'strength': (dims, max_elem),
'level': (dims, get_height(max_idx)),
},
coords=coords)
print(
'Beware! Level is also filled if no jet is detected! '
'Use ds.sel(level=lljs.level).where(lljs.falloff>0) to get rid of them'
)
return lljs | [
"numpy.ones",
"numpy.minimum",
"numpy.hstack",
"numpy.where",
"numpy.rollaxis",
"numpy.argmax",
"numpy.zeros",
"numpy.apply_along_axis",
"numpy.isnan",
"numpy.ma.masked_invalid"
] | [((1794, 1844), 'numpy.apply_along_axis', 'np.apply_along_axis', (['inner', 'axis', 'x'], {'output': 'output'}), '(inner, axis, x, output=output)\n', (1813, 1844), True, 'import numpy as np\n'), ((2690, 2711), 'numpy.rollaxis', 'np.rollaxis', (['xs', 'axis'], {}), '(xs, axis)\n', (2701, 2711), True, 'import numpy as np\n'), ((2882, 2906), 'numpy.zeros', 'np.zeros', (['min_elem.shape'], {}), '(min_elem.shape)\n', (2890, 2906), True, 'import numpy as np\n'), ((2922, 2946), 'numpy.zeros', 'np.zeros', (['min_elem.shape'], {}), '(min_elem.shape)\n', (2930, 2946), True, 'import numpy as np\n'), ((4338, 4363), 'numpy.rollaxis', 'np.rollaxis', (['da.values', '(1)'], {}), '(da.values, 1)\n', (4349, 4363), True, 'import numpy as np\n'), ((4478, 4502), 'numpy.zeros', 'np.zeros', (['min_elem.shape'], {}), '(min_elem.shape)\n', (4486, 4502), True, 'import numpy as np\n'), ((4518, 4542), 'numpy.zeros', 'np.zeros', (['min_elem.shape'], {}), '(min_elem.shape)\n', (4526, 4542), True, 'import numpy as np\n'), ((2787, 2811), 'numpy.ma.masked_invalid', 'np.ma.masked_invalid', (['xv'], {}), '(xv)\n', (2807, 2811), True, 'import numpy as np\n'), ((2961, 2995), 'numpy.ones', 'np.ones', (['min_elem.shape'], {'dtype': 'int'}), '(min_elem.shape, dtype=int)\n', (2968, 2995), True, 'import numpy as np\n'), ((3146, 3172), 'numpy.minimum', 'np.minimum', (['elem', 'min_elem'], {}), '(elem, min_elem)\n', (3156, 3172), True, 'import numpy as np\n'), ((3248, 3303), 'numpy.where', 'np.where', (['new_max_identified', '(elem - min_elem)', 'max_diff'], {}), '(new_max_identified, elem - min_elem, max_diff)\n', (3256, 3303), True, 'import numpy as np\n'), ((3323, 3367), 'numpy.where', 'np.where', (['new_max_identified', 'elem', 'max_elem'], {}), '(new_max_identified, elem, max_elem)\n', (3331, 3367), True, 'import numpy as np\n'), ((3386, 3426), 'numpy.where', 'np.where', (['new_max_identified', 'i', 'max_idx'], {}), '(new_max_identified, i, max_idx)\n', (3394, 3426), True, 'import numpy as np\n'), ((4557, 4591), 'numpy.ones', 'np.ones', (['min_elem.shape'], {'dtype': 'int'}), '(min_elem.shape, dtype=int)\n', (4564, 4591), True, 'import numpy as np\n'), ((4742, 4768), 'numpy.minimum', 'np.minimum', (['elem', 'min_elem'], {}), '(elem, min_elem)\n', (4752, 4768), True, 'import numpy as np\n'), ((4844, 4899), 'numpy.where', 'np.where', (['new_max_identified', '(elem - min_elem)', 'max_diff'], {}), '(new_max_identified, elem - min_elem, max_diff)\n', (4852, 4899), True, 'import numpy as np\n'), ((4919, 4963), 'numpy.where', 'np.where', (['new_max_identified', 'elem', 'max_elem'], {}), '(new_max_identified, elem, max_elem)\n', (4927, 4963), True, 'import numpy as np\n'), ((4982, 5022), 'numpy.where', 'np.where', (['new_max_identified', 'i', 'max_idx'], {}), '(new_max_identified, i, max_idx)\n', (4990, 5022), True, 'import numpy as np\n'), ((5092, 5148), 'numpy.where', 'np.where', (['(i > 0)', 'da.level.values[i]', 'da.level.values[-1]'], {}), '(i > 0, da.level.values[i], da.level.values[-1])\n', (5100, 5148), True, 'import numpy as np\n'), ((1476, 1495), 'numpy.argmax', 'np.argmax', (['strength'], {}), '(strength)\n', (1485, 1495), True, 'import numpy as np\n'), ((1025, 1036), 'numpy.isnan', 'np.isnan', (['x'], {}), '(x)\n', (1033, 1036), True, 'import numpy as np\n'), ((1090, 1108), 'numpy.hstack', 'np.hstack', (['(dx, 0)'], {}), '((dx, 0))\n', (1099, 1108), True, 'import numpy as np\n'), ((1117, 1135), 'numpy.hstack', 'np.hstack', (['(0, dx)'], {}), '((0, dx))\n', (1126, 1135), True, 'import numpy as np\n')] |
"""Sigv4 Signing Support"""
# Copyright 2021 Amazon.com, Inc. or its affiliates. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"). You
# may not use this file except in compliance with the License. A copy
# of the License is located at
#
# http://aws.amazon.com/apache2.0/
#
# or in the "LICENSE.txt" file accompanying this file. This file is
# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS
# OF ANY KIND, express or implied. See the License for the specific
# language governing permissions and limitations under the License.
import boto3
import botocore
import json
def sigv4_auth(method, host, path, querys, body, headers):
"Adds authorization headers for sigv4 to headers parameter."
endpoint = host.replace('https://', '').replace('http://', '')
_api_id, _service, region, _domain = endpoint.split('.', maxsplit=3)
request_parameters = '&'.join([f"{k}={v}" for k, v in querys])
url = f"{host}{path}?{request_parameters}"
session = botocore.session.Session()
request = botocore.awsrequest.AWSRequest(method=method,
url=url,
data=json.dumps(body) if body else None)
botocore.auth.SigV4Auth(session.get_credentials(),
"execute-api", region).add_auth(request)
prepared_request = request.prepare()
headers['host'] = endpoint.split('/', maxsplit=1)[0]
for k, value in prepared_request.headers.items():
headers[k] = value
| [
"json.dumps",
"botocore.session.Session"
] | [((1024, 1050), 'botocore.session.Session', 'botocore.session.Session', ([], {}), '()\n', (1048, 1050), False, 'import botocore\n'), ((1215, 1231), 'json.dumps', 'json.dumps', (['body'], {}), '(body)\n', (1225, 1231), False, 'import json\n')] |
r"""
>>> from django.conf import settings
>>> from django.contrib.sessions.backends.db import SessionStore as DatabaseSession
>>> from django.contrib.sessions.backends.cache import SessionStore as CacheSession
>>> from django.contrib.sessions.backends.file import SessionStore as FileSession
>>> from django.contrib.sessions.backends.base import SessionBase
>>> db_session = DatabaseSession()
>>> db_session.modified
False
>>> db_session['cat'] = "dog"
>>> db_session.modified
True
>>> db_session.pop('cat')
'dog'
>>> db_session.pop('some key', 'does not exist')
'does not exist'
>>> db_session.save()
>>> db_session.exists(db_session.session_key)
True
>>> db_session.delete(db_session.session_key)
>>> db_session.exists(db_session.session_key)
False
>>> file_session = FileSession()
>>> file_session.modified
False
>>> file_session['cat'] = "dog"
>>> file_session.modified
True
>>> file_session.pop('cat')
'dog'
>>> file_session.pop('some key', 'does not exist')
'does not exist'
>>> file_session.save()
>>> file_session.exists(file_session.session_key)
True
>>> file_session.delete(file_session.session_key)
>>> file_session.exists(file_session.session_key)
False
# Make sure the file backend checks for a good storage dir
>>> settings.SESSION_FILE_PATH = "/if/this/directory/exists/you/have/a/weird/computer"
>>> FileSession()
Traceback (innermost last):
...
ImproperlyConfigured: The session storage path '/if/this/directory/exists/you/have/a/weird/computer' doesn't exist. Please set your SESSION_FILE_PATH setting to an existing directory in which Django can store session data.
>>> cache_session = CacheSession()
>>> cache_session.modified
False
>>> cache_session['cat'] = "dog"
>>> cache_session.modified
True
>>> cache_session.pop('cat')
'dog'
>>> cache_session.pop('some key', 'does not exist')
'does not exist'
>>> cache_session.save()
>>> cache_session.delete(cache_session.session_key)
>>> cache_session.exists(cache_session.session_key)
False
>>> s = SessionBase()
>>> s._session['some key'] = 'exists' # Pre-populate the session with some data
>>> s.accessed = False # Reset to pretend this wasn't accessed previously
>>> s.accessed, s.modified
(False, False)
>>> s.pop('non existant key', 'does not exist')
'does not exist'
>>> s.accessed, s.modified
(True, False)
>>> s.setdefault('foo', 'bar')
'bar'
>>> s.setdefault('foo', 'baz')
'bar'
>>> s.accessed = False # Reset the accessed flag
>>> s.pop('some key')
'exists'
>>> s.accessed, s.modified
(True, True)
>>> s.pop('some key', 'does not exist')
'does not exist'
"""
if __name__ == '__main__':
import doctest
doctest.testmod()
| [
"doctest.testmod"
] | [((2606, 2623), 'doctest.testmod', 'doctest.testmod', ([], {}), '()\n', (2621, 2623), False, 'import doctest\n')] |
import textwrap
from contextlib import ExitStack as does_not_raise # noqa: N813
import pytest
from _pytask.mark import Mark
from _pytask.outcomes import Skipped
from _pytask.outcomes import SkippedAncestorFailed
from _pytask.outcomes import SkippedUnchanged
from _pytask.skipping import pytask_execute_task_setup
from pytask import cli
from pytask import main
class DummyClass:
pass
@pytest.mark.end_to_end
def test_skip_unchanged(tmp_path):
source = """
def task_dummy():
pass
"""
tmp_path.joinpath("task_dummy.py").write_text(textwrap.dedent(source))
session = main({"paths": tmp_path})
assert session.execution_reports[0].success
session = main({"paths": tmp_path})
assert isinstance(session.execution_reports[0].exc_info[1], SkippedUnchanged)
@pytest.mark.end_to_end
def test_skip_unchanged_w_dependencies_and_products(tmp_path):
source = """
import pytask
@pytask.mark.depends_on("in.txt")
@pytask.mark.produces("out.txt")
def task_dummy(depends_on, produces):
produces.write_text(depends_on.read_text())
"""
tmp_path.joinpath("task_dummy.py").write_text(textwrap.dedent(source))
tmp_path.joinpath("in.txt").write_text("Original content of in.txt.")
session = main({"paths": tmp_path})
assert session.execution_reports[0].success
assert tmp_path.joinpath("out.txt").read_text() == "Original content of in.txt."
session = main({"paths": tmp_path})
assert isinstance(session.execution_reports[0].exc_info[1], SkippedUnchanged)
assert tmp_path.joinpath("out.txt").read_text() == "Original content of in.txt."
@pytest.mark.end_to_end
def test_skipif_ancestor_failed(tmp_path):
source = """
import pytask
@pytask.mark.produces("out.txt")
def task_first():
assert 0
@pytask.mark.depends_on("out.txt")
def task_second():
pass
"""
tmp_path.joinpath("task_dummy.py").write_text(textwrap.dedent(source))
session = main({"paths": tmp_path})
assert not session.execution_reports[0].success
assert isinstance(session.execution_reports[0].exc_info[1], Exception)
assert not session.execution_reports[1].success
assert isinstance(session.execution_reports[1].exc_info[1], SkippedAncestorFailed)
@pytest.mark.end_to_end
def test_if_skip_decorator_is_applied_to_following_tasks(tmp_path):
source = """
import pytask
@pytask.mark.skip
@pytask.mark.produces("out.txt")
def task_first():
assert 0
@pytask.mark.depends_on("out.txt")
def task_second():
pass
"""
tmp_path.joinpath("task_dummy.py").write_text(textwrap.dedent(source))
session = main({"paths": tmp_path})
assert session.execution_reports[0].success
assert isinstance(session.execution_reports[0].exc_info[1], Skipped)
assert session.execution_reports[1].success
assert isinstance(session.execution_reports[1].exc_info[1], Skipped)
@pytest.mark.end_to_end
@pytest.mark.parametrize(
"mark_string", ["@pytask.mark.skip", "@pytask.mark.skipif(True, reason='bla')"]
)
def test_skip_if_dependency_is_missing(tmp_path, mark_string):
source = f"""
import pytask
{mark_string}
@pytask.mark.depends_on("in.txt")
def task_first():
assert 0
"""
tmp_path.joinpath("task_dummy.py").write_text(textwrap.dedent(source))
session = main({"paths": tmp_path})
assert session.execution_reports[0].success
assert isinstance(session.execution_reports[0].exc_info[1], Skipped)
@pytest.mark.end_to_end
@pytest.mark.parametrize(
"mark_string", ["@pytask.mark.skip", "@pytask.mark.skipif(True, reason='bla')"]
)
def test_skip_if_dependency_is_missing_only_for_one_task(runner, tmp_path, mark_string):
source = f"""
import pytask
{mark_string}
@pytask.mark.depends_on("in.txt")
def task_first():
assert 0
@pytask.mark.depends_on("in.txt")
def task_second():
assert 0
"""
tmp_path.joinpath("task_dummy.py").write_text(textwrap.dedent(source))
result = runner.invoke(cli, [tmp_path.as_posix()])
assert result.exit_code == 4
assert "in.txt" in result.output
assert "task_first" not in result.output
assert "task_second" in result.output
@pytest.mark.end_to_end
def test_if_skipif_decorator_is_applied_skipping(tmp_path):
source = """
import pytask
@pytask.mark.skipif(condition=True, reason="bla")
@pytask.mark.produces("out.txt")
def task_first():
assert False
@pytask.mark.depends_on("out.txt")
def task_second():
assert False
"""
tmp_path.joinpath("task_dummy.py").write_text(textwrap.dedent(source))
session = main({"paths": tmp_path})
node = session.collection_reports[0].node
assert len(node.markers) == 1
assert node.markers[0].name == "skipif"
assert node.markers[0].args == ()
assert node.markers[0].kwargs == {"condition": True, "reason": "bla"}
assert session.execution_reports[0].success
assert isinstance(session.execution_reports[0].exc_info[1], Skipped)
assert session.execution_reports[1].success
assert isinstance(session.execution_reports[1].exc_info[1], Skipped)
assert session.execution_reports[0].exc_info[1].args[0] == "bla"
@pytest.mark.end_to_end
def test_if_skipif_decorator_is_applied_execute(tmp_path):
source = """
import pytask
@pytask.mark.skipif(False, reason="bla")
@pytask.mark.produces("out.txt")
def task_first(produces):
with open(produces, "w") as f:
f.write("hello world.")
@pytask.mark.depends_on("out.txt")
def task_second():
pass
"""
tmp_path.joinpath("task_dummy.py").write_text(textwrap.dedent(source))
session = main({"paths": tmp_path})
node = session.collection_reports[0].node
assert len(node.markers) == 1
assert node.markers[0].name == "skipif"
assert node.markers[0].args == (False,)
assert node.markers[0].kwargs == {"reason": "bla"}
assert session.execution_reports[0].success
assert session.execution_reports[0].exc_info is None
assert session.execution_reports[1].success
assert session.execution_reports[1].exc_info is None
@pytest.mark.end_to_end
def test_if_skipif_decorator_is_applied_any_condition_matches(tmp_path):
"""Any condition of skipif has to be True and only their message is shown."""
source = """
import pytask
@pytask.mark.skipif(condition=False, reason="I am fine")
@pytask.mark.skipif(condition=True, reason="No, I am not.")
@pytask.mark.produces("out.txt")
def task_first():
assert False
@pytask.mark.depends_on("out.txt")
def task_second():
assert False
"""
tmp_path.joinpath("task_dummy.py").write_text(textwrap.dedent(source))
session = main({"paths": tmp_path})
node = session.collection_reports[0].node
assert len(node.markers) == 2
assert node.markers[0].name == "skipif"
assert node.markers[0].args == ()
assert node.markers[0].kwargs == {"condition": True, "reason": "No, I am not."}
assert node.markers[1].name == "skipif"
assert node.markers[1].args == ()
assert node.markers[1].kwargs == {"condition": False, "reason": "I am fine"}
assert session.execution_reports[0].success
assert isinstance(session.execution_reports[0].exc_info[1], Skipped)
assert session.execution_reports[1].success
assert isinstance(session.execution_reports[1].exc_info[1], Skipped)
assert session.execution_reports[0].exc_info[1].args[0] == "No, I am not."
@pytest.mark.unit
@pytest.mark.parametrize(
("marker_name", "expectation"),
[
("skip_unchanged", pytest.raises(SkippedUnchanged)),
("skip_ancestor_failed", pytest.raises(SkippedAncestorFailed)),
("skip", pytest.raises(Skipped)),
("", does_not_raise()),
],
)
def test_pytask_execute_task_setup(marker_name, expectation):
class Task:
pass
task = Task()
kwargs = {"reason": ""} if marker_name == "skip_ancestor_failed" else {}
task.markers = [Mark(marker_name, (), kwargs)]
with expectation:
pytask_execute_task_setup(task)
| [
"textwrap.dedent",
"_pytask.mark.Mark",
"pytest.mark.parametrize",
"_pytask.skipping.pytask_execute_task_setup",
"pytest.raises",
"contextlib.ExitStack",
"pytask.main"
] | [((2980, 3088), 'pytest.mark.parametrize', 'pytest.mark.parametrize', (['"""mark_string"""', '[\'@pytask.mark.skip\', "@pytask.mark.skipif(True, reason=\'bla\')"]'], {}), '(\'mark_string\', [\'@pytask.mark.skip\',\n "@pytask.mark.skipif(True, reason=\'bla\')"])\n', (3003, 3088), False, 'import pytest\n'), ((3559, 3667), 'pytest.mark.parametrize', 'pytest.mark.parametrize', (['"""mark_string"""', '[\'@pytask.mark.skip\', "@pytask.mark.skipif(True, reason=\'bla\')"]'], {}), '(\'mark_string\', [\'@pytask.mark.skip\',\n "@pytask.mark.skipif(True, reason=\'bla\')"])\n', (3582, 3667), False, 'import pytest\n'), ((602, 627), 'pytask.main', 'main', (["{'paths': tmp_path}"], {}), "({'paths': tmp_path})\n", (606, 627), False, 'from pytask import main\n'), ((691, 716), 'pytask.main', 'main', (["{'paths': tmp_path}"], {}), "({'paths': tmp_path})\n", (695, 716), False, 'from pytask import main\n'), ((1265, 1290), 'pytask.main', 'main', (["{'paths': tmp_path}"], {}), "({'paths': tmp_path})\n", (1269, 1290), False, 'from pytask import main\n'), ((1440, 1465), 'pytask.main', 'main', (["{'paths': tmp_path}"], {}), "({'paths': tmp_path})\n", (1444, 1465), False, 'from pytask import main\n'), ((1989, 2014), 'pytask.main', 'main', (["{'paths': tmp_path}"], {}), "({'paths': tmp_path})\n", (1993, 2014), False, 'from pytask import main\n'), ((2684, 2709), 'pytask.main', 'main', (["{'paths': tmp_path}"], {}), "({'paths': tmp_path})\n", (2688, 2709), False, 'from pytask import main\n'), ((3384, 3409), 'pytask.main', 'main', (["{'paths': tmp_path}"], {}), "({'paths': tmp_path})\n", (3388, 3409), False, 'from pytask import main\n'), ((4705, 4730), 'pytask.main', 'main', (["{'paths': tmp_path}"], {}), "({'paths': tmp_path})\n", (4709, 4730), False, 'from pytask import main\n'), ((5761, 5786), 'pytask.main', 'main', (["{'paths': tmp_path}"], {}), "({'paths': tmp_path})\n", (5765, 5786), False, 'from pytask import main\n'), ((6825, 6850), 'pytask.main', 'main', (["{'paths': tmp_path}"], {}), "({'paths': tmp_path})\n", (6829, 6850), False, 'from pytask import main\n'), ((562, 585), 'textwrap.dedent', 'textwrap.dedent', (['source'], {}), '(source)\n', (577, 585), False, 'import textwrap\n'), ((1151, 1174), 'textwrap.dedent', 'textwrap.dedent', (['source'], {}), '(source)\n', (1166, 1174), False, 'import textwrap\n'), ((1949, 1972), 'textwrap.dedent', 'textwrap.dedent', (['source'], {}), '(source)\n', (1964, 1972), False, 'import textwrap\n'), ((2644, 2667), 'textwrap.dedent', 'textwrap.dedent', (['source'], {}), '(source)\n', (2659, 2667), False, 'import textwrap\n'), ((3344, 3367), 'textwrap.dedent', 'textwrap.dedent', (['source'], {}), '(source)\n', (3359, 3367), False, 'import textwrap\n'), ((4028, 4051), 'textwrap.dedent', 'textwrap.dedent', (['source'], {}), '(source)\n', (4043, 4051), False, 'import textwrap\n'), ((4665, 4688), 'textwrap.dedent', 'textwrap.dedent', (['source'], {}), '(source)\n', (4680, 4688), False, 'import textwrap\n'), ((5721, 5744), 'textwrap.dedent', 'textwrap.dedent', (['source'], {}), '(source)\n', (5736, 5744), False, 'import textwrap\n'), ((6785, 6808), 'textwrap.dedent', 'textwrap.dedent', (['source'], {}), '(source)\n', (6800, 6808), False, 'import textwrap\n'), ((8093, 8122), '_pytask.mark.Mark', 'Mark', (['marker_name', '()', 'kwargs'], {}), '(marker_name, (), kwargs)\n', (8097, 8122), False, 'from _pytask.mark import Mark\n'), ((8155, 8186), '_pytask.skipping.pytask_execute_task_setup', 'pytask_execute_task_setup', (['task'], {}), '(task)\n', (8180, 8186), False, 'from _pytask.skipping import pytask_execute_task_setup\n'), ((7697, 7728), 'pytest.raises', 'pytest.raises', (['SkippedUnchanged'], {}), '(SkippedUnchanged)\n', (7710, 7728), False, 'import pytest\n'), ((7764, 7800), 'pytest.raises', 'pytest.raises', (['SkippedAncestorFailed'], {}), '(SkippedAncestorFailed)\n', (7777, 7800), False, 'import pytest\n'), ((7820, 7842), 'pytest.raises', 'pytest.raises', (['Skipped'], {}), '(Skipped)\n', (7833, 7842), False, 'import pytest\n'), ((7858, 7874), 'contextlib.ExitStack', 'does_not_raise', ([], {}), '()\n', (7872, 7874), True, 'from contextlib import ExitStack as does_not_raise\n')] |
from django.apps import AppConfig
from django.core.checks import Tags, register
from django_version_checks import checks
class DjangoVersionChecksAppConfig(AppConfig):
name = "django_version_checks"
verbose_name = "django-version-checks"
def ready(self) -> None:
register(Tags.compatibility)(checks.check_config)
register(Tags.compatibility)(checks.check_python_version)
register(Tags.database)(checks.check_postgresql_version)
register(Tags.database)(checks.check_mysql_version)
register(Tags.database)(checks.check_sqlite_version)
| [
"django.core.checks.register"
] | [((287, 315), 'django.core.checks.register', 'register', (['Tags.compatibility'], {}), '(Tags.compatibility)\n', (295, 315), False, 'from django.core.checks import Tags, register\n'), ((345, 373), 'django.core.checks.register', 'register', (['Tags.compatibility'], {}), '(Tags.compatibility)\n', (353, 373), False, 'from django.core.checks import Tags, register\n'), ((411, 434), 'django.core.checks.register', 'register', (['Tags.database'], {}), '(Tags.database)\n', (419, 434), False, 'from django.core.checks import Tags, register\n'), ((476, 499), 'django.core.checks.register', 'register', (['Tags.database'], {}), '(Tags.database)\n', (484, 499), False, 'from django.core.checks import Tags, register\n'), ((536, 559), 'django.core.checks.register', 'register', (['Tags.database'], {}), '(Tags.database)\n', (544, 559), False, 'from django.core.checks import Tags, register\n')] |