id stringlengths 1 8 | text stringlengths 6 1.05M | dataset_id stringclasses 1
value |
|---|---|---|
268416 | <filename>pybamm/parameters/size_distribution_parameters.py
"""
Adding particle-size distribution parameter values to a parameter set
"""
import pybamm
import numpy as np
def get_size_distribution_parameters(
param,
R_n_av=None,
R_p_av=None,
sd_n=0.3,
sd_p=0.3,
R_min_n=None,
R_min_p=None,
R_max_n=None,
R_max_p=None,
):
"""
A convenience method to add standard area-weighted particle-size distribution
parameter values to a parameter set. A lognormal distribution is assumed for
each electrode, with mean set equal to the particle radius parameter in the
set (default) or a custom value. The standard deviations and min/max radii
are specified relative (i.e. scaled by) the mean radius for convenience.
Only the dimensional values are output from this method.
Parameters
----------
param : :class:`pybamm.ParameterValues`
The parameter values to add the distribution parameters to.
R_n_av : float (optional)
The area-weighted mean particle radius (dimensional) of the negative electrode.
Default is the value "Negative particle radius [m]" from param.
R_p_av : float (optional)
The area-weighted mean particle radius (dimensional) of the positive electrode.
Default is the value "Positive particle radius [m]" from param.
sd_n : float (optional)
The area-weighted standard deviation, scaled by the mean radius R_n_av,
hence dimensionless. Default is 0.3.
sd_p : float (optional)
The area-weighted standard deviation, scaled by the mean radius R_p_av,
hence dimensionless. Default is 0.3.
R_min_n : float (optional)
Minimum radius in negative electrode, scaled by the mean radius R_n_av.
Default is 0 or 5 standard deviations below the mean (if positive).
R_min_p : float (optional)
Minimum radius in positive electrode, scaled by the mean radius R_p_av.
Default is 0 or 5 standard deviations below the mean (if positive).
R_max_n : float (optional)
Maximum radius in negative electrode, scaled by the mean radius R_n_av.
Default is 5 standard deviations above the mean.
R_max_p : float (optional)
Maximum radius in positive electrode, scaled by the mean radius R_p_av.
Default is 5 standard deviations above the mean.
"""
# Radii from given parameter set
R_n_typ = param["Negative particle radius [m]"]
R_p_typ = param["Positive particle radius [m]"]
# Set the mean particle radii for each electrode
R_n_av = R_n_av or R_n_typ
R_p_av = R_p_av or R_p_typ
# Minimum radii
R_min_n = R_min_n or np.max([0, 1 - sd_n * 5])
R_min_p = R_min_p or np.max([0, 1 - sd_p * 5])
# Max radii
R_max_n = R_max_n or (1 + sd_n * 5)
R_max_p = R_max_p or (1 + sd_p * 5)
# Area-weighted particle-size distributions
def f_a_dist_n_dim(R):
return lognormal(R, R_n_av, sd_n * R_n_av)
def f_a_dist_p_dim(R):
return lognormal(R, R_p_av, sd_p * R_p_av)
param.update(
{
"Negative area-weighted mean particle radius [m]": R_n_av,
"Positive area-weighted mean particle radius [m]": R_p_av,
"Negative area-weighted particle-size "
+ "standard deviation [m]": sd_n * R_n_av,
"Positive area-weighted particle-size "
+ "standard deviation [m]": sd_p * R_p_av,
"Negative minimum particle radius [m]": R_min_n * R_n_av,
"Positive minimum particle radius [m]": R_min_p * R_p_av,
"Negative maximum particle radius [m]": R_max_n * R_n_av,
"Positive maximum particle radius [m]": R_max_p * R_p_av,
"Negative area-weighted "
+ "particle-size distribution [m-1]": f_a_dist_n_dim,
"Positive area-weighted "
+ "particle-size distribution [m-1]": f_a_dist_p_dim,
},
check_already_exists=False,
)
return param
def lognormal(x, x_av, sd):
"""
A PyBaMM lognormal distribution for use with particle-size distribution models.
The independent variable is x, range 0 < x < Inf, with mean x_av and standard
deviation sd. Note: if, e.g. X is lognormally distributed, then the mean and
standard deviations used here are of X rather than the normal distribution log(X).
"""
mu_ln = pybamm.log(x_av ** 2 / pybamm.sqrt(x_av ** 2 + sd ** 2))
sigma_ln = pybamm.sqrt(pybamm.log(1 + sd ** 2 / x_av ** 2))
out = (
pybamm.exp(-((pybamm.log(x) - mu_ln) ** 2) / (2 * sigma_ln ** 2))
/ pybamm.sqrt(2 * np.pi * sigma_ln ** 2)
/ x
)
return out
| StarcoderdataPython |
291774 | <reponame>Wollacy/Python
## Verificar uma String
nome=str(input('Qual seu nome? '))
print('')
if nome == 'Wollacy':
print('Que nome bonito!')
elif nome == 'Pedro' or nome == 'Maria' or nome == 'João':
print('Nome popular no Brasil!')
else:
print('Nome comum!')
print('')
print('Olá {}!'.format(nome)) | StarcoderdataPython |
83422 |
from typing import Union, Iterable
import torch
import torch.nn.functional as F
def cross_entropy(
outs: torch.Tensor,
labels: torch.Tensor,
reduction: str = "mean"
) -> torch.Tensor:
"""
cross entropy with logits
"""
return F.cross_entropy(outs, labels, reduction=reduction)
def cross_entropy_softmax(
probs: torch.Tensor,
labels: torch.Tensor,
reduction: str = "mean"
) -> torch.Tensor:
"""
cross entropy with probs
probs: the softmax of logits
"""
return F.nll_loss(probs.log(), labels, reduction=reduction)
def kl_divergence(
logits: torch.Tensor,
targets: torch.Tensor,
reduction: str = "batchmean"
) -> torch.Tensor:
# KL divergence
assert logits.size() == targets.size()
# targets = targets.clone().detach()
inputs = F.log_softmax(logits, dim=-1)
targets = F.softmax(targets, dim=-1)
return F.kl_div(inputs, targets, reduction=reduction)
def mse_loss(
inputs: torch.Tensor,
targets: torch.Tensor,
reduction: str = "mean"
) -> torch.Tensor:
return F.mse_loss(inputs, targets, reduction=reduction)
def lploss(
x: torch.Tensor,
p: Union[int, float, 'fro', 'nuc'] = 'fro',
dim: Union[int, Iterable] = -1
):
return torch.norm(x, p=p, dim=dim).mean()
| StarcoderdataPython |
8056346 | <gh_stars>1-10
from enum import IntEnum
class StatusCode(IntEnum):
REQUEST_CANCELLED = 0
CONTINUE = 100
SWITCHING_PROTOCOLS = 101
PROCESSING = 102
OK = 200
CREATED = 201
ACCEPTED = 202
NON_AUTHORATIVE = 203
NO_CONTENT = 204
RESET_CONTENT = 205
PARTIAL_CONTENT = 206
MULTI_STATUS = 207
ALREADY_REPORTED = 208
IM_USED = 226
MULTIPLE_CHOICES = 300
MOVED_PERMAMENTLY = 301
FOUND = 302
SEE_OTHER = 303
NOT_MODIFIED = 304
USE_PROXY = 305
# NOTE 306 is reserved
TEMPORARY_REDIRECT = 307
PERMAMENT_REDIRECT = 308
BAD_REQUEST = 400
UNAUTHORIZED = 401
PAYMENT_REQUIRED = 402
FORBIDDEN = 403
NOT_FOUND = 404
METHOD_NOT_ALLOWED = 405
NOT_ACCEPTABLE = 406
PROXY_AUTH_REQUIRED = 407
REQUEST_TIMEOUT = 408
CONFLICT = 409
GONE = 410
LENGTH_REQUIRED = 411
PRECONDITION_FAILED = 412
PAYLOAD_TOO_LARGE = 413
URI_TOO_LONG = 414
UNSUPPORTED_MEDIA_TYPE = 415
RANGE_NOT_SATISFIABLE = 416
EXPECTATION_FAILED = 417
IM_A_TEAPOT = 418
AUTH_TIMEOUT = 419
MISDIRECTED_REQUEST = 421
UNPROCESSABLE_ENTITY = 422
LOCKED = 423
FAILED_DEPENDENCY = 424
UPGRADE_REQUIRED = 426
PRECONDITION_REQUIRED = 428
TOO_MANY_REQUESTS = 429
REQUEST_HEADER_FIELDS_TOO_LARGE = 431
REQUESTED_HOST_UNAVAILABLE = 434
RETRY_WITH = 449
UNAVAILABLE_FOR_LEGAL_REASONS = 451
CLIENT_CLOSED_REQUEST = 499
INTERNAL_SERVER_ERROR = 500
NOT_IMPLEMENTED = 501
BAD_GATEWAY = 502
SERVICE_UNAVAILABLE = 503
GATEWAY_TIMEOUT = 504
HTTP_VERSION_NOT_SUPPORTED = 505
VARIANT_ALSO_NEGOTIATES = 506
INSUFFICIENT_STORAGE = 507
BANDWIDTH_LIMIT_EXCEEDED = 509
NOT_EXTENDED = 510
NETWORK_AUTH_REQUIRED = 511
UNKNOWN_ERROR = 520
WEB_SERVER_IS_DOWN = 521
CONNECTION_TIMED_OUT = 522
ORIGIN_IS_UNREACHABLE = 523
A_TIMEOUT_OCCURED = 524
SSL_HANDSHAKE_FAILED = 524
INVALID_SSL_CERTIFICATE = 526
| StarcoderdataPython |
5065816 | import json
from datetime import datetime, timedelta
from uuid import uuid4
from django.core.management import call_command
from django.test.testcases import TestCase
import requests_mock
from freezegun import freeze_time
from djadyen import settings
from djadyen.choices import Status
from djadyen.models import AdyenIssuer, AdyenPaymentOption
from .factories import NotificationFactory, OrderFactory
def json_response(request, context):
return json.dumps({
'paymentMethods': [
{'brandCode': 'mc', 'name': 'MasterCard'},
{'brandCode': 'visa', 'name': 'VISA'},
{'brandCode': 'ideal', 'issuers': [
{'issuerId': '1121', 'name': 'Test Issuer'},
{'issuerId': '1154', 'name': 'Test Issuer 5'},
{'issuerId': '1153', 'name': 'Test Issuer 4'},
{'issuerId': '1152', 'name': 'Test Issuer 3'},
{'issuerId': '1151', 'name': 'Test Issuer 2'},
{'issuerId': '1162', 'name': 'Test Issuer Cancelled'},
{'issuerId': '1161', 'name': 'Test Issuer Pending'},
{'issuerId': '1160', 'name': 'Test Issuer Refused'},
{'issuerId': '1159', 'name': 'Test Issuer 10'},
{'issuerId': '1158', 'name': 'Test Issuer 9'},
{'issuerId': '1157', 'name': 'Test Issuer 8'},
{'issuerId': '1156', 'name': 'Test Issuer 7'},
{'issuerId': '1155', 'name': 'Test Issuer 6'}
], 'name': 'iDEAL'}
]}, ensure_ascii=False).encode('gbk')
class SyncPaymentMethods(TestCase):
def test_on_empty_database(self):
self.assertEqual(AdyenPaymentOption.objects.count(), 0)
self.assertEqual(AdyenIssuer.objects.count(), 0)
with self.assertRaises(ValueError):
call_command('sync_payment_methods')
# self.assertEqual(AdyenPaymentOption.objects.count(), 3)
# self.assertEqual(AdyenIssuer.objects.count(), 13)
def test_on_existing_database(self):
self.assertEqual(AdyenPaymentOption.objects.count(), 0)
self.assertEqual(AdyenIssuer.objects.count(), 0)
with self.assertRaises(ValueError):
call_command('sync_payment_methods')
# self.assertEqual(AdyenPaymentOption.objects.count(), 3)
# self.assertEqual(AdyenIssuer.objects.count(), 13)
# call_command('sync_payment_methods')
# self.assertEqual(AdyenPaymentOption.objects.count(), 3)
# self.assertEqual(AdyenIssuer.objects.count(), 13)
@requests_mock.mock()
def test_on_empty_database_mock(self, mock):
mock.post(
'https://test.adyen.com/hpp/directory.shtml',
[
{"content": json_response, "status_code": 200},
],
)
call_command('sync_payment_methods')
self.assertEqual(AdyenPaymentOption.objects.count(), 3)
self.assertEqual(AdyenIssuer.objects.count(), 13)
@requests_mock.mock()
def test_on_existing_database_mock(self, mock):
mock.post(
'https://test.adyen.com/hpp/directory.shtml',
[
{"content": json_response, "status_code": 200},
{"content": json_response, "status_code": 200},
],
)
self.assertEqual(AdyenPaymentOption.objects.count(), 0)
self.assertEqual(AdyenIssuer.objects.count(), 0)
call_command('sync_payment_methods')
self.assertEqual(AdyenPaymentOption.objects.count(), 3)
self.assertEqual(AdyenIssuer.objects.count(), 13)
call_command('sync_payment_methods')
self.assertEqual(AdyenPaymentOption.objects.count(), 3)
self.assertEqual(AdyenIssuer.objects.count(), 13)
class ProcessNotifications(TestCase):
def setUp(self):
super(ProcessNotifications, self).setUp()
reference = str(uuid4())
self.data = {
'success': 'true',
'eventCode': 'AUTHORISATION',
'merchantReference': reference,
'merchantAccountCode': settings.ADYEN_MERCHANT_ACCOUNT,
}
with freeze_time('2019-01-01 11:44'):
self.notification1 = NotificationFactory.create(
notification=json.dumps(self.data),
is_processed=False
)
self.order1 = OrderFactory.create(
status=Status.Pending,
reference=reference
)
@freeze_time('2019-01-01 12:00')
def test_process_notifications_already_processed(self):
"""
Make sure that an order, which status has already been
set as 'Authorised' is not processed again.
"""
self.order1.status = Status.Authorised
self.order1.save()
self.assertFalse(self.order1.paid)
call_command('adyen_maintenance')
self.order1.refresh_from_db()
self.assertFalse(self.order1.paid)
@freeze_time('2019-01-01 12:00')
def test_process_notifications(self):
self.assertFalse(self.order1.paid)
call_command('adyen_maintenance')
self.order1.refresh_from_db()
self.assertTrue(self.order1.paid)
self.notification1.refresh_from_db()
self.assertTrue(self.notification1.is_processed)
self.assertTrue(self.notification1.processed_at, datetime(2019, 1, 1, 12, 0))
@freeze_time('2019-01-01 12:00')
def test_process_notifications_is_error(self):
self.assertFalse(self.order1.paid)
self.data.update(eventCode='ERROR')
self.notification1.notification = json.dumps(self.data)
self.notification1.save()
call_command('adyen_maintenance')
self.order1.refresh_from_db()
self.assertFalse(self.order1.paid)
self.notification1.refresh_from_db()
self.assertTrue(self.notification1.is_processed)
self.assertTrue(self.notification1.processed_at, datetime(2019, 1, 1, 12, 0))
@freeze_time('2019-01-01 12:00')
def test_process_notifications_is_cancelled(self):
self.assertFalse(self.order1.paid)
self.data.update(eventCode='CANCEL')
self.notification1.notification = json.dumps(self.data)
self.notification1.save()
call_command('adyen_maintenance')
self.order1.refresh_from_db()
self.assertFalse(self.order1.paid)
self.notification1.refresh_from_db()
self.assertTrue(self.notification1.is_processed)
self.assertTrue(self.notification1.processed_at, datetime(2019, 1, 1, 12, 0))
@freeze_time('2019-01-01 12:00')
def test_process_notifications_is_refused(self):
self.assertFalse(self.order1.paid)
self.data.update(eventCode='REFUSED')
self.notification1.notification = json.dumps(self.data)
self.notification1.save()
call_command('adyen_maintenance')
self.order1.refresh_from_db()
self.assertFalse(self.order1.paid)
self.notification1.refresh_from_db()
self.assertTrue(self.notification1.is_processed)
self.assertTrue(self.notification1.processed_at, datetime(2019, 1, 1, 12, 0))
class CleanupPending(TestCase):
def test_cleanup(self):
# 5 days ago; Should be marked as 'Error'
with freeze_time('2019-01-5 12:00'):
self.order1 = OrderFactory.create(status=Status.Pending)
# 4 days ago; Should be left alone
with freeze_time('2019-01-6 12:00'):
self.order2 = OrderFactory.create(status=Status.Pending)
# 6 days ago, Should be marked as 'Error'
with freeze_time('2019-01-4 12:00'):
self.order3 = OrderFactory.create(status=Status.Pending)
# 7 days ago, but Authorised, should be left alone
with freeze_time('2019-01-3 12:00'):
self.order4 = OrderFactory.create(status=Status.Authorised)
data = {
'success': 'true',
'eventCode': 'AUTHORISATION',
'merchantReference': 'unknown',
'merchantAccountCode': settings.ADYEN_MERCHANT_ACCOUNT,
}
with freeze_time('2019-01-01 11:44'):
self.notification1 = NotificationFactory.create(
notification=json.dumps(data),
is_processed=False
)
with freeze_time('2019-01-10 12:00'):
call_command('adyen_maintenance')
self.order1.refresh_from_db()
self.order2.refresh_from_db()
self.order3.refresh_from_db()
self.order4.refresh_from_db()
self.notification1.refresh_from_db()
self.assertEqual(self.order1.status, Status.Error)
self.assertEqual(self.order2.status, Status.Pending)
self.assertEqual(self.order3.status, Status.Error)
self.assertEqual(self.order4.status, Status.Authorised)
self.assertTrue(self.notification1.is_processed)
| StarcoderdataPython |
8024336 | import data.simulation as sim
import package.params as params
import package.instance as inst
import package.experiment as exp
import package.batch as ba
import pprint
import zipfile
def example_create_instance():
# we use the params default data to create a dataset:
model_data = sim.create_dataset(params.OPTIONS)
# we print it:
pprint.pprint(model_data)
# we create an instance with that data:
# we can build something that solves this instance.
instance = inst.Instance(model_data)
# we can show some of the data
# for example, the start date for the tasks (missions):
instance.get_tasks('start')
# or the initial status of the resources (aircraft)
instance.get_resources('initial')
return instance
def load_experiment_from_zip():
# we load the zip
zipobj = zipfile.ZipFile('examples/serv_cluster1_20200625.zip')
# we look for one experiment inside the zip
experiment = exp.Experiment.from_zipfile(zipobj, 'serv_cluster1_20200625/numparalleltasks_13/202006250859')
# we can, for example, check the solution for violation of constraints:
pprint.pprint(experiment.check_solution())
return experiment
def load_batch_from_zip():
# we load the entire zip into a batch
batch = ba.ZipBatch(path='examples/serv_cluster1_20200625.zip')
# we can produce statistics from the batch:
print(batch.get_status_df())
return batch
| StarcoderdataPython |
11253491 | <gh_stars>1-10
"""API for Tasks"""
from flask import Blueprint, jsonify
from flask_login import login_required
from ..tasks import channels_renew, list_all_tasks, remove_all_tasks
from ..utils import admin_required
api_task_blueprint = Blueprint("api_task", __name__)
api_task_blueprint.before_request(admin_required)
@api_task_blueprint.route("/list-all")
@login_required
def list_all():
tasks = list_all_tasks()
return jsonify(tasks)
@api_task_blueprint.route("/remove-all")
@login_required
def remove_all():
results = remove_all_tasks()
return jsonify(results)
@api_task_blueprint.route("/<task_id>/status")
@login_required
def status(task_id):
task = channels_renew.AsyncResult(task_id)
response = {
"id": task.id,
"status": task.status.title(),
"result": task.result,
"traceback": task.traceback,
}
if isinstance(task.result, Exception):
response["traceback"] = task.__dict__["_cache"]["traceback"]
else:
response["current"] = task.result.get("current", 1)
response["total"] = task.result.get("total", 1)
response["channel_id"] = task.result.get("channel_id", None)
return jsonify(response)
| StarcoderdataPython |
6427116 | from spira.log import SPIRA_LOG as LOG
from spira.yevon.filters.filter import Filter
from spira.yevon.gdsii.elem_list import ElementList
from spira.yevon.geometry.ports.port_list import PortList
from spira.yevon.process import get_rule_deck
RDD = get_rule_deck()
__all__ = [
'ProcessBooleanFilter',
'SimplifyFilter',
'ElectricalAttachFilter',
'ContactAttachFilter',
'PinAttachFilter',
]
# FIXME: Maybe use derived layers directly?
class ProcessBooleanFilter(Filter):
"""
Applies boolean merge operations on all metal
layer polygons in the cell.
Notes
-----
Derived merge boolean polygons is added as a filter,
since we want to apply this operation on all elements.
"""
from spira.yevon.process.purpose_layer import PurposeLayerParameter
metal_purpose = PurposeLayerParameter(default=RDD.PURPOSE.METAL)
def filter_Cell(self, item):
from spira.yevon.gdsii.cell import Cell
ports = PortList()
elems = ElementList()
for e in item.derived_merged_elements:
elems += e
for e in item.elements.sref:
elems += e
for e in item.elements.labels:
elems += e
for p in item.ports:
if p.purpose.symbol == 'P':
ports += p
if p.purpose.symbol == 'T':
ports += p
cell = Cell(elements=elems, ports=ports)
return cell
def __repr__(self):
return "<ProcessBooleanFilter: \'{}\'>".format(self.name)
class SimplifyFilter(Filter):
"""
Simplify all curved shapes in the cell.
Notes
-----
Add shape simplifying algorithm as a filter, since
we only want to apply shape simplification is certain
circumstances. Other shape operations, such as
reversing points are typically applied algorithmically.
"""
def filter_Cell(self, item):
from spira.yevon.utils import clipping
from spira.yevon.gdsii.cell import Cell
ports = PortList()
elems = ElementList()
for e in item.elements.polygons:
e.shape = clipping.simplify_points(e.points)
elems += e
for e in item.elements.sref:
elems += e
for e in item.elements.labels:
elems += e
for p in item.ports:
ports += p
cell = Cell(elements=elems, ports=ports)
return cell
def __repr__(self):
return "<SimplifyFilter: \'{}\'>".format(self.name)
class ElectricalAttachFilter(Filter):
"""
"""
def filter_Cell(self, item):
from copy import deepcopy
from spira.yevon.vmodel.virtual import virtual_connect
from spira.yevon.geometry.shapes.adapters import ShapeConnected
from spira.yevon.geometry.shapes.shape import Shape
v_model = virtual_connect(device=item)
D = item.expand_flat_copy()
for i, e1 in enumerate(D.elements):
clip_shape = Shape()
for e2 in D.elements:
shape1 = e1.shape.transform_copy(e1.transformation).snap_to_grid()
shape2 = e2.shape.transform_copy(e2.transformation).snap_to_grid()
if (shape1 != shape2) and (e1.layer == e2.layer):
overlap_shape = shape1.intersections(shape2)
if isinstance(overlap_shape, Shape):
if overlap_shape.is_empty() is False:
clip_shape.extend(overlap_shape.points.tolist())
if clip_shape.is_empty() is False:
original_shape = e1.shape.transform_copy(e1.transformation).snap_to_grid()
D.elements[i].shape = ShapeConnected(
original_shape=original_shape,
clip_shape=clip_shape,
derived_edges=v_model.derived_edges)
D.elements[i].ports = D.elements[i].ports.transform_copy(e1.transformation)
D.elements[i].transformation = None
return item
def __repr__(self):
return "<ElectricalAttachFilter: \'{}\'>".format(self.name)
class ContactAttachFilter(Filter):
"""
Adds contact ports to each metal polygon connected by a
contact layer and return a list of the updated elements.
"""
def filter_Cell(self, item):
from spira.yevon.utils import clipping
from spira.yevon.gdsii.cell import Cell
from spira.yevon.geometry.ports import Port
from spira.yevon.vmodel.virtual import virtual_connect
from shapely.geometry import Polygon as ShapelyPolygon
# ports = PortList()
elems = ElementList()
v_model = virtual_connect(device=item)
for e1 in v_model.derived_contacts:
ps = e1.layer.process.symbol
for e2 in item.elements:
for m in ['BOT_LAYER', 'TOP_LAYER']:
if ps in RDD.VIAS.keys:
if e2.layer == RDD.VIAS[ps].LAYER_STACK[m]:
if e2.encloses(e1.center):
port = Port(
name='{}:Cv'.format(ps),
midpoint=e1.center,
process=e1.layer.process)
e2.ports += port
elems += item.elements
for e in item.elements.sref:
elems += e
for e in item.elements.labels:
elems += e
# for p in item.ports:
# ports += p
# cell = Cell(elements=elems, ports=ports)
cell = Cell(elements=elems)
return cell
def __repr__(self):
return "<ContactAttachFilter: \'{}\'>".format(self.name)
class PinAttachFilter(Filter):
"""
Adds contact ports to each metal polygon connected by a
contact layer and return a list of the updated elements.
"""
def filter_Cell(self, item):
D = item.expand_flat_copy()
for e in D.elements.polygons:
for p in item.ports:
# if p.purpose.symbol == 'P':
if p.purpose.symbol == 'T':
# if p.encloses(e.shape.points):
# e.ports += p
# c_port= p.transform_copy(e.transformation)
shape = e.shape.transform_copy(e.transformation).snap_to_grid()
if p.encloses(shape.points):
e.ports += p
return item
# for e in item.elements.sref:
# elems += e
# for e in item.elements.labels:
# elems += e
# # cell = Cell(elements=elems, ports=ports)
# cell = Cell(elements=elems)
# return cell
def __repr__(self):
return "<ContactAttachFilter: \'{}\'>".format(self.name)
| StarcoderdataPython |
208324 | <reponame>mdeloge/opengrid
# -*- coding: utf-8 -*-
"""
A setuptools based setup module for opengrid.
Adapted from
https://packaging.python.org/en/latest/distributing.html
https://github.com/pypa/sampleproject
"""
# Always prefer setuptools over distutils
from setuptools import setup, find_packages
# To use a consistent encoding
from codecs import open
from os import path
here = path.abspath(path.dirname(__file__))
# Get the long description from the README file
#with open(path.join(here, 'README.md'), encoding='utf-8') as f:
# long_description = f.read()
import subprocess
if subprocess.call(["pip", "install","-r", path.join(here, "requirements.txt"), "-v", "--no-cache"]):
raise Exception("Could not install dependencies")
setup(
name='opengrid',
version="0.5.0",
description='Open-source algorithms for data-driven building analysis and control',
#long_description=long_description,
url='https://github.com/opengridcc/opengrid',
author='<NAME> and many others',
author_email='<EMAIL>',
license='Apache 2.0',
# See https://pypi.python.org/pypi?%3Aaction=list_classifiers
classifiers=[
# How mature is this project? Common values are
# 3 - Alpha
# 4 - Beta
# 5 - Production/Stable
'Development Status :: 3 - Alpha',
# Indicate who your project is intended for
'Intended Audience :: Developers',
'Topic :: Scientific/Engineering',
# Pick your license as you wish (should match "license" above)
'License :: OSI Approved :: Apache Software License',
# Specify the Python versions you support here. In particular, ensure
# that you indicate whether you support Python 2, Python 3 or both.
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
],
keywords='algorithms buildings monitoring analysis control',
# You can just specify the packages manually here if your project is
# simple. Or you can use find_packages().
packages=find_packages(),
# Alternatively, if you want to distribute just a my_module.py, uncomment
# this:
#py_modules=["tmpo.py"],
# If there are data files included in your packages that need to be
# installed, specify them here. If using Python 2.6 or less, then these
# have to be included in MANIFEST.in as well.
# Note: for creating the source distribution, they had to be included in the
# MANIFEST.in as well.
package_data={
'opengrid': ['notebooks/*'],
},
data_files=[('', ['LICENSE', 'README.md', 'requirements.txt'])]
)
| StarcoderdataPython |
9692069 | <filename>shop/spiders/markethot_spider.py
# coding=utf-8
import datetime
import logging
import scrapy
from shop.items import ShopItem
logger = logging.getLogger('mycustomlogger')
class MarkethotSpider(scrapy.Spider):
name = 'markethot.ru'
base_url = 'https://markethot.ru'
search = '/catalog/search?sort=price&order=asc&query=%s'
def __init__(self, *args, **kwargs):
super(MarkethotSpider, self).__init__(**kwargs)
self.query=kwargs['query']
self.history=kwargs['history']
def start_requests(self):
yield scrapy.Request(url=self.base_url + self.search % (self.query), callback=self.parse)
def get_pages(self, response):
print("user-agent: %s" % self.settings.get('USER_AGENT'))
count_pages = response.xpath('string(.//*[@class="pagination"]/li[last()])').extract_first()
if count_pages != '':
count_pages = int(count_pages)
else:
count_pages = 0
for page in range(count_pages + 1):
url = self.base_url + self.search % (page, self.query)
yield response.follow(url, callback=self.parse)
def parse(self, response):
for product in response.xpath('//*[contains(@class, "product-item")]'):
item = ShopItem()
item['resource'] = self.name
item['history'] = self.history
item["url"] = self.base_url + product.xpath('.//a[@class="pi-inner"]/@href').extract_first()
name = product.xpath('.//div[@class="product-description"]/text()').extract_first()
name = name.strip()
item["name"] = name
item["price"] = float(product.xpath('.//span[@class="price"]/text()').extract_first())
item['created_date'] = datetime.datetime.now()
yield item
| StarcoderdataPython |
6420966 | import populate_test_tables
from archive.utils.mock_di_api import mock_api
from archive.utils.operator_test import operator_test
api = mock_api(__file__) # class instance of mock_api
mock_api.print_send_msg = False # set class variable for printing api.send
optest = operator_test(__file__)
# config parameter
api.config.num_rows = 100 # datatype : integer
msg = optest.get_message('msg_1.json')
#msg = api.Message(attributes={'operator':'di_replication.populate_test_tables'},body = None)
populate_test_tables.on_data(msg)
| StarcoderdataPython |
3333301 | <gh_stars>0
from bs4 import BeautifulSoup
''' beautiful soup xml html scraper must be imported from bs4 like this'''
path = 'C:\some_path\fb_file.html'
file = open(path,'rb')
soup = BeautifulSoup(file,'html5lib',)
#classes
#_12gz = note titles
#_2pin = things you posted (on walls, incl. notes, and self wall)
#_3-96 _2let = outside of _2pin
output_list = []
title_string=''
for d in soup.find_all('div',class_='_2pin'):
title = d.find('div',class_='_12gz')
#for your notes separates titles from body
if title != None:
title_string = title.text.strip()
title.decompose()
string = d.text.strip()
if len(string) > 10 and not string[:4] == 'http':
if len(title_string) > 2:
string = title_string+'\n'+string
output_list.append(string)
title_string =''
string = '\n\n\n'.join(output_list)
f = open('fb_txt.txt','w+',encoding='utf-8')
f.write(string)
f.close | StarcoderdataPython |
11283032 | from flask import Flask, render_template, request, redirect, url_for
from apiclient.discovery import build
import configparser
import json
from flask_mail import Mail, Message
from bin import utils
from bin.weather import Weather
from bin.mailmanager import MailManager
app = Flask(__name__)
#Load config.ini file.
config = configparser.ConfigParser()
config.read('./config/config.ini')
def callFromCron(*args, **kwargs):
"""Function checking if the call was made by cron.
Returns:
True if headers are from cron job.
"""
if request.headers.get('X-AppEngine-Cron') is None:
return False
else:
return True
@app.route('/')
def main():
"""Main page of the football notifier.
Returns:
A redirection to a homepage of a Google Group.
"""
# Temporarily disabled
#return redirect(config['REDIRECTS']['googleGroup'], code=302)
return render_template("404.html")
@app.errorhandler(404)
# inbuilt function which takes error as parameter
def not_found(e):
"""404 error handler.
Returns:
A 404 html template.
"""
return render_template("404.html")
@app.route('/mail/<weekday>')
def sendMail(weekday):
"""This is a page which cron job requests to send an email.
If 'today' is defined in config.ini, and call was made from cron,
get the current forecast and send an email. Otherwise redirect.
Args:
weekday from url
Returns:
A message content of the email or redirection to Google
"""
if callFromCron():
#Config email
mail = initEmail()
m = MailManager(config, weekday, mail)
m.sendEmail()
return m.createEmailMessageContent()
else:
return redirect(url_for('main'))
def initEmail():
"""Initiating email configuration with data from config.ini.
Returns:
Configured flask_mail Mail object.
"""
emailConfig = config['EMAIL']
app.config['MAIL_SERVER']= emailConfig['MAIL_SERVER']
app.config['MAIL_PORT'] = emailConfig['MAIL_PORT']
app.config['MAIL_USERNAME'] = emailConfig['MAIL_USERNAME']
app.config['MAIL_PASSWORD'] = emailConfig['MAIL_PASSWORD']
app.config['MAIL_USE_TLS'] = emailConfig.getboolean('MAIL_USE_TLS')
app.config['MAIL_USE_SSL'] = emailConfig.getboolean('MAIL_USE_SSL')
return Mail(app)
if __name__ == '__main__':
# This is used when running locally only. When deploying to Google App
# Engine, a webserver process such as Gunicorn will serve the app. This
# can be configured by adding an `entrypoint` to app.yaml.
app.run(host='127.0.0.1', port=8080, debug=True)
| StarcoderdataPython |
3488119 | <reponame>BMW-InnovationLab/BMW-Semantic-Segmentation-Training-GUI
from domain.exceptions.application_error import ApplicationError
class ConfigurationError(ApplicationError):
def __init__(self, configuration_name: str, additional_message: str = ''):
super().__init__('Could not create Configuration: ', additional_message + ' {}'.format(configuration_name))
class ConfigurationTypeNotFound(ApplicationError):
def __init__(self, configuration_type: str, additional_message: str = ''):
super().__init__('Configuration Type Not Found: ', additional_message + ' {}'.format(configuration_type))
class CheckpointConfigurationInvalid(ApplicationError):
def __init__(self, configuration_path: str, additional_message: str = ''):
super().__init__('JSON configuration is not valid: ', additional_message + ' {}'.format(configuration_path))
| StarcoderdataPython |
9758843 | # -*- coding: utf-8 -*-
import datetime
import locale
import sys
import time
from random import choice
from threading import Thread
import os
import lxml
import requests
from bs4 import BeautifulSoup as bs4
from PyQt5 import QtCore, QtGui, QtWidgets
from PyQt5.QtCore import QDate
from PyQt5.QtGui import QIcon
from PyQt5.QtWidgets import (QApplication, QCalendarWidget, QFileDialog,
QInputDialog, QMainWindow, QSizePolicy,
QTableWidgetItem, QTextEdit, QWidget)
from newUI import AuthWindow, MainWindow
count = 0
loginElementName = "main_login"
passwordElementName = "main_password"
successElementText = "Личный кабинет"
URL = "https://edu.tatar.ru/logon"
locale.setlocale(locale.LC_ALL, "ru")
style = """QMainWindow {
background-color:#ececec;
}
QTextEdit {
border-width: 1px;
border-style: solid;
border-color: qlineargradient(spread:pad, x1:0.5, y1:1, x2:0.5, y2:0, stop:0 rgba(0, 113, 255, 255), stop:1 rgba(91, 171, 252, 255));
}
QPlainTextEdit {
border-width: 1px;
border-style: solid;
border-color: qlineargradient(spread:pad, x1:0.5, y1:1, x2:0.5, y2:0, stop:0 rgba(0, 113, 255, 255), stop:1 rgba(91, 171, 252, 255));
}
QToolButton {
border-style: solid;
border-top-color: qlineargradient(spread:pad, x1:0.5, y1:1, x2:0.5, y2:0, stop:0 rgb(215, 215, 215), stop:1 rgb(222, 222, 222));
border-right-color: qlineargradient(spread:pad, x1:0, y1:0.5, x2:1, y2:0.5, stop:0 rgb(217, 217, 217), stop:1 rgb(227, 227, 227));
border-left-color: qlineargradient(spread:pad, x1:0, y1:0.5, x2:1, y2:0.5, stop:0 rgb(227, 227, 227), stop:1 rgb(217, 217, 217));
border-bottom-color: qlineargradient(spread:pad, x1:0.5, y1:1, x2:0.5, y2:0, stop:0 rgb(215, 215, 215), stop:1 rgb(222, 222, 222));
border-width: 1px;
border-radius: 5px;
color: rgb(0,0,0);
padding: 2px;
background-color: rgb(255,255,255);
}
QToolButton:hover{
border-style: solid;
border-top-color: qlineargradient(spread:pad, x1:0.5, y1:1, x2:0.5, y2:0, stop:0 rgb(195, 195, 195), stop:1 rgb(222, 222, 222));
border-right-color: qlineargradient(spread:pad, x1:0, y1:0.5, x2:1, y2:0.5, stop:0 rgb(197, 197, 197), stop:1 rgb(227, 227, 227));
border-left-color: qlineargradient(spread:pad, x1:0, y1:0.5, x2:1, y2:0.5, stop:0 rgb(227, 227, 227), stop:1 rgb(197, 197, 197));
border-bottom-color: qlineargradient(spread:pad, x1:0.5, y1:1, x2:0.5, y2:0, stop:0 rgb(195, 195, 195), stop:1 rgb(222, 222, 222));
border-width: 1px;
border-radius: 5px;
color: rgb(0,0,0);
padding: 2px;
background-color: rgb(255,255,255);
}
QToolButton:pressed{
border-style: solid;
border-top-color: qlineargradient(spread:pad, x1:0.5, y1:1, x2:0.5, y2:0, stop:0 rgb(215, 215, 215), stop:1 rgb(222, 222, 222));
border-right-color: qlineargradient(spread:pad, x1:0, y1:0.5, x2:1, y2:0.5, stop:0 rgb(217, 217, 217), stop:1 rgb(227, 227, 227));
border-left-color: qlineargradient(spread:pad, x1:0, y1:0.5, x2:1, y2:0.5, stop:0 rgb(227, 227, 227), stop:1 rgb(217, 217, 217));
border-bottom-color: qlineargradient(spread:pad, x1:0.5, y1:1, x2:0.5, y2:0, stop:0 rgb(215, 215, 215), stop:1 rgb(222, 222, 222));
border-width: 1px;
border-radius: 5px;
color: rgb(0,0,0);
padding: 2px;
background-color: rgb(142,142,142);
}
QPushButton{
border-style: solid;
border-top-color: qlineargradient(spread:pad, x1:0.5, y1:1, x2:0.5, y2:0, stop:0 rgb(215, 215, 215), stop:1 rgb(222, 222, 222));
border-right-color: qlineargradient(spread:pad, x1:0, y1:0.5, x2:1, y2:0.5, stop:0 rgb(217, 217, 217), stop:1 rgb(227, 227, 227));
border-left-color: qlineargradient(spread:pad, x1:0, y1:0.5, x2:1, y2:0.5, stop:0 rgb(227, 227, 227), stop:1 rgb(217, 217, 217));
border-bottom-color: qlineargradient(spread:pad, x1:0.5, y1:1, x2:0.5, y2:0, stop:0 rgb(215, 215, 215), stop:1 rgb(222, 222, 222));
border-width: 1px;
border-radius: 5px;
color: rgb(0,0,0);
padding: 2px;
background-color: rgb(255,255,255);
}
QPushButton::default{
border-style: solid;
border-top-color: qlineargradient(spread:pad, x1:0.5, y1:1, x2:0.5, y2:0, stop:0 rgb(215, 215, 215), stop:1 rgb(222, 222, 222));
border-right-color: qlineargradient(spread:pad, x1:0, y1:0.5, x2:1, y2:0.5, stop:0 rgb(217, 217, 217), stop:1 rgb(227, 227, 227));
border-left-color: qlineargradient(spread:pad, x1:0, y1:0.5, x2:1, y2:0.5, stop:0 rgb(227, 227, 227), stop:1 rgb(217, 217, 217));
border-bottom-color: qlineargradient(spread:pad, x1:0.5, y1:1, x2:0.5, y2:0, stop:0 rgb(215, 215, 215), stop:1 rgb(222, 222, 222));
border-width: 1px;
border-radius: 5px;
color: rgb(0,0,0);
padding: 2px;
background-color: rgb(255,255,255);
}
QPushButton:hover{
border-style: solid;
border-top-color: qlineargradient(spread:pad, x1:0.5, y1:1, x2:0.5, y2:0, stop:0 rgb(195, 195, 195), stop:1 rgb(222, 222, 222));
border-right-color: qlineargradient(spread:pad, x1:0, y1:0.5, x2:1, y2:0.5, stop:0 rgb(197, 197, 197), stop:1 rgb(227, 227, 227));
border-left-color: qlineargradient(spread:pad, x1:0, y1:0.5, x2:1, y2:0.5, stop:0 rgb(227, 227, 227), stop:1 rgb(197, 197, 197));
border-bottom-color: qlineargradient(spread:pad, x1:0.5, y1:1, x2:0.5, y2:0, stop:0 rgb(195, 195, 195), stop:1 rgb(222, 222, 222));
border-width: 1px;
border-radius: 5px;
color: rgb(0,0,0);
padding: 2px;
background-color: rgb(255,255,255);
}
QPushButton:pressed{
border-style: solid;
border-top-color: qlineargradient(spread:pad, x1:0.5, y1:1, x2:0.5, y2:0, stop:0 rgb(215, 215, 215), stop:1 rgb(222, 222, 222));
border-right-color: qlineargradient(spread:pad, x1:0, y1:0.5, x2:1, y2:0.5, stop:0 rgb(217, 217, 217), stop:1 rgb(227, 227, 227));
border-left-color: qlineargradient(spread:pad, x1:0, y1:0.5, x2:1, y2:0.5, stop:0 rgb(227, 227, 227), stop:1 rgb(217, 217, 217));
border-bottom-color: qlineargradient(spread:pad, x1:0.5, y1:1, x2:0.5, y2:0, stop:0 rgb(215, 215, 215), stop:1 rgb(222, 222, 222));
border-width: 1px;
border-radius: 5px;
color: rgb(0,0,0);
padding: 2px;
background-color: rgb(142,142,142);
}
QPushButton:disabled{
border-style: solid;
border-top-color: qlineargradient(spread:pad, x1:0.5, y1:1, x2:0.5, y2:0, stop:0 rgb(215, 215, 215), stop:1 rgb(222, 222, 222));
border-right-color: qlineargradient(spread:pad, x1:0, y1:0.5, x2:1, y2:0.5, stop:0 rgb(217, 217, 217), stop:1 rgb(227, 227, 227));
border-left-color: qlineargradient(spread:pad, x1:0, y1:0.5, x2:1, y2:0.5, stop:0 rgb(227, 227, 227), stop:1 rgb(217, 217, 217));
border-bottom-color: qlineargradient(spread:pad, x1:0.5, y1:1, x2:0.5, y2:0, stop:0 rgb(215, 215, 215), stop:1 rgb(222, 222, 222));
border-width: 1px;
border-radius: 5px;
color: #808086;
padding: 2px;
background-color: rgb(142,142,142);
}
QLineEdit {
border-width: 1px; border-radius: 4px;
border-style: solid;
border-color: qlineargradient(spread:pad, x1:0.5, y1:1, x2:0.5, y2:0, stop:0 rgba(0, 113, 255, 255), stop:1 rgba(91, 171, 252, 255));
}
QLabel {
color: #000000;
}
QLCDNumber {
color: rgb(0, 113, 255, 255);
}
QProgressBar {
text-align: center;
color: rgb(240, 240, 240);
border-width: 1px;
border-radius: 10px;
border-color: rgb(230, 230, 230);
border-style: solid;
background-color:rgb(207,207,207);
}
QProgressBar::chunk {
background-color: qlineargradient(spread:pad, x1:0.5, y1:1, x2:0.5, y2:0, stop:0 rgba(49, 147, 250, 255), stop:1 rgba(34, 142, 255, 255));
border-radius: 10px;
}
QMenuBar {
background-color: qlineargradient(spread:pad, x1:0.5, y1:1, x2:0.5, y2:0, stop:0 rgba(207, 209, 207, 255), stop:1 rgba(230, 229, 230, 255));
}
QMenuBar::item {
color: #000000;
spacing: 3px;
padding: 1px 4px;
background-color: qlineargradient(spread:pad, x1:0.5, y1:1, x2:0.5, y2:0, stop:0 rgba(207, 209, 207, 255), stop:1 rgba(230, 229, 230, 255));
}
QMenuBar::item:selected {
background-color: qlineargradient(spread:pad, x1:0.5, y1:1, x2:0.5, y2:0, stop:0 rgba(0, 113, 255, 255), stop:1 rgba(91, 171, 252, 255));
color: #FFFFFF;
}
QMenu::item:selected {
border-style: solid;
border-top-color: transparent;
border-right-color: transparent;
border-left-color: qlineargradient(spread:pad, x1:0.5, y1:1, x2:0.5, y2:0, stop:0 rgba(0, 113, 255, 255), stop:1 rgba(91, 171, 252, 255));
border-bottom-color: transparent;
border-left-width: 2px;
color: #000000;
padding-left:15px;
padding-top:4px;
padding-bottom:4px;
padding-right:7px;
}
QMenu::item {
border-style: solid;
border-top-color: transparent;
border-right-color: transparent;
border-left-color: transparent;
border-bottom-color: transparent;
border-bottom-width: 1px;
color: #000000;
padding-left:17px;
padding-top:4px;
padding-bottom:4px;
padding-right:7px;
}
QTabWidget {
color:rgb(0,0,0);
background-color:#000000;
}
QTabWidget::pane {
border-color: rgb(223,223,223);
background-color:rgb(226,226,226);
border-style: solid;
border-width: 2px;
border-radius: 6px;
}
QTabBar::tab:first {
border-style: solid;
border-left-width:1px;
border-right-width:0px;
border-top-width:1px;
border-bottom-width:1px;
border-top-color: rgb(209,209,209);
border-left-color: qlineargradient(spread:pad, x1:0.5, y1:1, x2:0.5, y2:0, stop:0 rgba(209, 209, 209, 209), stop:1 rgba(229, 229, 229, 229));
border-bottom-color: rgb(229,229,229);
border-top-left-radius: 4px;
border-bottom-left-radius: 4px;
color: #000000;
padding: 3px;
margin-left:0px;
background-color: qlineargradient(spread:pad, x1:0.5, y1:1, x2:0.5, y2:0, stop:0 rgba(247, 247, 247, 255), stop:1 rgba(255, 255, 255, 255));
}
QTabBar::tab:last {
border-style: solid;
border-width:1px;
border-top-color: rgb(209,209,209);
border-left-color: qlineargradient(spread:pad, x1:0.5, y1:1, x2:0.5, y2:0, stop:0 rgba(209, 209, 209, 209), stop:1 rgba(229, 229, 229, 229));
border-right-color: qlineargradient(spread:pad, x1:0.5, y1:1, x2:0.5, y2:0, stop:0 rgba(209, 209, 209, 209), stop:1 rgba(229, 229, 229, 229));
border-bottom-color: rgb(229,229,229);
border-top-right-radius: 4px;
border-bottom-right-radius: 4px;
color: #000000;
padding: 3px;
margin-left:0px;
background-color: qlineargradient(spread:pad, x1:0.5, y1:1, x2:0.5, y2:0, stop:0 rgba(247, 247, 247, 255), stop:1 rgba(255, 255, 255, 255));
}
QTabBar::tab {
border-style: solid;
border-top-width:1px;
border-bottom-width:1px;
border-left-width:1px;
border-top-color: rgb(209,209,209);
border-left-color: qlineargradient(spread:pad, x1:0.5, y1:1, x2:0.5, y2:0, stop:0 rgba(209, 209, 209, 209), stop:1 rgba(229, 229, 229, 229));
border-bottom-color: rgb(229,229,229);
color: #000000;
padding: 3px;
margin-left:0px;
background-color: qlineargradient(spread:pad, x1:0.5, y1:1, x2:0.5, y2:0, stop:0 rgba(247, 247, 247, 255), stop:1 rgba(255, 255, 255, 255));
}
QTabBar::tab:selected, QTabBar::tab:last:selected, QTabBar::tab:hover {
border-style: solid;
border-left-width:1px;
border-right-color: transparent;
border-top-color: rgb(209,209,209);
border-left-color: qlineargradient(spread:pad, x1:0.5, y1:1, x2:0.5, y2:0, stop:0 rgba(209, 209, 209, 209), stop:1 rgba(229, 229, 229, 229));
border-bottom-color: rgb(229,229,229);
color: #FFFFFF;
padding: 3px;
margin-left:0px;
background-color: qlineargradient(spread:pad, x1:0.5, y1:1, x2:0.5, y2:0, stop:0 rgba(0, 113, 255, 255), stop:1 rgba(91, 171, 252, 255));
}
QTabBar::tab:selected, QTabBar::tab:first:selected, QTabBar::tab:hover {
border-style: solid;
border-left-width:1px;
border-bottom-width:1px;
border-top-width:1px;
border-right-color: transparent;
border-top-color: rgb(209,209,209);
border-left-color: qlineargradient(spread:pad, x1:0.5, y1:1, x2:0.5, y2:0, stop:0 rgba(209, 209, 209, 209), stop:1 rgba(229, 229, 229, 229));
border-bottom-color: rgb(229,229,229);
color: #FFFFFF;
padding: 3px;
margin-left:0px;
background-color: qlineargradient(spread:pad, x1:0.5, y1:1, x2:0.5, y2:0, stop:0 rgba(0, 113, 255, 255), stop:1 rgba(91, 171, 252, 255));
}
QCheckBox {
color: #000000;
padding: 2px;
}
QCheckBox:disabled {
color: #808086;
padding: 2px;
}
QCheckBox:hover {
border-radius:4px;
border-style:solid;
padding-left: 1px;
padding-right: 1px;
padding-bottom: 1px;
padding-top: 1px;
border-width:1px;
border-color: transparent;
}
QCheckBox::indicator:checked {
height: 10px;
width: 10px;
border-style:solid;
border-width: 1px;
border-color: qlineargradient(spread:pad, x1:0.5, y1:1, x2:0.5, y2:0, stop:0 rgba(0, 113, 255, 255), stop:1 rgba(91, 171, 252, 255));
color: #000000;
background-color: qlineargradient(spread:pad, x1:0.5, y1:1, x2:0.5, y2:0, stop:0 rgba(0, 113, 255, 255), stop:1 rgba(91, 171, 252, 255));
}
QCheckBox::indicator:unchecked {
height: 10px;
width: 10px;
border-style:solid;
border-width: 1px;
border-color: qlineargradient(spread:pad, x1:0.5, y1:1, x2:0.5, y2:0, stop:0 rgba(0, 113, 255, 255), stop:1 rgba(91, 171, 252, 255));
color: #000000;
}
QRadioButton {
color: 000000;
padding: 1px;
}
QRadioButton::indicator:checked {
height: 10px;
width: 10px;
border-style:solid;
border-radius:5px;
border-width: 1px;
border-color: qlineargradient(spread:pad, x1:0.5, y1:1, x2:0.5, y2:0, stop:0 rgba(0, 113, 255, 255), stop:1 rgba(91, 171, 252, 255));
color: #a9b7c6;
background-color: qlineargradient(spread:pad, x1:0.5, y1:1, x2:0.5, y2:0, stop:0 rgba(0, 113, 255, 255), stop:1 rgba(91, 171, 252, 255));
}
QRadioButton::indicator:!checked {
height: 10px;
width: 10px;
border-style:solid;
border-radius:5px;
border-width: 1px;
border-color: qlineargradient(spread:pad, x1:0.5, y1:1, x2:0.5, y2:0, stop:0 rgba(0, 113, 255, 255), stop:1 rgba(91, 171, 252, 255));
color: #a9b7c6;
background-color: transparent;
}
QStatusBar {
color:#027f7f;
}
QSpinBox {
border-style: solid;
border-width: 1px;
border-color: qlineargradient(spread:pad, x1:0.5, y1:1, x2:0.5, y2:0, stop:0 rgba(0, 113, 255, 255), stop:1 rgba(91, 171, 252, 255));
}
QDoubleSpinBox {
border-style: solid;
border-width: 1px;
border-color: qlineargradient(spread:pad, x1:0.5, y1:1, x2:0.5, y2:0, stop:0 rgba(0, 113, 255, 255), stop:1 rgba(91, 171, 252, 255));
}
QTimeEdit {
border-style: solid;
border-width: 1px;
border-color: qlineargradient(spread:pad, x1:0.5, y1:1, x2:0.5, y2:0, stop:0 rgba(0, 113, 255, 255), stop:1 rgba(91, 171, 252, 255));
}
QDateTimeEdit {
border-style: solid;
border-width: 1px;
border-color: qlineargradient(spread:pad, x1:0.5, y1:1, x2:0.5, y2:0, stop:0 rgba(0, 113, 255, 255), stop:1 rgba(91, 171, 252, 255));
}
QDateEdit {
border-style: solid;
border-width: 1px;
border-color: qlineargradient(spread:pad, x1:0.5, y1:1, x2:0.5, y2:0, stop:0 rgba(0, 113, 255, 255), stop:1 rgba(91, 171, 252, 255));
}
QToolBox {
color: #a9b7c6;
background-color:#000000;
}
QToolBox::tab {
color: #a9b7c6;
background-color:#000000;
}
QToolBox::tab:selected {
color: #FFFFFF;
background-color:#000000;
}
QScrollArea {
color: #FFFFFF;
background-color:#000000;
}
QSlider::groove:horizontal {
height: 5px;
background-color: qlineargradient(spread:pad, x1:0.5, y1:1, x2:0.5, y2:0, stop:0 rgba(49, 147, 250, 255), stop:1 rgba(34, 142, 255, 255));
}
QSlider::groove:vertical {
width: 5px;
background-color: qlineargradient(spread:pad, x1:0.5, y1:1, x2:0.5, y2:0, stop:0 rgba(49, 147, 250, 255), stop:1 rgba(34, 142, 255, 255));
}
QSlider::handle:horizontal {
background: rgb(253,253,253);
border-style: solid;
border-width: 1px;
border-color: rgb(207,207,207);
width: 12px;
margin: -5px 0;
border-radius: 7px;
}
QSlider::handle:vertical {
background: rgb(253,253,253);
border-style: solid;
border-width: 1px;
border-color: rgb(207,207,207);
height: 12px;
margin: 0 -5px;
border-radius: 7px;
}
QSlider::add-page:horizontal {
background: rgb(181,181,181);
}
QSlider::add-page:vertical {
background: rgb(181,181,181);
}
QSlider::sub-page:horizontal {
background-color: qlineargradient(spread:pad, x1:0.5, y1:1, x2:0.5, y2:0, stop:0 rgba(49, 147, 250, 255), stop:1 rgba(34, 142, 255, 255));
}
QSlider::sub-page:vertical {
background-color: qlineargradient(spread:pad, y1:0.5, x1:1, y2:0.5, x2:0, stop:0 rgba(49, 147, 250, 255), stop:1 rgba(34, 142, 255, 255));
}
QScrollBar:horizontal {
max-height: 20px;
border: 1px transparent grey;
margin: 0px 20px 0px 20px;
}
QScrollBar:vertical {
max-width: 20px;
border: 1px transparent grey;
margin: 20px 0px 20px 0px;
}
QScrollBar::handle:horizontal {
background: rgb(253,253,253);
border-style: solid;
border-width: 1px;
border-color: rgb(207,207,207);
border-radius: 7px;
min-width: 25px;
}
QScrollBar::handle:horizontal:hover {
background: rgb(253,253,253);
border-style: solid;
border-width: 1px;
border-color: rgb(147, 200, 200);
border-radius: 7px;
min-width: 25px;
}
QScrollBar::handle:vertical {
background: rgb(253,253,253);
border-style: solid;
border-width: 1px;
border-color: rgb(207,207,207);
border-radius: 7px;
min-height: 25px;
}
QScrollBar::handle:vertical:hover {
background: rgb(253,253,253);
border-style: solid;
border-width: 1px;
border-color: rgb(147, 200, 200);
border-radius: 7px;
min-height: 25px;
}
QScrollBar::add-line:horizontal {
border: 2px transparent grey;
border-top-right-radius: 7px;
border-bottom-right-radius: 7px;
background: rgba(34, 142, 255, 255);
width: 20px;
subcontrol-position: right;
subcontrol-origin: margin;
}
QScrollBar::add-line:horizontal:pressed {
border: 2px transparent grey;
border-top-right-radius: 7px;
border-bottom-right-radius: 7px;
background: rgb(181,181,181);
width: 20px;
subcontrol-position: right;
subcontrol-origin: margin;
}
QScrollBar::add-line:vertical {
border: 2px transparent grey;
border-bottom-left-radius: 7px;
border-bottom-right-radius: 7px;
background: rgba(34, 142, 255, 255);
height: 20px;
subcontrol-position: bottom;
subcontrol-origin: margin;
}
QScrollBar::add-line:vertical:pressed {
border: 2px transparent grey;
border-bottom-left-radius: 7px;
border-bottom-right-radius: 7px;
background: rgb(181,181,181);
height: 20px;
subcontrol-position: bottom;
subcontrol-origin: margin;
}
QScrollBar::sub-line:horizontal {
border: 2px transparent grey;
border-top-left-radius: 7px;
border-bottom-left-radius: 7px;
background: rgba(34, 142, 255, 255);
width: 20px;
subcontrol-position: left;
subcontrol-origin: margin;
}
QScrollBar::sub-line:horizontal:pressed {
border: 2px transparent grey;
border-top-left-radius: 7px;
border-bottom-left-radius: 7px;
background: rgb(181,181,181);
width: 20px;
subcontrol-position: left;
subcontrol-origin: margin;
}
QScrollBar::sub-line:vertical {
border: 2px transparent grey;
border-top-left-radius: 7px;
border-top-right-radius: 7px;
background: rgba(34, 142, 255, 255);
height: 20px;
subcontrol-position: top;
subcontrol-origin: margin;
}
QScrollBar::sub-line:vertical:pressed {
border: 2px transparent grey;
border-top-left-radius: 7px;
border-top-right-radius: 7px;
background: rgb(181,181,181);
height: 20px;
subcontrol-position: top;
subcontrol-origin: margin;
}
QScrollBar::left-arrow:horizontal {
border: 1px transparent grey;
border-top-left-radius: 3px;
border-bottom-left-radius: 3px;
width: 6px;
height: 6px;
background: white;
}
QScrollBar::right-arrow:horizontal {
border: 1px transparent grey;
border-top-right-radius: 3px;
border-bottom-right-radius: 3px;
width: 6px;
height: 6px;
background: white;
}
QScrollBar::up-arrow:vertical {
border: 1px transparent grey;
border-top-left-radius: 3px;
border-top-right-radius: 3px;
width: 6px;
height: 6px;
background: white;
}
QScrollBar::down-arrow:vertical {
border: 1px transparent grey;
border-bottom-left-radius: 3px;
border-bottom-right-radius: 3px;
width: 6px;
height: 6px;
background: white;
}
QScrollBar::add-page:horizontal, QScrollBar::sub-page:horizontal {
background: none;
}
QScrollBar::add-page:vertical, QScrollBar::sub-page:vertical {
background: none;
}"""
class ThreadWithReturnValue(Thread):
def __init__(self, group=None, target=None, name=None,
args=(), kwargs={}, Verbose=None):
Thread.__init__(self, group, target, name, args, kwargs)
self._return = None
def run(self):
print(type(self._target))
if self._target is not None:
self._return = self._target(*self._args,
**self._kwargs)
def join(self, *args):
Thread.join(self, *args)
return self._return
class Auth(QWidget, AuthWindow):
def __init__(self):
super(Auth, self).__init__()
self.setStyleSheet(style)
self.setupUi(self)
self.pushButton.clicked.connect(self.collectUserAuthData)
self.session = requests.Session()
self.session.get(URL)
self.calendar = None
self.setStyleSheet(style)
def collectUserAuthData(self):
login = self.textEdit.toPlainText().strip()
password = <PASSWORD>.textEdit_2.toPlainText().strip()
if self.checkUserAuthData(login, password) is True:
status, session, name = self.authInSite(login, password)
if status is True:
self.label_4.setText("Авторизация выполнена успешно")
self.widget = Main(session, login, password, name)
wid.hide()
self.widget.show()
else:
self.label_4.setText("Вы ввели неверный логин или пароль")
def checkUserAuthData(self, login, password):
if login == "" or password == "":
self.label_4.setText("Поле 'Логин' и 'Пароль' не могут быть пустыми")
return False
try:
login = int(login)
return True
except:
self.label_4.setText("Логин может содержать только цифры от 0 до 9")
return False
def authInSite(self, login, password):
name = None
cookie = {'_ga': 'GA1.2.1804685607.1574325953',
'_gid': 'GA1.2.1116002961.1574325953'}
data = {loginElementName: login, passwordElementName: password}
headers = {'Referer': URL}
RH = self.session.post(URL, data=data, cookies=cookie, headers=headers).text
soup = bs4(RH, "lxml")
if soup.h2.text.strip() == successElementText:
name = soup.find("b").text
return (True, self.session, name)
else:
return (False, self.session, name)
class Main(QWidget, MainWindow):
def __init__(self, session, login, password, name):
super(Main, self).__init__()
self.setStyleSheet(style)
self.setupUi(self)
self.pushButton.clicked.connect(self.parseDay)
self.pushButton_4.clicked.connect(self.quit)
self.pushButton_3.clicked.connect(self.parseTable)
self.pushButton_2.clicked.connect(self.parseWeek)
self.pushButton_5.clicked.connect(self.getCalendarWidget)
self.session = session
self.login = login
self.password = password
self.reportCard = []
self.username = name
self.label.setText(f"Привет, {' '.join(self.username.split()[0:2])}")
self.parseDay()
def parseWeek(self):
reportCard = []
self.calendar.hide()
q = int(str(time.time()).split(".")[0])
today = datetime.datetime.today().isoweekday()
startDay = q - (86400 * (today - 1))
endDay = q - (86400 * (7 - today - 3))
while startDay <= endDay:
startDay = str(startDay)
con = ThreadWithReturnValue(target=self.parseDay, args=(startDay, ))
con.start()
content = con.join()
reportCard.append(content)
startDay = int(str(startDay).split(".")[0])
startDay += 86400
print(reportCard)
self.returnWeekContent(reportCard)
def returnWeekContent(self, reportCard):
self.calendar.hide()
self.clearPreviousContent()
self.tableWidget.show()
self.tableWidget.setColumnCount(5)
item = QTableWidgetItem()
item.setText("Время")
self.tableWidget.setHorizontalHeaderItem(0, item)
item = QTableWidgetItem()
item.setText("Предмет")
self.tableWidget.setHorizontalHeaderItem(1, item)
item = QTableWidgetItem()
item.setText("Что задали")
self.tableWidget.setHorizontalHeaderItem(2, item)
item = QTableWidgetItem()
item.setText("Комментарий")
self.tableWidget.setHorizontalHeaderItem(3, item)
item = QTableWidgetItem()
item.setText("Оценка")
self.tableWidget.setHorizontalHeaderItem(4, item)
def getCalendarWidget(self):
self.tableWidget.hide()
self.calendar = QCalendarWidget(self)
self.calendar.setGridVisible(True)
self.calendar.setGeometry(QtCore.QRect(10, 90, 621, 251))
self.calendar.show()
self.calendar.clicked[QDate].connect(self.selectMonthDay)
def selectMonthDay(self, date):
selectedDate = " ".join([str(i) for i in list(date.getDate())])
selectedDate = int(time.mktime(time.strptime(selectedDate, '%Y %m %d')))
self.parseDay(selectedDate)
self.calendar.hide()
def parseTable(self):
self.calendar.hide()
reportCard = {}
self.checkSessionIsValid()
URL = "https://edu.tatar.ru/user/diary/term"
RH = self.session.get(URL).text
soup = bs4(RH, "lxml")
soup = soup.find("table").findAll("td")
resultTags = [tag.text for tag in soup if (len(tag.attrs) == 0 or tag.text == "ИТОГО") and tag.string is not None and tag.text != '\n' and tag.text != "просмотр"][1:-3]
for index, item in enumerate(resultTags, 1):
if item.isdigit():
item = int(item)
reportCard[subjectName].append(item)
else:
try:
item = float(item)
reportCard[subjectName].append(item)
except:
reportCard[item] = []
subjectName = item
self.returnTableContent(reportCard)
def returnTableContent(self, reportCard):
self.calendar.hide()
self.clearPreviousContent()
self.tableWidget.setColumnCount(3)
item = QTableWidgetItem()
item.setText("Предмет")
self.tableWidget.setHorizontalHeaderItem(0, item)
item = QTableWidgetItem()
item.setText("Оценки")
self.tableWidget.setHorizontalHeaderItem(1, item)
item = QTableWidgetItem()
item.setText("Средний балл")
self.tableWidget.setHorizontalHeaderItem(2, item)
for row, link in enumerate(reportCard.keys()):
self.tableWidget.insertRow(row)
self.tableWidget.setItem(row, 0, QTableWidgetItem(link))
self.tableWidget.setItem(row, 1, QTableWidgetItem(", ".join([str(i) for i in reportCard[link][0:-1]])))
cost = reportCard[link][-1] if len(reportCard[link]) != 0 else "—"
self.tableWidget.setItem(row, 2, QTableWidgetItem(str(cost)))
header = self.tableWidget.horizontalHeader()
header.setSectionResizeMode(0, QtWidgets.QHeaderView.ResizeToContents)
header.setSectionResizeMode(1, QtWidgets.QHeaderView.ResizeToContents)
header.setSectionResizeMode(2, QtWidgets.QHeaderView.ResizeToContents)
def parseDay(self, date=None):
self.calendar.hide()
if date is None or date == 0:
date = int(str(time.time()).split(".")[0])
else:
date = int(str(date).split(".")[0])
reportCard = {}
resultTags = []
self.checkSessionIsValid()
URL = f"https://edu.tatar.ru/user/diary/day?for={date}"
RH = self.session.get(URL).text
soup = bs4(RH, "lxml")
p = []
soup = soup.find("tbody").findAll("td")
for tag in soup:
if "title" in tag.attrs:
resultTags.append(tag.get("title"))
else:
resultTags.append(tag.text.replace("\n", ""))
resultTags = list(reversed(resultTags))
reportCard[time.strftime("%a, %d %b %Y", time.localtime(int(URL.split("=")[1])))] = []
for tag in resultTags:
if len(tag.split("—")) == 2 and tag.count(":") == 2:
p.append(tag)
if len(p) % 2 == 0:
p = self.prepareDayContent(list(reversed(p)))
else:
p = list(reversed(p))
reportCard[time.strftime("%a, %d %b %Y", time.localtime(date))].append(p)
p = []
else:
if tag.isdigit() is True and len(tag) >= 2:
tag = ", ".join(list(tag))
if len(tag) != 0:
tag = tag.strip()
p.append(tag)
self.prepareDayContent(reportCard)
self.returnDayContent(reportCard)
return reportCard
def prepareDayContent(self, reportCard):
for i in range(len(reportCard) - 1):
if i >= 4 and reportCard[i + 1].isdigit() is False:
reportCard[i] = ", ".join([reportCard[i], reportCard[i + 1]])
reportCard.remove(reportCard[i + 1])
return reportCard
def returnDayContent(self, reportCard):
self.calendar.hide()
self.clearPreviousContent()
self.tableWidget.show()
self.tableWidget.setRowCount(1)
item = QTableWidgetItem()
item.setText(list(reportCard.keys())[0])
self.tableWidget.setVerticalHeaderItem(0, item)
self.tableWidget.setColumnCount(5)
item = QTableWidgetItem()
item.setText("Время")
self.tableWidget.setHorizontalHeaderItem(0, item)
item = QTableWidgetItem()
item.setText("Предмет")
self.tableWidget.setHorizontalHeaderItem(1, item)
item = QTableWidgetItem()
item.setText("Что задали")
self.tableWidget.setHorizontalHeaderItem(2, item)
item = QTableWidgetItem()
item.setText("Комментарий")
self.tableWidget.setHorizontalHeaderItem(3, item)
item = QTableWidgetItem()
item.setText("Оценка")
self.tableWidget.setHorizontalHeaderItem(4, item)
day = list(reportCard.values())[0]
print(day)
for row, link in enumerate(list(reversed(day)), 1):
link = list(link)
print(link)
self.tableWidget.insertRow(row)
for column in range(len(link)):
self.tableWidget.setItem(row, column, QTableWidgetItem(link[column]))
self.tableWidget.setVerticalHeaderItem(row, QTableWidgetItem(str(row)))
header = self.tableWidget.horizontalHeader()
header.setSectionResizeMode(0, QtWidgets.QHeaderView.ResizeToContents)
header.setSectionResizeMode(1, QtWidgets.QHeaderView.ResizeToContents)
header.setSectionResizeMode(2, QtWidgets.QHeaderView.ResizeToContents)
header.setSectionResizeMode(3, QtWidgets.QHeaderView.ResizeToContents)
header.setSectionResizeMode(4, QtWidgets.QHeaderView.ResizeToContents)
def clearPreviousContent(self):
self.tableWidget.clearContents()
self.calendar.hide()
self.tableWidget.setRowCount(0)
self.tableWidget.setColumnCount(0)
def checkSessionIsValid(self):
response = self.session.get("https://edu.tatar.ru/user/diary/term", allow_redirects=False)
if response.status_code != 200:
wid.authInSite(self.login, self.password)
def quit(self):
self.hide()
wid.show()
wid.textEdit.setText("")
wid.textEdit_2.setText("")
wid.label_4.setText("")
if __name__ == '__main__':
app = QApplication(sys.argv)
wid = Auth()
wid.show()
sys.exit(app.exec_()) | StarcoderdataPython |
5133861 | from gyomei_trainer.builder import (
Builder, BaseBuilder, State, AverageValueMeter
)
from gyomei_trainer.model import Model
import gyomei_trainer.metrics
import gyomei_trainer.modules
__version__ = "1.0.2"
| StarcoderdataPython |
6702480 | <reponame>faisaltheparttimecoder/carelogBackend
from products.models import Product
from products.serializers import ProductsSerializer
from django.http import Http404
from rest_framework.views import APIView
from rest_framework.response import Response
from rest_framework import status
from common.utilities import get_url
class ProductsList(APIView):
"""
List all products, or create a new products.
"""
def get(self, request, format=None):
"""
Default page load on page request
"""
product = Product.objects.all()
serializer = ProductsSerializer(product, many=True)
return Response(serializer.data)
def post(self, request, format=None):
"""
Default post method.
"""
serializer = ProductsSerializer(data=request.data)
if serializer.is_valid():
serializer.save()
return Response(serializer.data, status=status.HTTP_201_CREATED)
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
class ProductsListDetails(APIView):
"""
Retrieve, update or delete a rssfeed instance.
"""
def get_object(self, pk):
"""
Get the particular row of a given ID.
"""
try:
return Product.objects.get(pk=pk)
except Product.DoesNotExist:
raise Http404
def get(self, request, pk, format=None):
"""
If its a get request, send that row.
"""
product = self.get_object(pk)
serializer = ProductsSerializer(product)
data = serializer.data
data['content'] = get_url(serializer.data['url'])
return Response(data)
def put(self, request, pk, format=None):
"""
If its a put request, update that row.
"""
product = self.get_object(pk)
serializer = ProductsSerializer(product, data=request.data)
if serializer.is_valid():
serializer.save()
return Response(serializer.data)
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
def delete(self, request, pk, format=None):
"""
If its a delete request, delete that row.
"""
product = self.get_object(pk)
product.delete()
return Response(status=status.HTTP_204_NO_CONTENT) | StarcoderdataPython |
291928 | # -*- coding: utf-8 -*-
# @Time : 2018/3/13 08:30
# @Author : play4fun
# @File : compare_photos.py
# @Software: PyCharm
"""
compare_photos.py:
"""
import cv2, pickle
from pprint import pprint
with open('photo_mat', 'rb') as f:
mat = pickle.load(f)
pairs = [] # 配对好的
lenX = 9 # 行
lenY = 8 # 列
def get_image_difference(image_1, image_2): # 这个函数不行
first_image_hist = cv2.calcHist([image_1], [0], None, [256], [0, 256])
second_image_hist = cv2.calcHist([image_2], [0], None, [256], [0, 256])
img_hist_diff = cv2.compareHist(first_image_hist, second_image_hist, cv2.HISTCMP_BHATTACHARYYA)
img_template_probability_match = cv2.matchTemplate(first_image_hist, second_image_hist, cv2.TM_CCOEFF_NORMED)[0][0]
img_template_diff = 1 - img_template_probability_match
# taking only 10% of histogram diff, since it's less accurate than template method
commutative_image_diff = (img_hist_diff / 10) + img_template_diff
return commutative_image_diff
def compare(i, j, img):
for x in range(lenX):
if x < i:
continue
for y in range(lenY):
if x <= i and y < j:
continue
z = mat[x][y]
# 图片相似度
y1 = cv2.cvtColor(img, cv2.COLOR_BGR2GRAY)
z1 = cv2.cvtColor(z, cv2.COLOR_BGR2GRAY)
# image_difference = get_image_difference(y1, z1)
res = cv2.matchTemplate(z1, y1, cv2.TM_CCOEFF_NORMED)
# print(i, j, x, y, image_difference)
print(i, j, x, y, res)
# if abs(image_difference-1)>0.5:
# if image_difference < 0.1:
# pairs.append((i, j, x, y, image_difference))
if res[0][0] >= 0.8 :#and (i != x and j != y): # 0.9较好
if i ==x and j ==y:
continue
pairs.append((i, j, x, y, res[0][0]))
print('--------')
for i, x in enumerate(mat):
for j, y in enumerate(x):
compare(i, j, y)
print('--------',len(pairs))
pprint(pairs)#156对 #有问题
'''
[(0, 0, 0, 4, 0.81783479),
(0, 0, 1, 0, 0.82939386),
(0, 0, 1, 5, 0.80112994),
(0, 0, 2, 4, 0.81963593),
(0, 0, 2, 5, 0.80141765),
(0, 0, 3, 2, 0.83176291),
(0, 0, 5, 1, 0.82441366),
(0, 0, 5, 3, 0.93773538),
(0, 0, 6, 0, 0.80839384),
(0, 0, 7, 3, 0.80357623),
(0, 1, 4, 6, 0.84010893),
(0, 2, 4, 5, 0.89919138),
(0, 2, 5, 5, 0.89656675),
(0, 2, 6, 2, 0.87691551),
(0, 3, 2, 6, 0.94418496),
(0, 3, 3, 4, 0.97784418),
(0, 3, 5, 6, 0.91531861),
(0, 3, 7, 4, 0.90034771),
(0, 3, 8, 7, 0.8669098),
(0, 4, 1, 0, 0.95897603),
(0, 4, 1, 5, 0.9859665),
(0, 4, 2, 3, 0.84755546),
(0, 4, 2, 4, 0.98988521),
(0, 4, 2, 5, 0.97593749),
(0, 4, 3, 2, 0.96898985),
(0, 4, 5, 1, 0.93505126),
(0, 4, 5, 7, 0.92510819),
(0, 4, 6, 0, 0.88995898),
(0, 4, 7, 3, 0.91428041),
(0, 5, 2, 0, 0.90362453),
(0, 5, 2, 1, 0.93313634),
(0, 5, 6, 4, 0.88912612),
(0, 7, 2, 7, 0.98162633),
(0, 7, 3, 0, 0.84628779),
(0, 7, 6, 7, 0.85053468),
(1, 0, 1, 5, 0.93375051),
(1, 0, 2, 3, 0.80927575),
(1, 0, 2, 4, 0.95577663),
(1, 0, 2, 5, 0.93438679),
(1, 0, 3, 2, 0.98244762),
(1, 0, 5, 1, 0.95950162),
(1, 0, 5, 7, 0.9012484),
(1, 0, 6, 0, 0.93606734),
(1, 0, 7, 0, 0.81604606),
(1, 0, 7, 3, 0.91213149),
(1, 1, 7, 1, 0.8624481),
(1, 2, 1, 7, 0.94927907),
(1, 2, 4, 3, 0.97030866),
(1, 2, 6, 6, 0.89334244),
(1, 3, 7, 5, 0.90350145),
(1, 4, 3, 5, 0.92840946),
(1, 4, 3, 6, 0.92976296),
(1, 4, 8, 1, 0.87637573),
(1, 4, 8, 5, 0.86086744),
(1, 5, 2, 3, 0.83290088),
(1, 5, 2, 4, 0.98093969),
(1, 5, 2, 5, 0.9865284),
(1, 5, 3, 2, 0.95161527),
(1, 5, 5, 1, 0.91846502),
(1, 5, 5, 7, 0.93449652),
(1, 5, 6, 0, 0.87814039),
(1, 5, 7, 3, 0.91769367),
(1, 6, 3, 3, 0.87408149),
(1, 6, 4, 7, 0.83912045),
(1, 7, 4, 3, 0.93324989),
(1, 7, 6, 6, 0.90282589),
(2, 0, 2, 1, 0.98332465),
(2, 0, 6, 4, 0.89946473),
(2, 1, 6, 4, 0.91386253),
(2, 2, 4, 0, 0.97106832),
(2, 3, 2, 4, 0.85241109),
(2, 3, 2, 5, 0.84527677),
(2, 3, 3, 2, 0.83583575),
(2, 3, 3, 4, 0.80124199),
(2, 3, 5, 1, 0.81944293),
(2, 3, 5, 7, 0.819251),
(2, 3, 7, 0, 0.91440505),
(2, 3, 7, 3, 0.80969107),
(2, 4, 2, 5, 0.9853642),
(2, 4, 3, 2, 0.98278183),
(2, 4, 5, 1, 0.96176714),
(2, 4, 5, 3, 0.81060904),
(2, 4, 5, 7, 0.95080549),
(2, 4, 6, 0, 0.92093289),
(2, 4, 7, 0, 0.82010585),
(2, 4, 7, 3, 0.94900286),
(2, 5, 3, 2, 0.96413034),
(2, 5, 5, 1, 0.93163985),
(2, 5, 5, 3, 0.80133277),
(2, 5, 5, 7, 0.95228308),
(2, 5, 6, 0, 0.89228898),
(2, 5, 7, 0, 0.80005699),
(2, 5, 7, 3, 0.93504852),
(2, 6, 3, 4, 0.9634583),
(2, 6, 5, 6, 0.97281444),
(2, 6, 7, 4, 0.90955776),
(2, 6, 8, 6, 0.81169814),
(2, 6, 8, 7, 0.87542808),
(2, 7, 3, 0, 0.86373925),
(2, 7, 6, 7, 0.90865624),
(3, 0, 6, 7, 0.80371922),
(3, 1, 3, 7, 0.89857602),
(3, 2, 5, 1, 0.98385006),
(3, 2, 5, 3, 0.80837327),
(3, 2, 5, 7, 0.94026983),
(3, 2, 6, 0, 0.95155406),
(3, 2, 7, 0, 0.83519346),
(3, 2, 7, 3, 0.95594138),
(3, 3, 4, 7, 0.81548607),
(3, 3, 8, 4, 0.88165134),
(3, 4, 5, 6, 0.96190572),
(3, 4, 7, 4, 0.95597637),
(3, 4, 8, 7, 0.90763825),
(3, 5, 3, 6, 0.96791953),
(3, 5, 7, 7, 0.81160647),
(3, 5, 8, 5, 0.88941646),
(3, 6, 7, 7, 0.8219896),
(3, 6, 8, 1, 0.80933893),
(3, 6, 8, 5, 0.92017508),
(4, 1, 6, 5, 0.8459152),
(4, 1, 7, 2, 0.95110172),
(4, 2, 6, 1, 0.95789027),
(4, 3, 6, 6, 0.95759535),
(4, 4, 5, 1, 0.80212337),
(4, 4, 7, 3, 0.80778289),
(4, 4, 8, 2, 0.92399627),
(4, 5, 5, 5, 0.98698038),
(4, 5, 6, 2, 0.91531587),
(5, 0, 5, 4, 0.95705253),
(5, 1, 5, 3, 0.81610906),
(5, 1, 5, 7, 0.93452507),
(5, 1, 6, 0, 0.98169124),
(5, 1, 7, 0, 0.84997863),
(5, 1, 7, 3, 0.97735828),
(5, 2, 8, 3, 0.96606308),
(5, 3, 5, 7, 0.80398655),
(5, 3, 6, 0, 0.80013829),
(5, 3, 7, 3, 0.82962543),
(5, 5, 6, 2, 0.91919237),
(5, 6, 7, 4, 0.96237701),
(5, 6, 7, 6, 0.80884886),
(5, 6, 8, 6, 0.80175209),
(5, 6, 8, 7, 0.92764288),
(5, 7, 6, 0, 0.90893477),
(5, 7, 7, 0, 0.82358778),
(5, 7, 7, 3, 0.94626212),
(6, 0, 7, 0, 0.85159588),
(6, 0, 7, 3, 0.96886152),
(6, 3, 8, 0, 0.94173014),
(6, 5, 7, 2, 0.90841216),
(7, 0, 7, 3, 0.84417427),
(7, 4, 8, 7, 0.93397516),
(7, 6, 8, 6, 0.96749038),
(7, 7, 8, 1, 0.80834168),
(7, 7, 8, 5, 0.84336907),
(8, 1, 8, 5, 0.89013624)]
'''
'''
#Test
# 1, 0, 1, 5
a = mat[1][0]
b = mat[1][5]
y1 = cv2.cvtColor(a, cv2.COLOR_BGR2GRAY)
z1 = cv2.cvtColor(b, cv2.COLOR_BGR2GRAY)
# image_difference = get_image_difference(y1, z1)
res = cv2.matchTemplate(z1, y1, cv2.TM_CCOEFF_NORMED)
print(1, 0, 1, 5, res)
'''
def compare_2(x1, y1, x2, y2):
a = mat[x1][y1]
b = mat[x2][y2]
c1 = cv2.cvtColor(a, cv2.COLOR_BGR2GRAY)
c2 = cv2.cvtColor(b, cv2.COLOR_BGR2GRAY)
# image_difference = get_image_difference(y1, z1)
res = cv2.matchTemplate(c2, c1, cv2.TM_CCOEFF_NORMED)
print(x1, y1, x2, y2, res)
# compare_2(2, 0, 2, 1)
| StarcoderdataPython |
6608109 | from typing import List
import numpy as np
from EOSMixture import EOSMixture
from Factories.EOSMixFactory import createEOSMix
from Properties import Props
from compounds import SubstanceProp
class MixtureModel:
def __init__(self):
self.propsliq: Props = None
self.propsvap: Props = None
self.system: EOSMixture = None
self.T: float = 150
self.P: float = 1e5
self.Tref: float = 300
self.Pref: float = 150
self.y: List[float] = []
self.k: List[List[float]] = [[]]
self.eosname: str = "<NAME> Robinson (1976)"
# VLE
self.T_vle: float = 150
self.P_vle: float = 1e5
self.y_vle: List[float] = []
self.vle_method = "phi-phi"
self.binaryDiagram_type = "isothermal" # or isobaric
self.substances_in_the_system: List[SubstanceProp] = []
self.EOSObservers = []
self.RefObservers = []
self.ProcObservers = []
self.SubstanceObservers = []
self.CalculationObservers = []
self.info: str = ""
self.log: str = ""
# ================== SETTERS =========================
def setVLEmethod(self, method: str):
self.vle_method = method
self.system.setVLEmethod(method)
def setProc(self, p: float, t: float):
self.P = p
self.T = t
self.notifyProcObservers()
def setRef(self, p: float, t: float):
self.Pref = p
self.Tref = t
self.notifyRefObservers()
def setEOS(self, s: str):
self.eosname = s
self.setupSystem()
self.notifyEOSObservers()
def addSubstanceToSystem(self, substance: SubstanceProp):
self.substances_in_the_system.append(substance)
self.updateK()
self.setupSystem()
self.notifySubstanceObservers()
def clearSubstancesInSystem(self):
self.substances_in_the_system: List[SubstanceProp] = []
self.updateK()
self.setupSystem()
self.notifySubstanceObservers()
def removeSubstanceFromSystem(self, substance: str):
if self.getNumberOfSubstancesInSystem() > 0:
for s in self.substances_in_the_system:
if s.Name == substance:
self.substances_in_the_system.remove(s)
self.updateK()
self.setupSystem()
self.notifySubstanceObservers()
return
def setMolarFractions(self, y: List[float]):
if len(y) != self.getNumberOfSubstancesInSystem():
raise ValueError(
"Number of molar fractions not equals number of substances in the system"
)
if np.abs(np.sum(y) - 1.0) > 1e-10:
raise ValueError("Molar fractions doesn't sum to one")
self.y = y
def setBinaryInteractionsParameters(self, k: List[List[float]]):
self.k = k
self.setupSystem()
def setupSystem(self):
self.system = createEOSMix(self.substances_in_the_system, self.eosname, self.k)
self.setVLEmethod(self.vle_method)
def setVLEPT(self, p: float, t: float):
self.P_vle, self.T_vle = p, t
def setVLEMolarFractions(self, y: List[float]):
if len(y) != self.getNumberOfSubstancesInSystem():
raise ValueError(
"Number of molar fractions not equals number of substances in the system"
)
if np.sum(y) != 1.0:
raise ValueError("Molar fractions doesn't sum to one")
self.y_vle = y
def setBinaryDiagramType(self, t: str):
self.binaryDiagram_type = t
# ================== GETTERS =========================
def getVLEmethod(self) -> str:
return self.system.vle_method
def getPref(self) -> float:
return self.Pref
def getP(self) -> float:
return self.P
def getTref(self) -> float:
return self.Tref
def getT(self) -> float:
return self.T
def getEOS(self) -> str:
return self.eosname
def getSubstancesInSystems(self) -> List[SubstanceProp]:
return self.substances_in_the_system
def getNumberOfSubstancesInSystem(self) -> int:
return len(self.substances_in_the_system)
def getMolarFractions(self) -> List[float]:
return self.y
def getBinaryInteractionsParameters(self) -> List[List[float]]:
return self.k
def isBinaryMixture(self) -> bool:
return self.getNumberOfSubstancesInSystem() == 2
def getPropsLiq(self) -> Props:
return self.propsliq
def getPropsVap(self) -> Props:
return self.propsvap
def getVLEMolarFractions(self) -> List[float]:
return self.y_vle
def getBinaryDiagramType(self) -> str:
return self.binaryDiagram_type
def updateK(self):
n = self.getNumberOfSubstancesInSystem()
self.k = np.zeros((n, n), dtype=np.float64)
def getFluidState(self) -> str:
pbol = self.system.getBubblePointPressure(
self.getMolarFractions(), self.getT()
)[1]
pdew = self.system.getDewPointPressure(self.getMolarFractions(), self.getT())[1]
p = self.getP()
from compounds import state_dict
if p < pdew:
state = state_dict["vap"]
elif p > pbol:
state = state_dict["liq"]
else:
state = state_dict["VL_equi"]
return state
# ================= CALCULATIONS ==============
def calculations(self):
try:
self.propsliq, self.propsvap = self.system.getAllProps(
self.y, self.Tref, self.T, self.Pref, self.P
)
self.notifyCalculationsObserver()
except Exception as e:
raise ValueError(
"Error calculating properties of mixture\n{}".format(str(e))
)
# Observers registers
def registerEOSObserver(self, o):
self.EOSObservers.append(o)
def registerRefObserver(self, o):
self.RefObservers.append(o)
def registerProcObserver(self, o):
self.ProcObservers.append(o)
def registerSubstanceObserver(self, o):
self.SubstanceObservers.append(o)
def registerCalculationsObserver(self, o):
self.CalculationObservers.append(o)
# Observers notify
def notifyEOSObservers(self):
for o in self.EOSObservers:
o.updateEOS()
def notifyRefObservers(self):
for o in self.RefObservers:
o.updateRef()
def notifyProcObservers(self):
for o in self.ProcObservers:
o.updateProc()
def notifySubstanceObservers(self):
for o in self.SubstanceObservers:
o.updateSubstance()
def notifyCalculationsObserver(self):
for o in self.CalculationObservers:
o.updateCalculations()
| StarcoderdataPython |
3236121 | '''
Created on Nov 26, 2009
@author: <NAME>
'''
import numpy as N
import scipy.signal as SS
import scipy.interpolate as I
import scipy.optimize as O
import pylab as P
class SplineFitting:
def __init__(self, xnodes, spline_order = 3):
'''
'''
self.xnodes = xnodes
self.k = spline_order
def _fakeData(self):
x = N.linspace(1,1024,1024)
y = self._gety(x, 2.5, 1.3, 0.5, 10)
yn = y + 0.25*N.random.normal(size=len(x))
return x, yn
def _gety(self, x, a, b, c, d):
return a*N.exp(-b*x) + c*N.log(d*x**2)
def fitfunc(self, x, ynodes):
return I.splev(x, I.splrep(self.xnodes, ynodes, k = self.k))
def errfunc(self, ynodes, x, y):
return self.fitfunc(x, ynodes) - y
def doFit(self, ynodes, x, y):
return O.leastsq(self.errfunc, ynodes, args=(x, y))
if __name__ == '__main__':
'''
Executes this if ran from a command line.
'''
#Initializes the instance with dummy xnodes
Spline = SplineFitting([0,])
#Makes some faked data
x, y = Spline._fakeData()
#Median filter the data
medianFiltered = SS.medfilt(y, 7)
#Spline nodes and initial guess for y positions from median filtered
xnods = N.arange(0, 1050, 50)
ynods = medianFiltered[xnods]
#Updates dummy xnodes in Spline instance with read deal
Spline.xnodes = xnods
#Do the fitting
fittedYnodes, success = Spline.doFit(ynods, x, y)
#Lets plot the data for visual inspection
fig = P.figure()
left, width = 0.1, 0.8
rect1 = [left, 0.3, width, 0.65]
rect2 = [left, 0.1, width, 0.2]
ax1 = fig.add_axes(rect2) #left, bottom, width, height
ax2 = fig.add_axes(rect1)
ax2.plot(x, y, label='Noisy data')
ax2.plot(x, medianFiltered, 'y-', label= 'Median Filtered', lw = 2)
ax2.plot(x, Spline.fitfunc(x, ynods), 'm-', label = 'Initial Spline', lw = 2)
ax2.plot(x, Spline.fitfunc(x, fittedYnodes), 'r-', label = 'Fitted Spline', lw = 2)
ax2.plot(xnods, ynods, 'go', label ='Initial Spline nodes')
ax2.plot(xnods, fittedYnodes, 'gs', label ='Fitted Spline nodes')
ax1.axhline(0)
ax1.plot(x, SS.medfilt((y-Spline.fitfunc(x, ynods)), 55), 'm-', label = 'Initial guess residuals')
ax1.plot(x, SS.medfilt((y-Spline.fitfunc(x, fittedYnodes)), 55), 'r-', label = 'Fitted residuals')
ax1.set_xlim(0,1000)
ax2.set_xlim(0,1000)
ax2.set_xticklabels([])
ax2.set_yticks(ax2.get_yticks()[1:])
ax1.set_yticks(ax1.get_yticks()[::2])
ax1.set_ylabel('Residuals')
ax2.set_ylabel('Arbitrary Counts')
ax1.set_xlabel('Pixels')
try:
#IRAFDEV has too old matplotlib...
ax2.legend(numpoints = 1, loc = 'best')
except:
ax2.legend(loc = 'best')
P.savefig('SplineFitting.pdf') | StarcoderdataPython |
167516 | <reponame>tcoxon/fishpye
import numpy as np
import world
from math import *
def sign(x):
return cmp(x, 0)
def positive(x):
return x if x > 0 else 0
def trace_from_to(f, start, end):
"""
f(x,y,z) -> Bool
"""
last = (floor(end[0]), floor(end[1]), floor(end[2]))
trace(
lambda x, y, z: f(x,y,z) and (
x != last[0] or y != last[1] or z != last[2]),
start,
(end[0] - start[0], end[1] - start[1], end[2] - start[2]))
TRACE_LOOP_LIMIT = 100
INF = float('inf')
def trace(f, u, v):
"""
f(x,y,z) -> Bool
u is the starting point of the ray in float coords
v is the unit vector along the ray
Uses J Amanatides and A Woo's voxel traversal algorithm to trace
all the voxels from the starting point, u, along vector v until
function f() returns False.
"""
u = np.array([u[0], u[1], u[2], 1.0])
v = np.array([v[0], v[1], v[2], 0.0])
t = 0.0
# p is the coordinate of the current voxel
p = np.array([floor(u[0]), floor(u[1]), floor(u[2]), 1])
# step components are -1, 0, or 1. Values determined from v
step = np.array([sign(v[0]), sign(v[1]), sign(v[2]), 0])
# tmax = values of t at which ray next crosses a voxel boundary
tmax = np.array([
(p[0] + positive(step[0]) - u[0]) / v[0] if step[0] != 0 else INF,
(p[1] + positive(step[1]) - u[1]) / v[1] if step[1] != 0 else INF,
(p[2] + positive(step[2]) - u[2]) / v[2] if step[2] != 0 else INF,
0.0])
# dt = how far along ray (in units of t) we must move for x/y/z
# component of move to equal width of one voxel
dt = np.array([
step[0] / v[0] if step[0] != 0 else INF,
step[1] / v[1] if step[1] != 0 else INF,
step[2] / v[2] if step[2] != 0 else INF,
0.0])
for i in xrange(TRACE_LOOP_LIMIT):
if not f(p[0], p[1], p[2]):
break
if tmax[0] < tmax[1]:
if tmax[0] < tmax[2]:
p[0] += step[0]
t = tmax[0]
tmax[0] += dt[0]
else:
p[2] += step[2]
t = tmax[2]
tmax[2] += dt[2]
else:
if tmax[1] < tmax[2]:
p[1] += step[1]
t = tmax[1]
tmax[1] += dt[1]
else:
p[2] += step[2]
t = tmax[2]
tmax[2] += dt[2]
# FIXME: portals
def blocking(w, x, y, z):
return x < 0 or x >= w.x_size() or y < 0 or y >= w.y_size() or \
z < 0 or z >= w.z_size() or world.blocking(w.grid_get(x,y,z))
def _climb_step(w, obj, x, y, z, bx, by, bz):
if obj.uy[0] == 0 and obj.uy[2] == 0:
d = y-by
if sign(obj.uy[1]) == 1:
by = floor(by) + 1.0
else:
by = floor(by) - 0.005
y = by + d
elif obj.uy[0] == 0 and obj.uy[1] == 0:
d = z-bz
if sign(obj.uy[2]) == 1:
bz = floor(bz) + 1.0
else:
bz = floor(bz) - 0.005
z = bz + d
elif obj.uy[1] == 0 and obj.uy[2] == 0:
d = x-bx
if sign(obj.uy[0]) == 1:
bx = floor(bx) + 1.0
else:
bx = floor(bx) - 0.005
x = bx + d
else:
# Don't climb any steps at weird angles like these
pass
return (x,y,z,bx,by,bz)
def legal_move(w, obj, x, y, z):
""" legal_move: return the next position of an attempted move
of obj from its current position to x,y,z """
# Check the center of the object, after the move, is still within
# the bounds of the grid. If it is not, and the object is a camera,
# this could cause the renderer to crash
reverted_all = True
if x < 0.0 or x >= w.x_size():
x = obj.x
else:
reverted_all = False
if y < 0.0 or y >= w.y_size():
y = obj.y
else:
reverted_all = False
if z < 0.0 or z >= w.z_size():
z = obj.z
else:
reverted_all = False
if not reverted_all:
# We didn't revert the coords to the original, so the desired
# location is still within the bounds of the grid.
# Where the foot (bottom) of the object would be if it moved to
# the target location
(bx,by,bz) = (x - obj.uy[0] * obj.hover_height,
y - obj.uy[1] * obj.hover_height,
z - obj.uy[2] * obj.hover_height)
# If the target location is the step of a staircase, then the
# following coordinates are within the block above the step
(bxs,bys,bzs) = (x + sign(obj.uy[0]),
y + sign(obj.uy[1]),
z + sign(obj.uz[2]))
if blocking(w, bx,by,bz) and not blocking(w, bxs,bys,bzs):
(x, y, z, bx, by, bz) = \
_climb_step(w, obj, x, y, z, bx, by, bz)
(x, y, z) = _move_with_slide(w, obj, x, y, z, bx, by, bz)
return (x, y, z)
def _select(x, y, z, ox, oy, oz, d):
if d == 'x':
return (x, oy, oz)
elif d == 'y':
return (ox, y, oz)
elif d == 'z':
return (ox, oy, z)
elif d == 'a':
return (x, y, z)
def _move_with_slide(w, obj, x, y, z, bx, by, bz):
"""
Calculate the end position for a move to (x,y,z) (with feet at (bx,by,bz))
for a given world w and object obj.
If the move is diagonal to a wall, this will "slide" along parallel to the
wall.
"""
(obx,oby,obz) = (bx - x + obj.x, by - y + obj.y, bz - z + obj.z)
blocked = [False]
def visit_cell(x,y,z):
if blocking(w,x,y,z):
blocked[0] = True
return False
return True
has_changed = True
dimensions_not_moved = ['x','y','z']
(rx,ry,rz) = (obj.x, obj.y, obj.z)
# Try every ordering of x, y, z to figure out which dimensions of the
# movement to apply first.
while has_changed:
has_changed = False
dimensions = []
dimensions.extend(dimensions_not_moved)
for dim in dimensions:
# Visit every grid cell from the foot to the centre of the
# object. If any are blocking cells, don't allow the move.
# FIXME: trace_from_to won't work for portals
(tx,ty,tz) = _select(x,y,z,rx,ry,rz, dim)
(tbx, tby, tbz) = (bx - x + tx, by - y + ty, bz - z + tz)
blocked[0] = False
trace_from_to(visit_cell, (tbx,tby,tbz), (tx,ty,tz))
if not blocked[0]:
has_changed = True
dimensions_not_moved.remove(dim)
(rx,ry,rz) = _select(x,y,z,rx,ry,rz, dim)
return (rx,ry,rz)
| StarcoderdataPython |
11239490 | <gh_stars>0
"""
A module to simplify data wrangling using python.
Mostly used to work on biological specimen data.
The data manipulation is done using pandas.
"""
import os
from glob import glob
import pandas as pd
def clean_duplicates(df,params):
"""Clean specify duplicates specimens.
Keep the first row.
Args:
dataframe ([type]): [description]
"""
df = df.drop_duplicates([params], keep='first')
return df
def clean_column_names(df: pd.DataFrame):
"""
Convert Specify Darwin Core column names to human readable names.
"""
df = df.rename(columns={
'1.collectionobject.catalogNumber': 'CatNo',
'1.collectionobject.fieldNumber': 'FieldNo',
'1,10,30-collectors,5.agent.lastName': 'Collector',
'1,9-determinations,4.taxon.Order': 'Order',
'1,9-determinations,4.taxon.Family': 'Family',
'1,9-determinations,4.taxon.Genus': 'Genus',
'1,9-determinations,4.taxon.Species': 'Species',
'1,10,2,3.geography.Country': 'Country',
'1,10,2,3.geography.State': 'StateProvince',
'1,10,2,3.geography.County': 'CountyDistrict',
'1,10,2.locality.localityName': 'SpecificLocality',
'1,10,2.locality.latitude1': 'Latitude',
'1,10,2.locality.longitude1': 'Longitude',
'1,10,2.locality.verbatimElevation': 'Elevation',
'1,10,2.locality.originalElevationUnit': 'Unit',
'1,63-preparations,65.preptype.name': 'PrepType',
'1,63-preparations.preparation.text1': 'TissueType',
'1,63-preparations.preparation.text2': 'Preservation',
'1,63-preparations.preparation.storageLocation': 'StorageLocation',
'1,93.collectionobjectattribute.text1': 'Sex',
'1,93.collectionobjectattribute.text7': 'TotalLength',
'1,93.collectionobjectattribute.text8': 'TailLength',
'1,93.collectionobjectattribute.text9': 'HindFoot',
'1,93.collectionobjectattribute.text10': 'EarLength',
'1,93.collectionobjectattribute.text2': 'Weight',
'1,10.collectingevent.startDate': 'StartDate',
'1,93.collectionobjectattribute.text4': 'Stage',
'1.collectionobject.remarks': 'Remarks',
'1.collectionobject.altCatalogNumber': 'AltCatNo'})
return df
def clean_whitespace(df, columns):
"""
Clean leading/trailing whitespace from specific columns
Args:
df (table): pandas dataframe
columns (list): column labels
Returns:
dataframe
"""
df[columns] = df[columns].apply(lambda x: x.str.strip())
return df
def trimmed_df_whitespace(df):
"""Trimmed white space for a whole dataframe.
Args:
df (table): pandas table to clean.
Returns:
table: pandas data frame.
"""
df = df.apply(lambda x: x.str.strip() if x.dtype == 'object' else x)
return df
def extract_data(df_database, df_filters, column_names):
"""
Extract data from one database to match with the other database.
The function requires the same column names for both dataframe.
Args:
df_database (pandas table): [description]
df_filters (pandas table): [description]
column_names (string): [description]
"""
filters = df_database[column_names].isin(df_filters[column_names])
df_database = df_database[filters]
return df_database
def combine_dataframes(filepath, output_path):
"""Combine multiple dataframes into one.
The function assume the same column names for each dataframe.
Args:
filepath (string): the file's path with matching filenames
using wildcard
output_path (string): the final file's path and name
Returns:
csv: save into output path folder.
"""
filenames = glob(filepath)
combined_df = []
for df in filenames:
dataframes = pd.read_csv(df)
combined_df.append(dataframes)
results = pd.concat(combined_df, axis=0)
return results.to_csv(output_path, index=0)
def concat_column_values(df, first_column, second_column, new_column_names):
df[new_column_names] = '(' + df[first_column].map(str) + ',' + df[second_column].map(str) + ')'
return df
def convert_excel_to_csv(filepath):
"""Batch converting excel files to csv.
Args:
filepath (string): Use wildcard to match filenames.
"""
filenames = glob(filepath)
for excel in filenames:
out_filenames = excel.replace('.xlsx','.csv')
dataframe = pd.read_excel(excel)
dataframe.to_csv(out_filenames, index=False)
print("Done converting to csv!")
def convert_windows_path(file_path: str) -> str :
"""Convert windows path to unix path
Args:
file_path (str): Windows path
Returns:
str: Unix path
"""
file_path = file_path.replace("\\", "/")
return file_path
def sort_preptype(df):
"""[summary]
Args:
df ([type]): [description]
"""
#Make the preptype categorical
prep_type = [
'Skin',
'Alcohol',
'Skull',
'skeleton',
'Tissue',
'Intestine',
'Small intestine',
'Colon',
'GI Tract',
'Cecum',
'Glands',
'Testicle',
'Embryo'
]
df['PrepType'] = pd.Categorical(df['PrepType'], prep_type)
df = df.sort_values(by=['CatNo','PrepType'])
return df
def get_column_names(df):
column_names = []
for column in df.columns:
column_names.append(column)
return column_names
def filter_results(df, column_name, params):
"""
Filtered the data based on specific values.
Args:
df (table): pandas table
column_name (string): pandas column names
params (list): value names to filters
"""
filters = df[column_name].isin(params)
filtered_results = df[filters].sort_values(by=['Genus','Species'])
return filtered_results
def count_specimen_groups(df, params):
"""
Count the number of specimens based on pre-defined groups
Args:
df (table): [description]
"""
df = df.fillna('No Data')
species_count = df.groupby(params).count()
#Use field number as unique values for counting
species_count = species_count.filter(['CatNo'])
species_count = species_count.rename(columns={'CatNo': 'Counts'})
return species_count.reset_index()
def merge_dataframes(df1, df2, df1_column_names, column_keys):
"""
Merge two dataframes using a column value as a key.
Args:
df1 (table): [description]
df2 (table): [description]
column_keys (string): [description]
"""
df1 = df1[df1_column_names]
df1[column_keys] = df1[column_keys].astype(int)
df1[column_keys] = df1[column_keys].astype(int)
df_merge = pd.merge(df1, df2, on=column_keys)
return df_merge
def open_csv(path: str, filenames: str) -> pd.DataFrame:
"""Open csv file based on specified
path and filenames. Useful for deeply
nested folders.
Args:
path (string): path locations
filenames (string): filenames with the extension.
Returns:
[type]: [description]
"""
csv_file = path + '/' + filenames
df = pd.read_csv(csv_file)
return df
def save_csv(df, parent_path, filenames):
"""Save pandas's dataframe to csv.
The function check if the path exists.
If not, it will create the defined path.
Args:
df ([type]): [description]
parent_path ([type]): [description]
filenames ([type]): [description]
Returns:
[type]: [description]
"""
file_path = parent_path + '/' + filenames
try:
df.to_csv(file_path, index=False)
print('File saved!')
except FileNotFoundError:
os.mkdir(parent_path)
print(f'A new folder is created. File path: {parent_path}/')
df.to_csv(file_path, index=False)
print(f'File is saved in {file_path}.')
def save_with_index(df, filename):
return df.to_csv('cleaned_data/' + filename)
def split_columns(df, separator, new_columns, column_names):
"""
Split column in data frame into two.
Args:
df (pandas table):
separator (string): values separator to split
new_columns (list): names of the new columns
column_names (string): names of the column to split
Returns:
table: new data frame with the column splited into its values
"""
df[new_columns] = df[column_names].str.split(separator, expand=True)
return df
class MuseumNumbers():
"""
A class to get the museum number from a dataset.
"""
def __init__(self, df_origin, df_result):
self.df_origin = df_origin
self.df_result = df_result
def filter_database(self):
"""
df1 is the database
df2 is the resulting data
"""
#Extract LSUMZ
df_origin = self.df_origin[['ColInitial', 'CatNo']]
#Use field no as a key and match the key names of the two database.
filters = self.df_origin['ColInitial'].isin(self.df_result['ColInitial'])
df_origin = df_origin[filters]
return df_origin
def merge_database(self, df_origin):
df_merge = pd.merge(df_origin, self.df_result)
return df_merge
def get_numbers(self):
filter_df = self.filter_database()
merge_df = self.merge_database(filter_df)
return merge_df
def save_results(self, path, file_name):
final_df = self.get_numbers()
return save_csv(final_df, path, file_name)
class FieldNumbers():
def __init__(self, df, names, initials):
self.df = df
self.names = names
self.initials = initials
def add_initial_columns(self):
"""
Add initial column using collector names.
Args:
names (list): list of collector names
initials (list): list of collector initials
Returns:
dataframe with collector initials added at the far
right of the table.
"""
self.df['Initials'] = self.df['Collector'].replace(self.names, self.initials)
return self.df
def merge_initials(self):
"""
Args:
names ([type]): [description]
initials ([type]): [description]
Returns:
[type]: [description]
"""
df_result = self.add_initial_columns()
df_result['FieldNo'] = df_result['FieldNo'].astype(str)
df_result['ColInitial'] = df_result['Initials'] + df_result['FieldNo']
df_result = df_result.drop('Initials', axis = 1)
return df_result | StarcoderdataPython |
3586853 | from requests.exceptions import HTTPError
from .models.maven_model import maven_model_proxy, maven_model_hosted, maven_model_group
from .models.docker_model import docker_model_proxy, docker_model_group, docker_model_hosted
from .models.npm_model import npm_model_group, npm_model_hosted, npm_model_proxy
from .models.yum_model import yum_model_hosted
from .models.raw_model import raw_model_group, raw_model_proxy, raw_model_hosted
class Repo:
def __init__(self, session):
self.session = session
self.api_location = '/beta/repositories'
def list(self):
repo_dict = {}
try:
response = self.session.get(self.api_location)
response.raise_for_status()
except HTTPError as http_err:
print(f'ERROR REPO LIST HTTP: {http_err}')
except Exception as err:
print(f'ERROR REPO LIST OTHER: {err}')
else:
print(f'REPO LISTED: {str(response.status_code)}')
for repo in response.json():
repo_dict[repo['name']] = repo
return repo_dict
def create(self, **kwargs):
if kwargs['repoType'] == 'yum':
if kwargs['locationType'] == "hosted":
scheme = yum_model_hosted(kwargs)
else:
raise NameError(f'ERROR locationType for YUM not supported. Use hosted')
elif kwargs['repoType'] == 'npm':
if kwargs['locationType'] == "hosted":
scheme = npm_model_hosted(kwargs)
elif kwargs['locationType'] == "proxy":
scheme = npm_model_proxy(kwargs)
elif kwargs['locationType'] == 'group': # array
scheme = npm_model_group(kwargs)
else:
raise NameError(f'ERROR locationType for NPM not supported. Use hosted/proxy/group')
elif kwargs['repoType'] == 'maven':
if kwargs['locationType'] == "hosted":
scheme = maven_model_hosted(kwargs)
elif kwargs['locationType'] == "proxy":
scheme = maven_model_proxy(kwargs)
elif kwargs['locationType'] == 'group':
# TODO: not ready backend nexus 3.22.0-02 # array
scheme = maven_model_proxy(kwargs)
else:
raise NameError(f'ERROR locationType for MAVEN not supported. Use hosted/proxy')
elif kwargs['repoType'] == 'docker':
if kwargs['locationType'] == "hosted":
scheme = docker_model_hosted(kwargs)
elif kwargs['locationType'] == 'proxy':
scheme = docker_model_proxy(kwargs)
elif kwargs['locationType'] == 'group':
scheme = docker_model_group(kwargs)
else:
raise NameError(f'ERROR locationType for DOCKER not supported. Use hosted/proxy/group')
elif kwargs['repoType'] == 'raw':
if kwargs['locationType'] == "hosted":
scheme = raw_model_hosted(kwargs)
elif kwargs['locationType'] == 'proxy':
scheme = raw_model_proxy(kwargs)
elif kwargs['locationType'] == 'group':
scheme = raw_model_group(kwargs)
else:
raise NameError(f'ERROR locationType for RAW not supported. Use hosted/proxy/group')
else:
raise NameError(f'ERROR repoType for {kwargs["repoType"]} UPDATE not supported. Use maven/docker/npm/yum/raw')
try:
response = self.session.post(f"{self.api_location}/{kwargs['repoType']}/{kwargs['locationType']}",
json=scheme)
response.raise_for_status()
except HTTPError as http_err:
print(f"ERROR repo {kwargs['name']} CREATE HTTP: {http_err}")
except Exception as err:
print(f"ERROR repo {kwargs['name']} CREATE OTHER: {err}")
else:
print(f"REPO CREATED: {str(response.status_code)} {kwargs['name']} {kwargs['repoType']}")
def update(self, **kwargs):
if kwargs['repoType'] == 'yum':
if kwargs['locationType'] == "hosted":
scheme = yum_model_hosted(kwargs)
else:
raise NameError(f'ERROR locationType for YUM not supported. Use hosted')
elif kwargs['repoType'] == 'npm':
if kwargs['locationType'] == "hosted":
scheme = npm_model_hosted(kwargs)
elif kwargs['locationType'] == "proxy":
scheme = npm_model_proxy(kwargs)
elif kwargs['locationType'] == 'group': # array
scheme = npm_model_group(kwargs)
else:
raise NameError(f'ERROR locationType for NPM not supported. Use hosted/proxy/group')
elif kwargs['repoType'] == 'maven':
if kwargs['locationType'] == "hosted":
scheme = maven_model_hosted(kwargs)
elif kwargs['locationType'] == "proxy":
scheme = maven_model_proxy(kwargs)
elif kwargs['locationType'] == 'group':
scheme = maven_model_proxy(kwargs)
else:
raise NameError(f'ERROR locationType for MAVEN not supported. Use hosted/proxy')
elif kwargs['repoType'] == 'docker':
if kwargs['locationType'] == "hosted":
scheme = docker_model_hosted(kwargs)
elif kwargs['locationType'] == 'proxy':
scheme = docker_model_proxy(kwargs)
elif kwargs['locationType'] == 'group':
scheme = docker_model_group(kwargs)
else:
raise NameError(f'ERROR locationType for DOCKER not supported. Use hosted/proxy/group')
elif kwargs['repoType'] == 'raw':
if kwargs['locationType'] == "hosted":
scheme = raw_model_hosted(kwargs)
elif kwargs['locationType'] == 'proxy':
scheme = raw_model_proxy(kwargs)
elif kwargs['locationType'] == 'group':
scheme = raw_model_group(kwargs)
else:
raise NameError(f'ERROR locationType for RAW not supported. Use hosted/proxy/group')
else:
raise NameError(f'ERROR repoType for {kwargs["repoType"]} UPDATE not supported. Use maven/docker/npm/raw')
try:
response = self.session.put(f"{self.api_location}/{kwargs['repoType']}/{kwargs['locationType']}/{kwargs.get('name')}",
json=scheme)
response.raise_for_status()
except HTTPError as http_err:
print(f"ERROR repo {kwargs['name']} UPDATE HTTP: {http_err}")
except Exception as err:
print(f"ERROR repo {kwargs['name']} UPDATE OTHER: {err}")
else:
print(f"REPO UPDATED: {str(response.status_code)} {kwargs['name']} {kwargs['repoType']}")
def delete(self, name):
try:
response = self.session.delete(f'{self.api_location}/{name}')
response.raise_for_status()
except HTTPError as http_err:
print(f'ERROR repo {name} DELETE HTTP: {http_err}')
except Exception as err:
print(f'ERROR repo {name} DELETE OTHER: {err}')
else:
print(f'REPO DELETED: {str(response.status_code)} {name}')
| StarcoderdataPython |
8080441 | <filename>web_scraping/seleniumtest_mac.py<gh_stars>0
import time, csv
from selenium import webdriver
from selenium.webdriver.common.action_chains import ActionChains
url = "https://www.strava.com/login"
# driver = webdriver.Chrome(executable_path="~/Documents/Ecole_Ingé/2A/Stage/projet_startup/web_scraping/chromedriver")
from webdriver_manager.chrome import ChromeDriverManager
driver = webdriver.Chrome(ChromeDriverManager().install())
# driver = webdriver.Safari(executable_path = '/usr/bin/safaridriver')
driver.maximize_window()
email = "<EMAIL>"
password = "<PASSWORD>"
timev = ''
distance = ''
elev = ''
pace = ''
heartrate = ''
cadence = ''
runs = open("urrutyJuly.txt","r")
a = runs.readlines()
runs.close()
print(len(a)*2,"minutes estimées")
if __name__ == "__main__":
driver.get(url)
action = ActionChains(driver)
driver.find_element_by_id("email").send_keys(email)
driver.find_element_by_id("password").send_keys(password)
driver.find_element_by_id("login-button").click()
time.sleep(1)
driver.get("https://www.strava.com/activities/2533262240")
time.sleep(1)
action = ActionChains(driver)
for i in range (len(a)):
run = a[i][:-1]
res = [['time', 'distance', 'elevation', 'pace', 'heartrate', 'cadence']]
driver.get(run)
time.sleep(1)
try:
element1 = driver.find_element_by_xpath('//*[@id="chart-controls"]/table/tbody/tr[1]/td[4]/div[2]')
action.move_to_element(element1).click().perform()
time.sleep(1)
element2 = driver.find_element_by_xpath('//*[@id="chart-controls"]/table/tbody/tr[1]/td[5]/div[2]')
action.move_to_element(element2).click().perform()
time.sleep(1)
except:
continue
# driver.find_element_by_xpath("//td[@data-type='heartrate']/div[@class='toggle-button']").click()
# driver.find_element_by_xpath("//td[@data-type='cadence']/div[@class='toggle-button']").click()
grid = driver.find_element_by_id("grid")
action.move_to_element(grid).perform()
action.move_by_offset(-398, 0).perform()
for i in range(266):
action.move_by_offset(3, 0).perform()
timev = driver.find_element_by_xpath("//*[@id='crossBar']/*[@class='crossbar-text']").text
distance = driver.find_element_by_xpath("//*[@id='infobox-text-distance']/*[@class='value']").text
elev = driver.find_element_by_xpath("//*[@id='infobox-text-altitude']/*[@class='value']").text
pace = driver.find_element_by_xpath("//*[@id='infobox-text-pace']/*[@class='value']").text
heartrate = driver.find_element_by_xpath("//*[@id='infobox-text-heartrate']/*[@class='value']").text
cadence = driver.find_element_by_xpath("//*[@id='infobox-text-cadence']/*[@class='value']").text
res.append([timev, distance, elev, pace, heartrate, cadence])
action = ActionChains(driver)
time.sleep(1)
driver.close()
with open('Urruty_'run[-10:]+'.csv', 'w') as csvFile:
writer = csv.writer(csvFile)
writer.writerows(res)
csvFile.close()
| StarcoderdataPython |
3231106 | <reponame>rortiz9/meleeml<filename>models/GAIL.py
import torch
import torch.nn as nn
import torch.nn.functional as F
from envs.dataset import *
device = torch.device("cuda:0" if torch.cuda.is_available() else "cpu")
class Actor(nn.Module):
def __init__(self, state_dim, action_dim):
super(Actor, self).__init__()
self.l1 = nn.Linear(state_dim, 400)
self.l2 = nn.Linear(400, 200)
self.l3 = nn.Linear(200, action_dim)
def forward(self, x):
x = F.relu(self.l1(x))
x = F.relu(self.l2(x))
x = nn.Softmax()(self.l3(x))
#x = torch.sigmoid(self.l3(x))
return x
class Discriminator(nn.Module):
def __init__(self, state_dim, action_dim):
super(Discriminator, self).__init__()
self.l1 = nn.Linear(state_dim+action_dim, 500)
self.l2 = nn.Linear(500, 300)
self.l3 = nn.Linear(300, 300)
self.l4 = nn.Linear(300, 1)
def forward(self, state, action):
state_action = torch.cat([state, action], 1)
x = torch.tanh(self.l1(state_action))
x = torch.tanh(self.l2(x))
x = torch.tanh(self.l3(x))
x = torch.sigmoid(self.l4(x))
return x
class GAIL:
def __init__(self, expert_states, expert_actions, action_set, lr, betas):
state_dim = expert_states.shape[1]
action_dim = expert_actions.shape[1]
self.action_set = action_set
self.expert_states = expert_states
self.expert_actions = expert_actions
self.actor = Actor(state_dim, action_dim).to(device)
self.optim_actor = torch.optim.Adam(self.actor.parameters(), lr=lr, betas=betas)
self.discriminator = Discriminator(state_dim, action_dim).to(device)
self.optim_discriminator = torch.optim.Adam(self.discriminator.parameters(), lr=lr, betas=betas)
self.loss_fn = nn.BCELoss()
def select_action(self, state):
out = self.actor.forward(state)
action_dist = torch.distributions.Categorical(out.squeeze(0))
action_idx = action_dist.sample()
eval_action = torch.zeros((self.action_set.shape[0]))
eval_action[action_idx] = 1
return eval_action
def update(self, n_iter, batch_size=100, entropy_penalty = True, pg_penalty = True):
gen_losses = list()
discrim_losses = list()
for i in range(n_iter):
# sample expert transitions
expert_samples = torch.randint(self.expert_states.shape[0], (batch_size,))
exp_state = torch.FloatTensor(self.expert_states[expert_samples]).to(device)
exp_action = torch.FloatTensor(self.expert_actions[expert_samples]).to(device)
# sample expert states for actor
actor_samples = torch.randint(self.expert_states.shape[0], (batch_size,))
state = torch.FloatTensor(self.expert_states[actor_samples]).to(device)
action = self.actor(state)
#######################
# update discriminator
#######################
self.optim_discriminator.zero_grad()
# label tensors
exp_label= torch.full((batch_size,1), 1, device=device)
policy_label = torch.full((batch_size,1), 0, device=device)
# with expert transitions
prob_exp = self.discriminator(exp_state, exp_action)
loss = self.loss_fn(prob_exp, exp_label)
# with policy transitions
prob_policy = self.discriminator(state, action.detach())
loss += self.loss_fn(prob_policy, policy_label)
# take gradient step
loss.backward()
self.optim_discriminator.step()
################
# update policy
################
self.optim_actor.zero_grad()
#loss_actor = -self.discriminator(state, action)
loss_actor = self.loss_fn(self.discriminator(state, action), exp_label)
entropy = -torch.sum(torch.mean(action) * torch.log(action))
#pg loss
reward = torch.Tensor(get_rewards(state)).to(device)
correct_actions_onehot = self.expert_actions[actor_samples]
action_indices = torch.Tensor(np.where(correct_actions_onehot == 1)[0]).long().to(device)
action_indices = action_indices.unsqueeze(0).T
#log_prob = torch.log(action.squeeze(0)[action_indices])
#log_prob = torch.log(action[action_indices])
log_prob = action.gather(1, action_indices)
pg_loss = -log_prob * reward
#loss_actor += 0.0000 * entropy
#loss_actor.mean().backward()
if entropy_penalty:
new_loss = loss_actor + 0.01 * entropy
new_loss.mean().backward()
elif pg_penalty:
new_loss = loss_actor + 0.01 * pg_loss
new_loss.mean().backward()
else:
loss_actor.mean().backward()
self.optim_actor.step()
gen_losses.append(loss_actor.mean())
discrim_losses.append(loss)
avg_gen_loss = sum(gen_losses)/len(gen_losses)
avg_discrim_loss = sum(discrim_losses)/len(gen_losses)
return avg_gen_loss, avg_discrim_loss
def save(self, directory='./preTrained', name='GAIL'):
torch.save(self.actor.state_dict(), '{}/{}_actor.pth'.format(directory,name))
torch.save(self.discriminator.state_dict(), '{}/{}_discriminator.pth'.format(directory,name))
def load(self, directory='./preTrained', name='GAIL'):
self.actor.load_state_dict(torch.load('{}/{}_actor.pth'.format(directory,name), map_location=torch.device('cpu')))
self.discriminator.load_state_dict(torch.load('{}/{}_discriminator.pth'.format(directory,name), map_location=torch.device('cpu')))
| StarcoderdataPython |
234426 | <filename>2018/day_5/star_2/star.py
from datetime import datetime
def remove_all_from_polymer(polymer, type):
p = polymer
p = p.replace(type.lower(), "")
p = p.replace(type.upper(), "")
return p
def react_polymer(polymer):
start_time = datetime.now()
i = 0
while i + 1 < len(polymer) - 1:
if will_react(polymer[i], polymer[i + 1]):
polymer = remove_from_polymer(polymer, i)
i = 0
else:
i += 1
end_time = datetime.now()
time_taken = end_time - start_time
print("time: {}s".format(time_taken.total_seconds()))
with open('polymer.txt', 'w') as out:
out.write(polymer)
polymer_length = len(polymer)
print("length: {}".format(polymer_length))
return polymer_length
def will_react(p1, p2):
return p1.lower() == p2.lower() and p1 != p2
def remove_from_polymer(polymer, i):
return polymer[:i] + polymer[i+2:]
if __name__ == "__main__":
with open('../data.txt') as data:
polymer = data.read()
polymer_trials = [
('a', react_polymer(remove_all_from_polymer(polymer, 'a'))),
('b', react_polymer(remove_all_from_polymer(polymer, 'b'))),
('c', react_polymer(remove_all_from_polymer(polymer, 'c'))),
('d', react_polymer(remove_all_from_polymer(polymer, 'd'))),
]
print(polymer_trials) | StarcoderdataPython |
19448 | import requests as reqlib
import os
import re
import random
import time
import pickle
import abc
import hashlib
import threading
from urllib.parse import urlparse
from purifier import TEAgent
from purifier.logb import getLogger
from enum import IntEnum
from typing import Tuple, List, Dict, Optional
class ScraperTimeout(Exception):
def __init__(self, ex):
self.ex = ex
def __str__(self):
return f"Timeout: {self.ex}"
class ScraperNot200(Exception):
def __init__(self, sc):
self.sc = sc
def __str__(self):
return f"Unexpected Status Code={self.sc}!"
class UnsupportedMIME(Exception):
def __init__(self, mime):
self.mime = mime
def __str__(self):
return f"Unsupported MIME={self.mime}!"
class Scraper(metaclass=abc.ABCMeta):
@abc.abstractmethod
def get(self, url):
pass
class ReqScraper(object):
def __init__(self,
page_cache_path="page_caches",
headers={'User-Agent': 'Mozilla/5.0'},
skip_cache=False,
supported_mime_set={"text/html"}):
self.page_cache_path = page_cache_path
if not os.path.isdir(self.page_cache_path):
os.makedirs(self.page_cache_path)
self.headers = headers
self.logger = getLogger(os.path.basename(self.__class__.__name__))
self.skip_cache = skip_cache
self.supported_mime_set = supported_mime_set
def _get_cache_path(self, url):
test_url_host = urlparse(url).netloc
url_md5 = hashlib.md5(url.encode('utf-8')).hexdigest()
cache_file_name = f"{test_url_host}_{url_md5}.txt"
cache_file_path = os.path.join(self.page_cache_path, cache_file_name)
return cache_file_path
def _del_from_cache(self, url):
cache_file_path = self._get_cache_path(url)
if os.path.isfile(cache_file_path):
self.logger.warning("Removing cache file={cache_file_path}...")
os.remove(cache_file_path)
def _get_from_cache(self, url):
cache_file_path = self._get_cache_path(url)
if os.path.isfile(cache_file_path):
self.logger.debug(f"Return content of {url} from cache...")
with open(cache_file_path, 'r', encoding='utf8') as fo:
return fo.read()
return None
def _save2cache(self, url, html_content):
cache_file_path = self._get_cache_path(url)
with open(cache_file_path, 'w', encoding='utf8') as fw:
fw.write(html_content)
def get(self, url):
if not self.skip_cache:
cache_text = self._get_from_cache(url)
if cache_text is not None:
return cache_text
self.logger.debug(f"Crawling {url}...")
try:
resp = reqlib.get(url, headers=self.headers, timeout=(5, 10))
if resp.ok:
mime = resp.headers['content-type'].split(';')[0].strip()
self.logger.debug(f"URL={url} with MIME={mime}...")
if mime.lower() not in self.supported_mime_set:
raise UnsupportedMIME(mime)
self._save2cache(url, resp.text)
return resp.text
else:
raise ScraperNot200(resp.status_code)
except Exception as e:
raise ScraperTimeout(e)
class bcolors:
HEADER = '\033[95m'
OKBLUE = '\033[94m'
OKCYAN = '\033[96m'
OKGREEN = '\033[92m'
WARNING = '\033[93m'
FAIL = '\033[91m'
ENDC = '\033[0m'
BOLD = '\033[1m'
UNDERLINE = '\033[4m'
class ThreadState(IntEnum):
STOPPED = 0
RUNNING = 1
STOPPING = 2
class CrawlAgent(object):
def __init__(self, name, throttling_range=(1, 2)):
self.rs = ReqScraper(page_cache_path=f"{name}_cache")
self.et = TEAgent(
policy_path="policy",
disable_policy=True,
ext_title=True
)
self.logger = getLogger(os.path.basename(self.__class__.__name__))
self.throttling_range = throttling_range
def obsolete_cache(self, url):
self.rs._del_from_cache(url)
def handle(self, url:str, skip_throttling:bool=False) -> Tuple[str, str, List[str]]:
try:
if skip_throttling:
wait_in_sec = random.uniform(*self.throttling_range)
self.logger.debug(f"throttling wait {wait_in_sec}s...")
time.sleep(wait_in_sec)
url_content_html = self.rs.get(url)
is_succ, rst, handler = self.et.parse(
"text/html",
url,
url_content_html,
do_ext_link=True
)
if is_succ:
return (rst['title'], rst['text'], rst['all_links'])
else:
return (rst['title'], rst['text'], rst['all_links'])
except ScraperNot200 as e:
self.logger.warning(f"Fail to handle URL={url}: {str(e)}")
return None, None, None
except UnsupportedMIME as e:
self.logger.warning(f"Fail to handle URL={url}: {str(e)}")
return None, None, None
except ScraperTimeout as e:
time.sleep(2)
self.logger.warning(f"Fail to handle URL={url}: {str(e)}")
return None, None, None
class ExplorerWorker(threading.Thread):
def __init__(
self,
name:str,
url_ptn:str,
src_url:str,
test_run:int=-1,
page_saved_dir:Optional[str]=None):
super(ExplorerWorker, self ).__init__(name = name)
self.name = name
self.url_ptn = url_ptn
self.src_url = src_url
self.test_run = test_run
self.ca = CrawlAgent(name)
self.pc_dict = self._get_pc_dict()
''' Processed result cache: Key as URL; value as bool (True means this URL is crawled successfully)'''
self.state = ThreadState.STOPPED
''' Thread state: 0-> stopped; 1-> running; 2-> stopping'''
self.logger = getLogger(os.path.basename(self.__class__.__name__))
''' Logger object '''
self.page_saved_dir = page_saved_dir if page_saved_dir is not None else f"{self.name}_pages_output"
''' Path or directory to save dump page'''
self.stop_signal = f"STOP_{self.name}"
''' Stop signal file '''
if not os.path.isdir(self.page_saved_dir):
os.makedirs(self.page_saved_dir)
def _get_output_page_path(self, url):
url_host = urlparse(url).netloc
url_md5 = hashlib.md5(url.encode('utf-8')).hexdigest()
page_file_name = f"{url_host}_{url_md5}.txt"
page_file_path = os.path.join(self.page_saved_dir, page_file_name)
return page_file_path
def _get_pc_serialized_file(self) -> str:
return f"{self.name}_pc_dict.pkl"
def _get_pc_dict(self) -> Dict[str, bool]:
pkl_file = self._get_pc_serialized_file()
if os.path.isfile(pkl_file):
with open(pkl_file, 'rb') as fo:
return pickle.load(fo)
else:
return {}
def _serialized(self):
pkl_file = self._get_pc_serialized_file()
with open(pkl_file, 'wb') as fo:
pickle.dump(self.pc_dict, fo)
def run(self):
self.state = ThreadState.RUNNING
url_queue = [self.src_url]
pc = sc = fc = oc = 0
while self.state == ThreadState.RUNNING and url_queue:
if os.path.isfile(self.stop_signal):
os.remove(self.stop_signal)
self.logger.warning("Receive STOP signal!")
break
url = url_queue.pop(0)
pc += 1
if url not in self.pc_dict:
# New URL
self.logger.debug(f"Handling URL={url}...")
title, content, collected_urls = self.ca.handle(url)
if content is None:
self.pc_dict[url] = False
fc += 1
else:
if url != self.src_url:
self.pc_dict[url] = True
sc += 1
self.logger.info(bcolors.BOLD + f"Completed URL={url} ({len(url_queue):,d}/{pc:,d})" + bcolors.ENDC)
next_level_urls = list(filter(lambda u: re.match(self.url_ptn, u) is not None and "#" not in u, collected_urls))
if next_level_urls:
self.logger.debug(f"\tCollected {len(next_level_urls)} next level URL(s)")
url_queue.extend(list(set(next_level_urls) - set(url_queue)))
if content and "?" not in url:
page_output_path = self._get_output_page_path(url)
with open(page_output_path, 'w', encoding='utf8') as fw:
fw.write(f"{url}\n\n")
fw.write(f"{title}\n\n")
fw.write(f"{content}")
self.logger.debug(f"\tSaved page to {page_output_path}!")
else:
# Old URL
if not self.pc_dict[url]:
self.logger.info(f"Skip broken URL={url} in the past...")
continue
title, content, collected_urls = self.ca.handle(url, skip_throttling=True)
if collected_urls:
next_level_urls = list(filter(lambda u: re.match(self.url_ptn, u) is not None, collected_urls))
url_queue.extend(list(set(next_level_urls) - set(url_queue)))
oc += 1
self.logger.info(f"URL={url} is already handled...({len(url_queue):,d}/{pc:,d})")
continue
if self.test_run > 0:
if (sc + fc) > self.test_run:
self.logger.info(f"Exceed test_run={self.test_run} and therefore stop running...")
break
if pc % 1000 == 0:
self.logger.info(bcolors.OKBLUE + bcolors.BOLD + f"{pc} URL completed: sc={sc:,d}; fc={fc:,d}; oc={oc:,d}\n" + bcolors.ENDC)
self._serialized()
self.ca.obsolete_cache(self.src_url)
url_queue.append(self.src_url)
self.logger.warning(f"Serialized explorer result (name={self.name})...")
self._serialized()
self.logger.warning(f"Explorer is stopped! (name={self.name})...")
self.state = ThreadState.STOPPED
def stop(self):
self.logger.warning(f"Stopping explorer worker (name={self.name})...")
if self.state == ThreadState.RUNNING:
self.state = ThreadState.STOPPING
while self.state != ThreadState.STOPPED:
time.sleep(1)
| StarcoderdataPython |
5071838 | """
author: <NAME> (E-mail: <EMAIL>)
"""
import torch
import torch.nn as nn
from torch_custom.stft_helper import StftHelper
import torch_custom.spectral_ops as spo
from torch_custom.custom_layers import CustomModel
from torch_custom.wpe_th_utils import wpe_mb_torch_ri
# class NeuralWPE(nn.Module):
class NeuralWPE(CustomModel):
def __init__(self, stft_opts, lpsnet=None):
super(NeuralWPE, self).__init__()
assert lpsnet is not None and isinstance(lpsnet, nn.Module)
if stft_opts is None:
self.stft_helper = lpsnet.stft_helper
else:
assert len(stft_opts) >= 5
self.stft_helper = StftHelper(**stft_opts)
self.lpsnet = lpsnet
self.weights = lpsnet.weights
self.weights_list = lpsnet.weights_list
self.weights_name = lpsnet.weights_name
def train(self):
self.lpsnet.train()
def eval(self):
self.lpsnet.eval()
def forward(self, sig_x, delay=3, taps=10, drop=0.0, dtype=torch.float32):
""" sig_x is batched multi-channel time-domain waveforms
shape: (B, C, T) == (batch, channels, time)
"""
## Convert the time-domain signal to the STFT coefficients
nb, nc, nt = sig_x.size() # (B,C,t)
sig_x = sig_x.view(nb*nc, nt) # (BC,t)
stft_x = self.stft_helper.stft(sig_x) # (BC,F,T,2)
## Compute the PSD using a pre-trained neural network
lps_x = spo.stft2lps(stft_x) # (BC,F,T)
psd_x = self.lpsnet(lps_x, drop=drop).exp() # (BC,F,T)
## Batch-mode WPE
## >> STFT and PSD must be in shape (B,C,F,T,2) and (B,F,T), respectively.
nfreq, nfrm = psd_x.size(1), psd_x.size(2)
stft_x = stft_x.view(nb, nc, nfreq, nfrm, 2).contiguous() # (B,C,F,T,2)
psd_x_mean = psd_x.view(nb, nc, nfreq, nfrm).mean(dim=1) # (B,C,F,T) >> (B,F,T)
stft_v = wpe_mb_torch_ri(
stft_x.type(dtype), psd_x_mean, taps=taps, delay=delay) # (B,C,F,T,2)
stft_v = stft_v.float()
## Inverse STFT
stft_v = stft_v.view(nb*nc, nfreq, nfrm, 2) # (BC,F,T,2)
sig_v = self.stft_helper.istft(stft_v, length=nt) # (BC,t)
sig_v = sig_v.view(nb, nc, nt) # (B,C,t)
return sig_v
| StarcoderdataPython |
3203236 | <gh_stars>1-10
import os
import numpy as np
import torch
import copy
from pytorch_pretrained_bert.file_utils import PYTORCH_PRETRAINED_BERT_CACHE
from pytorch_pretrained_bert import BertTokenizer, BertModel
# load data
# processor = NerProcessor()
# label_list = processor.get_labels()
# num_labels = len(label_list) + 1
#
# train_examples = processor.get_train_examples("../BERT-NER/data/")
# test_examples = processor.get_test_examples("../BERT-NER/data/")
# dev_examples = processor.get_dev_examples("../BERT-NER/data/")
print("loading embedding model")
cache_dir = os.path.join(str(PYTORCH_PRETRAINED_BERT_CACHE), 'distributed_{}'.format(0))
Embedding_model = BertModel.from_pretrained("/home/michaelchen/wwm_uncased_L-24_H-1024_A-16/")
Embedding_model.eval()
Embedding_model.to('cuda')
for i, p in enumerate(Embedding_model.parameters()):
p.requires_grad = False
Embedding_tokenizer = BertTokenizer.from_pretrained('/home/michaelchen/wwm_uncased_L-24_H-1024_A-16/')
print("finish loading embedding model")
# print("loading embedding model")
# cache_dir = os.path.join(str(PYTORCH_PRETRAINED_BERT_CACHE), 'distributed_{}'.format(0))
# Embedding_model = BertModel.from_pretrained("bert-base-uncased")
# Embedding_model.eval()
# Embedding_model.to('cuda')
# Embedding_tokenizer = BertTokenizer.from_pretrained('bert-base-uncased')
# print("finish loading embedding model")
print("loading point cloud data")
PC_data = np.load('/home/michaelchen/bert-embedding/clustered-embedding-921-237.npy', allow_pickle=True)
PC_data = PC_data.item()
print("finish loading point cloud data")
# def pad_tokenids(tokens_tensor, max_len=MAX_LEN):
# while len(tokens_tensor) < max_len:
# tokens_tensor.append(0)
# return tokens_tensor
def gen_embedding(input_ids, token_type_ids, model=Embedding_model):
with torch.no_grad():
dev = input_ids.get_device()
# print(dev)
model.to(device='cuda:'+str(dev))
encoded_layers = model(input_ids, token_type_ids)
batch_of_embeddings = encoded_layers[0][-1]
return batch_of_embeddings
def find_words(tokens):
words = []
word_indices = []
word = ""
index = [0, 0] # start, length
for i in range(len(tokens)):
if tokens[i][0].isalpha() and len(tokens[i]) > 1:
# is a word or beginning of a word
if word:
words.append(word)
word_indices.append(index)
index = [0, 0]
word = tokens[i]
index[0] = i
index[1] = 1
elif tokens[i][:2] == "##":
# is continuation of a word
# Note: sometimes words start with ##, e.g 2.5million -> ##mill, ##ion
if not word:
word = tokens[i][2:]
index[0] = i
index[1] = 1
else:
word += tokens[i][2:]
index[1] += 1
else:
# clear word cache and do nothing
if word:
words.append(word)
word = ""
word_indices.append(index)
index = [0, 0]
if word:
words.append(word)
word_indices.append(index)
return words, word_indices
def gen_smoothed_embedding(input_ids, token_type_ids, batch_initial_embeddings, data=PC_data, tokenizer = Embedding_tokenizer):
batch_of_embeddings = gen_embedding(input_ids, token_type_ids)
for i in range(len(batch_of_embeddings)):
sentence = batch_of_embeddings[i] # [128 * 1024]
tokens = tokenizer.convert_ids_to_tokens(input_ids[i].tolist())
words, word_indices = find_words(tokens)
# print(word_indices)
# do batch matrix multiplication here to speed up (sentence-wise)
# sentence:[128 x 1024]
# sentence = sentence.unsqueeze(1) # sentence: [128 x 1 x 1024]
# get word embeddings for each word strings in sentence i from the dict
for j, word in enumerate(words):
if word not in data.keys():
continue
else:
pc_embeddings = torch.from_numpy(data[word]).to("cuda")
num_subwords = pc_embeddings.shape[1] // 1024
current_embedding = sentence[word_indices[j][0]:word_indices[j][0]+word_indices[j][1]].view([1,-1])
# print(pc_embeddings.shape)
# print(word_indices[j][0])
# print(word_indices[j][0]+word_indices[j][1])
# print(sentence[word_indices[j][0]+i+1].shape)
# print(current_embedding.shape)
# print(sentence.shape)
if pc_embeddings.shape[1] != current_embedding.shape[1]:
print("Error")
continue
else:
distance = pc_embeddings - current_embedding
norm = torch.norm(distance, 2) # [10 x (1024k)] - [1 x 1024k],
best_embedding = pc_embeddings[torch.argmin(norm)]
if num_subwords == 1:
# print('=======================')
# print(batch_initial_embeddings[i][word_indices[j][0]])
# print(best_embedding)
# print('=======================')
try:
batch_initial_embeddings[i][word_indices[j][0]] = best_embedding
except:
continue
else:
for i in range(num_subwords):
try:
batch_initial_embeddings[i][word_indices[j][0]+i] = best_embedding[i*1024:(i+1)*1024]
except:
continue
return batch_initial_embeddings
# skip_idxs = []
# batch = []
# for j, word_str in words:
# word_tensor = PC_data[word_str] # word_str: [10 x 1024] or [10 x 1024+]
# if word_tensor.size()[1] > 1024:
# word_tensor = torch.zeros([10, 1024])
# # mark down this idx for later skipping
# skip_idxs.append((i,j)) # the jth word in the ith sentence
# word_tensor = torch.transpose(word_tensor, 0, 1)
# word_tensor = word_tensor.unsqueeze(0) # [1 x 1024 x 10]
# batch.append(word_tensor)
# batch_of_words_dict = torch.cat(batch, 0) # [128 x 1024 x 10)], 128 is len(batch_of_words[i])
#
# sentence_sim_scores = torch.bmm(sentence, batch_of_words_dict) # [128 x 1 x 10]
| StarcoderdataPython |
4993537 | import os
import sys
from shutil import rmtree
from zipfile import ZipFile
from ..parameters import ZIP_OPTIONS
from ..core.helpers import console, splitModRef
GH_BASE = os.path.expanduser(f'~/github')
DW_BASE = os.path.expanduser(f'~/Downloads')
TEMP = '_temp'
RELATIVE = 'tf'
HELP = '''
USAGE
text-fabric-zip --help
text-fabric-zip {org}/{repo}/{relative}
EFFECT
Zips text-fabric data from your local github repository into
a release file, ready to be attached to a github release.
Your repo must sit in ~/github/{org}/{repo}.
Your TF data is assumed to sit in the toplevel tf directory of your repo.
But if it is somewhere else, you can pass relative, e.g phrases/heads/tf
It is assumed that your tf directory contains subdirectories according to
the versions of the main datasource.
The actual .tf files are in those version directories.
Each of these version directories will be zipped into a separate file.
The resulting zip files end up in ~/Downloads/{org}-release/{repo}
and the are named {relative}-{version}.zip
(where the / in relative have been replaced by -)
'''
EXCLUDE = {'.DS_Store'}
def zipData(org, repo, relative=RELATIVE, tf=True, keep=False):
console(f'Create release data for {org}/{repo}/{relative}')
sourceBase = f'{GH_BASE}/{org}'
destBase = f'{DW_BASE}/{org}-release'
sourceDir = f'{sourceBase}/{repo}/{relative}'
destDir = f'{destBase}/{repo}'
dataFiles = {}
if not keep:
if os.path.exists(destDir):
rmtree(destDir)
os.makedirs(destDir, exist_ok=True)
relativeDest = relative.replace('/', '-')
if tf:
if not os.path.exists(sourceDir):
return
with os.scandir(sourceDir) as sd:
versionEntries = [
(sourceDir, e.name)
for e in sd
if e.is_dir()
]
if versionEntries:
console(f'Found {len(versionEntries)} versions')
else:
versionEntries.append((sourceDir, ''))
console(f'Found unversioned features')
for (versionDir, version) in versionEntries:
if version == TEMP:
continue
versionRep = f'/{version}' if version else ''
versionRep2 = f'{version}/' if version else ''
versionRep3 = f'-{version}' if version else ''
tfDir = f'{versionDir}{versionRep}'
with os.scandir(tfDir) as sd:
for e in sd:
if not e.is_file():
continue
featureFile = e.name
if featureFile in EXCLUDE:
continue
if not featureFile.endswith('.tf'):
console(f'WARNING: non feature file "{versionRep2}{featureFile}"', error=True)
continue
dataFiles.setdefault(version, set()).add(featureFile)
console(f'zip files end up in {destDir}')
for (version, features) in sorted(dataFiles.items()):
item = f'{org}/{repo}'
versionRep = f'/{version}' if version else ''
versionRep3 = f'-{version}' if version else ''
target = f'{relativeDest}{versionRep3}.zip'
console(f'zipping {item:<25} {version:>4} with {len(features):>3} features ==> {target}')
with ZipFile(
f'{destDir}/{target}',
'w',
**ZIP_OPTIONS,
) as zipFile:
for featureFile in sorted(features):
zipFile.write(
f'{sourceDir}{versionRep}/{featureFile}',
arcname=featureFile,
)
else:
def collectFiles(base, path, results):
thisPath = f'{base}/{path}' if path else base
internalBase = f'{relative}/{path}' if path else relative
with os.scanDir(thisPath) as sd:
for e in sd:
name = e.name
if name in EXCLUDE:
continue
if e.is_file():
results.append((f'{internalBase}/{name}', f'{base}/{path}/{name}'))
elif e.is_dir():
collectFiles(base, f'{path}/{name}', results)
results = []
collectFiles(sourceDir, '', results)
if not relativeDest:
relativeDest = '-'
console(f'zipping {org}/{repo}/{relative} with {len(results)} files')
console(f'zip file is {destDir}/{relativeDest}.zip')
with ZipFile(
f'{destDir}/{relativeDest}.zip',
'w',
**ZIP_OPTIONS,
) as zipFile:
for (internalPath, path) in sorted(results):
zipFile.write(
path,
arcname=internalPath,
)
def main(cargs=sys.argv):
if len(cargs) != 2 and any(arg in {'--help', '-help', '-h', '?', '-?'} for arg in cargs):
console(HELP)
return
moduleRef = cargs[1]
parts = splitModRef(moduleRef)
if not parts:
console(HELP)
return
(org, repo, relative, checkout) = parts
tf = (
relative.endswith('tf') or
'/tf/' in relative
)
sys.stdout.write(f'{tf}\n')
zipData(org, repo, relative=relative, tf=tf)
if __name__ == "__main__":
main()
| StarcoderdataPython |
8177162 | <reponame>angelakuo/jupyter-extensions
from notebook.utils import url_path_join
from jupyterlab_vizier.handlers import ListHandler
from jupyterlab_vizier.version import VERSION
__version__ = VERSION
def _jupyter_server_extension_paths():
return [{'module': 'jupyterlab_vizier'}]
def load_jupyter_server_extension(nb_server_app):
"""
Called when the extension is loaded.
Args:
nb_server_app (NotebookWebApplication): handle to the Notebook webserver instance.
"""
host_pattern = '.*$'
app = nb_server_app.web_app
gcp_v1_endpoint = url_path_join(app.settings['base_url'], 'vizier', 'v1')
app.add_handlers(
host_pattern,
[
# TODO(cbwilkes): Add auth checking if needed.
# (url_path_join(gcp_v1_endpoint, auth'), AuthHandler)
(url_path_join(gcp_v1_endpoint, 'list') + '(.*)', ListHandler),
])
| StarcoderdataPython |
4888207 | import os
from sqlalchemy import create_engine
from sqlalchemy.ext.declarative import declarative_base
from sqlalchemy.orm import sessionmaker
DATABASE_URL = os.environ["DATABASE_URL"].replace("postgres://", "postgresql://")
"""
From https://docs.sqlalchemy.org/en/14/core/pooling.html
Default pool/overflow size is 5/10, timeout 30 seconds
max_overflow=10 - the number of connections to allow in connection pool “overflow”, that is connections that can be opened above and beyond the pool_size setting, which defaults to five. this is only used with QueuePool.
pool_size=5 - the number of connections to keep open inside the connection pool. This used with QueuePool as well as SingletonThreadPool. With QueuePool, a pool_size setting of 0 indicates no limit; to disable pooling, set poolclass to NullPool instead.
pool_timeout=30 - number of seconds to wait before giving up on getting a connection from the pool. This is only used with QueuePool. This can be a float but is subject to the limitations of Python time functions which may not be reliable in the tens of milliseconds.
pool_recycle=-1 - this setting causes the pool to recycle connections after the given number of seconds has passed. It defaults to -1, or no timeout. For example, setting to 3600 means connections will be recycled after one hour. Note that MySQL in particular will disconnect automatically if no activity is detected on a connection for eight hours (although this is configurable with the MySQLDB connection itself and the server configuration as well).
"""
engine = create_engine(DATABASE_URL, pool_size=10, max_overflow=15, pool_timeout=30)
SessionLocal = sessionmaker(autocommit=False, autoflush=False, bind=engine)
Base = declarative_base()
def get_db():
db = SessionLocal()
try:
yield db
finally:
db.close()
| StarcoderdataPython |
3451242 | <filename>repositories/actions/customer.py<gh_stars>0
from .baseactions import BaseActions
from models.customer import Customer
import re
class CustomerActions(BaseActions):
@classmethod
def _regular_attribute_actions(cls, diff: dict, obj, old_obj=None):
actions = []
for root_attr in diff:
attr = root_attr.split('.')[1]
if attr == 'email':
actions.append({'action': 'changeEmail', 'email': obj.email})
elif attr == 'firstName':
actions.append({'action': 'setFirstName',
'firstName': obj.firstName})
elif attr == 'lastName':
actions.append(
{'action': 'setLastName', 'lastName': obj.lastName})
elif attr == 'middleName':
actions.append({'action': 'setMiddleName',
'middleName': obj.middleName})
elif attr == 'title':
actions.append({'action': 'setTitle', 'title': obj.title})
elif attr == 'salutation':
actions.append({'action': 'setSalutation',
'salutation': obj.salutation})
return actions
@classmethod
def _iterable_attribute_add_actions(cls, diff: dict, obj, old_obj=None):
actions = []
for root_attr in diff:
attr = root_attr.split('.')[1]
if attr.__contains__('addresses'):
actions.append(
{'action': 'addAddress', 'address': diff[root_attr].__dict__})
return actions
@classmethod
def _iterable_attribute_update_actions(cls, diff: dict, obj, old_obj=None):
actions = []
for root_attr in diff:
attr = root_attr.split('.')[1]
if attr.__contains__('addresses'):
actions.append({'action': 'changeAddress', 'addressId': obj.addresses[int(re.findall(
r'[\d+]', attr)[0])].id, 'address': obj.addresses[int(re.findall(r'[\d+]', attr)[0])].__dict__})
return actions
@classmethod
def _iterable_attribute_remove_actions(cls, diff: dict, obj, old_obj=None):
actions = []
for root_attr in diff:
attr = root_attr.split('.')[1]
if attr.__contains__('addresses'):
actions.append({'action': 'removeAddress',
'addressId': diff[root_attr].id})
return actions
| StarcoderdataPython |
44769 | """Utilities for tests"""
import copy
import re
BAD_ID = "line %s: id '%s' doesn't match '%s'"
BAD_SEQLEN = "line %s: %s is not the same length as the first read (%s)"
BAD_BASES = "line %s: %s is not in allowed set of bases %s"
BAD_PLUS = "line %s: expected '+', got %s"
BAD_QUALS = "line %s: %s is not the same length as the first read (%s)"
MSG_INCOMPLETE = "incomplete record at end of file %s"
class Fastq:
"""A convenient data structure for handling the fastqs generated by qasim.
NOTES:
* Read id's are the form: @NAME_COORD1_COORD2_ERR1_ERR2_N/[1|2].
* COORD1 and COORD2 are the coordinates of the fragment ends.
* Illumina pair-end reads have read 1 forward and read 2 reverse:
>>>>>>>>>>>>>>
<<<<<<<<<<<<<<
* When run in normal (non-wgsim) mode, for pairs where read 1 is from
the reference strand the coordinates are ordered such that:
COORD1 < COORD2. For for "flipped" reads where read 1 is from the
reverse strand the coordinates are ordered such that:
COORD1 > COORD2.
* When run in legacy (wgsim) mode, coordinates are always ordered:
COORD1 < COORD2 and there's no way to tell by inspection what strand
a read is from."""
allowed_bases = {'A', 'C', 'G', 'T', 'N'}
complement = {'A': 'T', 'C': 'G', 'G': 'C', 'T': 'A', 'N': 'N'}
id_regex = re.compile(
r"^@(.+)_(\d+)_(\d+)_e(\d+)_e(\d+)_([a-f0-9]+)\/([12])$")
def __init__(self, filename):
self.records = []
self.read_length = -1
self.forwardized = False
self.minpos = -1
self.maxpos = -1
with open(filename, 'rt') as fh:
read = frag_start = frag_end = lastline = 0
for linenum, line in enumerate(fh.readlines(), 1):
lastline = linenum
if linenum % 4 == 1:
read_id = line.strip()
matches = self.id_regex.match(read_id)
assert matches, BAD_ID % (linenum, read_id, self.id_regex)
frag_start, frag_end = [
int(c) for c in matches.groups()[1:3]]
read = int(matches.groups()[-1])
elif linenum % 4 == 2:
seq = line.strip()
if self.read_length == -1:
self.read_length = len(seq)
else:
assert len(seq) == self.read_length, \
BAD_SEQLEN % (linenum, seq, self.read_length)
disallowed = set(seq) - self.allowed_bases
assert not disallowed, \
BAD_BASES % (linenum, disallowed, self.allowed_bases)
elif linenum % 4 == 3:
plus = line.strip()
assert plus == "+", BAD_PLUS % (linenum, plus)
if linenum % 4 == 0:
quals = line.strip()
assert len(quals) == self.read_length, \
BAD_QUALS % (linenum, quals, self.read_length)
self.records.append({
"id": read_id, "seq": seq, "quals": quals,
"frag_start": frag_start, "frag_end": frag_end,
"read": read})
low = min(frag_start, frag_end)
high = max(frag_start, frag_end)
if self.minpos == -1 or low < self.minpos:
self.minpos = low
if self.maxpos == -1 or high > self.maxpos:
self.maxpos = high
assert lastline % 4 == 0, MSG_INCOMPLETE % (filename)
def coverage(self, pos):
"""Return reads covering pos"""
# simple logic if all reads are forward on the reference strand:
if self.forwardized:
return [r for r in self.records if
r['read_start'] <= pos <=
r['read_start'] + self.read_length - 1]
# more cases to consider if not:
else:
covering = []
for r in self.records:
start = min(r['frag_start'], r['frag_end'])
end = max(r['frag_start'], r['frag_end'])
read = r['read']
flipped = True if r['frag_start'] > r['frag_end'] else False
if (read == 1 and not flipped and
start <= pos <= start + self.read_length - 1 or
read == 2 and not flipped and
end - self.read_length + 1 <= pos <= end or
read == 1 and flipped and
end - self.read_length + 1 <= pos <= end or
read == 2 and flipped and
start <= pos <= start + self.read_length - 1):
covering.append(r)
return covering
def basecounts(self):
"""Return a dict of { base: count } aggregated over all reads"""
counts = {}
for r in self.records:
for base in r['seq']:
counts[base] = counts.setdefault(base, 0) + 1
return counts
@classmethod
def forwardize(cls, original):
"""Return a copy of original with all reads turned into forward reads:
a calculational convenience"""
fwdized = copy.deepcopy(original)
for r in fwdized.records:
frag_start, frag_end = r['frag_start'], r['frag_end']
read = r['read']
if (read == 1 and frag_start < frag_end):
r['read_start'] = frag_start
elif (read == 1 and frag_start > frag_end):
r['seq'] = ''.join(cls.revcomp(r['seq']))
r['quals'] = ''.join(reversed(r['quals']))
r['read_start'] = frag_start - fwdized.read_length + 1
elif (read == 2 and frag_start < frag_end):
r['seq'] = ''.join(cls.revcomp(r['seq']))
r['quals'] = ''.join(reversed(r['quals']))
r['read_start'] = frag_end - fwdized.read_length + 1
elif (read == 2 and frag_start > frag_end):
r['read_start'] = frag_end
else:
raise Exception("Unhandled case:", r)
fwdized.forwardized = True
return fwdized
@classmethod
def revcomp(cls, seq):
return [cls.complement[b] for b in reversed(seq)]
| StarcoderdataPython |
317285 | from time import sleep
import rnc.corpora as rnc
from tests.corpora.template import TemplateCorpusTest
class TestAccentologicalCorpus(TemplateCorpusTest):
corp_type = rnc.AccentologicalCorpus
corp_normal_obj = corp_type('ты', 1, dpp=5, spd=1)
corp_kwic_obj = corp_type('ты', 1, dpp=5, spd=1, out='kwic')
corp_normal_obj.request_examples()
sleep(5)
corp_kwic_obj.request_examples()
sleep(5)
def test_mycorp(self):
corp = self.corp_type(
'ты', 1,
mycorp='<KEY>
'Rg9GI0LrQuNC9Il0sICJkb2NfaV9sZV9lbmRfeWV'
'hciI6IFsiMTgzMCJdfQ%3D%3D'
)
corp.request_examples()
assert len(corp) >= 1
sleep(5)
| StarcoderdataPython |
3596061 | <reponame>loleg/kandidaten<filename>api/api.py
from flask_peewee.rest import RestAPI, RestResource, UserAuthentication, AdminAuthentication, RestrictOwnerResource
from app import app
from auth import auth
from models import Councillor, Promise, Decision, Comment
api = RestAPI(app)
admin_auth = AdminAuthentication(auth)
class CantonResource(RestResource):
exclude = ()
class CouncilResource(RestResource):
exclude = ()
class PartyResource(RestResource):
exclude = ()
class CouncillorResource(RestResource):
include_resources = {
'canton': CantonResource,
'council': CouncilResource,
'party': PartyResource,
}
class PromiseResource(RestResource):
include_resources = {
'councillor': CouncillorResource
}
class DecisionResource(RestResource):
exclude = ('councillor')
class CommentResource(RestResource):
include_resources = {
'promise': PromiseResource,
'decision': DecisionResource
}
paginate_by = None
class UserResource(RestResource):
exclude = ('password', 'email',)
# register our models so they are exposed via /api/<model>/
api.register(Councillor, CouncillorResource)
api.register(Promise, PromiseResource)
api.register(Decision, DecisionResource)
api.register(Comment, CommentResource)
api.register(auth.User, UserResource, auth=admin_auth)
| StarcoderdataPython |
3511927 | <gh_stars>1-10
#!/usr/bin/env python
'''
Features for prepare source code.
- prepare :: generic
- autoconf :: run "configure" script found in source directory
- cmake :: run cmake
These features all rely on the "unpack" step to have run. It produces a "prepare" step.
'''
from waflib.TaskGen import feature
import waflib.Logs as msg
from orch.wafutil import exec_command
import orch.features
orch.features.register_defaults(
'prepare',
source_unpacked_path = '{source_dir}/{source_unpacked}',
prepare_cmd = None, # must provide
prepare_cmd_std_opts = '',
prepare_cmd_options = '',
prepare_target = None, # must provide
prepare_target_path = '{build_dir}/{prepare_target}',
)
@feature('prepare')
def feature_prepare(tgen):
cmdstr = tgen.worch.format('{prepare_cmd} {prepare_cmd_std_opts} {prepare_cmd_options}')
tgen.step('prepare',
rule = cmdstr,
source = tgen.control_node('unpack'),
target = tgen.worch.prepare_target_path)
orch.features.register_defaults(
'autoconf',
source_unpacked_path = '{source_dir}/{source_unpacked}',
prepare_cmd = '{source_unpacked_path}/configure',
prepare_cmd_std_opts = '--prefix={install_dir}',
prepare_cmd_options = '',
prepare_target = 'config.status',
prepare_target_path = '{build_dir}/{prepare_target}',
)
@feature('autoconf')
def feature_autoconf(tgen):
cmdstr = tgen.make_node(tgen.worch.prepare_cmd).abspath()
cmdstr += tgen.worch.format(' {prepare_cmd_std_opts} {prepare_cmd_options}')
tgen.step('prepare',
rule = cmdstr,
#after = tgen.worch.package + '_unpack',
source = tgen.control_node('unpack'),
target = tgen.worch.prepare_target_path)
orch.features.register_defaults(
'cmake',
source_unpacked_path = '{source_dir}/{source_unpacked}',
prepare_cmd = 'cmake',
prepare_cmd_std_opts = '-DCMAKE_INSTALL_PREFIX={install_dir}',
prepare_cmd_options = '',
prepare_target = 'CMakeCache.txt',
prepare_target_path = '{build_dir}/{prepare_target}',
)
@feature('cmake')
def feature_cmake(tgen):
cmkfile = tgen.make_node(tgen.worch.source_unpacked_path + '/CMakeLists.txt')
def prepare(task):
cmdstr = '{prepare_cmd} {srcdir} {prepare_cmd_std_opts} {prepare_cmd_options}'
cmd = tgen.worch.format(cmdstr, srcdir=cmkfile.parent.abspath())
return exec_command(task, cmd)
#msg.debug('orch: cmkfile: %s' % cmkfile.abspath())
tgen.step('prepare',
rule = prepare,
source = [tgen.control_node('unpack')],
target = tgen.worch.prepare_target_path)
| StarcoderdataPython |
1787938 | from changer import AmbientBackgrounds
class Main:
def run(self):
self.ambient_bg = AmbientBackgrounds()
self.ambient_bg.begin()
if __name__ == "__main__":
Main().run()
| StarcoderdataPython |
9758427 | # -*- coding: utf-8 -*-
from flask import Blueprint, render_template
from duffy.models import Host
blueprint = Blueprint('seamicro', __name__, url_prefix='/seamicro',
template_folder='templates')
@blueprint.route('/kickstarts/<hostname>')
def kickstart(hostname):
h = Host.query.filter(Host.hostname == hostname).first_or_404()
return render_template('seamicro-centos-7-ks.j2', host=h),\
{'Content-Type': 'text/plain; charset=utf-8'}
| StarcoderdataPython |
5053761 | import os
from collections import namedtuple
from hislicing import env_const
import logging
logger = logging.getLogger(__name__)
Cfg = namedtuple("Config",
["repoPath", "execPath", "sourceRoot", "classRoot", "startCommit", "endCommit", "buildScriptPath",
"testScope", "touchSetPath"])
ExtractedCfg = namedtuple("ExtractedCfg", "start, end, repo_name, test_suite, repo_path, lines, config_file")
def extractInfoFromCSlicerConfigs(example: str) -> ExtractedCfg:
"""
read start commit, end commit, repo, and test suite
"""
# find the config file
config_file = search_file(env_const.NEW_CONFIGS_DIR, example + '.properties')
if config_file is None:
logger.error(f'Cannot find config file for "{example}"')
exit(0)
fr = open(config_file, 'r')
lines = fr.readlines()
fr.close()
for i in range(len(lines)):
if lines[i].startswith('startCommit'):
start = lines[i].strip().split()[-1]
elif lines[i].startswith('endCommit'):
end = lines[i].strip().split()[-1]
elif lines[i].startswith('repoPath'):
repo_name = lines[i].split('/')[-2]
elif lines[i].startswith('testScope'):
test_suite = lines[i].strip().split()[-1]
repo_path = env_const.DOWNLOADS_DIR + '/' + repo_name
# print (start, end, repo_name, test_suite, repo_path)
cfg = ExtractedCfg(start, end, repo_name, test_suite, repo_path, lines, config_file)
logger.debug(cfg)
return cfg
def search_file(dir_root, file_name):
for dir_path, subpaths, files in os.walk(dir_root):
for f in files:
if f == file_name:
return dir_path + '/' + f
return None
| StarcoderdataPython |
9670413 | <filename>remme/token/token_cli.py
# Copyright 2018 REMME
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ------------------------------------------------------------------------
from remme.shared.basic_cli import BasicCli
from remme.token.token_client import TokenClient
# TODO create decorator to remove manual changes to "commands"
from remme.shared.exceptions import CliException, KeyNotFound
METHOD_TRANSFER = 'transfer'
METHOD_BALANCE = 'balance'
METHOD_ADDRESS = 'address'
class TokenCli(BasicCli):
def __init__(self):
self.client = TokenClient()
def parser_transfer(self, subparsers, parent_parser):
message = 'Send REMME token transfer transaction.'
parser = subparsers.add_parser(
METHOD_TRANSFER,
parents=[parent_parser],
description=message,
help='Transfers <amount> of tokens to <address>.')
parser.add_argument(
'address_to',
type=str,
help='REMME account address.')
parser.add_argument(
'value',
type=int,
help='Amount of REMME tokens to transfer with 4 decimals.')
def parser_balance(self, subparsers, parent_parser):
message = 'Show address balance.'
parser = subparsers.add_parser(
METHOD_BALANCE,
parents=[parent_parser],
description=message,
help='Balance of <address>.')
parser.add_argument(
'address',
type=str,
help='Check address. Specify "me" to use your address.')
def parser_address(self, subparsers, parent_parser):
message = 'Show current address or make one from public key.'
parser = subparsers.add_parser(
METHOD_ADDRESS,
parents=[parent_parser],
description=message,
help='You may specify "me" instead of a public key.')
parser.add_argument(
'pub_key',
type=str,
help='Type "me" or public address from which to show address in REMME network')
def do_address(self, args):
public_key = args.pub_key
if public_key == 'me':
public_key = self.client._signer.get_public_key().as_hex()
if not int(public_key, 16) or len(public_key) != 66:
raise CliException('Please, make sure public key is a 66 digit hex number: {}'.format(public_key))
print(self.client.make_address_from_data(public_key))
def do_transfer(self, args):
status = self.client.transfer(address_to=args.address_to, value=args.value)
print('Transfer status check: {}'.format(status['link']))
def do_balance(self, args):
if args.address == 'me':
args.address = self.client.make_address_from_data(self.client._signer.get_public_key().as_hex())
try:
account = self.client.get_account(address=args.address)
print("Balance: {}\n".format(account.balance))
except KeyNotFound:
print('Balance: 0 REM')
except Exception as e:
print(e)
def init(self):
commands = []
commands += [{
'name': METHOD_TRANSFER,
'parser': self.parser_transfer,
'action': self.do_transfer
},
{
'name': METHOD_BALANCE,
'parser': self.parser_balance,
'action': self.do_balance
},
{
'name': METHOD_ADDRESS,
'parser': self.parser_address,
'action': self.do_address
}
]
self.main_wrapper(commands)
def main():
TokenCli().init() | StarcoderdataPython |
1969864 | <reponame>mtianyan/TensorFlowPlayDemo
# -*- coding: UTF-8 -*-
"""
RNN-LSTM 循环神经网络
"""
import tensorflow as tf
import keras
# 神经网络的模型
def network_model(inputs, num_pitch, weights_file=None):
model = keras.models.Sequential()
model.add(keras.layers.LSTM(
512, # 输出的维度
input_shape=(inputs.shape[1], inputs.shape[2]), # 输入的形状
return_sequences=True # 返回 Sequences(序列)
))
model.add(keras.layers.Dropout(0.3)) # 丢弃 30%
model.add(keras.layers.LSTM(512, return_sequences=True))
model.add(keras.layers.Dropout(0.3))
model.add(keras.layers.LSTM(512))
model.add(keras.layers.Dense(256)) # 256 个神经元的全连接层
model.add(keras.layers.Dropout(0.3))
model.add(keras.layers.Dense(num_pitch)) # 所有不重复的音调的数目
model.add(keras.layers.Activation('softmax')) # Softmax 激活函数算概率
# 交叉熵计算误差,使用 RMSProp 优化器
model.compile(loss='categorical_crossentropy', optimizer='rmsprop')
if weights_file is not None: # 如果是 生成 音乐时
# 从 HDF5 文件中加载所有神经网络层的参数(Weights)
model.load_weights(weights_file)
return model
| StarcoderdataPython |
9738898 | <filename>setup.py
import os
from setuptools import setup
# Utility function to read the README file.
# Used for the long_description. It's nice, because now 1) we have a top level
# README file and 2) it's easier to type in the README file than to put a raw
# string in below ...
def read(f_name):
return open(os.path.join(os.path.dirname(__file__), f_name)).read()
setup(
name="tx-manager",
packages=['tx_manager'],
version="0.1.30",
author="unfoldingWord",
author_email="<EMAIL>",
description="Classes for executing tX Manager",
license="MIT",
keywords="tX manager",
url="https://github.org/unfoldingWord-dev/tx-manager",
long_description=read('README.rst'),
classifiers=[],
install_requires=[
'requests',
'tx-shared-tools'
],
test_suite="tests"
)
| StarcoderdataPython |
1839851 | <reponame>asantinc/python-architecture-patters<filename>tests/test_model.py<gh_stars>0
from datetime import datetime, date, timedelta
import pytest
from model import OrderLine, Batch, allocate, OutOfStockError
def make_batch_and_line(sku, batch_qty, line_qty, line_sku=None):
line_sku = line_sku if line_sku else sku
return (
Batch("batch-001", sku, batch_qty),
OrderLine("order-001", line_sku, line_qty),
)
def test_allocating_to_batch_reduces_the_availability_quantity():
batch = Batch("batch-001", "SMALL_TABLE", qty=20)
line = OrderLine(reference="order-red", sku="SMALL_TABLE", qty=2)
batch.allocate(line)
assert batch.available_quantity == 18
def test_can_allocate_if_available_greater_than_required():
batch, line = make_batch_and_line("CHAIR", 10, 5)
batch.allocate(line)
assert batch.available_quantity == 5
def test_cannot_allocate_if_available_smaller_than_required():
batch, line = make_batch_and_line("CHAIR", 5, 10)
batch.allocate(line)
assert batch.available_quantity == 5
def test_can_allocate_if_available_equal_to_required():
batch, line = make_batch_and_line("CHAIR", 5, 5)
batch.allocate(line)
assert batch.available_quantity == 0
def test_cannot_allocate_if_skus_do_not_match():
batch, line = make_batch_and_line("CHAIR", 5, 5, line_sku="TABLE")
batch.allocate(line)
assert batch.available_quantity == 5
def test_cannot_deallocate_unallocated_lines():
batch, unallocated_line = make_batch_and_line("CHAIR", 5, 5)
batch.deallocate(unallocated_line)
assert batch.available_quantity == 5
def test_can_deallocate_allocated_lines():
batch, line = make_batch_and_line("CHAIR", 5, 5)
batch.allocate(line)
assert batch.available_quantity == 0
batch.deallocate(line)
assert batch.available_quantity == 5
def test_cannot_allocate_repeatedly():
batch, line = make_batch_and_line("CHAIR", 10, 5)
batch.allocate(line)
assert batch.available_quantity == 5
batch.allocate(line)
assert batch.available_quantity == 5
def test_prefers_warehouse_batches_to_shipments():
in_stock_batch = Batch(ref="ref", qty=10, sku="sku", eta=None)
shipment_batch = Batch(ref="ref", qty=10, sku="sku", eta=date.today())
line = OrderLine(reference="xyz", sku="sku", qty=10)
allocate(line, [in_stock_batch, shipment_batch])
assert in_stock_batch.available_quantity == 0
assert shipment_batch.available_quantity == 10
def test_prefers_earlier_batches():
early_batch = Batch(ref="ref", qty=10, sku="sku", eta=date.today())
later_batch = Batch(
ref="ref", qty=10, sku="sku", eta=date.today() + timedelta(days=1)
)
line = OrderLine(reference="xyz", sku="sku", qty=10)
allocate(line, [early_batch, later_batch])
assert early_batch.available_quantity == 0
assert later_batch.available_quantity == 10
def test_allocate_raises_out_of_stock_error():
early_batch = Batch(ref="ref", qty=10, sku="sku", eta=date.today())
later_batch = Batch(
ref="ref", qty=10, sku="sku", eta=date.today() + timedelta(days=1)
)
line = OrderLine(reference="xyz", sku="sku", qty=20)
with pytest.raises(OutOfStockError):
allocate(line, [early_batch, later_batch])
| StarcoderdataPython |
4854042 | import dataclasses
from typing import Optional
from dis_snek.models import Guild, Member
from ElevatorBot.backendNetworking.http import BaseBackendConnection
from ElevatorBot.backendNetworking.routes import (
destiny_weapons_get_all_route,
destiny_weapons_get_top_route,
destiny_weapons_get_weapon_route,
)
from Shared.networkingSchemas.destiny import (
DestinyTopWeaponsInputModel,
DestinyTopWeaponsModel,
DestinyWeaponsModel,
DestinyWeaponStatsInputModel,
DestinyWeaponStatsModel,
)
@dataclasses.dataclass
class DestinyWeapons(BaseBackendConnection):
discord_guild: Optional[Guild]
discord_member: Optional[Member]
async def get_all(self) -> DestinyWeaponsModel:
"""Get all weapons"""
result = await self._backend_request(
method="GET",
route=destiny_weapons_get_all_route,
)
# convert to correct pydantic model
return DestinyWeaponsModel.parse_obj(result.result)
async def get_top(
self, input_data: DestinyTopWeaponsInputModel, discord_id: Optional[int] = None
) -> DestinyTopWeaponsModel:
"""Get top weapons"""
assert self.discord_member or discord_id
result = await self._backend_request(
method="POST",
route=destiny_weapons_get_top_route.format(
guild_id=self.discord_guild.id, discord_id=self.discord_member.id if self.discord_member else discord_id
),
data=input_data,
)
# convert to correct pydantic model
return DestinyTopWeaponsModel.parse_obj(result.result)
async def get_weapon(self, input_data: DestinyWeaponStatsInputModel) -> DestinyWeaponStatsModel:
"""Get the specified weapon stat"""
result = await self._backend_request(
method="POST",
route=destiny_weapons_get_weapon_route.format(
guild_id=self.discord_guild.id, discord_id=self.discord_member.id
),
data=input_data,
)
# convert to correct pydantic model
return DestinyWeaponStatsModel.parse_obj(result.result)
| StarcoderdataPython |
11341897 | from pettingzoo.utils.deprecated_module import DeprecatedModule
adversarial_pursuit_v0 = DeprecatedModule("adversarial_pursuit", "v0", "v3")
adversarial_pursuit_v1 = DeprecatedModule("adversarial_pursuit", "v1", "v3")
adversarial_pursuit_v2 = DeprecatedModule("adversarial_pursuit", "v2", "v3")
battle_v0 = DeprecatedModule("battle", "v0", "v3")
battle_v1 = DeprecatedModule("battle", "v1", "v3")
battle_v2 = DeprecatedModule("battle", "v2", "v3")
battlefield_v0 = DeprecatedModule("battlefield", "v0", "v3")
battlefield_v1 = DeprecatedModule("battlefield", "v1", "v3")
battlefield_v2 = DeprecatedModule("battlefield", "v2", "v3")
combined_arms_v0 = DeprecatedModule("combined_arms", "v0", "v5")
combined_arms_v1 = DeprecatedModule("combined_arms", "v1", "v5")
combined_arms_v2 = DeprecatedModule("combined_arms", "v2", "v5")
combined_arms_v3 = DeprecatedModule("combined_arms", "v3", "v5")
combined_arms_v4 = DeprecatedModule("combined_arms", "v4", "v5")
gather_v0 = DeprecatedModule("gather", "v0", "v3")
gather_v1 = DeprecatedModule("gather", "v1", "v3")
gather_v2 = DeprecatedModule("gather", "v2", "v3")
tiger_deer_v0 = DeprecatedModule("tiger_deer", "v0", "v3")
tiger_deer_v1 = DeprecatedModule("tiger_deer", "v1", "v3")
tiger_deer_v2 = DeprecatedModule("tiger_deer", "v2", "v3")
| StarcoderdataPython |
4947203 | <gh_stars>1-10
import copy
import numpy as np
from .utils import NAOParsing
from nasws.cnn.search_space.darts.operations import WSBNOPS
from nasws.cnn.search_space.darts.genotype import PRIMITIVES, Genotype
from nasws.cnn.search_space.darts.darts_search_space import DartsModelSpec
ALLOWED_OPS = PRIMITIVES
DARTS_Node2ArchLength = {
k: k*2*4 for k in range(2, 5)
}
class NAOParsingDarts(NAOParsing):
def __init__(self, dataset, args) -> None:
self.dataset = dataset
self.args = args
self.num_nodes = args.num_intermediate_nodes
self.num_ops = len(PRIMITIVES)
@staticmethod
def augmentation(arch):
split = len(arch) // 2
num_nodes = len(arch) // 2 // 4
new_arch = copy.deepcopy(arch)
for i in range(2):
rand = np.random.randint(0, num_nodes)
start = i * split + rand* 4
end = start + 4
new_arch[start:end] = new_arch[start + 2: end] + new_arch[start: start+2]
return new_arch
def generate_arch(self, n, num_nodes, num_ops=8):
""" Here we know the architecture num_nodes = num_inter + 2, so we add another 1 """
# def _get_arch():
# arch = []
# for i in range(2, num_nodes):
# p1 = np.random.randint(0, i)
# op1 = np.random.randint(0, num_ops)
# p2 = np.random.randint(0, i)
# op2 = np.random.randint(0 ,num_ops)
# arch.extend([p1, op1, p2, op2])
# return arch
# archs = [_get_arch() + _get_arch() for i in range(n)] #[[[conv],[reduc]]]
num_nodes = num_nodes or self.num_nodes
archs = []
ids = set()
for _ in range(n):
while True:
mid, model_spec = self.dataset.random_topology()
if mid not in ids:
break
archs.append(self.parse_model_spec_to_arch(model_spec))
ids.add(mid)
return archs
@staticmethod
def parse_arch_to_model_spec(arch, branch_length=None, B=None):
# we have two cell
length = len(arch)
conv_dag = arch[:length//2]
reduc_dag = arch[length//2:]
B = len(conv_dag) // 4
def _parse_cell(cell):
# cell[i] == node, cell[i+1] == op_id, reverse in the genotype.
return [(PRIMITIVES[cell[i+1]], cell[i]) for i in range(0, len(cell), 2)]
g = Genotype(
normal=_parse_cell(conv_dag), normal_concat=list(range(2, 2+B)),
reduce=_parse_cell(reduc_dag), reduce_concat=list(range(2, 2+B))
)
return DartsModelSpec.from_darts_genotype(g)
@staticmethod
def parse_model_spec_to_arch(model_spec):
"""
Note that, the arch / seq in NAO training, we have , but in genotypes, we have the opposite.
arch: [node, op ...]
Geno: [(Op, node), ...]
"""
arch = []
g = model_spec.to_darts_genotype()
for cell in [g.normal, g.reduce]:
for c in cell:
arch.extend([c[1], PRIMITIVES.index(c[0])])
return arch
# @staticmethod
# def deserialize_arch(arch):
# if arch is None:
# return None, None
# # assume arch is the format [idex, op ...] where index is in [0, 5] and op in [0, 10]
# arch = list(map(int, arch.strip().split()))
# return conv_dag, reduc_dag
# @staticmethod
# def serialize_arch(arch):
# return ' '.join(map(str, arch[0])) + ' '.join(map(str, arch[1]))
@staticmethod
def parse_arch_to_seq(arch, branch_length=2, B=4):
"""
:param arch: when branch_length = 2, arch length = seq length.
:param branch_length:
:return: sequence in a very WEIRD way.
"""
assert branch_length in [2, 3]
seq = []
def _parse_op(op):
if op == 0:
return 7, 12
if op == 1:
return 8, 11
if op == 2:
return 8, 12
if op == 3:
return 9, 11
if op == 4:
return 10, 11
for i in range(B*2): # two cell in one arch
prev_node1 = arch[4*i]+1
prev_node2 = arch[4*i+2]+1
if branch_length == 2:
op1 = arch[4*i+1] + 2 + B
op2 = arch[4*i+3] + 2 + B
seq.extend([prev_node1, op1, prev_node2, op2])
else:
op11, op12 = _parse_op(arch[4*i+1])
op21, op22 = _parse_op(arch[4*i+3])
seq.extend([prev_node1, op11, op12, prev_node2, op21, op22]) #nopknopk
return seq
@staticmethod
def parse_seq_to_arch(seq, branch_length=2, B=4):
"""
Why you need this?
:param seq:
:param branch_length:
:return:
"""
n = len(seq)
assert branch_length in [2, 3]
assert n // 2 // (B) // 2 == branch_length
def _parse_arch(arch_seq):
arch_arch = []
def _recover_op(op1, op2):
if op1 == 7:
return 0
if op1 == 8:
if op2 == 11:
return 1
if op2 == 12:
return 2
if op1 == 9:
return 3
if op1 == 10:
return 4
if branch_length == 2:
for i in range(B):
p1 = arch_seq[4*i] - 1
op1 = arch_seq[4*i+1] - (2 + B)
p2 = arch_seq[4*i+2] - 1
op2 = arch_seq[4*i+3] - (2 + B)
arch_arch.extend([p1, op1, p2, op2])
return arch_arch
else:
for i in range(B):
p1 = arch_seq[6*i] - 1
op11 = arch_seq[6*i+1]
op12 = arch_seq[6*i+2]
op1 = _recover_op(op11, op12)
p2 = arch_seq[6*i+3] - 1
op21 = arch_seq[6*i+4]
op22 = arch_seq[6*i+5]
op2 = _recover_op(op21, op22)
arch_arch.extend([p1, op1, p2, op2])
return arch_arch
conv_seq = seq[:n//2]
reduc_seq = seq[n//2:]
conv_arch = _parse_arch(conv_seq)
reduc_arch = _parse_arch(reduc_seq)
arch = conv_arch + reduc_arch
return arch
| StarcoderdataPython |
3521037 | <reponame>mosesbaraza/docx
from . import docxfile
from . import docxmodify
| StarcoderdataPython |
279437 | ##--<NAME>
##--v2.0.1 [2013-10-21]
# See install notes for directions
# This script must be run with root permissions
# sudo python setup.py ( /-client/-server) (-link)
import sys , os , time
##--Bash install name--##
##--Ex: fl , filel , flocket, f-l , etc--##
bashClientName = 'fl'
bashServerName = 'fl-server'
def makeClientLink(curdir = False):
if os.path.lexists('/usr/bin/'+bashClientName): os.system('rm /usr/bin/'+bashClientName)
if curdir: os.symlink(os.getcwd()+'/client/client.py', '/usr/bin/'+bashClientName)
else: os.system('ln -s /usr/local/bin/filelocket/client/client.py /usr/bin/'+bashClientName)
def makeServerLink(curDir = False):
if os.path.lexists('/usr/bin/'+bashServerName): os.system('rm /usr/bin/'+bashServerName)
if curDir: os.symlink(os.getcwd()+'/server/server.py', '/usr/bin/'+bashServerName)
else: os.system('ln -s /usr/local/bin/filelocket/server/server.py /usr/bin/'+bashServerName)
def makeClient():
if not os.path.isdir('/usr/local/bin/filelocket'): os.system('mkdir /usr/local/bin/filelocket')
if not os.path.isdir('/usr/local/bin/filelocket/client'): os.system('mkdir /usr/local/bin/filelocket/client')
os.system('cp client/client.py /usr/local/bin/filelocket/client')
os.system('cp client/clientCommands.py /usr/local/bin/filelocket/client')
os.system('chmod a+x /usr/local/bin/filelocket/client/client.py')
makeClientLink()
def makeServer():
if not os.path.isdir('/usr/local/bin/filelocket'): os.system('mkdir /usr/local/bin/filelocket')
if not os.path.isdir('/usr/local/bin/filelocket/server'): os.system('mkdir /usr/local/bin/filelocket/server')
os.system('cp server/server.py /usr/local/bin/filelocket/server')
os.system('cp server/serverCommands.py /usr/local/bin/filelocket/server')
os.system('chmod a+x /usr/local/bin/filelocket/server/server.py')
makeServerLink()
def main():
if len(sys.argv) == 1:
makeClient()
makeServer()
elif len(sys.argv) == 2:
if sys.argv[1] == '-client': makeClient()
elif sys.argv[1] == '-server': makeServer()
elif sys.argv[1] == '-link':
makeClientLink(True)
makeServerLink(True)
else: print 'Usage: sudo python setup.py ( /-client/-server) (-link)'
elif len(sys.argv) == 3:
if sys.argv[1] == '-client' and sys.argv[2] == '-link': makeClientLink(True)
elif sys.argv[1] == '-server' and sys.argv[2] == '-link': makeServerLink(True)
else: print 'Usage: sudo python setup.py ( /-client/-server]) (-link)'
else: print 'Usage: sudo python setup.py ( /-client/-server) (-link)'
main()
| StarcoderdataPython |
3411092 | <filename>esi_bot/request.py
"""Make GET requests to ESI."""
import re
import json
import time
import html
import http
from esi_bot import ESI
from esi_bot import ESI_CHINA
from esi_bot import SNIPPET
from esi_bot import command
from esi_bot import do_request
from esi_bot import multi_request
from esi_bot.utils import esi_base_url
def _initial_specs():
"""Return an initial empty specs dictionary."""
return {
x: {"timestamp": 0, "spec": {}} for x in ("latest", "legacy", "dev")
}
ESI_SPECS = {
ESI: _initial_specs(),
ESI_CHINA: _initial_specs(),
}
@command(trigger=re.compile(
r"^<?(?P<esi>https://esi\.(evetech\.net|evepc\.163\.com))?"
r"/(?P<esi_path>.+?)>?$"
))
def request(match, msg):
"""Make an ESI GET request, if the path is known.
Options:
--headers nest the response and add the headers
"""
match_group = match.groupdict()
if "evepc.163.com" in (match_group["esi"] or ""):
base_url = ESI_CHINA
else:
base_url = esi_base_url(msg)
version, *req_sections = match_group["esi_path"].split("/")
if version not in ESI_SPECS[base_url]:
req_sections.insert(0, version)
version = "latest"
params = ""
if "?" in req_sections[-1]:
if req_sections[-1].startswith("?"):
params = req_sections.pop()
params = params[1:]
else:
# qsparams passed w/out trailing slash
final_path, params = req_sections.pop().split("?")
req_sections.append(final_path)
params = html.unescape(params)
path = "/{}/".format("/".join(x for x in req_sections if x))
if _valid_path(base_url, path, version):
url = "{}/{}{}{}{}".format(
base_url,
version,
path,
"?" * int(params != ""),
params,
)
start = time.time()
res = do_request(url, return_response=True)
try:
content = res.json()
except ValueError:
content = res.text
try:
status = http.HTTPStatus(res.status_code) # pylint: disable=E1120
except ValueError:
status = str(res.status_code)
else:
status = "{} {}".format(status.value, status.name) # pylint: disable=E1101
if "--headers" in msg.args:
res = {"response": content, "headers": dict(res.headers)}
else:
res = content
return SNIPPET(
content=json.dumps(res, sort_keys=True, indent=4),
filename="response.json",
filetype="json",
comment="{} ({:,.0f}ms)".format(
status,
(time.time() - start) * 1000,
),
title=url,
)
return "failed to find GET {} in the {} ESI{} spec".format(
path,
version,
" China" * int(base_url == ESI_CHINA),
)
@command(trigger="refresh")
def refresh(msg):
"""Refresh internal specs."""
base_url = esi_base_url(msg)
refreshed = do_refresh(base_url)
if refreshed:
return "I refreshed my internal copy of the {}{}{} spec{}{}".format(
", ".join(refreshed[:-1]),
" and " * int(len(refreshed) > 1),
refreshed[-1],
"s" * int(len(refreshed) != 1),
" for ESI China" * int(base_url == ESI_CHINA),
)
return "my internal specs are up to date (try again later)"
def do_refresh(base_url):
"""DRY helper to refresh all stale ESI specs.
Returns:
list of updated ESI spec versions
"""
status, versions = do_request("{}/versions/".format(base_url))
if status == 200:
for version in versions:
if version not in ESI_SPECS[base_url]:
ESI_SPECS[base_url][version] = {"timestamp": 0, "spec": {}}
spec_urls = {} # url: version
for version, details in ESI_SPECS[base_url].items():
if not details["spec"] or details["timestamp"] < time.time() + 300:
url = "{}/{}/swagger.json".format(base_url, version)
spec_urls[url] = version
updates = {}
for url, result in multi_request(spec_urls.keys()).items():
status, spec = result
if status == 200:
updates[spec_urls[url]] = {"timestamp": time.time(), "spec": spec}
ESI_SPECS[base_url].update(updates)
return list(updates)
def _valid_path(base_url, path, version):
"""Check if the path is known."""
try:
spec = ESI_SPECS[base_url][version]["spec"]
except KeyError:
return False
for spec_path, operations in spec["paths"].items():
# we could pre-validate arguments.... *effort* though
if re.match(re.sub(r"{.*}", "[^/]+", spec_path), path):
# we only make get requests
return "get" in operations
return False
| StarcoderdataPython |
5013271 | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Created on Tue Oct 13 22:06:06 2020
@author: zuoxichen
"""
def String_to_list (Strings):
list1=list(Strings.split(" "))
return list1
def main_approach (num1,num2):
i=0
a=num1
b=num2
while a<=b :
i+=1
a=a*3
b=b*2
else:
return i
input1=input()
list2=String_to_list(input1)
num1=int(list2[0])
num2=int(list2[1])
print(main_approach(num1, num2))
| StarcoderdataPython |
6494868 | """Author: <NAME>, Copyright 2019"""
import tensorflow as tf
from mineral.algorithms.critics.critic import Critic
class TwinCritic(Critic):
def __init__(
self,
critic1,
critic2,
**kwargs
):
Critic.__init__(self, **kwargs)
self.critic1 = critic1
self.critic2 = critic2
def bellman_target_values(
self,
observations,
actions,
rewards,
terminals
):
return tf.minimum(
self.critic1.bellman_target_values(
observations,
actions,
rewards,
terminals),
self.critic2.bellman_target_values(
observations,
actions,
rewards,
terminals))
def discount_target_values(
self,
observations,
actions,
rewards,
terminals
):
return tf.minimum(
self.critic1.discount_target_values(
observations,
actions,
rewards,
terminals),
self.critic2.discount_target_values(
observations,
actions,
rewards,
terminals))
def update_critic(
self,
observations,
actions,
rewards,
terminals,
bellman_target_values,
discount_target_values
):
self.critic1.update_critic(
observations,
actions,
rewards,
terminals,
bellman_target_values,
discount_target_values)
self.critic2.update_critic(
observations,
actions,
rewards,
terminals,
bellman_target_values,
discount_target_values)
def soft_update(
self
):
self.critic1.soft_update()
self.critic2.soft_update()
def get_advantages(
self,
observations,
actions,
rewards,
terminals
):
return tf.minimum(
self.critic1.get_advantages(
observations,
actions,
rewards,
terminals),
self.critic2.get_advantages(
observations,
actions,
rewards,
terminals))
| StarcoderdataPython |
9722432 |
i = 0
while(i<119):
print(i)
i += 10 | StarcoderdataPython |
282890 | """
analytics.py
Author: <NAME>
Description:
This module implements the Analytics class which provides handy statistics from
data obtained while running the synthesizer. The .dat files produced from calling
the save_data method of the plotter class can analyzed and the mean, std deviation
and the like can be returned.
"""
import os
from src.Evaluation.EvaluationConfig.evaluation_config_cheby import EvaluationConfigCheby
from bayes_opt import BayesianOptimization
from src.SA.plotter import *
from math import ceil
from src.dsl import *
from src.Evaluation.EvaluationConfig.evaluation_config import *
from src.Evaluation.evaluation import *
from statistics import *
from src.SA.start_search import *
os.environ['SDL_VIDEODRIVER'] = 'dummy'
class Analytics:
def analyse_dat_file(self, filepath, name, var_num):
assert os.path.exists(filepath)
print('Stats for ', filepath)
with open(filepath, 'r') as data_file:
lines = data_file.readlines()
for line in lines:
if line[0] == '#':
continue
split_line = line.split()
var = split_line[int(var_num)]
print(f'\tmean of {name}: ', mean(var))
print(f'\tmedian of {name}:', median(var))
print(f'\tvariance of {name}:', variance(var))
print(f'\tstd. deviation of {name}:', stdev(var))
def launch_search(self, **kwargs):
arg = kwargs['total_games_played']
total_games_played = ceil(arg)
print('total_games_played', total_games_played)
# init SA variables
time_limit = 300 * (total_games_played / 5)
log_file = 'log_find_min_games' + str(self.counter)
self.counter += 1
# Turn on optimizer without triage
run_optimizer = {
'run_optimizer': True,
'iterations': 10,
'kappa': 2.5,
'triage': False,
'parallel': False
}
game = 'Catcher'
sa_option = 2
verbose = False
generate_plot = False
save_data = True
plot_filename = 'find_min_games_graph'
ibr = False
print(f'Calling search - {self.counter}')
# call search
start_sa(
time_limit,
log_file,
run_optimizer,
game,
sa_option,
verbose,
generate_plot,
save_data,
plot_filename,
ibr,
total_games_played
)
# extract variances and find mean
plotter = Plotter()
print('Getting variances')
path = os.path.join('data/' + 'score_variances_find_min_games_data.dat')
time, variances = plotter.parse_dat_file(path)
# return negative of mean variance
return -1 * mean(variances)
def find_min_games(self):
self.counter = 0
print('starting optimizer')
optimizer = BayesianOptimization(
f=self.launch_search,
pbounds={'total_games_played': (5, 50)},
verbose=0
)
optimizer.maximize(
init_points=2,
n_iter=5
)
return optimizer.max['target'], optimizer.max['params']
def calc_batch_size(self):
# catcher_p = Strategy.new(
# IT.new(
# GreaterThan.new( NonPlayerObjectPosition(), Plus.new( PlayerPosition(), Times.new( VarScalar.new('paddle_width'), Constant.new(0.5) ) ) ),
# ReturnAction.new( VarFromArray.new('actions', Constant.new(1)) )
# ),
# Strategy.new(
# IT.new(
# LessThan.new( NonPlayerObjectPosition(), Minus.new( PlayerPosition(), Times.new( VarScalar.new('paddle_width'), Constant.new(0.5) ) ) ),
# ReturnAction.new( VarFromArray.new('actions', Constant.new(0)) )
# ),
# ReturnAction.new( VarFromArray.new('actions', Constant.new(2)) )
# ),
# )
# flappy_bird_p = NestedITEDepth1.new(
# LessThan.new( NonPlayerDistToPlayer(), Constant.new(20) ),
# Strategy.new(
# IT.new(
# LessThan.new( Plus.new( PlayerVelocity(), PlayerPosition() ), Times.new(NonPlayerObjectPosition.new(1), Constant.new(0.8) ) ),
# ReturnAction.new( VarFromArray.new('actions', Constant.new(1)) )
# ),
# Strategy.new(
# IT.new(
# GreaterThan.new( Plus.new( PlayerVelocity(), PlayerPosition() ), Times.new(NonPlayerObjectPosition.new(0), Constant.new(1.1) ) ),
# ReturnAction.new( VarFromArray.new('actions', Constant.new(0)) )
# ),
# None
# )
# ),
# Strategy.new(
# IT.new(
# LessThan.new( Plus.new( PlayerVelocity(), PlayerPosition() ), Times.new(NonPlayerObjectPosition.new(1), Constant.new(0.83) ) ),
# ReturnAction.new( VarFromArray.new('actions', Constant.new(1)) )
# ),
# Strategy.new(
# IT.new(
# GreaterThan.new( Plus.new( PlayerVelocity(), PlayerPosition() ), Times.new(NonPlayerObjectPosition.new(0), Constant.new(1.1) ) ),
# ReturnAction.new( VarFromArray.new('actions', Constant.new(0)) )
# ),
# None
# )
# )
# )
pong_p = NestedITEDepth1.new(
NonPlayerObjectApproaching(),
Strategy.new(
IT.new(
GreaterThan.new( Minus.new( PlayerPosition(), Times.new( VarScalar.new('paddle_width'), Constant.new(0.85) ) ), NonPlayerObjectPosition() ),
ReturnAction.new( VarFromArray.new('actions', Constant.new(0)) )
),
Strategy.new(
IT.new(
LessThan.new( Plus.new( PlayerPosition(), Times.new( VarScalar.new('paddle_width'), Constant.new(0.85) ) ), NonPlayerObjectPosition() ),
ReturnAction.new( VarFromArray.new('actions', Constant.new(1)) )
),
ReturnAction.new( VarFromArray.new('actions', Constant.new(2)) )
)
),
ReturnAction.new( VarFromArray.new('actions', Constant.new(2)) )
)
eval_config_attr = form_basic_attr_dict(
True,
1,
0.95,
50,
2100,
Evaluation.MIN_SCORE,
5
)
eval_config_attr[EvaluationConfigCheby.k_eval_name] = 10
eval_config_attr[EvaluationConfigCheby.by_win_rate_name] = True
eval_config_factory = EvaluationConfigFactory()
eval_config = eval_config_factory.get_config('CHEBY', eval_config_attr)
eval_config.set_best_eval_variance(2.01)
factory = EvaluationFactory(0, eval_config)
eval_fun = factory.get_eval_fun('Pong')
start = time.time()
scores, avg_score = eval_fun.evaluate(pong_p, verbose=True)
end = time.time()
print(f'Running time: {end - start} seconds\n')
counter = 0
batch_count = 1
batch = []
max_scores = []
while counter < len(scores):
batch.append(scores[counter])
counter += 1
if counter % 5 == 0:
print(f'batch {batch_count}: {batch}, stdev {stdev(batch)}, mean: {mean(batch)}, max: {max(batch)}')
max_scores.append(max(batch))
batch = []
batch_count += 1
print(f'stdev scores {stdev(scores)}')
print(f'mean scores {round(mean(scores), 2)}')
print(f'stdev of max scores {stdev(max_scores)}')
print(f'mean of max scores {round(mean(max_scores), 2)}')
print(f'returned avg score {avg_score}')
if __name__ == '__main__':
analytics = Analytics()
# print('min sample size required: ', analytics.find_min_sample_size(p1, p2, 'Catcher'))
# min_mean_variance, min_sample = analytics.find_min_games()
# print(min_mean_variance, min_sample)
analytics.calc_batch_size() | StarcoderdataPython |
9777245 | <filename>spacq/devices/tektronix/tests/server/test_awg5014b.py
import logging
log = logging.getLogger(__name__)
from nose.tools import eq_
from numpy import linspace
from numpy.testing import assert_array_almost_equal
from unittest import main
from spacq.interface.units import Quantity
from spacq.tests.tool.box import AssertHandler, DeviceServerTestCase
from ... import awg5014b
class AWG5014BTest(DeviceServerTestCase):
def obtain_device(self):
return DeviceServerTestCase.obtain_device(self, impl=awg5014b.AWG5014B,
manufacturer='Tektronix', model='AWG5014B')
def testMarkerValues(self):
"""
Set the various marker values.
"""
awg = self.obtain_device()
awg.reset()
awg.channels[1].markers[1].delay = Quantity(1, 'ns')
awg.channels[1].markers[1].high = Quantity(0.5, 'V')
awg.channels[1].markers[2].delay = Quantity(0.1, 'ns')
awg.channels[2].markers[1].low = Quantity(-100, 'mV')
eq_(awg.channels[1].markers[1].delay.value, 1e-9)
eq_(awg.channels[1].markers[2].delay.value, 0.1e-9)
eq_(awg.channels[2].markers[1].delay.value, 0)
eq_(awg.channels[2].markers[2].delay.value, 0)
eq_(awg.channels[1].markers[1].high.value, 0.5)
eq_(awg.channels[1].markers[2].high.value, 1)
eq_(awg.channels[2].markers[1].high.value, 1)
eq_(awg.channels[2].markers[2].high.value, 1)
eq_(awg.channels[1].markers[1].low.value, 0)
eq_(awg.channels[1].markers[2].low.value, 0)
eq_(awg.channels[2].markers[1].low.value, -0.1)
eq_(awg.channels[2].markers[2].low.value, 0)
def testScenario(self):
"""
Run through a simple scenario.
Note: Verification should also be done manually based on the AWG output.
"""
log = AssertHandler()
awg = self.obtain_device()
awg.reset()
assert not awg.enabled
# Setup
existing_waveforms = awg.waveform_names
data1 = linspace(-1.0, 1.0, 21)
data2 = linspace(1.0, -1.0, 21)
log.flush()
awg.channels[1].set_waveform(data1, {
1: ([1, 1, 1, 0, 0] * len(data1))[:len(data1)],
2: ([0, 0, 0, 1, 1] * len(data1))[:len(data1)],
3: [1, 2, 3, 4],
})
log.assert_logged('warning', 'marker 3 ignored: \[1, 2, 3, 4\]')
awg.channels[2].set_waveform(data2, name='Test 2')
awg.sampling_rate = Quantity(200, 'MHz')
awg.channels[1].enabled = True
awg.channels[1].amplitude = Quantity(0.8, 'V')
awg.channels[2].enabled = True
awg.channels[2].amplitude = Quantity(0.4, 'V')
awg.channels[3].waveform_name = 'Test 2'
awg.channels[3].enabled = True
awg.channels[4].waveform_name = 'Channel 1'
del awg.channels[3].waveform_name
awg.run_mode = 'triggered'
awg.enabled = True
# Verify
eq_(awg.sampling_rate.value, 2e8)
eq_(awg.waveform_names, existing_waveforms + ['Channel 1', 'Test 2'])
assert_array_almost_equal(awg.get_waveform('Channel 1'), data1, 4)
eq_(awg.channels[1].amplitude.value, 0.8)
assert_array_almost_equal(awg.get_waveform('Test 2'), data2, 4)
eq_(awg.channels[2].amplitude.value, 0.4)
for ch in [1, 2]:
eq_(awg.channels[ch].enabled, True)
for ch in [3, 4]:
eq_(awg.channels[ch].enabled, False)
for ch in [1, 4]:
eq_(awg.channels[ch].waveform_name, 'Channel 1')
eq_(awg.channels[2].waveform_name, 'Test 2')
eq_(awg.channels[3].waveform_name, '')
eq_(awg.run_mode, 'triggered')
assert awg.waiting_for_trigger
assert awg.enabled
awg.trigger()
assert awg.waiting_for_trigger
assert awg.enabled
awg.run_mode = 'continuous'
assert not awg.waiting_for_trigger
assert awg.enabled
if __name__ == '__main__':
main()
| StarcoderdataPython |
367584 | # -*- coding: utf-8 -*-
"""
Created on Sun Aug 29 21:46:34 2021
@author: User
"""
#####################################################################
# Escribí otra leer_arboles(nombre_archivo) que lea el archivo indicado y
# devuelva una lista de diccionarios con la información de todos los árboles
# en el archivo. La función debe devolver una lista conteniendo un diccionario
# por cada árbol con todos los datos.
# Vamos a llamar arboleda a esta lista.
#####################################################################
# long,lat,id_arbol,altura_tot,diametro,inclinacio,id_especie,nombre_com,nombre_cie,tipo_folla,espacio_ve,ubicacion,nombre_fam,nombre_gen,origen,coord_x,coord_y
import csv
import os
import matplotlib.pyplot as plt
import numpy as np
def leer_arboles(nombre_archivo):
f = open(nombre_archivo,encoding="utf8")
#types = [str, str, str, str, str, str, str, str, str, float, int]
arboleda = []
rows = csv.reader(f)
headers = next(rows)
for row in rows:
arbol=({ name: val for name, val in zip(headers, row) })
arboleda.append(arbol)
f.close()
########################################################
# Ejercicio 4.16: Lista de altos de Jacarandá
# Usando comprensión de listas y la variable arboleda podés por ejemplo
# armar la lista de la altura de los árboles.
########################################################
H =[]
valor = 'Jacarandá'
H=[float(arbol['altura_tot']) for arbol in arboleda if arbol['nombre_com']==valor]
H
# altura.append(H)
#return #print(arboleda)
########################################################
# Ejercicio 4.17: Lista de altos y diámetros de Jacarandá
# nueva lista que tenga pares (tuplas de longitud 2) conteniendo no solo el alto
########################################################
H1 =[]
H1 = [(arbol['altura_tot'], float(arbol['diametro'])) for arbol in arboleda if arbol['nombre_com']==valor]
H1
return arboleda
########################################################
# Ejercicio 4.18: Diccionario con medidas
# recibir un diccionario con tres entradas (una por especie),
#cada una con una lista asociada conteniendo 4112, 3150 y 3255 pares de números (altos y diámetros), respectivamente.
########################################################
def medidas_de_especies(especies, arboleda):
# print(especies)
headers = especies
T = []
H = []
for i in especies:
T=[(int(arbol['altura_tot']), float(arbol['diametro'])) for arbol in arboleda if arbol['nombre_com']==i]
H.append(T)
diccio = dict(zip(headers,H))
return diccio
# diccionario = { clave: valor for clave in claves }
def graficar_altura():
#Ejercicio 5.25: Histograma de altos de Jacarandás
fn = os.path.join('..', 'Data', 'arbolado-en-espacios-verdes.csv')
arboleda = leer_arboles(fn)
altos = [int(arbol['altura_tot']) for arbol in arboleda if arbol['nombre_com']=='Jacarandá']
plt.hist(altos,bins=25)
plt.xlabel("alto (m)")
plt.ylabel("cantidad (un)")
plt.title("Cantidad de Jacarandás y sus alturas")
return
def graficar_alt_diam(pares):
# Ejercicio 5.26: Scatterplot (diámetro vs alto) de Jacarandás
datos = np.array(pares)
d1 = np.array(datos)[:,0]
h1 = np.array(datos)[:,1]
N = len(h1)
colors = np.random.rand(N)
area = (10 * np.random.rand(N))**2
plt.scatter(h1, d1, s = area, c = colors, alpha = 0.5)
plt.xlim(0,150)
plt.ylim(0,40)
plt.xlabel("diametro (cm)")
plt.ylabel("alto (m)")
plt.title("Relación diámetro-alto")
return
| StarcoderdataPython |
6562585 | # Copyright 2018 The TensorFlow Probability Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ============================================================================
"""Lewandowski-Kurowicka-Joe distribution on correlation matrices.
The sampler follows the "onion" method from
[1] <NAME>, <NAME>, and <NAME>,
"Generating random correlation matrices based on vines and extended
onion method," Journal of Multivariate Analysis 100 (2009), pp
1989-2001.
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
# Dependency imports
import numpy as np
import tensorflow as tf
from tensorflow_probability.python.distributions import beta
from tensorflow_probability.python.distributions import distribution
from tensorflow_probability.python.distributions import normal
from tensorflow_probability.python.distributions import seed_stream
from tensorflow_probability.python.internal import dtype_util
from tensorflow_probability.python.internal import reparameterization
__all__ = [
'LKJ',
]
def _uniform_unit_norm(dimension, shape, dtype, seed):
"""Returns a batch of points chosen uniformly from the unit hypersphere."""
# This works because the Gaussian distribution is spherically symmetric.
# raw shape: shape + [dimension]
raw = normal.Normal(
loc=dtype.as_numpy_dtype(0.),
scale=dtype.as_numpy_dtype(1.)).sample(
tf.concat([shape, [dimension]], axis=0), seed=seed())
unit_norm = raw / tf.norm(raw, ord=2, axis=-1)[..., tf.newaxis]
return unit_norm
def _replicate(n, tensor):
"""Replicate the input tensor n times along a new (major) dimension."""
# TODO(axch) Does this already exist somewhere? Should it get contributed?
multiples = tf.concat([[n], tf.ones_like(tensor.shape)], axis=0)
return tf.tile(tf.expand_dims(tensor, axis=0), multiples)
class LKJ(distribution.Distribution):
"""The LKJ distribution on correlation matrices.
This is a one-parameter family of distributions on correlation matrices. The
probability density is proportional to the determinant raised to the power of
the parameter: `pdf(X; eta) = Z(eta) * det(X) ** (eta - 1)`, where `Z(eta)` is
a normalization constant. The uniform distribution on correlation matrices is
the special case `eta = 1`.
The distribution is named after Lewandowski, Kurowicka, and Joe, who gave a
sampler for the distribution in [(Lewandowski, Kurowicka, Joe, 2009)][1].
#### Examples
```python
# Initialize a single 3x3 LKJ with concentration parameter 1.5
dist = tfp.distributions.LKJ(dimension=3, concentration=1.5)
# Evaluate this at a batch of two observations, each in R^{3x3}.
x = ... # Shape is [2, 3, 3].
dist.prob(x) # Shape is [2].
# Draw 6 LKJ-distributed 3x3 correlation matrices
ans = dist.sample(sample_shape=[2, 3], seed=42)
# shape of ans is [2, 3, 3, 3]
```
"""
def __init__(self,
dimension,
concentration,
validate_args=False,
allow_nan_stats=True,
name='LKJ'):
"""Construct LKJ distributions.
Args:
dimension: Python `int`. The dimension of the correlation matrices
to sample.
concentration: `float` or `double` `Tensor`. The positive concentration
parameter of the LKJ distributions. The pdf of a sample matrix `X` is
proportional to `det(X) ** (concentration - 1)`.
validate_args: Python `bool`, default `False`. When `True` distribution
parameters are checked for validity despite possibly degrading runtime
performance. When `False` invalid inputs may silently render incorrect
outputs.
allow_nan_stats: Python `bool`, default `True`. When `True`, statistics
(e.g., mean, mode, variance) use the value `NaN` to indicate the
result is undefined. When `False`, an exception is raised if one or
more of the statistic's batch members are undefined.
name: Python `str` name prefixed to Ops created by this class.
Raises:
ValueError: If `dimension` is negative.
"""
if dimension < 0:
raise ValueError(
'There are no negative-dimension correlation matrices.')
parameters = dict(locals())
with tf.name_scope(name, values=[dimension, concentration]):
concentration = tf.convert_to_tensor(
concentration,
name='concentration',
dtype=dtype_util.common_dtype([concentration],
preferred_dtype=tf.float32))
with tf.control_dependencies([
# concentration >= 1
# TODO(b/111451422) Generalize to concentration > 0.
tf.assert_non_negative(concentration - 1.),
] if validate_args else []):
self._dimension = dimension
self._concentration = tf.identity(concentration, name='concentration')
super(LKJ, self).__init__(
dtype=self._concentration.dtype,
validate_args=validate_args,
allow_nan_stats=allow_nan_stats,
reparameterization_type=reparameterization.NOT_REPARAMETERIZED,
parameters=parameters,
graph_parents=[self._concentration],
name=name)
@property
def dimension(self):
"""Dimension of returned correlation matrices."""
return self._dimension
@property
def concentration(self):
"""Concentration parameter."""
return self._concentration
def _batch_shape_tensor(self):
return tf.shape(self.concentration)
def _batch_shape(self):
return self.concentration.shape
def _event_shape_tensor(self):
return tf.constant([self.dimension, self.dimension], dtype=tf.int32)
def _event_shape(self):
return tf.TensorShape([self.dimension, self.dimension])
def _sample_n(self, num_samples, seed=None, name=None):
"""Returns a Tensor of samples from an LKJ distribution.
Args:
num_samples: Python `int`. The number of samples to draw.
seed: Python integer seed for RNG
name: Python `str` name prefixed to Ops created by this function.
Returns:
samples: A Tensor of correlation matrices with shape `[n, B, D, D]`,
where `B` is the shape of the `concentration` parameter, and `D`
is the `dimension`.
Raises:
ValueError: If `dimension` is negative.
"""
if self.dimension < 0:
raise ValueError(
'Cannot sample negative-dimension correlation matrices.')
# Notation below: B is the batch shape, i.e., tf.shape(concentration)
seed = seed_stream.SeedStream(seed, 'sample_lkj')
with tf.name_scope('sample_lkj', name, [self.concentration]):
if not self.concentration.dtype.is_floating:
raise TypeError('The concentration argument should have floating type,'
' not {}'.format(self.concentration.dtype.name))
concentration = _replicate(num_samples, self.concentration)
concentration_shape = tf.shape(concentration)
if self.dimension <= 1:
# For any dimension <= 1, there is only one possible correlation matrix.
shape = tf.concat([
concentration_shape, [self.dimension, self.dimension]], axis=0)
return tf.ones(shape=shape, dtype=self.concentration.dtype)
beta_conc = concentration + (self.dimension - 2.) / 2.
beta_dist = beta.Beta(concentration1=beta_conc, concentration0=beta_conc)
# Note that the sampler below deviates from [1], by doing the sampling in
# cholesky space. This does not change the fundamental logic of the
# sampler, but does speed up the sampling.
# This is the correlation coefficient between the first two dimensions.
# This is also `r` in reference [1].
corr12 = 2. * beta_dist.sample(seed=seed()) - 1.
# Below we construct the Cholesky of the initial 2x2 correlation matrix,
# which is of the form:
# [[1, 0], [r, sqrt(1 - r**2)]], where r is the correlation between the
# first two dimensions.
# This is the top-left corner of the cholesky of the final sample.
first_row = tf.concat([
tf.ones_like(corr12)[..., tf.newaxis],
tf.zeros_like(corr12)[..., tf.newaxis]], axis=-1)
second_row = tf.concat([
corr12[..., tf.newaxis],
tf.sqrt(1 - corr12**2)[..., tf.newaxis]], axis=-1)
chol_result = tf.concat([
first_row[..., tf.newaxis, :],
second_row[..., tf.newaxis, :]], axis=-2)
for n in range(2, self.dimension):
# Loop invariant: on entry, result has shape B + [n, n]
beta_conc -= 0.5
# norm is y in reference [1].
norm = beta.Beta(
concentration1=n/2.,
concentration0=beta_conc
).sample(seed=seed())
# distance shape: B + [1] for broadcast
distance = tf.sqrt(norm)[..., tf.newaxis]
# direction is u in reference [1].
# direction shape: B + [n]
direction = _uniform_unit_norm(
n, concentration_shape, self.concentration.dtype, seed)
# raw_correlation is w in reference [1].
raw_correlation = distance * direction # shape: B + [n]
# This is the next row in the cholesky of the result,
# which differs from the construction in reference [1].
# In the reference, the new row `z` = chol_result @ raw_correlation^T
# = C @ raw_correlation^T (where as short hand we use C = chol_result).
# We prove that the below equation is the right row to add to the
# cholesky, by showing equality with reference [1].
# Let S be the sample constructed so far, and let `z` be as in
# reference [1]. Then at this iteration, the new sample S' will be
# [[S z^T]
# [z 1]]
# In our case we have the cholesky decomposition factor C, so
# we want our new row x (same size as z) to satisfy:
# [[S z^T] [[C 0] [[C^T x^T] [[CC^T Cx^T]
# [z 1]] = [x k]] [0 k]] = [xC^t xx^T + k**2]]
# Since C @ raw_correlation^T = z = C @ x^T, and C is invertible,
# we have that x = raw_correlation. Also 1 = xx^T + k**2, so k
# = sqrt(1 - xx^T) = sqrt(1 - |raw_correlation|**2) = sqrt(1 -
# distance**2).
new_row = tf.concat(
[raw_correlation, tf.sqrt(1. - norm[..., tf.newaxis])], axis=-1)
# Finally add this new row, by growing the cholesky of the result.
chol_result = tf.concat([
chol_result,
tf.zeros_like(chol_result[..., 0][..., tf.newaxis])], axis=-1)
chol_result = tf.concat(
[chol_result, new_row[..., tf.newaxis, :]], axis=-2)
result = tf.matmul(chol_result, chol_result, transpose_b=True)
# The diagonal for a correlation matrix should always be ones. Due to
# numerical instability the matmul might not achieve that, so manually set
# these to ones.
result = tf.matrix_set_diag(result, tf.ones(
shape=tf.shape(result)[:-1], dtype=result.dtype.base_dtype))
# This sampling algorithm can produce near-PSD matrices on which standard
# algorithms such as `tf.cholesky` or `tf.linalg.self_adjoint_eigvals`
# fail. Specifically, as documented in b/116828694, around 2% of trials
# of 900,000 5x5 matrices (distributed according to 9 different
# concentration parameter values) contained at least one matrix on which
# the Cholesky decomposition failed.
return result
def _validate_dimension(self, x):
x = tf.convert_to_tensor(x, name='x')
if x.shape[-2:].is_fully_defined():
if x.shape.dims[-2] == x.shape.dims[-1] == self.dimension:
pass
else:
raise ValueError(
'Input dimension mismatch: expected [..., {}, {}], got {}'.format(
self.dimension, self.dimension, x.shape.dims))
elif self.validate_args:
msg = 'Input dimension mismatch: expected [..., {}, {}], got {}'.format(
self.dimension, self.dimension, tf.shape(x))
with tf.control_dependencies(
[tf.assert_equal(tf.shape(x)[-2], self.dimension, message=msg),
tf.assert_equal(tf.shape(x)[-1], self.dimension, message=msg)]):
x = tf.identity(x)
return x
def _validate_correlationness(self, x):
if not self.validate_args:
return x
checks = [
tf.assert_less_equal(
tf.cast(-1., dtype=x.dtype.base_dtype),
x,
message='Correlations must be >= -1.'),
tf.assert_less_equal(
x,
tf.cast(1., x.dtype.base_dtype),
message='Correlations must be <= 1.'),
tf.assert_near(
tf.matrix_diag_part(x),
tf.cast(1., x.dtype.base_dtype),
message='Self-correlations must be = 1.'),
tf.assert_near(
x, tf.matrix_transpose(x),
message='Correlation matrices must be symmetric')
]
with tf.control_dependencies(checks):
return tf.identity(x)
def _log_prob(self, x):
# Despite what one might infer from Eq 15 in [1], the formula
# given for the normalization constant should be read in the sense
# of division, not multiplication.
x = self._validate_dimension(x)
x = self._validate_correlationness(x)
normalizer = self._log_normalization()
return self._log_unnorm_prob(x) - normalizer
def _log_unnorm_prob(self, x, name=None):
"""Returns the unnormalized log density of an LKJ distribution.
Args:
x: `float` or `double` `Tensor` of correlation matrices. The shape of `x`
must be `B + [D, D]`, where `B` broadcasts with the shape of
`concentration`.
name: Python `str` name prefixed to Ops created by this function.
Returns:
log_p: A Tensor of the unnormalized log density of each matrix element of
`x`, with respect to an LKJ distribution with parameter the
corresponding element of `concentration`.
"""
with tf.name_scope('log_unnorm_prob_lkj', name, [self.concentration]):
x = tf.convert_to_tensor(x, name='x')
# The density is det(matrix) ** (concentration - 1).
# Computing the determinant with `logdet` is usually fine, since
# correlation matrices are Hermitian and PSD. But in some cases, for a
# PSD matrix whose eigenvalues are close to zero, `logdet` raises an error
# complaining that it is not PSD. The root cause is the computation of the
# cholesky decomposition in `logdet`. Hence, we use the less efficient but
# more robust `slogdet` which does not use `cholesky`.
#
# An alternative would have been to check allow_nan_stats and use
# eigenvalues = tf.linalg.self_adjoint_eigvals(x)
# psd_mask = tf.cast(
# tf.reduce_min(eigenvalues, axis=-1) >= 0, dtype=x.dtype)
# tf.where(psd_mask, answer, float('-inf'))
# to emit probability 0 for inputs that are not PSD, without ever raising
# an error. More care must be taken, as due to numerical stability issues,
# self_adjoint_eigvals can return slightly negative eigenvalues even for
# a PSD matrix.
_, logdet = tf.linalg.slogdet(x)
answer = (self.concentration - 1.) * logdet
return answer
def _log_normalization(self, name='log_normalization'):
"""Returns the log normalization of an LKJ distribution.
Args:
name: Python `str` name prefixed to Ops created by this function.
Returns:
log_z: A Tensor of the same shape and dtype as `concentration`, containing
the corresponding log normalizers.
"""
# The formula is from <NAME> al [1], p. 1999, from the
# proof that eqs 16 and 17 are equivalent.
with tf.name_scope('log_normalization_lkj', name, [self.concentration]):
logpi = np.log(np.pi)
ans = tf.zeros_like(self.concentration)
for k in range(1, self.dimension):
ans += logpi * (k / 2.)
ans += tf.lgamma(self.concentration + (self.dimension - 1 - k) / 2.)
ans -= tf.lgamma(self.concentration + (self.dimension - 1) / 2.)
return ans
def _mean(self):
# The mean of the LKJ distribution (with any concentration parameter) is the
# identity matrix. Proof: Imagine a correlation matrix on D variables, and
# imagine reversing the sense of the kth of those variables. The
# off-diagonal entries in row and column k change sign, but LKJ is symmetric
# with respect to this operation (because the determinant doesn't change).
# Ergo, the mean must be invariant under it (for any k), and hence all the
# off-diagonal entries must be 0.
return self._identity()
def _identity(self):
batch = tf.shape(self.concentration)
answer = tf.eye(
num_rows=self.dimension, batch_shape=batch,
dtype=self.concentration.dtype)
# set_shape only necessary because tf.eye doesn't do it itself: b/111413915
answer.set_shape(
answer.shape[:-2].concatenate([self.dimension, self.dimension]))
return answer
| StarcoderdataPython |
6686126 | <filename>jaqalpaq/parser/tree.py<gh_stars>1-10
# Copyright 2020 National Technology & Engineering Solutions of Sandia, LLC (NTESS).
# Under the terms of Contract DE-NA0003525 with NTESS, the U.S. Government retains
# certain rights in this software.
"""Functions and data types creating and acting on parse trees."""
from abc import ABC, abstractmethod
from functools import wraps, lru_cache
import pathlib
from lark import Lark, Transformer, Tree, Token
from lark.exceptions import UnexpectedInput
from .identifier import Identifier
from jaqalpaq import JaqalError
def parse_with_lark(text, *args, **kwargs):
"""Parse the given text using Lark. Return the Lark parse tree."""
parser = make_lark_parser(*args, **kwargs)
try:
return parser.parse(text)
except UnexpectedInput as exc:
raise JaqalParseError(
f"Expected: {list(exc.expected)}, found: `{exc.token}`",
line=exc.line,
column=exc.column,
)
@lru_cache(maxsize=16)
def make_lark_parser(*args, **kwargs):
"""Create a lark parser with some default arguments."""
kwargs_with_defaults = {"start": "start", "parser": "lalr", **kwargs}
with open(get_grammar_path(), "r") as fd:
parser = PreprocessingLarkParser(fd, *args, **kwargs_with_defaults)
return parser
class PreprocessingLarkParser(Lark):
"""Subclass of lark parsers that run preparsing steps. As this may be
cached it should be considered immutable once created."""
def parse(self, *args, **kwargs):
tree = super().parse(*args, **kwargs)
tree = expand_qualified_identifiers(tree)
return tree
def get_grammar_path(filename="jaqal_grammar.lark"):
"""Return the path to the lark grammar file."""
return pathlib.Path(__file__).parent / filename
def expand_qualified_identifiers(tree):
"""Expand qualified identifier tokens into trees. This step is a hack to disallow spaces between elements of a
qualified identifier but still allow downstream elements to see them broken out by element."""
transformer = QualifiedIdentifierTransformer(visit_tokens=True)
return transformer.transform(tree)
class LarkTransformerBase(Transformer):
"""Base for transformers based on the Lark Transformer class."""
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
# The last token read. This is used to get an approximation of
# the position of errors. We start with an invalid, zero-size
# token at the beginnning to avoid dereferencing an invalid
# object before the first token is read.
self.last_token = Token(
"INVALID",
"",
pos_in_stream=0,
line=0,
column=0,
end_line=0,
end_column=0,
end_pos=0,
)
##
# Position properties
#
# Note: These are approximate as they pick the last token
# processed inside an expression and use that as its
# position. This should be good enough for debugging purposes
#
@property
def current_line(self):
"""Return a line associated with the current item being processed."""
return self.last_token.line
@property
def current_column(self):
"""Return a column associated with the current item being
processed."""
return self.last_token.column
@property
def current_pos(self):
"""Return a position in the input character stream associated with the
current item being processed."""
return self.last_token.pos_in_stream
def token_method(method):
"""Decorator used in classes derived from LarkTransformerBase to
indicate they are handling a token."""
@wraps(method)
def wrapped_method(self, token):
self.last_token = token
return method(self, token)
return wrapped_method
class QualifiedIdentifierTransformer(LarkTransformerBase):
"""Transformer class to replace instances of QUALIFIED_IDENTIFIER tokens with qualified_identifier trees."""
@token_method
def QUALIFIED_IDENTIFIER(self, string):
parts = Identifier.parse(string)
children = []
# Assign positions in the original text to portions of the
# token. This doesn't have to be perfect as it's only useful
# in error messages.
remaining_token = str(string)
for part in parts:
offset = remaining_token.find(part)
remaining_token = remaining_token[offset + len(part) :]
# Assume a token cannot cross lines, which I think is true
# for Jaqal
token = Token(
"IDENTIFIER",
part,
pos_in_stream=string.pos_in_stream + offset,
line=string.line,
column=string.column + offset,
end_line=string.end_line,
end_column=string.column + offset + len(part),
end_pos=string.pos_in_stream + offset + len(part),
)
children.append(token)
return Tree("qualified_identifier", children=children)
class VisitTransformer(LarkTransformerBase):
"""A Lark transformer that traverses the tree and calls the appropriate methods in the ParseTreeVisitor class.
If you're unsure of whether you should be using this class, you should not be using this class.
"""
def __init__(self, visitor):
super().__init__(visit_tokens=True)
self._visitor = visitor
def start(self, args):
header_statements, body_statements = args
return self._visitor.visit_program(header_statements, body_statements)
def register_statement(self, args):
(array_declaration,) = args
return self._visitor.visit_register_statement(array_declaration)
def map_statement(self, args):
target, source = args
return self._visitor.visit_map_statement(target, source)
def let_statement(self, args):
identifier, number = args
return self._visitor.visit_let_statement(identifier, number)
def usepulses_statement(self, args):
if len(args) != 2:
raise JaqalError("Only from foo usepulses * implemented")
if args[0].data == "from_clause":
if args[1].data != "all_module":
raise JaqalError("Only from foo usepulses * implemented")
identifier = args[0].children[0]
objects = all
else:
raise JaqalError("Only from foo usepulses * implemented")
return self._visitor.visit_usepulses_statement(identifier, objects)
def body_statements(self, args):
return [stmt for stmt in args if stmt is not None]
def header_statements(self, args):
return [stmt for stmt in args if stmt is not None]
def gate_statement(self, args):
gate_name = args[0]
gate_args = args[1:]
return self._visitor.visit_gate_statement(gate_name, gate_args)
def macro_definition(self, args):
identifiers = args[0].children
gate_block = args[1]
macro_name = identifiers[0]
macro_args = identifiers[1:]
return self._visitor.visit_macro_definition(macro_name, macro_args, gate_block)
def macro_header(self, args):
macro_name = args[0]
macro_args = args[1:]
ret = self._visitor.visit_macro_header(macro_name, macro_args)
if ret is None:
# This allows macro_header to be optional in the visitor
return Tree("macro_header", args)
else:
return ret
def macro_gate_block(self, args):
block = args[0]
ret = self._visitor.visit_macro_gate_block(block)
if ret is None:
# This allows macro_block to be optional in the visitor
return Tree("macro_gate_block", args)
else:
return ret
def loop_statement(self, args):
repetition_count, block = args
return self._visitor.visit_loop_statement(repetition_count, block)
def sequential_gate_block(self, args):
return self._visitor.visit_sequential_gate_block(args)
def parallel_gate_block(self, args):
return self._visitor.visit_parallel_gate_block(args)
def array_declaration(self, args):
identifier, size = args
return self._visitor.visit_array_declaration(identifier, size)
def array_element(self, args):
identifier, index = args
return self._visitor.visit_array_element(identifier, index)
def array_element_qual(self, args):
identifier, index = args
return self._visitor.visit_array_element_qual(identifier, index)
def array_slice(self, args):
identifier = args[0]
slice_args = args[1:]
index_slice = slice(*slice_args)
return self._visitor.visit_array_slice(identifier, index_slice)
def array_slice_start(self, args):
return self._array_slice_element(args)
def array_slice_stop(self, args):
return self._array_slice_element(args)
def array_slice_step(self, args):
return self._array_slice_element(args)
def _array_slice_element(self, args):
if args:
return args[0]
else:
return None
def let_identifier(self, args):
identifier = args[0]
return self._visitor.visit_let_identifier(identifier)
def let_or_map_identifier(self, args):
identifier = args[0]
return self._visitor.visit_let_or_map_identifier(identifier)
def qualified_identifier(self, args):
names = tuple(name for name in args)
return self._visitor.visit_qualified_identifier(names)
@token_method
def IDENTIFIER(self, string):
return self._visitor.visit_identifier(string)
@token_method
def SIGNED_NUMBER(self, string):
return self._visitor.visit_signed_number(string)
@token_method
def NUMBER(self, string):
return self._visitor.visit_number(string)
@token_method
def INTEGER(self, string):
return self._visitor.visit_integer(string)
@token_method
def SIGNED_INTEGER(self, string):
return self._visitor.visit_signed_integer(string)
class ParseTreeVisitor(ABC):
"""A visitor used to traverse a parse tree. Although it works directly on parse trees used by the underlying
parser library, the user is not exposed to this detail.
Methods in this visitor are designed to be overridden. Those without default implementations (mostly token-level
methods) must be overridden to implement the visitor. The parse tree is visited from the bottom up. Therefore
each method gets the results of lower visitations as its arguments, except for tokens, which get the raw string if
they are overridden.
"""
def visit(self, tree):
"""Visit this tree and return the result of successively calling the visit_* methods."""
self.transformer = VisitTransformer(self)
try:
return self.transformer.transform(tree)
except Exception as exc:
raise JaqalParseError(
str(exc), self.transformer.current_line, self.transformer.current_column
)
@property
def current_line(self):
"""Return a line associated with the current item being processed."""
if not hasattr(self, "transformer"):
raise JaqalError("Cannot call current_line before visit")
return self.transformer.current_line
@property
def current_column(self):
"""Return a column associated with the current item being
processed."""
if not hasattr(self, "transformer"):
raise JaqalError("Cannot call current_column before visit")
return self.transformer.current_column
@property
def current_pos(self):
"""Return a position in the input character stream associated with the
current item being processed."""
if not hasattr(self, "transformer"):
raise JaqalError("Cannot call current_pos before visit")
return self.transformer.current_pos
##
# Token-level methods
#
def visit_identifier(self, identifier_string):
return str(identifier_string)
def visit_signed_number(self, string):
if "." in string or "e" in string or "E" in string:
return float(string)
else:
return int(string)
def visit_number(self, string):
if "." in string or "e" in string or "E" in string:
return float(string)
else:
return int(string)
def visit_integer(self, string):
return int(string)
def visit_signed_integer(self, string):
return int(string)
##
# Mandatory overrides
#
@abstractmethod
def visit_program(self, header_statements, body_statements):
"""Visit the 'start' rule in the grammar. Header statements and body statements are automatically gathered
into a list after calling the appropriate header or body statement on each."""
pass
@abstractmethod
def visit_register_statement(self, array_declaration):
pass
@abstractmethod
def visit_map_statement(self, target, source):
pass
@abstractmethod
def visit_let_statement(self, identifier, number):
pass
@abstractmethod
def visit_usepulses_statement(self, identifier, objects):
"""Visit a usepulses statement. The identifier is the name of the
module to import (possibly with namespaces). objects is either None, all,
or a list of identifiers. None means the usepulses was imported with its
namespace. all (the Python built-in function) means all objects in that
namespace were imported into the global namespace. Finally, a list of
identifiers means those identifiers are pulled into the global namespace."""
pass
@abstractmethod
def visit_gate_statement(self, gate_name, gate_args):
"""Visit a gate. The args are gathered into a list or identifiers, numbers, and array elements."""
pass
@abstractmethod
def visit_macro_definition(self, name, arguments, block):
"""Visit a macro definition. The arguments are gathered into a list, but the block is merely the result of
the appropriate visit_*_block method."""
pass
def visit_macro_header(self, name, arguments):
"""Visit the head of a macro. This override is optional as the information will be passed to
visit_macro_definition."""
pass
def visit_macro_gate_block(self, block):
"""Visit the block of a macro. This override is optional as the information will be passed to
visit_macro_definition."""
pass
@abstractmethod
def visit_loop_statement(self, repetition_count, block):
"""Visit a loop statement. The repetition count is either an integer or identifier."""
pass
@abstractmethod
def visit_sequential_gate_block(self, statements):
"""Visit a gate block of sequential statements. Each statement is a gate statement, macro definition, or
loop statement. Therefore it is important to be able to differentiate between the results of the appropriate
visit_* methods."""
pass
@abstractmethod
def visit_parallel_gate_block(self, statements):
"""Same as visit_sequential_gate_block, but intended for parallel execution."""
pass
@abstractmethod
def visit_array_declaration(self, identifier, size):
"""Visit an array declaration, currently used in map and register statements. The identifier is the label
the user wishes to use, and the size is either an identifier or integer."""
pass
@abstractmethod
def visit_array_element(self, identifier, index):
"""Visit an array, dereferenced to a single element. The index is either an identifier or integer."""
pass
@abstractmethod
def visit_array_element_qual(self, identifier, index):
"""Visit an array, dereferenced to a single element. The index is either an identifier or integer. The
identifier in this case is a qualified identifier."""
pass
@abstractmethod
def visit_array_slice(self, identifier, index_slice):
"""Visit an array dereferenced by slice, as used in the map statement. The identifier is the name of the
existing array, and index_slice is a Python slice object. None represents the lack of a bound, an integer a
definite bound, and a string is an identifier used as that bound."""
pass
@abstractmethod
def visit_let_identifier(self, identifier):
"""Visit an identifier that can only exist if it was previously declared by a let statement."""
pass
@abstractmethod
def visit_let_or_map_identifier(self, identifier):
"""Visit an identifier that must be declared in either a let or map statement."""
pass
@abstractmethod
def visit_qualified_identifier(self, names):
"""Visit an identifier qualified with zero or more namespaces. The identifier's name is in the most-significant
index."""
pass
class TreeManipulators:
##
# New methods to construct parts of the tree
#
@staticmethod
def make_program(header_statements, body_statements):
return Tree(
"start",
[
Tree("header_statements", header_statements),
Tree("body_statements", body_statements),
],
)
@staticmethod
def make_register_statement(array_declaration):
return Tree("register_statement", [array_declaration])
@staticmethod
def make_map_statement(target, source):
return Tree("map_statement", [target, source])
@staticmethod
def make_let_statement(identifier, number):
return Tree("let_statement", [identifier, number])
@staticmethod
def make_usepulses_statement(identifier, objects):
if objects is not all:
raise JaqalError("Only from foo usepulses * implemented")
from_clause = Tree("from_clause", [identifier])
all_module = Tree("all_module", [])
return Tree("usepulses_statement", [from_clause, all_module])
@staticmethod
def make_gate_statement(gate_name, gate_args):
return Tree("gate_statement", [gate_name] + gate_args)
@classmethod
def make_macro_definition(cls, name, arguments, block):
macro_header = cls.make_macro_header(name, arguments)
macro_gate_block = cls.make_macro_gate_block(block)
return Tree("macro_definition", [macro_header, macro_gate_block])
@staticmethod
def make_macro_header(name, arguments):
return Tree("macro_header", [name] + arguments)
@classmethod
def make_macro_gate_block(cls, block):
if cls.is_macro_gate_block(block):
# This allows use for much more transparent uses of this method and allows other methods to ignore
# the exact form of the gate block they receive, which in term makes them more flexible.
return block
return Tree("macro_gate_block", [block])
@classmethod
def make_loop_statement(cls, repetition_count, block):
return Tree(
"loop_statement", [cls.enforce_integer_if_numeric(repetition_count), block]
)
@staticmethod
def make_sequential_gate_block(statements):
return Tree("sequential_gate_block", statements)
@staticmethod
def make_parallel_gate_block(statements):
return Tree("parallel_gate_block", statements)
@classmethod
def make_array_declaration(cls, identifier, size):
return Tree(
"array_declaration", [identifier, cls.enforce_integer_if_numeric(size)]
)
@classmethod
def make_array_element(cls, identifier, index):
return Tree(
"array_element", [identifier, cls.enforce_signed_integer_if_numeric(index)]
)
@classmethod
def make_array_element_qual(cls, identifier, index):
return Tree(
"array_element_qual",
[identifier, cls.enforce_signed_integer_if_numeric(index)],
)
@classmethod
def make_array_slice(cls, identifier, index_slice):
index_start_children = (
[cls.enforce_signed_integer_if_numeric(index_slice.start)]
if index_slice.start is not None
else []
)
index_stop_children = (
[cls.enforce_signed_integer_if_numeric(index_slice.stop)]
if index_slice.stop is not None
else []
)
index_step_children = (
[cls.enforce_signed_integer_if_numeric(index_slice.step)]
if index_slice.step is not None
else []
)
index_start = Tree("array_slice_start", index_start_children)
index_stop = Tree("array_slice_stop", index_stop_children)
index_step = Tree("array_slice_step", index_step_children)
indices = [
index
for index in [index_start, index_stop, index_step]
if index is not None
]
return Tree("array_slice", [identifier] + indices)
@staticmethod
def make_let_identifier(identifier):
return Tree("let_identifier", [identifier])
@staticmethod
def make_let_or_map_identifier(identifier):
return Tree("let_or_map_identifier", [identifier])
@staticmethod
def make_let_or_integer(identifier):
return Tree("let_or_integer", [identifier])
@classmethod
def make_qualified_identifier(cls, names):
children = []
for name in names:
if cls.is_identifier(name):
children.append(name)
else:
children.append(cls.make_identifier(name))
return Tree("qualified_identifier", children)
@staticmethod
def make_identifier(identifier_string):
return Token("IDENTIFIER", identifier_string)
@staticmethod
def make_signed_number(number):
if not isinstance(number, float) and not isinstance(number, int):
raise JaqalError(f"Expected number, found {number}")
return Token("SIGNED_NUMBER", str(number))
@staticmethod
def make_number(number):
if (
not isinstance(number, float) and not isinstance(number, int)
) or number < 0:
raise JaqalError(f"Expected non-negative number, found {number}")
return Token("NUMBER", str(number))
@staticmethod
def make_integer(number):
if not isinstance(number, int) or number < 0:
raise JaqalError(f"Expected non-negative integer, found {number}")
return Token("INTEGER", str(number))
@staticmethod
def make_signed_integer(number):
if not isinstance(number, int):
raise JaqalError(f"Expected integer, found {number}")
return Token("SIGNED_INTEGER", str(number))
@classmethod
def enforce_integer_if_numeric(cls, number):
if cls.is_integer(number):
return number
elif (
cls.is_signed_integer(number)
or cls.is_number(number)
or cls.is_signed_number(number)
):
# A signed number token can be converted to a float but not an int, so we have a workaround here.
if float(number) < 0 or float(number) != int(float(number)):
raise JaqalError(f"Expected integer, found {number}")
return cls.make_integer(int(float(number)))
else:
# Likely an identifier
return number
@classmethod
def enforce_signed_integer_if_numeric(cls, number):
if cls.is_signed_integer(number):
return number
elif cls.is_integer(number):
return cls.make_signed_integer(int(number))
elif cls.is_number(number) or cls.is_signed_number(number):
# A signed number token can be converted to a float but not an int, so we have a workaround here.
if float(number) != int(float(number)):
raise JaqalError(f"Expected signed integer, found {number}")
return cls.make_signed_integer(int(float(number)))
else:
return number
##
# New methods to check if a portion of a tree or token is of a given type
#
@classmethod
def is_program(cls, tree):
return cls._is_tree(tree, "start")
@classmethod
def is_register_statement(cls, tree):
return cls._is_tree(tree, "register_statement")
@classmethod
def is_map_statement(cls, tree):
return cls._is_tree(tree, "map_statement")
@classmethod
def is_let_statement(cls, tree):
return cls._is_tree(tree, "let_statement")
@classmethod
def is_body_statements(cls, tree):
# Note: The visitor would not visit this directly but as part of visiting the whole program
return cls._is_tree(tree, "body_statements")
@classmethod
def is_header_statements(cls, tree):
return cls._is_tree(tree, "header_statements")
@classmethod
def is_gate_statement(cls, tree):
return cls._is_tree(tree, "gate_statement")
@classmethod
def is_macro_definition(cls, tree):
return cls._is_tree(tree, "macro_definition")
@classmethod
def is_macro_header(cls, tree):
return cls._is_tree(tree, "macro_header")
@classmethod
def is_macro_gate_block(cls, tree):
return cls._is_tree(tree, "macro_gate_block")
@classmethod
def is_loop_statement(cls, tree):
return cls._is_tree(tree, "loop_statement")
@classmethod
def is_sequential_gate_block(cls, tree):
return cls._is_tree(tree, "sequential_gate_block")
@classmethod
def is_parallel_gate_block(cls, tree):
return cls._is_tree(tree, "parallel_gate_block")
@classmethod
def is_array_declaration(cls, tree):
return cls._is_tree(tree, "array_declaration")
@classmethod
def is_array_element(cls, tree):
return cls._is_tree(tree, "array_element")
@classmethod
def is_array_slice(cls, tree):
return cls._is_tree(tree, "array_slice")
@classmethod
def is_let_identifier(cls, tree):
return cls._is_tree(tree, "let_identifier")
@classmethod
def is_let_or_map_identifier(cls, tree):
return cls._is_tree(tree, "let_or_map_identifier")
@classmethod
def is_identifier(cls, token):
return cls._is_token(token, "IDENTIFIER")
@classmethod
def is_qualified_identifier(cls, tree):
return cls._is_tree(tree, "qualified_identifier")
@classmethod
def is_signed_number(cls, token):
return cls._is_token(token, "SIGNED_NUMBER")
@classmethod
def is_number(cls, token):
return cls._is_token(token, "NUMBER")
@classmethod
def is_integer(cls, token):
return cls._is_token(token, "INTEGER")
@classmethod
def is_signed_integer(cls, token):
return cls._is_token(token, "SIGNED_INTEGER")
@classmethod
def _is_tree(cls, tree, data):
return cls.is_tree(tree) and tree.data == data
@classmethod
def _is_token(cls, token, data):
return cls.is_token(token) and token.type == data
@staticmethod
def is_tree(tree):
return isinstance(tree, Tree)
@staticmethod
def is_token(token):
return isinstance(token, Token)
##
# Deconstruct trees and tokens into their parts, used to go top down instead of (actually in addition to) bottom-up
#
@staticmethod
def deconstruct_sequential_gate_block(tree):
return tree.children
@staticmethod
def deconstruct_parallel_gate_block(tree):
return tree.children
@staticmethod
def deconstruct_macro_gate_block(tree):
"""Return the sequential or parallel gate block inside a macro gate block."""
return tree.children[0]
@staticmethod
def deconstruct_array_declaration(tree):
"""Return the portion of the tree that is the identifier and the size."""
identifier, size = tree.children
return identifier, size
@staticmethod
def deconstruct_array_slice(tree):
"""Return the portion of the tree that is the identifier and a 3-tuple with tokens representing the slice."""
identifier, slice_start, slice_stop, slice_step = tree.children
slice_start = slice_start.children[0] if slice_start.children else None
slice_stop = slice_stop.children[0] if slice_stop.children else None
slice_step = slice_step.children[0] if slice_step.children else None
return identifier, (slice_start, slice_stop, slice_step)
@staticmethod
def deconstruct_array_element(tree):
"""Return the portion of the tree that is the identifier and the index."""
identifier, index = tree.children
return identifier, index
@classmethod
def deconstruct_let_or_map_identifier(cls, tree):
"""Return a qualified identifier from a let-or-map identifier."""
assert len(tree.children) == 1
return cls.extract_qualified_identifier(tree.children[0])
@classmethod
def deconstruct_let_identifier(cls, tree):
"""Return a qualified identifier from a let identifier."""
assert len(tree.children) == 1
return cls.extract_qualified_identifier(tree.children[0])
@staticmethod
def extract_qualified_identifier(tree):
"""Return a qualified identifier as a tuple of strings."""
return Identifier(str(child) for child in tree.children)
@staticmethod
def extract_identifier(token):
"""Return an identifier as an Identifier object."""
return Identifier.parse(token)
@staticmethod
def extract_integer(token):
return int(token)
@staticmethod
def extract_signed_integer(token):
return int(token)
@staticmethod
def extract_number(token):
return float(token)
@staticmethod
def extract_signed_number(token):
return float(token)
@classmethod
def extract_token(cls, token):
"""Figure out what the token is and call the appropriate extract method."""
if cls.is_identifier(token):
return cls.extract_identifier(token)
elif cls.is_integer(token):
return cls.extract_integer(token)
elif cls.is_signed_integer(token):
return cls.extract_signed_integer(token)
elif cls.is_number(token):
return cls.extract_number(token)
elif cls.is_signed_number(token):
return cls.extract_signed_number(token)
else:
raise JaqalError(f"Unknown token: {token}")
class TreeRewriteVisitor(ParseTreeVisitor, TreeManipulators):
"""A base class that serves to mostly rewrite a parse tree without knowing the exact implementation of the tree.
Each method by default returns or reconstructs its portion of the tree."""
##
# Overrides of visit methods
#
def visit_identifier(self, token):
return token
def visit_signed_number(self, token):
return token
def visit_number(self, token):
return token
def visit_integer(self, token):
return token
def visit_signed_integer(self, token):
return token
def visit_program(self, header_statements, body_statements):
return self.make_program(header_statements, body_statements)
def visit_register_statement(self, array_declaration):
return self.make_register_statement(array_declaration)
def visit_map_statement(self, target, source):
return self.make_map_statement(target, source)
def visit_let_statement(self, identifier, number):
return self.make_let_statement(identifier, number)
def visit_usepulses_statement(self, identifier, objects):
return self.make_usepulses_statement(identifier, objects)
def visit_gate_statement(self, gate_name, gate_args):
return self.make_gate_statement(gate_name, gate_args)
def visit_macro_definition(self, name, arguments, block):
return self.make_macro_definition(name, arguments, block)
def visit_loop_statement(self, repetition_count, block):
return self.make_loop_statement(repetition_count, block)
def visit_sequential_gate_block(self, statements):
return self.make_sequential_gate_block(statements)
def visit_parallel_gate_block(self, statements):
return self.make_parallel_gate_block(statements)
def visit_array_declaration(self, identifier, size):
return self.make_array_declaration(identifier, size)
def visit_array_element(self, identifier, index):
return self.make_array_element(identifier, index)
def visit_array_element_qual(self, identifier, index):
return self.make_array_element_qual(identifier, index)
def visit_array_slice(self, identifier, index_slice):
return self.make_array_slice(identifier, index_slice)
def visit_let_identifier(self, identifier):
return self.make_let_identifier(identifier)
def visit_let_or_map_identifier(self, identifier):
return self.make_let_or_map_identifier(identifier)
def visit_qualified_identifier(self, names):
return self.make_qualified_identifier(names)
class JaqalParseError(JaqalError):
"""
Bases: :exc:`jaqalpaq.JaqalError`
Represents parse errors, with :attr:`line` and :attr:`column` properties denoting
where in the input the error occurred.
"""
def __init__(self, message, line, column):
self.message = message
self.line = line
self.column = column
def __str__(self):
return f"{self.message}: line {self.line} column {self.column}"
| StarcoderdataPython |
360553 | from fastapi import FastAPI, Request, UploadFile, File
from fastapi.templating import Jinja2Templates
from fastapi.staticfiles import StaticFiles
import uvicorn
from src import const, preprocess
import os
import shutil
from pathlib import Path
import json
templates = Jinja2Templates(directory="./templates")
app = FastAPI()
app.mount(
"/static",
StaticFiles(directory=Path(__file__).parent.parent.absolute() / "static"),
name="static",
)
@app.get('/')
async def home(request: Request):
return templates.TemplateResponse("index.html", {"request": request})
@app.post('/predict')
async def predict(image: UploadFile = File(...)):
temp_file = save_to_disk(image, path="temp", save_as='temp')
result = preprocess.predict(temp_file)
with open(const.diagnosis_dir + const.diseases[result]+".json", 'r', encoding='utf-8') as f:
diagnosis = json.load(f)
return diagnosis
def save_to_disk(uploadedfile, path='.', save_as='default'):
extension = os.path.splitext(uploadedfile.filename)[-1]
temp_file = os.path.join(path, save_as+extension)
with open(temp_file, 'wb') as buffer:
shutil.copyfileobj(uploadedfile.file, buffer)
return temp_file
| StarcoderdataPython |
295647 | from os import path
from setuptools import setup
# get version
__version__ = None
exec(open('protobuf_serialization/version.py').read())
this_directory = path.abspath(path.dirname(__file__))
with open(path.join(this_directory, 'README.md')) as f:
long_description = f.read()
setup(
name='protobuf-serialization',
version=__version__,
description="Helpers for protobuf3 serialization and deserialization",
long_description=long_description,
long_description_content_type='text/markdown',
url='https://github.com/alvinchow86/protobuf-serialization-py',
author='<NAME>',
author_email='<EMAIL>',
license="MIT",
classifiers=[
"License :: OSI Approved :: MIT License",
"Programming Language :: Python :: 3",
"Programming Language :: Python :: 3.7",
"Programming Language :: Python :: 3.8",
],
packages=[
'protobuf_serialization',
'protobuf_serialization/deserialization',
'protobuf_serialization/serialization',
],
package_data={},
scripts=[],
install_requires=[
'python-dateutil>=2.7',
'protobuf>=3.6.0',
],
python_requires='>=3.6',
)
| StarcoderdataPython |
3544016 | <reponame>cedadev/ndg_security_server
"""Paste related helper utilities (moved from ndg.security.test.unit.wsgi)
NERC DataGrid Project
"""
__author__ = "<NAME>"
__date__ = "25/01/11"
__copyright__ = "(C) 2011 Science and Technology Facilities Council"
__license__ = "BSD - see LICENSE file in top-level directory"
__contact__ = "<EMAIL>"
__revision__ = '$Id:$'
from os import path
import sys
from paste.script.util.logging_config import fileConfig
from paste.deploy import loadapp
import multiprocessing
import gunicorn.app.base
import gunicorn.arbiter
from ndg.security.server.test.base import BaseTestCase
class GunicornServerApp(gunicorn.app.base.BaseApplication):
@classmethod
def from_config(cls, cfgFilePath, port=7443, host='127.0.0.1',
certfile=BaseTestCase.SSL_CERT_FILEPATH,
keyfile=BaseTestCase.SSL_PRIKEY_FILEPATH):
"""Load an application configuration from cfgFilePath ini file"""
options = {
'bind': '%s:%s' % (host, str(port)),
'keyfile': keyfile,
'certfile': certfile
}
fileConfig(cfgFilePath, defaults={'here':path.dirname(cfgFilePath)})
app = loadapp('config:%s' % cfgFilePath)
obj = cls(app, options)
app._app._app.gunicorn_server_app = obj
return obj
@property
def number_of_workers(self):
return (multiprocessing.cpu_count() * 2) + 1
def __init__(self, app, options=None):
self.options = options or {}
if not 'workers' in options:
self.options['workers'] = self.number_of_workers
self.application = app
self.arbiter = None
super().__init__()
def load_config(self):
config = dict([(key, value) for key, value in self.options.items()
if key in self.cfg.settings and value is not None])
for key, value in config.items():
self.cfg.set(key.lower(), value)
def load(self):
return self.application
def run(self):
'''Extend in order to save arbiter reference'''
try:
self.arbiter = gunicorn.arbiter.Arbiter(self)
self.arbiter.run()
except RuntimeError as e:
print("\nError: {}\n".format(e), file=sys.stderr)
sys.stderr.flush()
sys.exit(1)
def kill_workers(self, sig):
self.arbiter.kill_workers(sig)
| StarcoderdataPython |
8156065 | <gh_stars>10-100
#/usr/bin/env python
# -*- coding: utf-8 -*-
#
# Copyright (c) 2010-2012 <NAME>
#
# This file is part of e-cidadania.
#
# e-cidadania is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# e-cidadania is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with e-cidadania. If not, see <http://www.gnu.org/licenses/>.
from core.spaces import url_names
from core.spaces.models import Space
from tests.test_utils import ECDTestCase
class ViewSpaceIndexTest(ECDTestCase):
"""
Tests the view for the index page of a space.
"""
def setUp(self):
super(ViewSpaceIndexTest, self).init()
self.private_space = self.foo_space
self.private_space_url = self.getURL(url_names.SPACE_INDEX,
kwargs={'space_url': self.private_space.url})
self.public_space = self.bar_space
self.public_space_url = self.getURL(url_names.SPACE_INDEX,
kwargs={'space_url': self.public_space.url})
def testAnonymousUserCanNotAccessPrivateSpace(self):
"""
Tests if anonymous user can not access the space index page.
"""
response = self.get(self.private_space_url)
self.assertResponseOK(response)
self.assertContains(response, "You're an anonymous user.")
def testUnregisteredUserCanNotAccessPrivateSpace(self):
"""Tests if an unregistered user can not access the space index.
"""
#Create and login a user who is not registered to the space
user = self.login("test_user", "<PASSWORD>")
self.assertFalse(user.is_staff)
self.assertFalse(user.is_superuser)
self.assertFalse(user.is_anonymous())
self.assertFalse(user in self.private_space.users.all())
self.assertFalse(user in self.private_space.mods.all())
self.assertFalse(user in self.private_space.admins.all())
response = self.get(self.private_space_url)
self.assertResponseOK(response)
self.assertContains(response, "You're not registered to this space.")
self.logout()
def testSpaceAdminCanAccessThePrivateSpace(self):
"""Tests if the space admin can access the space index.
"""
space_admin = self.login('foo_admin', '<PASSWORD>')
self.assertTrue(self.isLoggedIn(space_admin))
response = self.get(self.private_space_url)
self.assertResponseOK(response)
self.assertTemplateNotUsed(response, 'not_allowed.html')
self.logout()
def testSpaceModCanAccessThePrivateSpace(self):
"""Tests if the space mod can access the space index.
"""
space_mod = self.login('foo_mod', '<PASSWORD>')
self.assertTrue(self.isLoggedIn(space_mod))
response = self.get(self.private_space_url)
self.assertResponseOK(response)
self.assertTemplateNotUsed(response, 'not_allowed.html')
self.logout()
def testSpaceUserCanAccessTheSpace(self):
"""Tests if the space user can access the space index.
"""
space_user = self.login('foo_user', '<PASSWORD>')
self.assertTrue(self.isLoggedIn(space_user))
response = self.get(self.private_space_url)
self.assertResponseOK(response)
self.assertTemplateNotUsed(response, 'not_allowed.html')
self.logout()
def testOtherUsersCanNotAccessThePrivateSpace(self):
"""Test if other users who are not registered to the space can not
access the space.
"""
other_user = self.login('bar_admin', '<PASSWORD>')
self.assertTrue(self.isLoggedIn(other_user))
self.assertFalse(other_user in self.private_space.admins.all())
response = self.get(self.private_space_url)
self.assertResponseOK(response)
self.assertTemplateUsed(response, 'not_allowed.html')
def testAdminAccessToAPublicSpace(self):
"""Tests if an admin for one space can access a public space.
"""
admin = self.login('foo_admin', '<PASSWORD>')
self.assertTrue(self.isLoggedIn(admin))
self.assertFalse(admin in self.public_space.admins.all())
response = self.get(self.public_space_url)
self.assertResponseOK(response)
self.assertTemplateNotUsed(response, 'not_allowed.html')
def testAnonymousUserCanAcessAPublicSpace(self):
"""Tests if an anonymous user can access a public space.
"""
response = self.get(self.public_space_url)
self.assertResponseOK(response)
self.assertTemplateNotUsed(response, 'not_allowed.html')
class DeleteSpaceTest(ECDTestCase):
"""
Tests the deletion of a space.
"""
def setUp(self):
self.init()
def testGeneralUserAccess(self):
"""
Tests if a general user is prohibited from deleting the space.
"""
space = self.bar_space
general_user = self.login('test_user', '<PASSWORD>')
url = self.getURL(url_names.SPACE_DELETE, kwargs={'space_url': space.url})
response = self.get(url)
self.assertResponseRedirect(response)
self.assertEqual(url, response.request['PATH_INFO'])
def testAdminAccess(self):
"""
Tests if a correct admin can delete a space.
"""
space =self.bar_space
user = self.create_super_user("other_admin", "<PASSWORD>",
logged_in=True)
self.assertTrue(self.isLoggedIn(user))
url = self.getURL(url_names.SPACE_DELETE, kwargs={'space_url': space.url})
response = self.get(url)
self.assertResponseOK(response)
self.assertTemplateUsed(response, 'not_allowed.html')
#logout the present user because the space does not belong to it
self.logout()
admin = self.login('bar_admin', '<PASSWORD>')
self.assertTrue(self.isLoggedIn(admin))
self.assertTrue(admin in space.admins.all())
response = self.get(url)
self.assertResponseRedirect(response)
self.assertTemplateNotUsed(response, 'not_allowed.html')
class ListSpacesTest(ECDTestCase):
"""
Tests the list spaces view.
"""
def setUp(self):
self.init()
#We have a public space as well as a private space.
self.private_space = self.foo_space
self.public_space = self.bar_space
self.url = self.getURL(url_names.SPACE_LIST)
def testOnlyPublicSpacesAreListedForAnonymousUser(self):
"""
Tests if only the public spaces are listed for anonymous user.
"""
#No user is logged in currently
response = self.get(self.url)
self.assertResponseOK(response)
spaces_returned = response.context[0].dicts[0]['space_list']
self.assertEqual(len(spaces_returned), 1)
self.assertTrue(self.public_space in spaces_returned)
self.assertTrue(self.private_space not in spaces_returned)
def testAllSpacesAreReturnedForALoggedInUser(self):
"""
Tests if both the public and private spaces are returned for a logged
in user who is registered for both the spaces.
We make self.bar_admin to be a user for self.foo_space which is a
private space.
"""
self.foo_space.users.add(self.bar_admin)
self.login('bar_admin', '<PASSWORD>')
response = self.get(self.url)
spaces_returned = response.context[0].dicts[0]['space_list']
self.assertEqual(len(spaces_returned), 2)
self.assertTrue(self.foo_space in spaces_returned)
self.assertTrue(self.bar_space in spaces_returned)
class EditRoleTest(ECDTestCase):
"""
Tests if only admin can edit roles of people
"""
def setUp(self):
self.init()
self.private_space = self.foo_space
self.public_space = self.bar_space
def testSuperuserCanAccessPrivateView(self):
space=self.private_space
self.root=self.create_super_user(logged_in=True)
self.assertTrue(self.isLoggedIn(self.root))
url = self.getURL('edit-roles', kwargs={'space_url': space.url})
response = self.get(url,follow=True)
self.assertResponseOK(response)
self.assertContains(response,"Please select the users that will be administrators")
self.logout()
def testSuperuserCanAccessPrivateView(self):
space=self.public_space
self.root=self.create_super_user(logged_in=True)
self.assertTrue(self.isLoggedIn(self.root))
url = self.getURL('edit-roles', kwargs={'space_url': space.url})
response = self.get(url,follow=True)
self.assertResponseOK(response)
self.assertContains(response,"Please select the users that will be administrators")
self.logout()
def testAdminCannotAccessPrivateView(self):
space = self.private_space
self.login('foo_admin', 'foo_<PASSWORD>')
self.assertTrue(self.isLoggedIn(self.foo_admin))
url = self.getURL('edit-roles', kwargs={'space_url': space.url})
response=self.get(url,follow=True)
self.assertResponseOK(response)
self.assertContains(response, "you don't have permissions for accessing to some area.")
self.logout()
def testAdminCannotAccessPublicView(self):
space = self.public_space
self.login('bar_admin', '<PASSWORD>')
self.assertTrue(self.isLoggedIn(self.bar_admin))
url = self.getURL('edit-roles', kwargs={'space_url': space.url})
response=self.get(url,follow=True)
self.assertResponseOK(response)
self.assertContains(response, "you don't have permissions for accessing to some area.")
self.logout()
def testModCannotAccessPrivateView(self):
space = self.private_space
self.login('foo_mod', 'foo_mod_password')
self.assertTrue(self.isLoggedIn(self.foo_mod))
url = self.getURL('edit-roles', kwargs={'space_url': space.url})
response=self.get(url,follow=True)
self.assertResponseOK(response)
self.assertContains(response, "you don't have permissions for accessing to some area.")
self.logout()
def testModCannotAccessPublicView(self):
space = self.public_space
self.login('bar_mod', '<PASSWORD>password')
self.assertTrue(self.isLoggedIn(self.bar_mod))
url = self.getURL('edit-roles', kwargs={'space_url': space.url})
response=self.get(url,follow=True)
self.assertResponseOK(response)
self.assertContains(response, "you don't have permissions for accessing to some area.")
self.logout()
def testUserCannotAccessPrivateView(self):
space = self.private_space
self.login('foo_user', '<PASSWORD>')
self.assertTrue(self.isLoggedIn(self.foo_user))
url=self.getURL('edit-roles', kwargs={'space_url': space.url})
response=self.get(url,follow=True)
self.assertResponseOK(response)
self.assertContains(response, "you don't have permissions for accessing to some area.")
self.logout()
def testUserCannotAccessPublicView(self):
space = self.public_space
self.login('bar_user', '<PASSWORD>user_password')
self.assertTrue(self.isLoggedIn(self.bar_user))
url = self.getURL('edit-roles',kwargs={'space_url': space.url})
response=self.get(url,follow=True)
self.assertResponseOK(response)
self.assertContains(response, "you don't have permissions for accessing to some area.")
self.logout()
def testOtherUserCannotAccessPrivateView(self):
space = self.private_space
self.unreg_user = self.create_user('unreg_user', '<PASSWORD>')
self.login('unreg_user', '<PASSWORD>')
self.assertTrue(self.isLoggedIn(self.unreg_user))
url = self.getURL('edit-roles', kwargs={'space_url': space.url})
response=self.get(url,follow=True)
self.assertResponseOK(response)
self.assertContains(response, "you don't have permissions for accessing to some area.")
self.logout()
def testOtherUserCannotAccessPublicView(self):
space = self.public_space
self.unreg_user = self.create_user('unreg_user', '<PASSWORD>')
self.login('unreg_user', '<PASSWORD>')
self.assertTrue(self.isLoggedIn(self.unreg_user))
url = self.getURL('edit-roles', kwargs={'space_url': space.url})
response=self.get(url,follow=True)
self.assertResponseOK(response)
self.assertContains(response, "you don't have permissions for accessing to some area.")
self.logout()
| StarcoderdataPython |
3532581 | # -*- coding: utf-8 -*-
# Generated by Django 1.11.27 on 2020-02-19 10:37
from __future__ import unicode_literals
from django.conf import settings
import django.contrib.postgres.fields.jsonb
import django.core.validators
from django.db import migrations, models
import django.db.models.deletion
import outpost.django.base.utils
import outpost.django.base.validators
class Migration(migrations.Migration):
initial = True
dependencies = [
("contenttypes", "0002_remove_content_type_name"),
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
("campusonline", "0050_stud_photo"),
]
operations = [
migrations.CreateModel(
name="Job",
fields=[
(
"id",
models.CharField(max_length=20, primary_key=True, serialize=False),
),
("data", django.contrib.postgres.fields.jsonb.JSONField()),
],
options={"db_table": "salt_job", "managed": False},
),
migrations.CreateModel(
name="Result",
fields=[
(
"id",
models.AutoField(
auto_created=True,
primary_key=True,
serialize=False,
verbose_name="ID",
),
),
("function", models.CharField(max_length=50)),
("result", django.contrib.postgres.fields.jsonb.JSONField()),
("data", django.contrib.postgres.fields.jsonb.JSONField()),
("target", models.CharField(max_length=255)),
("success", models.BooleanField()),
("modified", models.DateTimeField()),
],
options={
"db_table": "salt_result",
"ordering": ("-modified",),
"managed": False,
},
),
migrations.CreateModel(
name="File",
fields=[
(
"id",
models.AutoField(
auto_created=True,
primary_key=True,
serialize=False,
verbose_name="ID",
),
),
("path", models.CharField(max_length=512)),
(
"content",
models.FileField(upload_to=outpost.django.base.utils.Uuid4Upload),
),
("sha256", models.CharField(max_length=64)),
(
"permissions",
models.CharField(
default="0640",
max_length=4,
validators=[
django.core.validators.RegexValidator(
"^0?[0-7]{3}$", "Not a valid POSIX permission."
)
],
),
),
("mimetype", models.TextField()),
],
),
migrations.CreateModel(
name="Group",
fields=[
("id", models.IntegerField(primary_key=True, serialize=False)),
("name", models.CharField(max_length=31, unique=True)),
],
options={"ordering": ("pk",)},
),
migrations.CreateModel(
name="Host",
fields=[
(
"id",
models.AutoField(
auto_created=True,
primary_key=True,
serialize=False,
verbose_name="ID",
),
),
("name", models.CharField(db_index=True, max_length=64, unique=True)),
],
options={
"ordering": ("name",),
"permissions": (("view_host", "View host"),),
},
),
migrations.CreateModel(
name="Permission",
fields=[
(
"id",
models.AutoField(
auto_created=True,
primary_key=True,
serialize=False,
verbose_name="ID",
),
),
("function", models.CharField(default=".*", max_length=256)),
],
),
migrations.CreateModel(
name="PublicKey",
fields=[
(
"id",
models.AutoField(
auto_created=True,
primary_key=True,
serialize=False,
verbose_name="ID",
),
),
("name", models.CharField(max_length=128)),
(
"key",
models.TextField(
validators=[outpost.django.base.validators.PublicKeyValidator()]
),
),
],
),
migrations.CreateModel(
name="System",
fields=[
(
"id",
models.AutoField(
auto_created=True,
primary_key=True,
serialize=False,
verbose_name="ID",
),
),
("name", models.CharField(max_length=128)),
(
"home_template",
models.CharField(default="/home/{username}", max_length=256),
),
("same_group_id", models.BooleanField(default=True)),
("same_group_name", models.BooleanField(default=True)),
],
),
migrations.CreateModel(
name="SystemFile",
fields=[
(
"id",
models.AutoField(
auto_created=True,
primary_key=True,
serialize=False,
verbose_name="ID",
),
),
(
"file",
models.ForeignKey(
on_delete=django.db.models.deletion.CASCADE, to="salt.File"
),
),
(
"system",
models.ForeignKey(
on_delete=django.db.models.deletion.CASCADE, to="salt.System"
),
),
],
),
migrations.CreateModel(
name="SystemUser",
fields=[
(
"id",
models.AutoField(
auto_created=True,
primary_key=True,
serialize=False,
verbose_name="ID",
),
),
("shell", models.CharField(default="/bin/bash", max_length=256)),
("sudo", models.BooleanField(default=False)),
("groups", models.ManyToManyField(blank=True, to="salt.Group")),
(
"system",
models.ForeignKey(
on_delete=django.db.models.deletion.CASCADE, to="salt.System"
),
),
],
),
migrations.CreateModel(
name="User",
fields=[
(
"id",
models.AutoField(
auto_created=True,
primary_key=True,
serialize=False,
verbose_name="ID",
),
)
],
options={"ordering": ("pk",), "manager_inheritance_from_future": True},
),
migrations.CreateModel(
name="StaffUser",
fields=[
(
"user_ptr",
models.OneToOneField(
auto_created=True,
on_delete=django.db.models.deletion.CASCADE,
parent_link=True,
primary_key=True,
serialize=False,
to="salt.User",
),
),
(
"person",
models.OneToOneField(
db_constraint=False,
on_delete=django.db.models.deletion.CASCADE,
to="campusonline.Person",
),
),
],
options={"manager_inheritance_from_future": True},
bases=("salt.user",),
),
migrations.CreateModel(
name="StudentUser",
fields=[
(
"user_ptr",
models.OneToOneField(
auto_created=True,
on_delete=django.db.models.deletion.CASCADE,
parent_link=True,
primary_key=True,
serialize=False,
to="salt.User",
),
),
(
"person",
models.OneToOneField(
db_constraint=False,
on_delete=django.db.models.deletion.CASCADE,
to="campusonline.Student",
),
),
],
options={"manager_inheritance_from_future": True},
bases=("salt.user",),
),
migrations.AddField(
model_name="user",
name="local",
field=models.ForeignKey(
blank=True,
null=True,
on_delete=django.db.models.deletion.CASCADE,
to=settings.AUTH_USER_MODEL,
),
),
migrations.AddField(
model_name="user",
name="polymorphic_ctype",
field=models.ForeignKey(
editable=False,
null=True,
on_delete=django.db.models.deletion.CASCADE,
related_name="polymorphic_salt.user_set+",
to="contenttypes.ContentType",
),
),
migrations.AddField(
model_name="user",
name="systems",
field=models.ManyToManyField(
blank=True, through="salt.SystemUser", to="salt.System"
),
),
migrations.AddField(
model_name="systemuser",
name="user",
field=models.ForeignKey(
on_delete=django.db.models.deletion.CASCADE, to="salt.User"
),
),
migrations.AddField(
model_name="publickey",
name="user",
field=models.ForeignKey(
on_delete=django.db.models.deletion.CASCADE, to="salt.User"
),
),
migrations.AddField(
model_name="permission",
name="system",
field=models.ForeignKey(
blank=True,
null=True,
on_delete=django.db.models.deletion.CASCADE,
to="salt.System",
),
),
migrations.AddField(
model_name="permission",
name="user",
field=models.ForeignKey(
on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL
),
),
migrations.AddField(
model_name="host",
name="system",
field=models.ForeignKey(
blank=True,
null=True,
on_delete=django.db.models.deletion.CASCADE,
to="salt.System",
),
),
migrations.AddField(
model_name="group",
name="systems",
field=models.ManyToManyField(blank=True, to="salt.System"),
),
migrations.AddField(
model_name="file",
name="systems",
field=models.ManyToManyField(
blank=True, through="salt.SystemFile", to="salt.System"
),
),
migrations.AddField(
model_name="file",
name="user",
field=models.ForeignKey(
on_delete=django.db.models.deletion.CASCADE, to="salt.User"
),
),
]
| StarcoderdataPython |
5055896 | <gh_stars>0
import numpy as np
import pandas as pd
import streamlit as st
from .constants import RANDOM_STATE
def app():
gpt = pd.read_csv('data/gpt.csv')
gpt['file_path'] = gpt['audio_path'].str[1:]
# gpt_split = pd.read_csv('data/gpt_split.csv')
# gpt_split['file_path'] = gpt_split['file_path'].str[1:]
st.title('Dataset')
st.write('<hr>', unsafe_allow_html=True)
st.write(
'The Guitar Playing Technique (GPT) datasets from the work of [Su et al. (2014)](http://mac.citi.sinica.edu.tw/GuitarTranscription/) was utilized.')
st.write('This dataset comprises `7 playing techniques` of the electrical guitar, including: `bending`, `hamming`, `mute`, `normal`, `pulling`, `slide`, and `trill`')
# st.write('There are two sets of data:')
# st.write('1. A `complete dataset`, which includes complete audio signals of each guitar sound with a duration of `4.0 s`.')
st.write('This dataset includes complete audio signals of each guitar sound with a duration of `4.0 s`.')
# st.write('2. A `split dataset`, which includes only portions of the waveform signals on the onsets of each guitar sound, obtained by clipping them from `0.1 s` before the onset to `0.2 s` after the onset.')
st.write('To make the quality of the sound recordings akin to that of real-world performance, `7 different guitar tones` are used with differences in effect and equalizer settings.')
st.markdown('<font size="2"><table> \
<tr> \
<th style="width:20%">Tone name</th> \
<th>Effect</th> \
<th>Equalizer</th> \
</tr> \
<tr> \
<td>1 (Normal tone)</td> \
<td>moderate distortion</td> \
<td>no modification on EQ</td> \
</tr> \
<tr> \
<td>2 (Solo tone)</td> \
<td>moderate distortion and moderate reverb</td> \
<td>mid-frequency is emphasized</td> \
</tr> \
<tr> \
<td>3 (Solo tone)</td> \
<td>moderate distortion, intense chorus, slight reverb</td> \
<td>mid-frequency is emphasized</td> \
</tr> \
<tr> \
<td>4 (Solo tone)</td> \
<td>moderate distortion, intense delay, moderate reverb</td> \
<td>mid-frequency is emphasized</td> \
</tr> \
<tr> \
<td>5 (Riff tone)</td> \
<td>intense distortion</td> \
<td>mid-frequency is suppressed while high-frequency and low-frequency are emphasized</td> \
</tr> \
<tr> \
<td>6 (Country tone)</td> \
<td>very slight distortion</td> \
<td>no modification on EQ</td> \
</tr> \
<tr> \
<td>7 (Funk tone)</td> \
<td>slight distortion, slight delay, and slight reverb</td> \
<td>high-frequency component is emphasized a little</td> \
</tr> \
</table></font>', unsafe_allow_html=True)
st.write('<hr>', unsafe_allow_html=True)
st.header('GPT Dataset')
st.subheader("Number of Sound Clips in GPT Dataset")
st.write('*Total:', gpt.shape[0], ' audio files.*')
st.bar_chart(pd.value_counts(gpt['technique']))
st.subheader('Play an Audio Clip of GPT Dataset')
techniques = gpt['technique'].unique()
tones = gpt['tone_type'].unique()
selected_technique = st.selectbox('Select Technique:', np.sort(techniques))
selected_tone = st.selectbox('Select Tone Type:', np.sort(tones))
files = gpt['audio_path'].loc[(gpt['technique'] == selected_technique) & (
gpt['tone_type'] == selected_tone)].sort_values()
df_files = files.to_frame()
df_files['value'] = np.array(files.str.split('/').tolist())[:, 6]
df_files['audio_path'] = df_files['audio_path'].str[3:]
# st.dataframe(df_files)
selected_file = st.selectbox('Select File:', df_files['value'].tolist())
selected_file_path = df_files['audio_path'].loc[df_files['value'] == selected_file].item()
st.write('`Play: ', selected_file_path, '`')
audio_file = open(selected_file_path, 'rb')
audio_bytes = audio_file.read()
st.audio(audio_bytes)
st.write('<hr>', unsafe_allow_html=True)
# st.header('2. GPT-split Dataset')
# st.subheader("Number of Sound Clips in GPT-split Dataset")
# st.write('*Total:', gpt_split.shape[0], ' audio files.*')
# st.bar_chart(pd.value_counts(gpt_split['technique']))
# st.subheader('Play an Audio Clip of GPT-split Dataset')
# techniques2 = gpt_split['technique'].unique()
# tones2 = gpt_split['tone_type'].unique()
# selected_technique2 = st.selectbox(
# 'Select Technique', np.sort(techniques2))
# selected_tone2 = st.selectbox('Select Tone Type', np.sort(tones2))
# files2 = gpt_split['file_path'].loc[(gpt_split['technique'] == selected_technique2) & (
# gpt_split['tone_type'] == selected_tone2)].sort_values()
# df_files2 = files2.to_frame()
# df_files2['value'] = np.array(files2.str.split('/').tolist())[:, 5]
# selected_file2 = st.selectbox('Select File', df_files2['value'].tolist())
# selected_file_path2 = df_files2['file_path'].loc[df_files2['value']
# == selected_file2].item()
# st.write('`Play: ', selected_file_path2, '`')
# audio_file2 = open(selected_file_path2, 'rb')
# audio_bytes2 = audio_file2.read()
# st.audio(audio_bytes2)
# st.write('<hr>', unsafe_allow_html=True)
st.header('Extracted Features of GPT Datasets')
st.write('To represent musical signal, the `mean`, `std`, `variance`, `skewness`, and `kurtosis` as the statistics measure of various audio descriptors including: *MFCC-13*, *$\Delta$MFCC-13* (first-order derivative), *$\Delta$<sub>2</sub>MFCC-13* (second-order derivative) was utilized.', unsafe_allow_html=True)
st.latex(r'''
Total = 5 \times 13 \times 3 = 195D \ Feature \ Vector
''')
# st.write('The audio descriptors are computed using python package for music and audio analysis, [librosa](https://librosa.org/doc/latest/index.html).')
st.markdown('### GPT dataset (1% sampling)')
sample_gpt = gpt.sample(frac=0.01, random_state=RANDOM_STATE)
st.dataframe(sample_gpt)
# st.markdown('### GPT-split dataset (1% sampling)')
# sample_gpt_split = gpt_split.sample(frac=0.01, random_state=RANDOM_STATE)
# st.dataframe(sample_gpt_split)
| StarcoderdataPython |
3276755 | <gh_stars>100-1000
# ------------------------------------------
# VQ-Diffusion
# Copyright (c) Microsoft Corporation.
# Licensed under the MIT License.
# written By <NAME>
# ------------------------------------------
import torch
import math
from torch import nn
from image_synthesis.utils.misc import instantiate_from_config
import time
import numpy as np
from PIL import Image
import os
from torch.cuda.amp import autocast
class UC_DALLE(nn.Module):
def __init__(
self,
*,
content_info={'key': 'image'},
content_codec_config,
diffusion_config
):
super().__init__()
self.content_info = content_info
self.content_codec = instantiate_from_config(content_codec_config)
self.transformer = instantiate_from_config(diffusion_config)
self.truncation_forward = False
def parameters(self, recurse=True, name=None):
if name is None or name == 'none':
return super().parameters(recurse=recurse)
else:
names = name.split('+')
params = []
for n in names:
try: # the parameters() method is not overwritten for some classes
params += getattr(self, name).parameters(recurse=recurse, name=name)
except:
params += getattr(self, name).parameters(recurse=recurse)
return params
@property
def device(self):
return self.transformer.device
def get_ema_model(self):
return self.transformer
@autocast(enabled=False)
@torch.no_grad()
def prepare_content(self, batch, with_mask=False):
cont_key = self.content_info['key']
cont = batch[cont_key]
if torch.is_tensor(cont):
cont = cont.to(self.device)
if not with_mask:
cont = self.content_codec.get_tokens(cont)
else:
mask = batch['mask'.format(cont_key)]
cont = self.content_codec.get_tokens(cont, mask, enc_with_mask=False)
cont_ = {}
for k, v in cont.items():
v = v.to(self.device) if torch.is_tensor(v) else v
cont_['content_' + k] = v
return cont_
@torch.no_grad()
def prepare_input(self, batch):
input = self.prepare_content(batch)
return input
def predict_start_with_truncation(self, func, sample_type):
if sample_type[-1] == 'p':
truncation_k = int(sample_type[:-1].replace('top', ''))
content_codec = self.content_codec
save_path = self.this_save_path
def wrapper(*args, **kwards):
out = func(*args, **kwards)
val, ind = out.topk(k = truncation_k, dim=1)
probs = torch.full_like(out, -70)
probs.scatter_(1, ind, val)
return probs
return wrapper
elif sample_type[-1] == 'r':
truncation_r = float(sample_type[:-1].replace('top', ''))
def wrapper(*args, **kwards):
out = func(*args, **kwards)
temp, indices = torch.sort(out, 1, descending=True)
temp1 = torch.exp(temp)
temp2 = temp1.cumsum(dim=1)
temp3 = temp2 < truncation_r
new_temp = torch.full_like(temp3[:,0:1,:], True)
temp6 = torch.cat((new_temp, temp3), dim=1)
temp3 = temp6[:,:-1,:]
temp4 = temp3.gather(1, indices.argsort(1))
temp5 = temp4.float()*out+(1-temp4.float())*(-70)
probs = temp5
return probs
return wrapper
else:
print("wrong sample type")
@torch.no_grad()
def generate_content(
self,
*,
batch,
filter_ratio = 0.5,
temperature = 1.0,
content_ratio = 0.0,
replicate=1,
return_att_weight=False,
sample_type="normal",
):
self.eval()
content_token = None
if sample_type.split(',')[0][:3] == "top" and self.truncation_forward == False:
self.transformer.predict_start = self.predict_start_with_truncation(self.transformer.predict_start, sample_type.split(',')[0])
self.truncation_forward = True
trans_out = self.transformer.sample(condition_token=None,
condition_mask=None,
condition_embed=None,
content_token=content_token,
filter_ratio=filter_ratio,
temperature=temperature,
return_att_weight=return_att_weight,
return_logits=False,
print_log=False,
sample_type=sample_type,
batch_size=replicate)
content = self.content_codec.decode(trans_out['content_token']) #(8,1024)->(8,3,256,256)
self.train()
out = {
'content': content
}
return out
@torch.no_grad()
def reconstruct(
self,
input
):
if torch.is_tensor(input):
input = input.to(self.device)
cont = self.content_codec.get_tokens(input)
cont_ = {}
for k, v in cont.items():
v = v.to(self.device) if torch.is_tensor(v) else v
cont_['content_' + k] = v
rec = self.content_codec.decode(cont_['content_token'])
return rec
@torch.no_grad()
def sample(
self,
batch,
clip = None,
temperature = 1.,
return_rec = True,
filter_ratio = [0],
content_ratio = [1], # the ratio to keep the encoded content tokens
return_att_weight=False,
return_logits=False,
sample_type="normal",
**kwargs,
):
self.eval()
content = self.prepare_content(batch)
content_samples = {'input_image': batch[self.content_info['key']]}
if return_rec:
content_samples['reconstruction_image'] = self.content_codec.decode(content['content_token'])
# import pdb; pdb.set_trace()
for fr in filter_ratio:
for cr in content_ratio:
num_content_tokens = int((content['content_token'].shape[1] * cr))
if num_content_tokens < 0:
continue
else:
content_token = content['content_token'][:, :num_content_tokens]
trans_out = self.transformer.sample(condition_token=None,
condition_mask=None,
condition_embed=None,
content_token=content_token,
filter_ratio=fr,
temperature=temperature,
return_att_weight=return_att_weight,
return_logits=return_logits,
content_logits=content.get('content_logits', None),
sample_type=sample_type,
batch_size=batch[self.content_info['key']].shape[0],
**kwargs)
content_samples['cond1_cont{}_fr{}_image'.format(cr, fr)] = self.content_codec.decode(trans_out['content_token'])
if return_logits:
content_samples['logits'] = trans_out['logits']
self.train()
output = {}
output.update(content_samples)
return output
def forward(
self,
batch,
name='none',
**kwargs
):
input = self.prepare_input(batch)
output = self.transformer(input, **kwargs)
return output
| StarcoderdataPython |
1717338 | """
Unreify RDF values in KGTK files
"""
from argparse import ArgumentParser, Namespace
import attr
from pathlib import Path
import sys
import typing
from kgtk.kgtkformat import KgtkFormat
from kgtk.io.kgtkreader import KgtkReader, KgtkReaderMode, KgtkReaderOptions
from kgtk.io.kgtkwriter import KgtkWriter
from kgtk.unreify.kgtksortbuffer import KgtkSortBuffer
from kgtk.utils.argparsehelpers import optional_bool
from kgtk.value.kgtkvalueoptions import KgtkValueOptions
@attr.s(slots=True, frozen=False)
class KgtkUnreifyValues(KgtkFormat):
# Note: If you change any of these default values, be sure to change the
# explanation in the corresponding parser.add_argument(...) call.
DEFAULT_TRIGGER_LABEL_VALUE: typing.Optional[str] = None
DEFAULT_TRIGGER_NODE2_VALUE: typing.Optional[str] = None
DEFAULT_VALUE_LABEL_VALUE: typing.Optional[str] = None
DEFAULT_OLD_LABEL_VALUE: typing.Optional[str] = None
DEFAULT_NEW_LABEL_VALUE: typing.Optional[str] = None
DEFAULT_ALLOW_MULTIPLE_VALUES: bool = False
DEFAULT_ALLOW_EXTRA_COLUMNS: bool = False
input_file_path: Path = attr.ib(validator=attr.validators.instance_of(Path))
output_file_path: Path = attr.ib(validator=attr.validators.instance_of(Path))
reified_file_path: typing.Optional[Path] = attr.ib(validator=attr.validators.optional(attr.validators.instance_of(Path)))
unreified_file_path: typing.Optional[Path] = attr.ib(validator=attr.validators.optional(attr.validators.instance_of(Path)))
uninvolved_file_path: typing.Optional[Path] = attr.ib(validator=attr.validators.optional(attr.validators.instance_of(Path)))
trigger_label_value: str = attr.ib(validator=attr.validators.instance_of(str), default=DEFAULT_TRIGGER_LABEL_VALUE)
trigger_node2_value: str = attr.ib(validator=attr.validators.instance_of(str), default=DEFAULT_TRIGGER_NODE2_VALUE)
value_label_value: str = attr.ib(validator=attr.validators.instance_of(str), default=DEFAULT_VALUE_LABEL_VALUE)
old_label_value: str = attr.ib(validator=attr.validators.instance_of(str), default=DEFAULT_OLD_LABEL_VALUE)
new_label_value: typing.Optional[str] = attr.ib(validator=attr.validators.optional(attr.validators.instance_of(str)),
default=DEFAULT_NEW_LABEL_VALUE)
allow_multiple_values: bool = attr.ib(validator=attr.validators.instance_of(bool), default=DEFAULT_ALLOW_MULTIPLE_VALUES)
allow_extra_columns: bool = attr.ib(validator=attr.validators.instance_of(bool), default=DEFAULT_ALLOW_EXTRA_COLUMNS)
# TODO: find working validators
# value_options: typing.Optional[KgtkValueOptions] = attr.ib(attr.validators.optional(attr.validators.instance_of(KgtkValueOptions)), default=None)
reader_options: typing.Optional[KgtkReaderOptions]= attr.ib(default=None)
value_options: typing.Optional[KgtkValueOptions] = attr.ib(default=None)
output_format: typing.Optional[str] = attr.ib(validator=attr.validators.optional(attr.validators.instance_of(str)), default=None) # TODO: use an enum
error_file: typing.TextIO = attr.ib(default=sys.stderr)
verbose: bool = attr.ib(validator=attr.validators.instance_of(bool), default=False)
very_verbose: bool = attr.ib(validator=attr.validators.instance_of(bool), default=False)
# Working variables:
output_line_count: int = attr.ib(default=0)
def make_keygen(self, old_label_value: str)->KgtkSortBuffer.KEYGEN_TYPE:
# Create a key generator function passing old_label_value as a
# closure value.
def keygen(buf: 'KgtkSortBuffer', row: typing.List[str])->str:
label_column_idx: int = buf.label_column_idx
if label_column_idx < 0:
raise ValueError("Unknown label column.")
if row[label_column_idx] == old_label_value:
node2_column_idx: int = buf.node2_column_idx
if node2_column_idx < 0:
raise ValueError("Unknown node2 column.")
return row[node2_column_idx]
else:
node1_column_idx: int = buf.node1_column_idx
if node1_column_idx < 0:
raise ValueError("Unknown node1 column.")
return row[node1_column_idx]
return keygen
def process(self):
# Open the input file.
if self.verbose:
print("Opening the input file: %s" % str(self.input_file_path), file=self.error_file, flush=True)
kr: KgtkReader = KgtkReader.open(self.input_file_path,
mode=KgtkReaderMode.EDGE, # Must be an edge file.
error_file=self.error_file,
options=self.reader_options,
value_options = self.value_options,
verbose=self.verbose,
very_verbose=self.very_verbose,
)
output_column_names: typing.List[str] = kr.column_names.copy()
node1_column_idx: int = kr.node1_column_idx
node1_column_name: str = output_column_names[node1_column_idx]
label_column_idx: int = kr.label_column_idx
label_column_name: str = output_column_names[label_column_idx]
node2_column_idx: int = kr.node2_column_idx
node2_column_name: str = output_column_names[node2_column_idx]
# Adding an ID column?
new_id_column: bool = False
id_column_idx: int = kr.id_column_idx
if id_column_idx < 0:
new_id_column = True
id_column_idx = len(output_column_names)
output_column_names.append(KgtkFormat.ID)
id_column_name: str = output_column_names[id_column_idx]
# There should be exactly 4 output columns, iincluding an ID column.
# If there are additional columns, some content may be lost when
# unreifying records.
num_columns: int = len(output_column_names)
if num_columns < 4 or (num_columns > 4 and not self.allow_extra_columns):
raise ValueError("Expecting 4 output columns, found %d." % num_columns)
if self.verbose:
print("Opening the output file: %s" % str(self.output_file_path), file=self.error_file, flush=True)
# Open the output file.
kw: KgtkWriter = KgtkWriter.open(output_column_names,
self.output_file_path,
mode=KgtkWriter.Mode[kr.mode.name],
output_format=self.output_format,
require_all_columns=not self.allow_extra_columns,
prohibit_extra_columns=True,
fill_missing_columns=self.allow_extra_columns,
gzip_in_parallel=False,
verbose=self.verbose,
very_verbose=self.very_verbose)
reifiedw: typing.Optional[KgtkWriter] = None
if self.reified_file_path is not None:
if self.verbose:
print("Opening the reified value output file: %s" % str(self.reified_file_path), file=self.error_file, flush=True)
reifiedw: KgtkWriter = KgtkWriter.open(kr.column_names,
self.reified_file_path,
mode=KgtkWriter.Mode[kr.mode.name],
output_format=self.output_format,
require_all_columns=not self.allow_extra_columns,
prohibit_extra_columns=True,
fill_missing_columns=self.allow_extra_columns,
gzip_in_parallel=False,
verbose=self.verbose,
very_verbose=self.very_verbose)
unreifiedw: typing.Optional[KgtkWriter] = None
if self.unreified_file_path is not None:
if self.verbose:
print("Opening the unreified value output file: %s" % str(self.unreified_file_path), file=self.error_file, flush=True)
unreifiedw: KgtkWriter = KgtkWriter.open(output_column_names,
self.unreified_file_path,
mode=KgtkWriter.Mode[kr.mode.name],
output_format=self.output_format,
require_all_columns=True,
prohibit_extra_columns=True,
fill_missing_columns=False,
gzip_in_parallel=False,
verbose=self.verbose,
very_verbose=self.very_verbose)
uninvolvedw: typing.Optional[KgtkWriter] = None
if self.uninvolved_file_path is not None:
if self.verbose:
print("Opening the uninvolved records output file: %s" % str(self.uninvolved_file_path), file=self.error_file, flush=True)
uninvolvedw: KgtkWriter = KgtkWriter.open(kr.column_names,
self.uninvolved_file_path,
mode=KgtkWriter.Mode[kr.mode.name],
output_format=self.output_format,
require_all_columns=True,
prohibit_extra_columns=True,
fill_missing_columns=False,
gzip_in_parallel=False,
verbose=self.verbose,
very_verbose=self.very_verbose)
if self.verbose:
print("Reading and grouping the input records.", file=self.error_file, flush=True)
ksb: KgtkSortBuffer = KgtkSortBuffer.readall(kr, grouped=True, keygen=self.make_keygen(self.old_label_value))
input_group_count: int = 0
input_line_count: int = 0
self.output_line_count = 0
unreification_count: int = 0
if self.verbose:
print("Processing the input records.", file=self.error_file, flush=True)
node1_group: typing.List[typing.List[str]]
for node1_group in ksb.groupiterate():
input_group_count += 1
saw_error: bool = False
saw_trigger: bool = False
node1_value: typing.Optional[str] = None
node2_values: typing.Set[str] = set()
old_label_node2_value: typing.Optional[str] = None
trigger_node1_value: typing.Optional[str] = None
potential_edge_attributes: typing.List[typing.List[str]] = [ ]
row: typing.List[str]
for row in node1_group:
input_line_count += 1
node1: str = row[node1_column_idx]
label: str = row[label_column_idx]
node2: str = row[node2_column_idx]
if label == self.trigger_label_value and node2 == self.trigger_node2_value:
if saw_trigger:
# TODO: Shout louder.
if self.verbose:
print("Warning: Duplicate trigger in input group %d (%s)" % (input_group_count, node1_value), file=self.error_file, flush=True)
saw_trigger = True
trigger_node1_value = node1
elif label == self.value_label_value:
if len(node2_values) > 0 and node2 not in node2_values and not self.allow_multiple_values:
# TODO: Shout louder.
if self.verbose:
print("Warning: Multiple values in input group %d" % (input_group_count), file=self.error_file, flush=True)
saw_error = True
node2_values.add(node2)
elif label == self.old_label_value:
node1_value = node1
old_label_node2_value = node2
else:
potential_edge_attributes.append(row)
if saw_trigger and \
node1_value is not None and \
len(node2_values) > 0 and \
old_label_node2_value == trigger_node1_value and \
not saw_error:
# Unreification was triggered.
unreification_count += 1
node2_value: str = KgtkFormat.LIST_SEPARATOR.join(list(node2_values))
if reifiedw is not None:
for row in node1_group:
reifiedw.write(row)
self.write_new_edge(kw,
reifiedw,
potential_edge_attributes,
node1_value,
node2_value,
trigger_node1_value,
label_column_idx,
node2_column_idx,
node1_column_name,
label_column_name,
node2_column_name,
id_column_name,
)
else:
# Unreification was not triggered. Pass this group of rows
# through unchanged, except for possibly appending an ID
# column.
self.pass_group_through(kw, uninvolvedw, node1_group, new_id_column)
if self.verbose:
print("Processed %d records in %d groups." % (input_line_count, input_group_count), file=self.error_file, flush=True)
print("Unreified %d groups." % unreification_count, file=self.error_file, flush=True)
print("Wrote %d output records" % self.output_line_count, file=self.error_file, flush=True)
kw.close()
if reifiedw is not None:
reifiedw.close()
if unreifiedw is not None:
unreifiedw.close()
if uninvolvedw is not None:
uninvolvedw.close()
def make_new_id(self, edge_id: str, count: int, width: int)->str:
# Generate a new ID that will sort after the new edge.
# What if the existing ID is not a symbol or a string?
#
# TODO: Handle cases where the existing ID is not a symbol or string.
new_id: str
if edge_id.startswith(KgtkFormat.STRING_SIGIL) and edge_id.endswith(KgtkFormat.STRING_SIGIL):
new_id = edge_id[:-1] + "-" + str(count).zfill(width) + KgtkFormat.STRING_SIGIL
else:
new_id = edge_id + "-" + str(count).zfill(width)
return new_id
def get_width(self, max_count: int)->int:
return len(str(max_count).strip())
def write_new_edge(self,
kw: KgtkWriter,
unreifiedw: typing.Optional[KgtkWriter],
potential_edge_attributes: typing.List[typing.List[str]],
node1_value: str,
node2_value: str,
edge_id: str,
label_column_idx: int,
node2_column_idx: int,
node1_column_name: str,
label_column_name: str,
node2_column_name: str,
id_column_name: str,
):
new_label_value: str = self.new_label_value if self.new_label_value is not None else self.value_label_value
kw.writemap({
node1_column_name: node1_value,
label_column_name: new_label_value,
node2_column_name: node2_value,
id_column_name: edge_id,
})
self.output_line_count += 1
if unreifiedw is not None:
unreifiedw.writemap({
node1_column_name: node1_value,
label_column_name: new_label_value,
node2_column_name: node2_value,
id_column_name: edge_id,
})
self.write_edge_attributes(kw,
unreifiedw,
potential_edge_attributes,
edge_id,
label_column_idx,
node2_column_idx,
node1_column_name,
label_column_name,
node2_column_name,
id_column_name,
)
def write_edge_attributes(self,
kw: KgtkWriter,
unreifiedw: typing.Optional[KgtkWriter],
potential_edge_attributes: typing.List[typing.List[str]],
edge_id: str,
label_column_idx: int,
node2_column_idx: int,
node1_column_name: str,
label_column_name: str,
node2_column_name: str,
id_column_name: str,
):
width: int = self.get_width(len(potential_edge_attributes))
attribute_number: int = 0
edge_row: typing.List[str]
for edge_row in potential_edge_attributes:
attribute_number += 1
attr_edge_id: str = self.make_new_id(edge_id, attribute_number, width)
kw.writemap({
node1_column_name: edge_id,
label_column_name: edge_row[label_column_idx],
node2_column_name: edge_row[node2_column_idx],
id_column_name: attr_edge_id
})
self.output_line_count += 1
if unreifiedw is not None:
unreifiedw.writemap({
node1_column_name: edge_id,
label_column_name: edge_row[label_column_idx],
node2_column_name: edge_row[node2_column_idx],
id_column_name: attr_edge_id
})
def pass_group_through(self,
kw: KgtkWriter,
uninvolvedw: typing.Optional[KgtkWriter],
node1_group: typing.List[typing.List[str]],
new_id_column: bool):
# Unreification was not triggered. Pass this group of rows
# through unchanged, except for possibly appending an ID
# column.
#
# TODO: Perhaps we'd like to build an ID value at the same time?
row: typing.List[str]
for row in node1_group:
if uninvolvedw is not None:
uninvolvedw.write(row)
if new_id_column:
row = row.copy()
row.append("")
kw.write(row)
self.output_line_count += 1
@classmethod
def add_arguments(cls, parser: ArgumentParser):
parser.add_argument( "--trigger-label", dest="trigger_label_value", required=True,
help="A value in the label (or its alias) column that identifies the trigger record. (default=%(default)s).",
type=str, default=cls.DEFAULT_TRIGGER_LABEL_VALUE)
parser.add_argument( "--trigger-node2", dest="trigger_node2_value", required=True,
help="A value in the node2 (or its alias) column that identifies the trigger record. " +
"This is a required parameter for which there is no default value. (default=%(default)s).",
type=str, default=cls.DEFAULT_TRIGGER_NODE2_VALUE)
parser.add_argument( "--value-label", dest="value_label_value", required=True,
help="A value in the label (or its alias) column that identifies the record with the node2 value for the new, unreified edge. " +
"This is a required parameter for which there is no default value. (default=%(default)s).",
type=str, default=cls.DEFAULT_VALUE_LABEL_VALUE)
parser.add_argument( "--old-label", dest="old_label_value", required=True,
help="A value in the label (or its alias) column that identifies the edge with the node1 value being unreified. " +
"The value in the node1 (or its alias) column of this record will be used in the node1 (or its alias) column for the " +
"new, unreified edge. " +
"This is a required parameter for which there is no default value. (default=%(default)s).",
type=str, default=cls.DEFAULT_OLD_LABEL_VALUE)
parser.add_argument( "--new-label", dest="new_label_value",
help="The value to be entered in the label (or its alias) column of the new, unreified edge. " +
"If not specified (None), the value from --value-label is used. (default=%(default)s).",
type=str, default=cls.DEFAULT_NEW_LABEL_VALUE)
parser.add_argument( "--allow-multiple-values", dest="allow_multiple_values",
help="When true, allow multiple values (a '|' list) in the node2 (or its alias) column of the new, unreified edge. " +
"(default=%(default)s).",
type=optional_bool, nargs='?', const=True, default=cls.DEFAULT_ALLOW_MULTIPLE_VALUES)
parser.add_argument( "--allow-extra-columns", dest="allow_extra_columns",
help="When true, allow extra columns (beyond node1, label, node2, and id, or their aliases. " +
"Warning: the contents of these columns may be lost silently in unreified statements. " +
"(default=%(default)s).",
type=optional_bool, nargs='?', const=True, default=cls.DEFAULT_ALLOW_MULTIPLE_VALUES)
def main():
"""
Test the KGTK copy template.
"""
parser: ArgumentParser = ArgumentParser()
parser.add_argument("-i", "--input-file", dest="input_file_path",
help="The KGTK input file. (default=%(default)s)", type=Path, default="-")
parser.add_argument("-o", "--output-file", dest="output_file_path",
help="The KGTK output file. (default=%(default)s).", type=Path, default="-")
parser.add_argument( "--reified-file", dest="reified_file_path",
help="A KGTK output file that will contain only the reified values. (default=%(default)s).", type=Path, default=None)
parser.add_argument( "--unreified-file", dest="unreified_file_path",
help="A KGTK output file that will contain only the unreified values. (default=%(default)s).", type=Path, default=None)
parser.add_argument( "--uninvolved-file", dest="uninvolved_file_path",
help="A KGTK output file that will contain only the uninvolved input records. (default=%(default)s).", type=Path, default=None)
parser.add_argument( "--output-format", dest="output_format", help="The file format (default=kgtk)", type=str,
choices=KgtkWriter.OUTPUT_FORMAT_CHOICES)
KgtkUnreifyValues.add_arguments(parser)
KgtkReader.add_debug_arguments(parser)
KgtkReaderOptions.add_arguments(parser, mode_options=False, expert=True)
KgtkValueOptions.add_arguments(parser)
args: Namespace = parser.parse_args()
error_file: typing.TextIO = sys.stdout if args.errors_to_stdout else sys.stderr
# Build the option structures.
reader_options: KgtkReaderOptions = KgtkReaderOptions.from_args(args)
value_options: KgtkValueOptions = KgtkValueOptions.from_args(args)
# Show the final option structures for debugging and documentation.
if args.show_options:
print("--input-files %s" % " ".join([str(path) for path in input_file_paths]), file=error_file, flush=True)
print("--output-file=%s" % str(args.output_file_path), file=error_file, flush=True)
if args.reified_file_path is not None:
print("--reified-file=%s" % str(args.reified_file_path), file=error_file, flush=True)
if args.unreified_file_path is not None:
print("--unreified-file=%s" % str(args.unreified_file_path), file=error_file, flush=True)
if args.uninvolved_file_path is not None:
print("--uninvolved-file=%s" % str(args.uninvolved_file_path), file=error_file, flush=True)
if args.output_format is not None:
print("--output-format=%s" % args.output_format, file=error_file, flush=True)
if args.trigger_label_value is not None:
print("--trigger-label=%s" % args.trigger_label_value, file=error_file, flush=True)
if args.trigger_node2_value is not None:
print("--trigger-node2=%s" % args.trigger_node2_value, file=error_file, flush=True)
if args.value_label_value is not None:
print("--value-label=%s" % args.value_label_value, file=error_file, flush=True)
if args.old_label_value is not None:
print("--old-label=%s" % args.old_label_value, file=error_file, flush=True)
if args.new_label_value is not None:
print("--new-label=%s" % args.new_label_value, file=error_file, flush=True)
print("--allow-multiple-values=%s" % str(args.allow_multiple_values), file=error_file, flush=True)
print("--allow-extra-columns=%s" % str(args.allow_extra_columns), file=error_file, flush=True)
reader_options.show(out=error_file)
value_options.show(out=error_file)
kuv: KgtkUnreifyValues = KgtkUnreifyValues(
input_file_path=args.input_file_path,
output_file_path=args.output_file_path,
reified_file_path=args.reified_file_path,
unreified_file_path=args.unreified_file_path,
uninvolved_file_path=args.uninvolved_file_path,
trigger_label_value=args.trigger_label_value,
trigger_node2_value=args.trigger_node2_value,
value_label_value=args.value_label_value,
old_label_value=args.old_label_value,
new_label_value=args.new_label_value,
allow_multiple_values=args.allow_multiple_values,
allow_extra_columns=args.allow_extra_columns,
reader_options=reader_options,
value_options=value_options,
output_format=args.output_format,
error_file=error_file,
verbose=args.verbose,
very_verbose=args.very_verbose,
)
kuv.process()
if __name__ == "__main__":
main()
| StarcoderdataPython |
3302962 | from __future__ import unicode_literals
from django.conf import settings
from django.db.models import F, Case, When
from django_filters.rest_framework.backends import DjangoFilterBackend
from rest_framework.permissions import IsAuthenticatedOrReadOnly
from geotrek.api.v2 import serializers as api_serializers, \
viewsets as api_viewsets
from geotrek.api.v2.functions import Transform, Buffer, GeometryType, Area
from geotrek.sensitivity import models as sensitivity_models
from ..filters import GeotrekQueryParamsFilter, GeotrekInBBoxFilter, GeotrekSensitiveAreaFilter
class SensitiveAreaViewSet(api_viewsets.GeotrekViewset):
filter_backends = (
DjangoFilterBackend,
GeotrekQueryParamsFilter,
GeotrekInBBoxFilter,
GeotrekSensitiveAreaFilter,
)
permission_classes = [IsAuthenticatedOrReadOnly]
authentication_classes = []
bbox_filter_field = 'geom2d_transformed'
bbox_filter_include_overlapping = True
def get_serializer_class(self):
if 'bubble' in self.request.GET:
base_serializer_class = api_serializers.BubbleSensitiveAreaListSerializer
else:
base_serializer_class = api_serializers.SensitiveAreaListSerializer
format_output = self.request.query_params.get('format', 'json')
dimension = self.request.query_params.get('dim', '2')
return api_serializers.override_serializer(format_output, dimension, base_serializer_class)
def get_queryset(self):
queryset = sensitivity_models.SensitiveArea.objects.existing() \
.filter(published=True) \
.select_related('species', 'structure') \
.prefetch_related('species__practices') \
.annotate(geom_type=GeometryType(F('geom'))) \
.order_by('pk') # Required for reliable pagination
if 'bubble' in self.request.GET:
queryset = queryset.annotate(geom2d_transformed=Transform(F('geom'), settings.API_SRID))
else:
queryset = queryset.annotate(geom2d_transformed=Case(
When(geom_type='POINT', then=Transform(Buffer(F('geom'), F('species__radius'), 4), settings.API_SRID)),
When(geom_type='POLYGON', then=Transform(F('geom'), settings.API_SRID))
))
# Ensure smaller areas are at the end of the list, ie above bigger areas on the map
# to ensure we can select every area in case of overlapping
queryset = queryset.annotate(area=Area('geom2d_transformed')).order_by('-area')
return queryset
def list(self, request, *args, **kwargs):
response = super(SensitiveAreaViewSet, self).list(request, *args, **kwargs)
response['Access-Control-Allow-Origin'] = '*'
return response
class SportPracticeViewSet(api_viewsets.GeotrekViewset):
filter_backends = (
DjangoFilterBackend,
GeotrekQueryParamsFilter,
)
permission_classes = [IsAuthenticatedOrReadOnly]
serializer_class = api_serializers.SportPracticeListSerializer
serializer_detail_class = api_serializers.SportPracticeListSerializer
authentication_classes = []
def get_queryset(self):
queryset = sensitivity_models.SportPractice.objects.all()
queryset = queryset.order_by('pk') # Required for reliable pagination
return queryset
def list(self, request, *args, **kwargs):
response = super(SportPracticeViewSet, self).list(request, *args, **kwargs)
response['Access-Control-Allow-Origin'] = '*'
return response
| StarcoderdataPython |
396615 | <reponame>mikimaus78/ml_monorepo<filename>BiBloSA/exp_SC/src/utils/time_counter.py<gh_stars>100-1000
import time
class TimeCounter(object):
def __init__(self):
self.data_round = 0
self.global_training_time = 0 # todo: updated
self.epoch_time_list = []
self.batch_time_list = []
# run time
self.start_time = None
def add_start(self):
self.start_time = time.time()
def add_stop(self):
assert self.start_time is not None
time_interval = time.time() - self.start_time
self.batch_time_list.append(time_interval)
self.global_training_time += time_interval # todo: updated
self.start_time = None
def update_data_round(self, data_round):
if self.data_round == data_round:
return None, None
else:
this_epoch_time = sum(self.batch_time_list)
self.epoch_time_list.append(this_epoch_time)
self.batch_time_list = []
self.data_round = data_round
return this_epoch_time, \
1.0 * sum(self.epoch_time_list)/len(self.epoch_time_list) if len(self.epoch_time_list) > 0 else 0
| StarcoderdataPython |
396960 | <filename>connector/discord/discord_bot_connector.py
import os
import time
import requests
import discord
import logging
TOKEN = os.environ.get("DISCORD_TOKEN")
client = discord.Client()
def handle_command(user_id, user_entry, user_chan):
print(user_id, user_entry, user_chan)
response = "Hum ... I can't access to natural language processing service. :robot_face:"
try:
r = requests.get('http://nlp:5000/api/message/' + user_id + '/' + user_chan + '/' + user_entry + '/').json()
if r and 'response' in r and r['response']['message']:
response = r['response']['message']
except ValueError:
print ("chat_response: can't decode json from nlp api")
return response
@client.event
async def on_message(message):
if message.author == client.user:
return
if message.channel.is_private:
rep = handle_command(message.author.id, message.content, message.channel.id)
await client.send_message(message.channel, rep)
@client.event
async def on_ready():
print('Logged in as')
print(client.user.name)
print(client.user.id)
print('------')
client.run(TOKEN)
| StarcoderdataPython |
11213396 | import os
import sys
import random
import re
import copy
import matplotlib
import matplotlib.pyplot as plt
import pandas as pd
import numpy as np
import logging
import datetime as dt
from math import radians, cos, sin, asin, sqrt
from datetime import datetime,timedelta
from objects.objects import Cluster,Order,Vehicle,Transition,Grid
from config.setting import *
from preprocessing.readfiles import *
###########################################################################
class Simulation(object):
"""
This simulator is used to simulate urban vehicle traffic.The system divides the day into several time slots.
System information is updated at the beginning of each time slot to update vehicle arrivals and order completion.
Then the system generates the order that needs to be started within the current time slot, and then finds the optimal
idle vehicle to match the order. If the match fails or the recent vehicles have timed out, the order is marked as Reject.
If it is successful, the vehicle service order is arranged. The shortest path in the road network first reaches the
place where the order occurred, and then arrives at the order destination, and repeats matching the order until all
the orders in the current time slot have been completed. Then the system generates orders that occur within the current
time slot, finds the nearest idle vehicle to match the order, and if there is no idle vehicle or the nearest idle vehicle
reaches the current position of the order and exceeds the limit time, the match fails, and if the match is successful, the
selected vehicle service is arranged Order. After the match is successful, the vehicle's idle record in the current cluster
is deleted, and the time to be reached is added to the cluster where the order destination is located. The vehicle must
first arrive at the place where the order occurred, pick up the passengers, and then complete the order at the order destination.
Repeat the matching order until a match All orders in this phase are completed.
At the end of the matching phase, you can useyour own matching method to dispatch idle vehicles in each cluster to other
clusters that require more vehicles to meet future order requirements.
"""
def __init__(self,ClusterMode,DemandPredictionMode,
DispatchMode,VehiclesNumber,TimePeriods,LocalRegionBound,
SideLengthMeter,VehiclesServiceMeter,
NeighborCanServer,FocusOnLocalRegion):
#Component
self.DispatchModule = None
self.DemandPredictorModule = None
#Statistical variables
self.OrderNum = 0
self.RejectNum = 0
self.DispatchNum = 0
self.TotallyDispatchCost = 0
self.TotallyWaitTime = 0
self.TotallyUpdateTime = dt.timedelta()
self.TotallyRewardTime = dt.timedelta()
self.TotallyNextStateTime = dt.timedelta()
self.TotallyLearningTime = dt.timedelta()
self.TotallyDispatchTime = dt.timedelta()
self.TotallyMatchTime = dt.timedelta()
self.TotallyDemandPredictTime = dt.timedelta()
#Data variable
self.Clusters = None
self.Orders = None
self.Vehicles = None
self.Map = None
self.Node = None
self.NodeIDList = None
self.NodeID2Cluseter = {}
self.NodeID2NodesLocation = {}
self.TransitionTempPool = []
self.MapWestBound = LocalRegionBound[0]
self.MapEastBound = LocalRegionBound[1]
self.MapSouthBound = LocalRegionBound[2]
self.MapNorthBound = LocalRegionBound[3]
#Weather data
#------------------------------------------
self.WeatherType = np.array([2,1,1,1,1,0,1,2,1,1,3,3,3,3,3,
3,3,0,0,0,2,1,1,1,1,0,1,0,1,1,
1,3,1,1,0,2,2,1,0,0,2,3,2,2,2,
1,2,2,2,1,0,0,2,2,2,1,2,1,1,1])
self.MinimumTemperature = np.array([12,12,11,12,14,12,9,8,7,8,9,7,9,10,11,
12,13,13,11,11,11,6,5,5,4,4,6,6,5,6])
self.MaximumTemperature = np.array([17,19,19,20,20,19,13,12,13,15,16,18,18,19,19,
18,20,21,19,20,19,12,9,9,10,13,12,12,13,15])
self.WindDirection = np.array([1,2,0,2,7,6,3,2,3,7,1,0,7,1,7,
0,0,7,0,7,7,7,0,7,5,7,6,6,7,7])
self.WindPower = np.array([1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,
1,1,1,1,1,1,2,1,1,1,1,1,1,1,1])
self.WeatherType = self.Normaliztion_1D(self.WeatherType)
self.MinimumTemperature = self.Normaliztion_1D(self.MinimumTemperature)
self.MaximumTemperature = self.Normaliztion_1D(self.MaximumTemperature)
self.WindDirection = self.Normaliztion_1D(self.WindDirection)
self.WindPower = self.Normaliztion_1D(self.WindPower)
#------------------------------------------
#Input parameters
self.ClusterMode = ClusterMode
self.DispatchMode = DispatchMode
self.VehiclesNumber = VehiclesNumber
self.TimePeriods = TimePeriods
self.LocalRegionBound = LocalRegionBound
self.SideLengthMeter = SideLengthMeter
self.VehiclesServiceMeter = VehiclesServiceMeter
self.ClustersNumber = None
self.NumGrideWidth = None
self.NumGrideHeight = None
self.NeighborServerDeepLimit = None
#Control variable
self.NeighborCanServer = NeighborCanServer
self.FocusOnLocalRegion = FocusOnLocalRegion
#Process variable
self.RealExpTime = None
self.NowOrder = None
self.step = None
self.Episode = 0
self.CalculateTheScaleOfDivision()
#Demand predictor variable
self.DemandPredictionMode = DemandPredictionMode
self.SupplyExpect = None
return
def Reload(self,OrderFileDate="1101"):
"""
Read a new order into the simulator and
reset some variables of the simulator
"""
print("Load order " + OrderFileDate + "and reset the experimental environment")
self.OrderNum = 0
self.RejectNum = 0
self.DispatchNum = 0
self.TotallyDispatchCost = 0
self.TotallyWaitTime = 0
self.TotallyUpdateTime = dt.timedelta()
self.TotallyNextStateTime = dt.timedelta()
self.TotallyLearningTime = dt.timedelta()
self.TotallyDispatchTime = dt.timedelta()
self.TotallyMatchTime = dt.timedelta()
self.TotallyDemandPredictTime = dt.timedelta()
self.Orders = None
self.TransitionTempPool.clear()
self.RealExpTime = None
self.NowOrder = None
self.step = None
#read orders
#-----------------------------------------
if self.FocusOnLocalRegion == False:
Orders = ReadOrder(input_file_path="./data/test/order_2016"+ str(OrderFileDate) + ".csv")
self.Orders = [Order(i[0],i[1],self.NodeIDList.index(i[2]),self.NodeIDList.index(i[3]),i[1]+PICKUPTIMEWINDOW,None,None,None) for i in Orders]
else:
SaveLocalRegionBoundOrdersPath = "./data/test/order_2016" + str(self.LocalRegionBound) + str(OrderFileDate) + ".csv"
if os.path.exists(SaveLocalRegionBoundOrdersPath):
Orders = ReadResetOrder(input_file_path=SaveLocalRegionBoundOrdersPath)
self.Orders = [Order(i[0],string_pdTimestamp(i[1]),self.NodeIDList.index(i[2]),self.NodeIDList.index(i[3]),string_pdTimestamp(i[1])+PICKUPTIMEWINDOW,None,None,None) for i in Orders]
else:
Orders = ReadOrder(input_file_path="./data/test/order_2016"+ str(OrderFileDate) + ".csv")
self.Orders = [Order(i[0],i[1],self.NodeIDList.index(i[2]),self.NodeIDList.index(i[3]),i[1]+PICKUPTIMEWINDOW,None,None,None) for i in Orders]
#Limit order generation area
#-------------------------------
for i in self.Orders[:]:
if self.IsOrderInLimitRegion(i) == False:
self.Orders.remove(i)
#-------------------------------
LegalOrdersSet = []
for i in self.Orders:
LegalOrdersSet.append(i.ID)
OutBoundOrdersSet = []
for i in range(len(Orders)):
if not i in LegalOrdersSet:
OutBoundOrdersSet.append(i)
Orders = pd.DataFrame(Orders)
Orders = Orders.drop(OutBoundOrdersSet)
Orders.to_csv(SaveLocalRegionBoundOrdersPath,index=0)
#-----------------------------------------
#Rename orders'ID
#-------------------------------
for i in range(len(self.Orders)):
self.Orders[i].ID = i
#-------------------------------
#Calculate the value of all orders in advance
#-------------------------------
for EachOrder in self.Orders:
EachOrder.OrderValue = self.RoadCost(EachOrder.PickupPoint,EachOrder.DeliveryPoint)
#-------------------------------
#Reset the Clusters and Vehicles
#-------------------------------
for i in self.Clusters:
i.Reset()
for i in self.Vehicles:
i.Reset()
self.InitVehiclesIntoCluster()
#-------------------------------
return
def Reset(self):
print("Reset the experimental environment")
self.OrderNum = 0
self.RejectNum = 0
self.DispatchNum = 0
self.TotallyDispatchCost = 0
self.TotallyWaitTime = 0
self.TotallyUpdateTime = dt.timedelta()
self.TotallyNextStateTime = dt.timedelta()
self.TotallyLearningTime = dt.timedelta()
self.TotallyDispatchTime = dt.timedelta()
self.TotallyMatchTime = dt.timedelta()
self.TotallyDemandPredictTime = dt.timedelta()
self.TransitionTempPool.clear()
self.RealExpTime = None
self.NowOrder = None
self.step = None
#Reset the Orders and Clusters and Vehicles
#-------------------------------
for i in self.Orders:
i.Reset()
for i in self.Clusters:
i.Reset()
for i in self.Vehicles:
i.Reset()
self.InitVehiclesIntoCluster()
#-------------------------------
return
def InitVehiclesIntoCluster(self):
print("Initialization Vehicles into Clusters or Grids")
for i in self.Vehicles:
while True:
RandomNode = random.choice(range(len(self.Node)))
if RandomNode in self.NodeID2Cluseter:
i.LocationNode = RandomNode
i.Cluster = self.NodeID2Cluseter[i.LocationNode]
i.Cluster.IdleVehicles.append(i)
break
def LoadDispatchComponent(self,DispatchModule):
self.DispatchModule = DispatchModule
def RoadCost(self,start,end):
return int(self.Map[start][end])
def haversine(self, lon1, lat1, lon2, lat2):
"""
Calculate the great circle distance between two points
on the earth (specified in decimal degrees)
"""
lon1, lat1, lon2, lat2 = map(radians, [lon1, lat1, lon2, lat2])
#haversine
dlon = lon2 - lon1
dlat = lat2 - lat1
a = np.sin(dlat/2)**2 + cos(lat1) * cos(lat2) * sin(dlon/2)**2
c = 2 * asin(sqrt(a))
r = 6371
return c * r * 1000
def CalculateTheScaleOfDivision(self):
EastWestSpan = self.LocalRegionBound[1] - self.LocalRegionBound[0]
NorthSouthSpan = self.LocalRegionBound[3] - self.LocalRegionBound[2]
AverageLongitude = (self.MapEastBound-self.MapWestBound)/2
AverageLatitude = (self.MapNorthBound-self.MapSouthBound)/2
self.NumGrideWidth = int(self.haversine(self.MapWestBound,AverageLatitude,self.MapEastBound,AverageLatitude) / self.SideLengthMeter + 1)
self.NumGrideHeight = int(self.haversine(AverageLongitude,self.MapSouthBound,AverageLongitude,self.MapNorthBound) / self.SideLengthMeter + 1)
self.NeighborServerDeepLimit = int((self.VehiclesServiceMeter - (0.5 * self.SideLengthMeter))//self.SideLengthMeter)
self.ClustersNumber = self.NumGrideWidth * self.NumGrideHeight
print("----------------------------")
print("Map extent",self.LocalRegionBound)
print("The width of each grid",self.SideLengthMeter,"meters")
print("Vehicle service range",self.VehiclesServiceMeter,"meters")
print("Number of grids in east-west direction",self.NumGrideWidth)
print("Number of grids in north-south direction",self.NumGrideHeight)
print("Number of grids",self.ClustersNumber)
print("----------------------------")
return
def CreateAllInstantiate(self,OrderFileDate="1101"):
print("Read all files")
self.Node,self.NodeIDList,Orders,Vehicles,self.Map = ReadAllFiles(OrderFileDate)
if self.ClusterMode != "Grid":
print("Create Clusters")
self.Clusters = self.CreateCluster()
elif self.ClusterMode == "Grid":
print("Create Grids")
self.Clusters = self.CreateGrid()
#Construct NodeID to Cluseter map for Fast calculation
NodeID = self.Node['NodeID'].values
for i in range(len(NodeID)):
NodeID[i] = self.NodeIDList.index(NodeID[i])
for i in NodeID:
for j in self.Clusters:
for k in j.Nodes:
if i == k[0]:
self.NodeID2Cluseter[i] = j
print("Create Orders set")
self.Orders = [Order(i[0],i[1],self.NodeIDList.index(i[2]),self.NodeIDList.index(i[3]),i[1]+PICKUPTIMEWINDOW,None,None,None) for i in Orders]
#Limit order generation area
#-------------------------------
if self.FocusOnLocalRegion == True:
print("Remove out-of-bounds Orders")
for i in self.Orders[:]:
if self.IsOrderInLimitRegion(i) == False:
self.Orders.remove(i)
for i in range(len(self.Orders)):
self.Orders[i].ID = i
#-------------------------------
#Calculate the value of all orders in advance
#-------------------------------
print("Pre-calculated order value")
for EachOrder in self.Orders:
EachOrder.OrderValue = self.RoadCost(EachOrder.PickupPoint,EachOrder.DeliveryPoint)
#-------------------------------
#Select number of vehicles
#-------------------------------
Vehicles = Vehicles[:self.VehiclesNumber]
#-------------------------------
print("Create Vehicles set")
self.Vehicles = [Vehicle(i[0],self.NodeIDList.index(i[1]),None,[],None) for i in Vehicles]
self.InitVehiclesIntoCluster()
return
def IsOrderInLimitRegion(self,Order):
if not Order.PickupPoint in self.NodeID2NodesLocation:
return False
if not Order.DeliveryPoint in self.NodeID2NodesLocation:
return False
return True
def IsNodeInLimitRegion(self,TempNodeList):
if TempNodeList[0][0] < self.LocalRegionBound[0] or TempNodeList[0][0] > self.LocalRegionBound[1]:
return False
elif TempNodeList[0][1] < self.LocalRegionBound[2] or TempNodeList[0][1] > self.LocalRegionBound[3]:
return False
return True
def CreateGrid(self):
NumGrideHeight = self.NumGrideHeight
NumGride = self.NumGrideWidth * self.NumGrideHeight
NodeLocation = self.Node[['Longitude','Latitude']].values.round(7)
NodeID = self.Node['NodeID'].values.astype('int64')
#Select small area simulation
#----------------------------------------------------
if self.FocusOnLocalRegion == True:
NodeLocation = NodeLocation.tolist()
NodeID = NodeID.tolist()
TempNodeList = []
for i in range(len(NodeLocation)):
TempNodeList.append((NodeLocation[i],NodeID[i]))
for i in TempNodeList[:]:
if self.IsNodeInLimitRegion(i) == False:
TempNodeList.remove(i)
NodeLocation.clear()
NodeID.clear()
for i in TempNodeList:
NodeLocation.append(i[0])
NodeID.append(i[1])
NodeLocation = np.array(NodeLocation)
#--------------------------------------------------
NodeSet = {}
for i in range(len(NodeID)):
NodeSet[(NodeLocation[i][0],NodeLocation[i][1])] = self.NodeIDList.index(NodeID[i])
#Build each grid
#------------------------------------------------------
if self.FocusOnLocalRegion == True:
TotalWidth = self.LocalRegionBound[1] - self.LocalRegionBound[0]
TotalHeight = self.LocalRegionBound[3] - self.LocalRegionBound[2]
else:
TotalWidth = self.MapEastBound - self.MapWestBound
TotalHeight = self.MapNorthBound - self.MapSouthBound
IntervalWidth = TotalWidth / self.NumGrideWidth
IntervalHeight = TotalHeight / self.NumGrideHeight
AllGrid = [Grid(i,[],[],0,[],{},[]) for i in range(NumGride)]
for key,value in NodeSet.items():
NowGridWidthNum = None
NowGridHeightNum = None
for i in range(self.NumGrideWidth):
if self.FocusOnLocalRegion == True:
LeftBound = (self.LocalRegionBound[0] + i * IntervalWidth)
RightBound = (self.LocalRegionBound[0] + (i+1) * IntervalWidth)
else:
LeftBound = (self.MapWestBound + i * IntervalWidth)
RightBound = (self.MapWestBound + (i+1) * IntervalWidth)
if key[0] > LeftBound and key[0] < RightBound:
NowGridWidthNum = i
break
for i in range(self.NumGrideHeight):
if self.FocusOnLocalRegion == True:
DownBound = (self.LocalRegionBound[2] + i * IntervalHeight)
UpBound = (self.LocalRegionBound[2] + (i+1) * IntervalHeight)
else:
DownBound = (self.MapSouthBound + i * IntervalHeight)
UpBound = (self.MapSouthBound + (i+1) * IntervalHeight)
if key[1] > DownBound and key[1] < UpBound:
NowGridHeightNum = i
break
if NowGridWidthNum == None or NowGridHeightNum == None :
print(key[0],key[1])
raise Exception('error')
else:
AllGrid[self.NumGrideWidth * NowGridHeightNum + NowGridWidthNum].Nodes.append((value,(key[0],key[1])))
#------------------------------------------------------
for i in AllGrid:
for j in i.Nodes:
self.NodeID2NodesLocation[j[0]] = j[1]
#Add neighbors to each grid
#------------------------------------------------------
for i in AllGrid:
#Bound Check
#----------------------------
UpNeighbor = True
DownNeighbor = True
LeftNeighbor = True
RightNeighbor = True
LeftUpNeighbor = True
LeftDownNeighbor = True
RightUpNeighbor = True
RightDownNeighbor = True
if i.ID >= self.NumGrideWidth * (self.NumGrideHeight - 1):
UpNeighbor = False
LeftUpNeighbor = False
RightUpNeighbor = False
if i.ID < self.NumGrideWidth:
DownNeighbor = False
LeftDownNeighbor = False
RightDownNeighbor = False
if i.ID % self.NumGrideWidth == 0:
LeftNeighbor = False
LeftUpNeighbor = False
LeftDownNeighbor = False
if (i.ID+1) % self.NumGrideWidth == 0:
RightNeighbor = False
RightUpNeighbor = False
RightDownNeighbor = False
#----------------------------
#Add all neighbors
#----------------------------
if UpNeighbor:
i.Neighbor.append(AllGrid[i.ID+self.NumGrideWidth])
if DownNeighbor:
i.Neighbor.append(AllGrid[i.ID-self.NumGrideWidth])
if LeftNeighbor:
i.Neighbor.append(AllGrid[i.ID-1])
if RightNeighbor:
i.Neighbor.append(AllGrid[i.ID+1])
if LeftUpNeighbor:
i.Neighbor.append(AllGrid[i.ID+self.NumGrideWidth-1])
if LeftDownNeighbor:
i.Neighbor.append(AllGrid[i.ID-self.NumGrideWidth-1])
if RightUpNeighbor:
i.Neighbor.append(AllGrid[i.ID+self.NumGrideWidth+1])
if RightDownNeighbor:
i.Neighbor.append(AllGrid[i.ID-self.NumGrideWidth+1])
#----------------------------
#You can draw every grid(red) and neighbor(random color) here
#----------------------------------------------
'''
for i in range(len(AllGrid)):
print("Grid ID ",i,AllGrid[i])
print(AllGrid[i].Neighbor)
self.DrawOneCluster(Cluster = AllGrid[i],random = False,show = False)
for j in AllGrid[i].Neighbor:
if j.ID == AllGrid[i].ID :
continue
print(j.ID)
self.DrawOneCluster(Cluster = j,random = True,show = False)
plt.xlim(104.007, 104.13)
plt.ylim(30.6119, 30.7092)
plt.show()
'''
#----------------------------------------------
return AllGrid
def CreateCluster(self):
NodeLocation = self.Node[['Longitude','Latitude']].values.round(7)
NodeID = self.Node['NodeID'].values.astype('int64')
#Set Nodes In Limit Region
#----------------------------------------
if self.FocusOnLocalRegion == True:
print("Remove out-of-bounds Nodes")
NodeLocation = NodeLocation.tolist()
NodeID = NodeID.tolist()
TempNodeList = []
for i in range(len(NodeLocation)):
TempNodeList.append((NodeLocation[i],NodeID[i]))
for i in TempNodeList[:]:
if self.IsNodeInLimitRegion(i) == False:
TempNodeList.remove(i)
NodeLocation.clear()
NodeID.clear()
for i in TempNodeList:
#NodeLocation.append(i[0])
NodeLocation.append(i[0])
NodeID.append(i[1])
NodeLocation = np.array(NodeLocation)
#----------------------------------------
N = {}
for i in range(len(NodeID)):
N[(NodeLocation[i][0],NodeLocation[i][1])] = NodeID[i]
Clusters=[Cluster(i,[],[],0,[],{},[]) for i in range(self.ClustersNumber)]
ClusterPath = './data/'+str(self.LocalRegionBound)+str(self.ClustersNumber)+str(self.ClusterMode)+'Clusters.csv'
if os.path.exists(ClusterPath):
reader = pd.read_csv(ClusterPath,chunksize = 1000)
label_pred = []
for chunk in reader:
label_pred.append(chunk)
label_pred = pd.concat(label_pred)
label_pred = label_pred.values
label_pred = label_pred.flatten()
label_pred = label_pred.astype('int64')
else:
raise Exception('Cluster Path not found')
#Loading Clustering results into simulator
print("Loading Clustering results")
for i in range(self.ClustersNumber):
temp = NodeLocation[label_pred == i]
for j in range(len(temp)):
Clusters[i].Nodes.append((self.NodeIDList.index(N[(temp[j,0],temp[j,1])]),(temp[j,0],temp[j,1])))
SaveClusterNeighborPath = './data/'+str(self.LocalRegionBound)+str(self.ClustersNumber)+str(self.ClusterMode)+'Neighbor.csv'
if not os.path.exists(SaveClusterNeighborPath):
print("Computing Neighbor relationships between clusters")
AllNeighborList = []
for i in Clusters:
NeighborList = []
for j in Clusters:
if i == j:
continue
else:
TempSumCost = 0
for k in i.Nodes:
for l in j.Nodes:
TempSumCost += self.RoadCost(k[0],l[0])
if (len(i.Nodes)*len(j.Nodes)) == 0:
RoadNetworkDistance = 99999
else:
RoadNetworkDistance = TempSumCost / (len(i.Nodes)*len(j.Nodes))
NeighborList.append((j,RoadNetworkDistance))
NeighborList.sort(key=lambda X: X[1])
AllNeighborList.append([])
for j in NeighborList:
AllNeighborList[-1].append((j[0].ID,j[1]))
AllNeighborList = pd.DataFrame(AllNeighborList)
AllNeighborList.to_csv(SaveClusterNeighborPath,header=0,index=0) #不保存列名
print("Save the Neighbor relationship records to: "+SaveClusterNeighborPath)
print("Load Neighbor relationship records")
reader = pd.read_csv(SaveClusterNeighborPath,header = None,chunksize = 1000)
NeighborList = []
for chunk in reader:
NeighborList.append(chunk)
NeighborList = pd.concat(NeighborList)
NeighborList = NeighborList.values
ID2Cluseter = {}
for i in Clusters:
ID2Cluseter[i.ID] = i
ConnectedThreshold = 15
for i in range(len(Clusters)):
for j in NeighborList[i]:
temp = eval(j)
if len(Clusters[i].Neighbor) < 4:
Clusters[i].Neighbor.append(ID2Cluseter[temp[0]])
elif temp[1] < ConnectedThreshold:
Clusters[i].Neighbor.append(ID2Cluseter[temp[0]])
else:
continue
del ID2Cluseter
#self.NodeID2NodesLocation = {}
print("Store node coordinates for drawing")
for i in Clusters:
for j in i.Nodes:
self.NodeID2NodesLocation[j[0]] = j[1]
#You can draw every cluster(red) and neighbor(random color) here
#----------------------------------------------
'''
for i in range(len(Clusters)):
print("Cluster ID ",i,Clusters[i])
print(Clusters[i].Neighbor)
self.DrawOneCluster(Cluster = Clusters[i],random = False,show = False)
for j in Clusters[i].Neighbor:
if j.ID == Clusters[i].ID :
continue
print(j.ID)
self.DrawOneCluster(Cluster = j,random = True,show = False)
plt.xlim(104.007, 104.13)
plt.ylim(30.6119, 30.7092)
plt.show()
'''
#----------------------------------------------
return Clusters
def LoadDemandPrediction(self):
if self.DemandPredictionMode == 'None' or self.DemandPredictionMode == "Training":
self.DemandPredictorModule = None
return
elif self.DemandPredictionMode == 'HA':
self.DemandPredictorModule = HAPredictionModel()
DemandPredictionModelPath = "./model/"+str(self.DemandPredictionMode)+"PredictionModel"+str(self.ClusterMode)+str(self.SideLengthMeter)+str(self.LocalRegionBound)+".csv"
#You can extend the predictor here
#elif self.DemandPredictionMode == 'Your predictor name':
else:
raise Exception('DemandPredictionMode Name error')
if os.path.exists(DemandPredictionModelPath):
self.DemandPredictorModule.Load(DemandPredictionModelPath)
else:
print(DemandPredictionModelPath)
raise Exception("No Demand Prediction Model")
return
def Normaliztion_1D(self,arr):
arrmax = arr.max()
arrmin = arr.min()
arrmaxmin = arrmax - arrmin
result = []
for x in arr:
x = float(x - arrmin)/arrmaxmin
result.append(x)
return np.array(result)
#Visualization tools
#-----------------------------------------------
def randomcolor(self):
colorArr = ['1','2','3','4','5','6','7','8','9','A','B','C','D','E','F']
color = ""
for i in range(6):
color += colorArr[random.randint(0,len(colorArr)-1)]
return "#"+color
def DrawAllClusterInternalNodes(self):
ConnectionMap = ReadMap('./data/Map__.csv'),
ConnectionMap = ConnectionMap[0]
ClusetersColor = []
for i in range(len(self.Clusters)):
ClusetersColor.append(self.randomcolor())
NodeNumber = len(self.Node)
for i in tqdm(range(NodeNumber)):
if not i in self.NodeID2NodesLocation:
continue
for j in range(NodeNumber):
if not j in self.NodeID2NodesLocation:
continue
if i == j:
continue
if ConnectionMap[i][j] <= 3000:
LX = [self.NodeID2NodesLocation[i][0],self.NodeID2NodesLocation[j][0]]
LY = [self.NodeID2NodesLocation[i][1],self.NodeID2NodesLocation[j][1]]
if self.NodeID2Cluseter[i] == self.NodeID2Cluseter[j]:
plt.plot(LX,LY,c=ClusetersColor[self.NodeID2Cluseter[i].ID],linewidth=0.8,alpha = 0.5)
else:
plt.plot(LX,LY,c='grey',linewidth=0.5,alpha = 0.4)
plt.xlim(self.MapWestBound , self.MapEastBound)
plt.ylim(self.MapSouthBound , self.MapNorthBound)
plt.title(self.ClusterMode)
plt.show()
return
def DrawAllNodes(self):
ConnectionMap = ReadMap('./data/Map__.csv'),
ConnectionMap = ConnectionMap[0]
ClusetersColor = []
for i in range(len(self.Clusters)):
ClusetersColor.append(self.randomcolor())
NodeNumber = len(self.Node)
for i in range(NodeNumber):
if not i in self.NodeID2NodesLocation:
continue
for j in range(NodeNumber):
if not j in self.NodeID2NodesLocation:
continue
if i == j:
continue
if ConnectionMap[i][j] <= 3000:
LX = [self.NodeID2NodesLocation[i][0],self.NodeID2NodesLocation[j][0]]
LY = [self.NodeID2NodesLocation[i][1],self.NodeID2NodesLocation[j][1]]
plt.plot(LX,LY,c=ClusetersColor[self.NodeID2Cluseter[i].ID],linewidth=0.8,alpha = 0.5)
plt.xlim(self.MapWestBound , self.MapEastBound)
plt.ylim(self.MapSouthBound , self.MapNorthBound)
plt.title(self.ClusterMode)
plt.show()
return
def DrawOneCluster(self,Cluster,random=True,show=False):
randomc = self.randomcolor()
for i in Cluster.Nodes:
if random == True:
plt.scatter(i[1][0],i[1][1],s = 3, c=randomc,alpha = 0.5)
else :
plt.scatter(i[1][0],i[1][1],s = 3, c='r',alpha = 0.5)
if show == True:
plt.xlim(self.MapWestBound , self.MapEastBound)
plt.ylim(self.MapSouthBound , self.MapNorthBound)
plt.show()
def DrawAllVehicles(self):
for i in self.Clusters:
for j in i.IdleVehicles:
res = self.NodeID2NodesLocation[j.LocationNode]
X = res[0]
Y = res[1]
plt.scatter(X,Y,s = 3, c='b',alpha = 0.3)
for key in i.VehiclesArrivetime:
res = self.NodeID2NodesLocation[key.LocationNode]
X = res[0]
Y = res[1]
if len(key.Orders):
plt.scatter(X,Y,s = 3, c='r',alpha = 0.3)
else :
plt.scatter(X,Y,s = 3, c='g',alpha = 0.3)
plt.xlim(self.MapWestBound , self.MapEastBound)
plt.xlabel("red = running blue = idle green = Dispatch")
plt.ylim(self.MapSouthBound , self.MapNorthBound)
plt.title("Vehicles Location")
plt.show()
return
def DrawVehicleTrajectory(self,Vehicle):
X1,Y1 = self.NodeID2NodesLocation[Vehicle.LocationNode]
X2,Y2 = self.NodeID2NodesLocation[Vehicle.DeliveryPoint]
#start location
plt.scatter(X1,Y1,s = 3, c='black',alpha = 0.3)
#destination
plt.scatter(X2,Y2,s = 3, c='blue',alpha = 0.3)
#Vehicles Trajectory
LX1=[X1,X2]
LY1=[Y1,Y2]
plt.plot(LY1,LX1,c='k',linewidth=0.3,alpha = 0.5)
plt.title("Vehicles Trajectory")
plt.show()
return
#-----------------------------------------------
def WorkdayOrWeekend(self,day):
if type(day) != type(0) or day<0 or day > 6:
raise Exception('input format error')
elif day == 5 or day == 6:
return "Weekend"
else:
return "Workday"
def GetTimeAndWeather(self,Order):
Month = Order.ReleasTime.month
Day = Order.ReleasTime.day
Week = Order.ReleasTime.weekday()
if Week == 5 or Week == 6:
Weekend = 1
else:
Weekend = 0
Hour = Order.ReleasTime.hour
Minute = Order.ReleasTime.minute
if Month == 11:
if Hour < 12:
WeatherType = self.WeatherType[2*(Day-1)]
else:
WeatherType = self.WeatherType[2*(Day-1)+1]
else:
raise Exception('Month format error')
MinimumTemperature = self.MinimumTemperature[Day-1]
MaximumTemperature = self.MaximumTemperature[Day-1]
WindDirection = self.WindDirection[Day-1]
WindPower = self.WindPower[Day-1]
return [Day,Week,Weekend,Hour,Minute,WeatherType,MinimumTemperature,MaximumTemperature,WindDirection,WindPower]
############################################################################
#The main modules
#---------------------------------------------------------------------------
def DemandPredictFunction(self):
"""
Here you can implement your own order forecasting method
to provide efficient and accurate help for Dispatch method
"""
return
def SupplyExpectFunction(self):
"""
Calculate the number of idle Vehicles in the next time slot
of each cluster due to the completion of the order
"""
self.SupplyExpect = np.zeros(self.ClustersNumber)
for i in self.Clusters:
for key,value in list(i.VehiclesArrivetime.items()):
#key = Vehicle ; value = Arrivetime
if value <= self.RealExpTime + self.TimePeriods and len(key.Orders)>0:
self.SupplyExpect[i.ID] += 1
return
def DispatchFunction(self):
"""
Here you can implement your own Dispatch method to
move idle vehicles in each cluster to other clusters
"""
return
def MatchFunction(self):
"""
Each matching module will match the orders that will occur within the current time slot.
The matching module will find the nearest idle vehicles for each order. It can also enable
the neighbor car search system to determine the search range according to the set search distance
and the size of the grid. It use dfs to find the nearest idle vehicles in the area.
"""
#Count the number of idle vehicles before matching
for i in self.Clusters:
i.PerMatchIdleVehicles = len(i.IdleVehicles)
while self.NowOrder.ReleasTime < self.RealExpTime+self.TimePeriods :
if self.NowOrder.ID == self.Orders[-1].ID:
break
self.OrderNum += 1
NowCluster = self.NodeID2Cluseter[self.NowOrder.PickupPoint]
NowCluster.Orders.append(self.NowOrder)
if len(NowCluster.IdleVehicles) or len(NowCluster.Neighbor):
TempMin = None
if len(NowCluster.IdleVehicles):
#Find a nearest car to match the current order
#--------------------------------------
for i in NowCluster.IdleVehicles:
TempRoadCost = self.RoadCost(i.LocationNode,self.NowOrder.PickupPoint)
if TempMin == None :
TempMin = (i,TempRoadCost,NowCluster)
elif TempRoadCost < TempMin[1] :
TempMin = (i,TempRoadCost,NowCluster)
#--------------------------------------
#Neighbor car search system to increase search range
elif self.NeighborCanServer and len(NowCluster.Neighbor):
TempMin = self.FindServerVehicleFunction(
NeighborServerDeepLimit=self.NeighborServerDeepLimit,
Visitlist={},Cluster=NowCluster,TempMin=None,deep=0
)
#When all Neighbor Cluster without any idle Vehicles
if TempMin == None or TempMin[1] > PICKUPTIMEWINDOW:
self.RejectNum+=1
self.NowOrder.ArriveInfo="Reject"
#Successfully matched a vehicle
else:
NowVehicle = TempMin[0]
self.NowOrder.PickupWaitTime = TempMin[1]
NowVehicle.Orders.append(self.NowOrder)
self.TotallyWaitTime += self.RoadCost(NowVehicle.LocationNode,self.NowOrder.PickupPoint)
ScheduleCost = self.RoadCost(NowVehicle.LocationNode,self.NowOrder.PickupPoint) + self.RoadCost(self.NowOrder.PickupPoint,self.NowOrder.DeliveryPoint)
#Add a destination to the current vehicle
NowVehicle.DeliveryPoint = self.NowOrder.DeliveryPoint
#Delivery Cluster {Vehicle:ArriveTime}
self.Clusters[self.NodeID2Cluseter[self.NowOrder.DeliveryPoint].ID].VehiclesArrivetime[NowVehicle] = self.RealExpTime + np.timedelta64(ScheduleCost*MINUTES)
#delete now Cluster's recode about now Vehicle
TempMin[2].IdleVehicles.remove(NowVehicle)
self.NowOrder.ArriveInfo="Success"
else:
#None available idle Vehicles
self.RejectNum += 1
self.NowOrder.ArriveInfo = "Reject"
#The current order has been processed and start processing the next order
#------------------------------
self.NowOrder = self.Orders[self.NowOrder.ID+1]
return
def FindServerVehicleFunction(self,NeighborServerDeepLimit,Visitlist,Cluster,TempMin,deep):
"""
Use dfs visit neighbors and find nearest idle Vehicle
"""
if deep > NeighborServerDeepLimit or Cluster.ID in Visitlist:
return TempMin
Visitlist[Cluster.ID] = True
for i in Cluster.IdleVehicles:
TempRoadCost = self.RoadCost(i.LocationNode,self.NowOrder.PickupPoint)
if TempMin == None :
TempMin = (i,TempRoadCost,Cluster)
elif TempRoadCost < TempMin[1]:
TempMin = (i,TempRoadCost,Cluster)
if self.NeighborCanServer:
for j in Cluster.Neighbor:
TempMin = self.FindServerVehicleFunction(NeighborServerDeepLimit,Visitlist,j,TempMin,deep+1)
return TempMin
def RewardFunction(self):
"""
When apply Dispatch with Reinforcement learning
you need to implement your reward function here
"""
return
def UpdateFunction(self):
"""
Each time slot update Function will update each cluster
in the simulator, processing orders and vehicles
"""
for i in self.Clusters:
#Records array of orders cleared for the last time slot
i.Orders.clear()
for key,value in list(i.VehiclesArrivetime.items()):
#key = Vehicle ; value = Arrivetime
if value <= self.RealExpTime :
#update Order
if len(key.Orders):
key.Orders[0].ArriveOrderTimeRecord(self.RealExpTime)
#update Vehicle info
key.ArriveVehicleUpDate(i)
#update Cluster record
i.ArriveClusterUpDate(key)
return
def GetNextStateFunction(self):
"""
When apply Dispatch with Reinforcement learning
you need to implement your next State function here
"""
return
def LearningFunction(self):
return
def SimCity(self):
self.RealExpTime = self.Orders[0].ReleasTime - self.TimePeriods
#To complete running orders
EndTime = self.Orders[-1].ReleasTime + 3 * self.TimePeriods
self.NowOrder = self.Orders[0]
self.step = 0
EpisodeStartTime = dt.datetime.now()
print("Start experiment")
print("----------------------------")
while self.RealExpTime <= EndTime:
StepStartTime = dt.datetime.now()
StepUpdateStartTime = dt.datetime.now()
self.UpdateFunction()
self.TotallyUpdateTime += dt.datetime.now() - StepUpdateStartTime
StepMatchStartTime = dt.datetime.now()
self.MatchFunction()
self.TotallyMatchTime += dt.datetime.now() - StepMatchStartTime
StepRewardStartTime = dt.datetime.now()
self.RewardFunction()
self.TotallyRewardTime += dt.datetime.now() - StepRewardStartTime
StepNextStateStartTime = dt.datetime.now()
self.GetNextStateFunction()
self.TotallyNextStateTime += dt.datetime.now() - StepNextStateStartTime
for i in self.Clusters:
i.DispatchNumber = 0
StepLearningStartTime = dt.datetime.now()
self.LearningFunction()
self.TotallyLearningTime += dt.datetime.now() - StepLearningStartTime
StepDemandPredictStartTime = dt.datetime.now()
self.DemandPredictFunction()
self.SupplyExpectFunction()
self.TotallyDemandPredictTime += dt.datetime.now() - StepDemandPredictStartTime
#Count the number of idle vehicles before Dispatch
for i in self.Clusters:
i.PerDispatchIdleVehicles = len(i.IdleVehicles)
StepDispatchStartTime = dt.datetime.now()
self.DispatchFunction()
self.TotallyDispatchTime += dt.datetime.now() - StepDispatchStartTime
#Count the number of idle vehicles after Dispatch
for i in self.Clusters:
i.LaterDispatchIdleVehicles = len(i.IdleVehicles)
#A time slot is processed
self.step += 1
self.RealExpTime += self.TimePeriods
#------------------------------------------------
EpisodeEndTime = dt.datetime.now()
SumOrderValue = 0
OrderValueNum = 0
for i in self.Orders:
if i.ArriveInfo != "Reject":
SumOrderValue += i.OrderValue
OrderValueNum += 1
#------------------------------------------------
print("Experiment over")
print("Episode: " + str(self.Episode))
print("Clusting mode: " + self.ClusterMode)
print("Demand Prediction mode: " + self.DemandPredictionMode)
print("Dispatch mode: " + self.DispatchMode)
print("Date: " + str(self.Orders[0].ReleasTime.month) + "/" + str(self.Orders[0].ReleasTime.day))
print("Weekend or Workday: " + self.WorkdayOrWeekend(self.Orders[0].ReleasTime.weekday()))
if self.ClusterMode != "Grid":
print("Number of Clusters: " + str(len(self.Clusters)))
elif self.ClusterMode == "Grid":
print("Number of Grids: " + str((self.NumGrideWidth * self.NumGrideHeight)))
print("Number of Vehicles: " + str(len(self.Vehicles)))
print("Number of Orders: " + str(len(self.Orders)))
print("Number of Reject: " + str(self.RejectNum))
print("Number of Dispatch: " + str(self.DispatchNum))
if (self.DispatchNum)!=0:
print("Average Dispatch Cost: " + str(self.TotallyDispatchCost/self.DispatchNum))
if (len(self.Orders)-self.RejectNum)!=0:
print("Average wait time: " + str(self.TotallyWaitTime/(len(self.Orders)-self.RejectNum)))
print("Totally Order value: " + str(SumOrderValue))
print("Totally Update Time : " + str(self.TotallyUpdateTime))
print("Totally NextState Time : " + str(self.TotallyNextStateTime))
print("Totally Learning Time : " + str(self.TotallyLearningTime))
print("Totally Demand Predict Time : " + str(self.TotallyDemandPredictTime))
print("Totally Dispatch Time : " + str(self.TotallyDispatchTime))
print("Totally Simulation Time : " + str(self.TotallyMatchTime))
print("Episode Run time : " + str(EpisodeEndTime - EpisodeStartTime))
return
if __name__ == '__main__':
DispatchMode = "Simulation"
DemandPredictionMode = "None"
ClusterMode = "Grid"
EXPSIM = Simulation(
ClusterMode = ClusterMode,
DemandPredictionMode = DemandPredictionMode,
DispatchMode = DispatchMode,
VehiclesNumber = VehiclesNumber,
TimePeriods = TIMESTEP,
LocalRegionBound = LocalRegionBound,
SideLengthMeter = SideLengthMeter,
VehiclesServiceMeter = VehiclesServiceMeter,
NeighborCanServer = NeighborCanServer,
FocusOnLocalRegion = FocusOnLocalRegion,
)
EXPSIM.CreateAllInstantiate()
EXPSIM.SimCity()
| StarcoderdataPython |
1809183 | <reponame>bauchter-work/2445_git_repo
import Adafruit_BBIO.GPIO as GPIO
import Adafruit_BBIO.ADC as ADC
ADC.setup()
Value = ADC.read("P9_36") #Returns a value from 0 to 1
Voltage = Value*1.8 #converts to a voltage value
print "Voltage is: ",Voltage," volts"
| StarcoderdataPython |
9799757 | <filename>api/tests/api_gateway/test_api.py
# pylint: disable=missing-class-docstring
# pylint: disable=missing-function-docstring
import json
import os
import main
import pytest
import uuid
from aws_lambda_powertools.metrics import MetricUnit
from fastapi import HTTPException
from unittest.mock import ANY, MagicMock, patch
from sqlalchemy.exc import SQLAlchemyError
from models.List import List
from models.Subscription import Subscription
def test_return_all_lists(list_fixture, list_fixture_with_redirects, client):
response = client.get("/lists")
data = response.json()
assert len(data) == 2
assert find_item_in_dict_list(data, "id", str(list_fixture.id)) is not None
assert (
find_item_in_dict_list(data, "id", str(list_fixture_with_redirects.id))
is not None
)
assert response.status_code == 200
def test_return_all_lists_with_additional_data(
list_fixture, list_fixture_with_redirects, client
):
response = client.get("/lists")
response_list = find_item_in_dict_list(response.json(), "id", str(list_fixture.id))
response_list_with_redirects = find_item_in_dict_list(
response.json(), "id", str(list_fixture_with_redirects.id)
)
assert len(response.json()) == 2
assert response_list == json.loads(
json.dumps(
{
"id": str(list_fixture.id),
"language": list_fixture.language,
"name": list_fixture.name,
"service_id": list_fixture.service_id,
"subscribe_email_template_id": list_fixture.subscribe_email_template_id,
"unsubscribe_email_template_id": list_fixture.unsubscribe_email_template_id,
"subscribe_phone_template_id": list_fixture.subscribe_phone_template_id,
"unsubscribe_phone_template_id": list_fixture.unsubscribe_phone_template_id,
"subscriber_count": 0,
}
)
)
assert response_list_with_redirects == json.loads(
json.dumps(
{
"id": str(list_fixture_with_redirects.id),
"language": list_fixture_with_redirects.language,
"name": list_fixture_with_redirects.name,
"service_id": list_fixture_with_redirects.service_id,
"subscribe_email_template_id": list_fixture_with_redirects.subscribe_email_template_id,
"unsubscribe_email_template_id": list_fixture_with_redirects.unsubscribe_email_template_id,
"subscribe_phone_template_id": list_fixture_with_redirects.subscribe_phone_template_id,
"unsubscribe_phone_template_id": list_fixture_with_redirects.unsubscribe_phone_template_id,
"subscribe_redirect_url": list_fixture_with_redirects.subscribe_redirect_url,
"confirm_redirect_url": list_fixture_with_redirects.confirm_redirect_url,
"unsubscribe_redirect_url": list_fixture_with_redirects.unsubscribe_redirect_url,
"subscriber_count": 0,
}
)
)
assert response.status_code == 200
def test_return_lists_with_one_containing_only_required_data(
list_fixture_required_data_only, client
):
response = client.get("/lists")
assert {
"id": str(list_fixture_required_data_only.id),
"language": list_fixture_required_data_only.language,
"name": list_fixture_required_data_only.name,
"service_id": list_fixture_required_data_only.service_id,
"subscriber_count": 0,
} in response.json()
assert response.status_code == 200
def test_return_lists_by_service(list_fixture, list_fixture_with_redirects, client):
response = client.get(f"/lists/{list_fixture.service_id}")
assert {
"id": str(list_fixture.id),
"language": list_fixture.language,
"name": list_fixture.name,
"service_id": list_fixture.service_id,
"subscribe_email_template_id": list_fixture_with_redirects.subscribe_email_template_id,
"unsubscribe_email_template_id": list_fixture_with_redirects.unsubscribe_email_template_id,
"subscribe_phone_template_id": list_fixture_with_redirects.subscribe_phone_template_id,
"unsubscribe_phone_template_id": list_fixture_with_redirects.unsubscribe_phone_template_id,
"subscriber_count": 0,
} in response.json()
assert response.status_code == 200
def test_create_list(client):
response = client.post(
"/list",
json={
"name": "new_name",
"language": "new_language",
"service_id": "new_service_id",
"subscribe_email_template_id": str(uuid.uuid4()),
"unsubscribe_email_template_id": str(uuid.uuid4()),
"subscribe_phone_template_id": str(uuid.uuid4()),
"unsubscribe_phone_template_id": str(uuid.uuid4()),
},
headers={"Authorization": os.environ["API_AUTH_TOKEN"]},
)
assert response.json() == {"id": ANY}
assert response.status_code == 200
def test_create_list_with_undeclared_parameter(client):
response = client.post(
"/list",
json={
"name": "new_name",
"language": "new_language",
"service_id": "new_service_id",
"subscribe_email_template_id": str(uuid.uuid4()),
"unsubscribe_email_template_id": str(uuid.uuid4()),
"subscribe_phone_template_id": str(uuid.uuid4()),
"unsubscribe_phone_template_id": str(uuid.uuid4()),
"foo": "bar",
},
headers={"Authorization": os.environ["API_AUTH_TOKEN"]},
)
assert response.json() == {"detail": ANY}
assert response.status_code == 422
def test_create_list_with_error(client):
response = client.post(
"/list",
json={
"name": "fixture_name",
"language": "new_language",
"service_id": "new_service_id",
"subscribe_email_template_id": "new_subscribe_email_template_id",
"unsubscribe_email_template_id": "new_unsubscribe_email_template_id",
"subscribe_phone_template_id": "new_subscribe_phone_template_id",
"unsubscribe_phone_template_id": "new_unsubscribe_phone_template_id",
},
headers={"Authorization": os.environ["API_AUTH_TOKEN"]},
)
assert response.json() == {"detail": ANY}
assert response.status_code == 422
@patch("api_gateway.api.db_session")
def test_create_list_with_unknown_error(mock_db_session, client):
mock_session = MagicMock()
mock_session.commit.side_effect = SQLAlchemyError("fakeerror")
mock_db_session.return_value = mock_session
response = client.post(
"/list",
json={
"name": "new_name",
"language": "new_language",
"service_id": "new_service_id",
"subscribe_email_template_id": str(uuid.uuid4()),
"unsubscribe_email_template_id": str(uuid.uuid4()),
"subscribe_phone_template_id": str(uuid.uuid4()),
"unsubscribe_phone_template_id": str(uuid.uuid4()),
},
headers={"Authorization": os.environ["API_AUTH_TOKEN"]},
)
assert response.json() == {"error": "error saving list: fakeerror"}
assert response.status_code == 500
@pytest.mark.parametrize(
"field,value",
[
("subscribe_redirect_url", "https://example.com/redirect_target"),
("confirm_redirect_url", "https://example.com/redirect_target"),
("unsubscribe_redirect_url", "https://example.com/redirect_target"),
],
)
def test_create_list_invalid_domain(field, value, client):
response = client.post(
"/list",
json={
"name": f"new_name_{uuid.uuid4()}",
"language": "new_language",
"service_id": "new_service_id",
"subscribe_email_template_id": str(uuid.uuid4()),
"unsubscribe_email_template_id": str(uuid.uuid4()),
"subscribe_phone_template_id": str(uuid.uuid4()),
"unsubscribe_phone_template_id": str(uuid.uuid4()),
field: value,
},
headers={"Authorization": os.environ["API_AUTH_TOKEN"]},
)
assert response.json() == {
"detail": [
{
"loc": ["body", field],
"msg": "domain must be in REDIRECT_ALLOW_LIST",
"type": "value_error",
}
]
}
assert response.status_code == 422
@pytest.mark.parametrize(
"field,value",
[
("subscribe_redirect_url", "https://ircc.digital.canada.ca/redirect_target"),
("confirm_redirect_url", "https://ircc.digital.canada.ca/redirect_target"),
("unsubscribe_redirect_url", "https://ircc.digital.canada.ca/redirect_target"),
("subscribe_redirect_url", "https://ircc.digital.canada.ca/redirect_target"),
("confirm_redirect_url", "https://ircc.digital.canada.ca/redirect_target"),
("unsubscribe_redirect_url", "https://ircc.digital.canada.ca/redirect_target"),
],
)
def test_create_list_valid_domain(field, value, client):
response = client.post(
"/list",
json={
"name": f"new_name_{uuid.uuid4()}",
"language": "new_language",
"service_id": "new_service_id",
"subscribe_email_template_id": str(uuid.uuid4()),
"unsubscribe_email_template_id": str(uuid.uuid4()),
"subscribe_phone_template_id": str(uuid.uuid4()),
"unsubscribe_phone_template_id": str(uuid.uuid4()),
field: value,
},
headers={"Authorization": os.environ["API_AUTH_TOKEN"]},
)
assert response.json() == {"id": ANY}
assert response.status_code == 200
def test_delete_list_with_bad_id(client):
response = client.delete(
"/list/foo",
headers={"Authorization": os.environ["API_AUTH_TOKEN"]},
)
assert response.json() == {"error": "list not found"}
assert response.status_code == 404
def test_delete_list_with_id_not_found(client):
response = client.delete(
f"/list/{str(uuid.uuid4())}",
headers={"Authorization": os.environ["API_AUTH_TOKEN"]},
)
assert response.json() == {"error": "list not found"}
assert response.status_code == 404
def test_delete_list_with_correct_id(session, client):
list = List(
name="delete_name",
language="delete_language",
service_id="delete_service_id",
subscribe_email_template_id="delete_subscribe_email_template_id",
unsubscribe_email_template_id="delete_unsubscribe_email_template_id",
subscribe_phone_template_id="delete_subscribe_phone_template_id",
unsubscribe_phone_template_id="delete_unsubscribe_phone_template_id",
)
session.add(list)
session.commit()
response = client.delete(
f"/list/{str(list.id)}",
headers={"Authorization": os.environ["API_AUTH_TOKEN"]},
)
assert response.json() == {"status": "OK"}
assert response.status_code == 200
@patch("api_gateway.api.db_session")
def test_delete_list_with_correct_id_unknown_error(
mock_db_session, list_fixture, client
):
mock_session = MagicMock()
mock_session.commit.side_effect = SQLAlchemyError()
mock_db_session.return_value = mock_session
response = client.delete(
f"/list/{str(list_fixture.id)}",
headers={"Authorization": os.environ["API_AUTH_TOKEN"]},
)
assert response.json() == {"error": "error deleting list"}
assert response.status_code == 500
def test_edit_list_with_bad_id(client):
response = client.put(
"/list/foo",
headers={"Authorization": os.environ["API_AUTH_TOKEN"]},
json={"name": "name", "language": "language", "service_id": "service_id"},
)
assert response.json() == {"error": "list not found"}
assert response.status_code == 404
def test_edit_list_with_id_not_found(client):
response = client.put(
f"/list/{str(uuid.uuid4())}",
headers={"Authorization": os.environ["API_AUTH_TOKEN"]},
json={"name": "name", "language": "language", "service_id": "service_id"},
)
assert response.json() == {"error": "list not found"}
assert response.status_code == 404
def test_edit_list_with_correct_id(session, client):
list = List(
name="edit_name",
language="edit_language",
service_id="edit_service_id",
subscribe_email_template_id="edit_subscribe_email_template_id",
unsubscribe_email_template_id="edit_unsubscribe_email_template_id",
subscribe_phone_template_id="edit_subscribe_phone_template_id",
unsubscribe_phone_template_id="edit_unsubscribe_phone_template_id",
)
session.add(list)
session.commit()
response = client.put(
f"/list/{str(list.id)}",
headers={"Authorization": os.environ["API_AUTH_TOKEN"]},
json={
"name": "edited_name",
"language": "edited_language",
"service_id": "edited_service_id",
},
)
assert response.json() == {"status": "OK"}
assert response.status_code == 200
session.expire_all()
list = session.query(List).get(list.id)
assert list.name == "edited_name"
assert list.language == "edited_language"
assert list.service_id == "edited_service_id"
def test_edit_list_without_supplying_service_id_and_name(session, client):
list = List(
name="name_1",
language="English",
service_id="service_id_1",
subscribe_email_template_id=str(uuid.uuid4()),
)
session.add(list)
session.commit()
response = client.put(
f"/list/{str(list.id)}",
headers={"Authorization": os.environ["API_AUTH_TOKEN"]},
json={"subscribe_email_template_id": "ea974231-002b-4889-87f1-0b9cf48e9411"},
)
assert response.json() == {"status": "OK"}
assert response.status_code == 200
session.expire_all()
list = session.query(List).get(list.id)
assert list.subscribe_email_template_id == "ea974231-002b-4889-87f1-0b9cf48e9411"
@patch("api_gateway.api.db_session")
def test_edit_list_with_correct_id_unknown_error(mock_db_session, list_fixture, client):
mock_session = MagicMock()
mock_session.commit.side_effect = SQLAlchemyError()
mock_db_session.return_value = mock_session
response = client.put(
f"/list/{str(list_fixture.id)}",
headers={"Authorization": os.environ["API_AUTH_TOKEN"]},
json={
"name": "edited_name",
"language": "edited_language",
"service_id": "edited_service_id",
},
)
assert response.json() == {"error": "error updating list"}
assert response.status_code == 500
# @TODO - not sure we need to test this middleware
# @patch("api_gateway.api.get_notify_client")
# def test_global_exception_handler(mock_client, list_fixture, client):
# template_id = str(uuid.uuid4())
# mock_client.side_effect = Exception("Unknown error")
# response = client.post(
# "/send",
# headers={"Authorization": os.environ["API_AUTH_TOKEN"]},
# json={
# "service_api_key": str(<KEY>()),
# "list_id": str(list_fixture.id),
# "template_id": template_id,
# "template_type": "email",
# "job_name": "<NAME>",
# },
# )
# mock_client.assert_called_once()
# assert response.status_code == status.HTTP_500_INTERNAL_SERVER_ERROR
@patch("main.Mangum")
def test_metrics(mock_mangum, context_fixture, capsys, metrics):
mock_asgi_handler = MagicMock()
mock_asgi_handler.return_value = True
mock_mangum.return_value = mock_asgi_handler
main.handler({"httpMethod": "GET"}, context_fixture)
log = capsys.readouterr().out.strip()
metrics_output = json.loads(log)
metric_list = [
"ListCreated",
"ListDeleted",
"SuccessfulSubscription",
"UnsubscriptionError",
"ConfirmationError",
"UnsubscriptionNotificationError",
"SuccessfulConfirmation",
"SuccessfulUnsubscription",
"BulkNotificationError",
"SubscriptionNotificationError",
"ListDeleteError",
"ListUpdateError",
"ListUpdated",
"ListCreateError",
]
for metric in metric_list:
assert metric in str(metrics_output["_aws"]["CloudWatchMetrics"][0]["Metrics"])
@patch("api_gateway.api.metrics")
def test_verify_token_throws_an_exception_if_token_is_not_correct(
mock_metrics, api_verify_token
):
request = MagicMock()
request.headers = {"Authorization": "invalid"}
with pytest.raises(HTTPException):
assert api_verify_token(request)
mock_metrics.add_metric.assert_called_once_with(
name="IncorrectAuthorizationToken", unit=MetricUnit.Count, value=1
)
def test_verify_token_returns_true_if_token_is_correct(api_verify_token):
request = MagicMock()
request.headers = {"Authorization": os.environ["API_AUTH_TOKEN"]}
assert api_verify_token(request)
def subscribe_users(session, user_list, fixture):
for user in user_list:
subscription = Subscription(
email=user["email"], list=fixture, confirmed=user["confirmed"]
)
session.add(subscription)
session.commit()
def find_item_in_dict_list(data, identifier, value):
return next((item for item in data if item[identifier] == value), None)
@patch("api_gateway.api.get_notify_client")
def test_counts_when_list_has_no_subscribers(mock_client, list_count_fixture_1, client):
response = client.get(
f"/lists/{str(list_count_fixture_1.service_id)}/subscriber-count",
headers={"Authorization": os.environ["API_AUTH_TOKEN"]},
)
data = response.json()
assert len(data) == 0
@patch("api_gateway.api.get_notify_client")
def test_counts_when_list_has_subscribers(
mock_client,
session,
list_count_fixture_0,
list_count_fixture_1,
list_count_fixture_2,
client,
):
# add subscribers to list 0
# note service id doesn't match the other lists
# i.e. these shouldn't end up in the response
list_0_emails = [
{"email": "<EMAIL>", "confirmed": True},
{"email": "<EMAIL>", "confirmed": False},
{"email": "<EMAIL>", "confirmed": True},
{"email": "<EMAIL>", "confirmed": False},
]
subscribe_users(session, list_0_emails, list_count_fixture_0)
# add subscribers to list 1
list_1_emails = [
{"email": "<EMAIL>", "confirmed": False},
{"email": "<EMAIL>", "confirmed": True},
{"email": "<EMAIL>", "confirmed": True},
]
subscribe_users(session, list_1_emails, list_count_fixture_1)
# add subscribers to list 2
list_2_emails = [
{"email": "<EMAIL>", "confirmed": True},
{"email": "<EMAIL>", "confirmed": True},
{"email": "<EMAIL>", "confirmed": True},
{"email": "<EMAIL>", "confirmed": False},
]
subscribe_users(session, list_2_emails, list_count_fixture_2)
response = client.get(
f"/lists/{str(list_count_fixture_1.service_id)}/subscriber-count",
headers={"Authorization": os.environ["API_AUTH_TOKEN"]},
)
data = response.json()
assert len(data) == 2
# check list 1
item = find_item_in_dict_list(data, "list_id", str(list_count_fixture_1.id))
assert item is not None
assert item["subscriber_count"] == 2
# check list 2
item = find_item_in_dict_list(data, "list_id", str(list_count_fixture_2.id))
assert item is not None
assert item["subscriber_count"] == 3
@patch("api_gateway.api.get_notify_client")
def test_remove_all_subscribers_from_list(
mock_client,
session,
list_reset_fixture_0,
client,
):
# add subscribers to list 0
list_0_emails = [
{"email": "<EMAIL>", "confirmed": True},
{"email": "<EMAIL>", "confirmed": False},
{"email": "<EMAIL>", "confirmed": True},
{"email": "<EMAIL>", "confirmed": True},
]
subscribe_users(session, list_0_emails, list_reset_fixture_0)
data = session.query(Subscription).filter(
Subscription.list_id == list_reset_fixture_0.id
)
assert data.count() == 4
# reset the list
client.put(
f"/list/{str(list_reset_fixture_0.id)}/reset",
headers={"Authorization": os.environ["API_AUTH_TOKEN"]},
)
data = session.query(Subscription).filter(
Subscription.list_id == list_reset_fixture_0.id
)
assert data.count() == 0
# add one user back
subscribe_users(
session,
[
{"email": "<EMAIL>", "confirmed": True},
],
list_reset_fixture_0,
)
data = session.query(Subscription).filter(
Subscription.list_id == list_reset_fixture_0.id
)
assert data.count() == 1
def test_return_list_subscriber_count(list_with_subscribers, client, session):
response = client.get("/lists")
assert response.status_code == 200
data = response.json()
# check #1
item = find_item_in_dict_list(data, "id", str(list_with_subscribers[0].id))
assert item is not None
assert item["subscriber_count"] == 2
# checking #2 list
item = find_item_in_dict_list(data, "id", str(list_with_subscribers[1].id))
assert item is not None
assert item["subscriber_count"] == 1
# check details
assert {
"id": str(list_with_subscribers[0].id),
"language": list_with_subscribers[0].language,
"name": list_with_subscribers[0].name,
"service_id": list_with_subscribers[0].service_id,
"subscribe_email_template_id": list_with_subscribers[
0
].subscribe_email_template_id,
"unsubscribe_email_template_id": list_with_subscribers[
0
].unsubscribe_email_template_id,
"subscribe_phone_template_id": list_with_subscribers[
0
].subscribe_phone_template_id,
"unsubscribe_phone_template_id": list_with_subscribers[
0
].unsubscribe_phone_template_id,
"subscriber_count": 2,
} in data
session.expire_all()
| StarcoderdataPython |
11246254 | <gh_stars>0
# Generated by Django 3.2.6 on 2021-09-18 01:29
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('CollegeApp', '0001_initial'),
]
operations = [
migrations.AddField(
model_name='college',
name='acceptance',
field=models.DecimalField(decimal_places=2, default=0.0, max_digits=3),
),
migrations.AddField(
model_name='college',
name='act',
field=models.IntegerField(default=0),
),
migrations.AddField(
model_name='college',
name='city',
field=models.CharField(default='', max_length=255),
),
migrations.AddField(
model_name='college',
name='desirability',
field=models.IntegerField(default=0),
),
migrations.AddField(
model_name='college',
name='domain',
field=models.CharField(default='', max_length=255),
),
migrations.AddField(
model_name='college',
name='grad_rate',
field=models.DecimalField(decimal_places=2, default=0.0, max_digits=4),
),
migrations.AddField(
model_name='college',
name='influence',
field=models.IntegerField(default=0),
),
migrations.AddField(
model_name='college',
name='overall_rank',
field=models.IntegerField(default=0),
),
migrations.AddField(
model_name='college',
name='sat',
field=models.IntegerField(default=0),
),
migrations.AddField(
model_name='college',
name='slug',
field=models.CharField(default='', max_length=255),
),
migrations.AddField(
model_name='college',
name='state',
field=models.CharField(default='', max_length=255),
),
migrations.AddField(
model_name='college',
name='tuition',
field=models.IntegerField(default=0),
),
migrations.AddField(
model_name='college',
name='undergrad_student_body',
field=models.IntegerField(default=0),
),
migrations.AlterField(
model_name='college',
name='name',
field=models.CharField(default='', max_length=255),
),
]
| StarcoderdataPython |
4598 | # Install all examples to connected device(s)
import subprocess
import sys
answer = input("Install all vulkan examples to attached device, this may take some time! (Y/N)").lower() == 'y'
if answer:
BUILD_ARGUMENTS = ""
for arg in sys.argv[1:]:
if arg == "-validation":
BUILD_ARGUMENTS += "-validation"
if subprocess.call(("python build-all.py -deploy %s" % BUILD_ARGUMENTS).split(' ')) != 0:
print("Error: Not all examples may have been installed!")
sys.exit(-1)
| StarcoderdataPython |
6684968 | from __future__ import print_function, unicode_literals
from argparse import ArgumentParser
from code import InteractiveConsole
import sys
from wsgiref.simple_server import make_server
from wsgiref.util import setup_testing_defaults
from . import app, init_db, init_environ
def main(argv=None):
parser = make_argument_parser()
args = parser.parse_args(argv)
if args.subcommand == 'initdb':
initdb_command()
elif args.subcommand == 'run':
run_command(args.host, args.port)
elif args.subcommand == 'shell':
shell_command()
def make_argument_parser():
parser = ArgumentParser()
subparsers = parser.add_subparsers(dest='subcommand')
subparsers.add_parser('initdb', help='Initializes the database.')
run_parser = subparsers.add_parser('run',
help='Runs a development server.')
run_parser.add_argument('-H', '--host', default='localhost')
run_parser.add_argument('-p', '--port', default=5000, type=int)
subparsers.add_parser('shell', help='Runs a shell in the app context.')
return parser
def initdb_command():
environ = init_environ({})
init_db(environ)
print('Initialized the database.', file=sys.stderr)
def run_command(host, port):
server = make_server(host, port, app)
print('Starting a server on http://{}:{}.'.format(host, port))
server.serve_forever()
def shell_command():
environ = {}
setup_testing_defaults(environ)
environ = init_environ(environ)
console = InteractiveConsole(dict(environ=environ))
console.interact()
if __name__ == '__main__':
main()
| StarcoderdataPython |
4809873 | <reponame>CogSciUOS/DeepLearningToolbox
"""Support for parsing of common Deep Learning Toolbox command line
options.
Intended usage:
```
from argparse import ArgumentParser
import dltb.argparse as ToolboxArgparse
# ...
parser = ArgumentParser(...)
# ... add specific arguments ...
ToolboxArgparse.add_arguments(parser)
args = parser.parse_args()
ToolboxArgparse.process_arguments(args)
# ...
```
"""
# standard imports
from argparse import ArgumentParser, Namespace, Action
import sys
import logging
import importlib
# toolbox imports
from .config import config
from .util.logging import TerminalFormatter
class NegateAction(Action):
"""An `Action` allowing to negate an option by prefixing
it with `'no'`.
"""
def __call__(self, parser, ns, values, option):
setattr(ns, self.dest, option[2:4] != 'no')
def add_arguments(parser: ArgumentParser) -> None:
"""Add arguments to an :py:class:`ArgumentParser`, that
allow to specify general options for the Deep Learning ToolBox.
Parameters
----------
parser: ArgumentParser
The argument parser to which arguments are to be added.
"""
#
#
#
group = parser.add_argument_group("Computation")
group.add_argument('--cpu',
help="Force CPU usage (even if GPU is available)",
action='store_true', default=False)
group = parser.add_argument_group("General toolbox arguments")
#
# Debugging
#
group.add_argument('--info', default=[], action='append',
metavar='MODULE',
help='Show info messages from MODULE')
group.add_argument('--debug', default=[], action='append',
metavar='MODULE',
help='Show debug messages from MODLE')
#
# Miscallenous
#
group.add_argument('--warn-missing-dependencies',
help="Issue warnings on missing software packages",
action='store_true', default=False)
def process_arguments(parser: ArgumentParser, args: Namespace = None) -> None:
"""Evaluate command line arguments for configuring the toolbox.
Parameters
----------
parser: ArgumentParser
The argument parser (used for error processing).
args: Namespace
An `Namespace` from parsing the command line
arguments with `parser.parse_args()`.
"""
if args is None:
args = parser.parse_args()
if args.warn_missing_dependencies:
config.warn_missing_dependencies = True
if args.cpu:
config.use_cpu = True
#
# Debugging
#
handler = None
for what in ('info', 'debug'):
modules = getattr(args, what)
if not modules:
continue # no modules provided as 'info/debug' command line args
if handler is None:
handler = logging.StreamHandler(sys.stderr)
handler.setLevel(logging.DEBUG)
handler.setFormatter(TerminalFormatter())
for module in modules:
logger = logging.getLogger(module)
logger.addHandler(handler)
logger.setLevel(getattr(logging, what.upper()))
log = getattr(logger, what)
log("Outputting %s messages from module %s", what, module)
if (module != '__main__' and
importlib.util.find_spec(module) is None):
logger.warning("Target module %s not found by importlib.",
module)
| StarcoderdataPython |
1691919 | <filename>venv/Lib/site-packages/PySide6/examples/macextras/macpasteboardmime/macpasteboardmime.py
############################################################################
##
## Copyright (C) 2017 The Qt Company Ltd.
## Contact: http://www.qt.io/licensing/
##
## This file is part of the Qt for Python examples of the Qt Toolkit.
##
## $QT_BEGIN_LICENSE:BSD$
## You may use this file under the terms of the BSD license as follows:
##
## "Redistribution and use in source and binary forms, with or without
## modification, are permitted provided that the following conditions are
## met:
## * Redistributions of source code must retain the above copyright
## notice, this list of conditions and the following disclaimer.
## * Redistributions in binary form must reproduce the above copyright
## notice, this list of conditions and the following disclaimer in
## the documentation and/or other materials provided with the
## distribution.
## * Neither the name of The Qt Company Ltd nor the names of its
## contributors may be used to endorse or promote products derived
## from this software without specific prior written permission.
##
##
## THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
## "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
## LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
## A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
## OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
## SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
## LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
## DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
## THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
## (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
## OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE."
##
## $QT_END_LICENSE$
##
############################################################################
import sys
from PySide6 import QtCore, QtWidgets
try:
from PySide6 import QtMacExtras
except ImportError:
app = QtWidgets.QApplication(sys.argv)
messageBox = QtWidgets.QMessageBox(QtWidgets.QMessageBox.Critical, "QtMacExtras macpasteboardmime",
"This exampe only runs on macOS and QtMacExtras must be installed to run this example.",
QtWidgets.QMessageBox.Close)
messageBox.exec_()
sys.exit(1)
class VCardMime(QtMacExtras.QMacPasteboardMime):
def __init__(self, t = QtMacExtras.QMacPasteboardMime.MIME_ALL):
super(VCardMime, self).__init__(t)
def convertorName(self):
return "VCardMime"
def canConvert(self, mime, flav):
if self.mimeFor(flav) == mime:
return True
else:
return False
def mimeFor(self, flav):
if flav == "public.vcard":
return "application/x-mycompany-VCard"
else:
return ""
def flavorFor(self, mime):
if mime == "application/x-mycompany-VCard":
return "public.vcard"
else:
return ""
def convertToMime(self, mime, data, flav):
all = QtCore.QByteArray()
for i in data:
all += i
return all
def convertFromMime(mime, data, flav):
# Todo: implement!
return []
class TestWidget(QtWidgets.QWidget):
def __init__(self, parent=None):
super(TestWidget, self).__init__(parent)
self.vcardMime = VCardMime()
self.setAcceptDrops(True)
self.label1 = QtWidgets.QLabel()
self.label2 = QtWidgets.QLabel()
layout = QtWidgets.QVBoxLayout()
layout.addWidget(self.label1)
layout.addWidget(self.label2)
self.setLayout(layout)
self.label1.setText("Please drag a \"VCard\" from Contacts application, normally a name in the list, and drop here.")
def dragEnterEvent(self, e):
e.accept()
def dropEvent(self, e):
e.accept()
self.contentsDropEvent(e)
def contentsDropEvent(self, e):
if e.mimeData().hasFormat("application/x-mycompany-VCard"):
s = e.mimeData().data( "application/x-mycompany-VCard" )
# s now contains text of vcard
self.label2.setText(str(s))
e.acceptProposedAction()
if __name__ == '__main__':
app = QtWidgets.QApplication(sys.argv)
QtMacExtras.qRegisterDraggedTypes(["public.vcard"])
wid1 = TestWidget()
wid1.show()
sys.exit(app.exec_())
| StarcoderdataPython |
11204972 | from redis.exceptions import (
ResponseError
)
class RedisCluException(Exception):
pass
class AskError(ResponseError):
"""
partially keys is slot migrated to another node
src node: MIGRATING to dst node
get > ASK error
ask dst node > ASKING command
dst node: IMPORTING from src node
asking command only affects next command
any op will be allowed after asking command
"""
def __init__(self, resp):
"""should only redirect to master node"""
self.args = (resp,)
self.message = resp
slot_id, new_node = resp.split(' ')
host, port = new_node.rsplit(':', 1)
self.slot_id = int(slot_id)
self.node_addr = self.host, self.port = host, int(port)
class MovedError(AskError):
"""
all keys in slot migrated to another node
"""
pass
class ClusterNotHealthy(RedisCluException):
pass
class ClusterNotConsistent(RedisCluException):
pass | StarcoderdataPython |
6651131 | <gh_stars>100-1000
from .vtk import VTK, VTKVolume # noqa
| StarcoderdataPython |
12832211 | <reponame>blazejmanczak/AoM-LineMatching<filename>overlay.py<gh_stars>1-10
# Copyright 2020-present, Netherlands Institute for Sound and Vision (<NAME>)
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
##############################################################################
from matchingMethods import all_in_one
import argparse
import os
import pandas as pd
from PIL import Image
import time
def print_config():
"""
Prints all entries in config variable.
"""
print("[INFO]: Overlaying with follwoing parameters ...")
for key, value in vars(config).items():
print(key + ' : ' + str(value))
def overaly(config):
"""Performs the overlaying"""
print("[INFO]: Loading in the pickeled data ... ")
img_names = os.listdir(config.path_dir, )
img_paths =[os.path.join(config.path_dir, name) for name in img_names]
data = pd.read_pickle(config.data_directory)
non_zero_objects_dic = pd.read_pickle(config.non_zero_objects_dic_directory)
threshold, minLineLength, maxLineGap = [int(param) for param in config.hough_params.split(",")] # parse hough parameters
start_time = time.time()
count = 0
for img_path in img_paths:
try:
img_array = all_in_one(path = img_path, data = data, non_zero_objects_dic = non_zero_objects_dic ,num_lines = config.num_lines, normalizing_stats=[71.73, 26.70, 254.71, 94.19],
params_hough={"threshold": threshold, "minLineLength": minLineLength, "maxLineGap": maxLineGap})
im = Image.fromarray(img_array)
im.save(os.path.join(config.save_dir ,"overlayed_" + img_path.split("/")[-1]))
count += 1
except Exception as e: print("Overlaying failed for path {} with exception {} ".format(img_path, e))
end_time = time.time()
print("[INFO]: overalying and saving took on average {} seconds per query image".format(round((end_time-start_time)/count,4)))
if __name__ == "__main__":
parser = argparse.ArgumentParser()
parser.add_argument("--path_dir",
required=False,
type=str,
default = "frames/contemporary" ,
help="Directory containing images on which the matching should be done. All images in the directory will be matched. The directory should contain only images.")
parser.add_argument("--save_dir",
required=False,
type=str,
default="frames/outputs",
help="Directory where the overlayed images should be stored.")
parser.add_argument("--data_directory",
required = False,
type = str,
default = "data/data.pkl",
help = "Diectory to a pickle file of the processed archives")
parser.add_argument("--non_zero_objects_dic_directory",
required=False,
type=str,
default="data/non_zero_object_dic.pickle",
help="Diectory to a pickle file of the processed matchingObjects that contain a line")
parser.add_argument("--num_lines",
required=False,
type=int,
default=1,
help="How many lines should be overlayed? If num_lines bigger than matches, all matches are overlayed.")
parser.add_argument("--hough_params",
required=False,
type=str,
default="200,150,25",
help="What parameters to use for line detection? Argument is expected to be a string of integers seperated by a comma. \
Consecutive ints stand for threshold, minLineLength and maxLineGap respectively.")
config = parser.parse_args()
print_config()
overaly(config)
print("[INFO]: Overlaying successful!") | StarcoderdataPython |
5106377 | <gh_stars>0
import re
import unittest
from unittest.mock import Mock
from ats.topology import Device
from genie.metaparser.util.exceptions import SchemaEmptyParserError, \
SchemaMissingKeyError
from genie.libs.parser.nxos.show_ipv6 import ShowIpv6NeighborsDetailVrfAll, \
ShowIpv6RoutersVrfAll, \
ShowIpv6IcmpNeighborDetailVrfAll, \
ShowIpv6NdInterfaceVrfAll
#############################################################################
# Unittest For "show ipv6 routers vrf all"
#############################################################################
class test_show_ipv6_routers_vrf_all(unittest.TestCase):
device = Device(name='aDevice')
empty_output = {'execute.return_value': ''}
golden_parsed_output1 = {
'interfaces': {
'Ethernet1/1': {
'interface': 'Ethernet1/1',
'router_advertisement': {
'router': 'fe80::f816:3eff:fe82:6320',
'last_update_time_min': '3.2',
'current_hop_limit': 64,
'lifetime': 1800,
'addrFlag': 0,
'other_flag': 0,
'mtu': 1500,
'home_agent_flag': 0,
'preference': 'Medium',
'reachable_time_msec': 0,
'retransmission_time': 0,
'prefix': {
'2010:2:3::/64': {
'autonomous_flag': 1,
'onlink_flag': 1,
'preferred_lifetime': 604800,
'valid_lifetime': 2592000}}}},
'Ethernet1/2': {
'interface': 'Ethernet1/2',
'router_advertisement': {
'router': 'fe80::f816:3eff:fe8b:59c9',
'last_update_time_min': '1.5',
'current_hop_limit': 64,
'lifetime': 1800,
'addrFlag': 0,
'other_flag': 0,
'mtu': 1500,
'home_agent_flag': 0,
'preference': 'Medium',
'reachable_time_msec': 0,
'retransmission_time': 0,
'prefix': {
'2020:2:3::/64': {
'autonomous_flag': 1,
'onlink_flag': 1,
'preferred_lifetime': 604800,
'valid_lifetime': 2592000}}}},
'Ethernet1/3': {
'interface': 'Ethernet1/3',
'router_advertisement': {
'router': 'fe80::f816:3eff:fe19:8682',
'last_update_time_min': '2.8',
'current_hop_limit': 64,
'lifetime': 1800,
'addrFlag': 0,
'other_flag': 0,
'mtu': 1500,
'home_agent_flag': 0,
'preference': 'Medium',
'reachable_time_msec': 0,
'retransmission_time': 0,
'prefix': {
'2010:1:3::/64': {
'autonomous_flag': 1,
'onlink_flag': 1,
'preferred_lifetime': 604800,
'valid_lifetime': 2592000}}}},
'Ethernet1/4': {
'interface': 'Ethernet1/4',
'router_advertisement': {
'router': 'fe80::f816:3eff:fec7:8140',
'last_update_time_min': '2.3',
'current_hop_limit': 64,
'lifetime': 1800,
'addrFlag': 0,
'other_flag': 0,
'mtu': 1500,
'home_agent_flag': 0,
'preference': 'Medium',
'reachable_time_msec': 0,
'retransmission_time': 0,
'prefix': {
'2020:1:3::/64': {
'autonomous_flag': 1,
'onlink_flag': 1,
'preferred_lifetime': 604800,
'valid_lifetime': 2592000}}}}}}
golden_output1 = {'execute.return_value': '''
n9kv-3# show ipv6 routers vrf all
Router fe80::f816:3eff:fe82:6320 on Ethernet1/1 , last update time 3.2 min
Current_hop_limit 64, Lifetime 1800, AddrFlag 0, OtherFlag 0, MTU 1500
HomeAgentFlag 0, Preference Medium
Reachable time 0 msec, Retransmission time 0 msec
Prefix 2010:2:3::/64 onlink_flag 1 autonomous_flag 1
valid lifetime 2592000, preferred lifetime 604800
Router fe80::f816:3eff:fe8b:59c9 on Ethernet1/2 , last update time 1.5 min
Current_hop_limit 64, Lifetime 1800, AddrFlag 0, OtherFlag 0, MTU 1500
HomeAgentFlag 0, Preference Medium
Reachable time 0 msec, Retransmission time 0 msec
Prefix 2020:2:3::/64 onlink_flag 1 autonomous_flag 1
valid lifetime 2592000, preferred lifetime 604800
Router fe80::f816:3eff:fe19:8682 on Ethernet1/3 , last update time 2.8 min
Current_hop_limit 64, Lifetime 1800, AddrFlag 0, OtherFlag 0, MTU 1500
HomeAgentFlag 0, Preference Medium
Reachable time 0 msec, Retransmission time 0 msec
Prefix 2010:1:3::/64 onlink_flag 1 autonomous_flag 1
valid lifetime 2592000, preferred lifetime 604800
Router fe80::f816:3eff:fec7:8140 on Ethernet1/4 , last update time 2.3 min
Current_hop_limit 64, Lifetime 1800, AddrFlag 0, OtherFlag 0, MTU 1500
HomeAgentFlag 0, Preference Medium
Reachable time 0 msec, Retransmission time 0 msec
Prefix 2020:1:3::/64 onlink_flag 1 autonomous_flag 1
valid lifetime 2592000, preferred lifetime 604800
'''}
def test_show_ipv6_routers_vrf_all_empty(self):
self.device = Mock(**self.empty_output)
obj = ShowIpv6RoutersVrfAll(device=self.device)
with self.assertRaises(SchemaEmptyParserError):
parsed_output = obj.parse()
def test_show_ipv6_routers_vrf_all_golden1(self):
self.device = Mock(**self.golden_output1)
obj = ShowIpv6RoutersVrfAll(device=self.device)
parsed_output = obj.parse()
self.assertEqual(parsed_output, self.golden_parsed_output1)
#############################################################################
# Unittest for 'show ipv6 icmp neighbor detail vrf all'
#############################################################################
class test_show_ipv6_icmp_neighbor_detail_vrf_all(unittest.TestCase):
device = Device(name='aDevice')
empty_output = {'execute.return_value': ''}
golden_parsed_output = {
'interfaces': {
'Ethernet1/1': {
'interface': 'Eth1/1',
'phy_interface': 'Eth1/1',
'neighbors': {
'fc00:e968:6179::de52:7100': {
'ip': 'fc00:e968:6179::de52:7100',
'age': '00:15:02',
'mac_address': 'fa16.3e82.6320',
'state': 'STALE'},
'fe80::f816:3eff:fe82:6320': {
'ip': 'fe80::f816:3eff:fe82:6320',
'age': '00:18:33',
'mac_address': 'fa16.3e82.6320',
'state': 'STALE'}}},
'Ethernet1/2': {
'interface': 'Eth1/2',
'phy_interface': 'Eth1/2',
'neighbors': {
'fdf8:f53e:61e4::18': {
'ip': 'fdf8:f53e:61e4::18',
'age': '00:03:30',
'mac_address': 'fa16.3e8b.59c9',
'state': 'STALE'},
'fe80::f816:3eff:fe8b:59c9': {
'ip': 'fe80::f816:3eff:fe8b:59c9',
'age': '00:14:19',
'mac_address': 'fa16.3e8b.59c9',
'state': 'STALE'}}},
'Ethernet1/3': {
'interface': 'Eth1/3',
'phy_interface': 'Eth1/3',
'neighbors': {
'fdf8:f53e:61e4::18': {
'ip': 'fdf8:f53e:61e4::18',
'age': '00:15:31',
'mac_address': 'fa16.3e19.8682',
'state': 'STALE'}}},
'Ethernet1/4': {
'interface': 'Eth1/4',
'phy_interface': 'Eth1/4',
'neighbors': {
'fd00:a516:7c1b:17cd:6d81:2137:bd2a:2c5b': {
'ip': 'fd00:a516:7c1b:17cd:6d81:2137:bd2a:2c5b',
'age': '00:07:58',
'mac_address': 'fa16.3ec7.8140',
'state': 'STALE'},
'fe80::f816:3eff:fec7:8140': {
'ip': 'fe80::f816:3eff:fec7:8140',
'age': '00:02:41',
'mac_address': 'fa16.3ec7.8140',
'state': 'STALE'}}}}}
golden_output = {'execute.return_value': '''
n9kv-3# show ipv6 icmp neighbor detail vrf all
Flags: + - Adjacencies synced via CFSoE
# - Adjacencies Throttled for Glean
ICMPv6 Adjacency Table for all VRFs
Address Age MAC Address State Interface Phy-Interface
fc00:e968:6179::de52:7100 00:15:02 fa16.3e82.6320 STALE Eth1/1 Eth1/1
fe80::f816:3eff:fe82:6320
00:18:33 fa16.3e82.6320 STALE Eth1/1 Eth1/1
2fdf8:f53e:61e4::18 00:03:30 fa16.3e8b.59c9 STALE Eth1/2 Eth1/2
fe80::f816:3eff:fe8b:59c9
00:14:19 fa16.3e8b.59c9 STALE Eth1/2 Eth1/2
fdf8:f53e:61e4::18 00:15:31 fa16.3e19.8682 STALE Eth1/3 Eth1/3
fe80::f816:3eff:fe19:8682
00:15:31 fa16.3e19.8682 STALE Eth1/3 Eth1/3
2fc00:e968:6179::de52:7100 00:07:58 fa16.3ec7.8140 STALE Eth1/4 Eth1/4
fe80::f816:3eff:fec7:8140
00:02:41 fa16.3ec7.8140 STALE Eth1/4 Eth1/4
'''}
def test_show_ipv6_icmp_neighbor_detail_vrf_all_empty(self):
self.device = Mock(**self.empty_output)
obj = ShowIpv6IcmpNeighborDetailVrfAll(device=self.device)
with self.assertRaises(SchemaEmptyParserError):
parsed_output = obj.parse()
def test_show_ipv6_icmp_neighbor_detail_vrf_all_golden(self):
self.device = Mock(**self.golden_output)
obj = ShowIpv6IcmpNeighborDetailVrfAll(device=self.device)
parsed_output = obj.parse()
self.assertEqual(parsed_output, self.golden_parsed_output)
#############################################################################
# Unittest for 'show ipv6 nd interface vrf all'
#############################################################################
class test_show_ipv6_nd_interface_vrf_all(unittest.TestCase):
device = Device(name='aDevice')
empty_output = {'execute.return_value': ''}
golden_parsed_output = {
'interfaces': {
'Ethernet1/1': {
'interface': 'Ethernet1/1',
'interface_status': 'protocol-up/link-up/admin-up',
'vrf': 'default',
'ipv6_address': {
'fc00:db20:35b:7399::5/64': {
'status': 'VALID'
}
},
'ipv6_link_local_address': {
'fe80::5c01:c0ff:fe02:7': {
'status': 'VALID'
}
},
'nd_mac_extract': 'Disabled',
'icmpv6_active_timers': {
'last_neighbor_solicitation_sent': '00:06:16',
'last_neighbor_advertisement_sent': '00:02:12',
'last_router_advertisement_sent': '1d18h',
'next_router_advertisement_sent': '0.000000'
},
'router_advertisement': {
'periodic_interval_seconds': '200-201',
'send_managed_address_configuration_flag': 'false',
'send_other_stateful_configuration_flag': 'false',
'send_default_router_preference_value': 'Medium',
'send_current_hop_limit': 64,
'send_mtu': 1500,
'send_router_lifetime_secs': 1801,
'send_reachable_time_ms': 0,
'send_retrans_timer_ms': 0,
'suppress_ra': 'Enabled',
'suppress_mtu_ra': 'Disabled',
'suppress_route_information_option_ra': 'Disabled'
},
'neighbor_solicitation': {
'ns_retransmit_interval_ms': 1000,
'nd_nud_retry_base': 1,
'nd_nud_retry_interval': 1000,
'nd_nud_retry_attempts': 3
},
'icmpv6_error_message': {
'send_redirects_num': 0,
'send_unreachables': 'false'
},
'icmpv6_dad': {
'maximum_dad_attempts': 1,
'current_dad_attempt': 1
}
},
'Ethernet1/3': {
'interface': 'Ethernet1/3',
'interface_status': 'protocol-up/link-up/admin-up',
'vrf': 'default',
'ipv6_address': {
'fdf8:f53e:61e4::18/64': {
'status': 'VALID'
}
},
'ipv6_link_local_address': {
'fe80::5c01:c0ff:fe02:7': {
'status': 'VALID'
}
},
'nd_mac_extract': 'Disabled',
'icmpv6_active_timers': {
'last_neighbor_solicitation_sent': '00:07:39',
'last_neighbor_advertisement_sent': '02:39:27',
'last_router_advertisement_sent': '00:01:33',
'next_router_advertisement_sent': '00:03:50'
},
'router_advertisement': {
'periodic_interval_seconds': '200-600',
'send_managed_address_configuration_flag': 'false',
'send_other_stateful_configuration_flag': 'false',
'send_default_router_preference_value': 'Medium',
'send_current_hop_limit': 64,
'send_mtu': 1500,
'send_router_lifetime_secs': 1800,
'send_reachable_time_ms': 0,
'send_retrans_timer_ms': 0,
'suppress_ra': 'Disabled',
'suppress_mtu_ra': 'Disabled',
'suppress_route_information_option_ra': 'Disabled'
},
'neighbor_solicitation': {
'ns_retransmit_interval_ms': 1000,
'nd_nud_retry_base': 1,
'nd_nud_retry_interval': 1000,
'nd_nud_retry_attempts': 3
},
'icmpv6_error_message': {
'send_redirects_num': 0,
'send_unreachables': 'false'
},
'icmpv6_dad': {
'maximum_dad_attempts': 1,
'current_dad_attempt': 1
}
},
'loopback0': {
'interface': 'loopback0',
'interface_status': 'protocol-up/link-up/admin-up',
'vrf': 'default',
'ipv6_address': {
'2001:3:3::3/128': {
'status': 'VALID'
}
},
'ipv6_link_local_address': {
'fe80::5c01:c0ff:fe02:0': {
'status': 'VALID'
}
},
'nd_mac_extract': 'Disabled',
'icmpv6_active_timers': {
'last_neighbor_solicitation_sent': 'never',
'last_neighbor_advertisement_sent': 'never',
'last_router_advertisement_sent': 'never',
'next_router_advertisement_sent': 'never'
},
'router_advertisement': {
'periodic_interval_seconds': '200-600',
'send_managed_address_configuration_flag': 'false',
'send_other_stateful_configuration_flag': 'false',
'send_default_router_preference_value': 'Medium',
'send_current_hop_limit': 64,
'send_mtu': 1500,
'send_router_lifetime_secs': 1800,
'send_reachable_time_ms': 0,
'send_retrans_timer_ms': 0,
'suppress_ra': 'Disabled',
'suppress_mtu_ra': 'Disabled',
'suppress_route_information_option_ra': 'Disabled'
},
'neighbor_solicitation': {
'ns_retransmit_interval_ms': 1000,
'nd_nud_retry_base': 1,
'nd_nud_retry_interval': 1000,
'nd_nud_retry_attempts': 3
},
'icmpv6_error_message': {
'send_redirects_num': 0,
'send_unreachables': 'false'
},
'icmpv6_dad': {
'maximum_dad_attempts': 1,
'current_dad_attempt': 0
}
},
'loopback1': {
'interface': 'loopback1',
'interface_status': 'protocol-up/link-up/admin-up',
'vrf': 'default',
'ipv6_address': {
'2001:33:33::33/128': {
'status': 'VALID'
}
},
'ipv6_link_local_address': {
'fe80::5c01:c0ff:fe02:0': {
'status': 'VALID'
}
},
'nd_mac_extract': 'Disabled',
'icmpv6_active_timers': {
'last_neighbor_solicitation_sent': 'never',
'last_neighbor_advertisement_sent': 'never',
'last_router_advertisement_sent': 'never',
'next_router_advertisement_sent': 'never'
},
'router_advertisement': {
'periodic_interval_seconds': '200-600',
'send_managed_address_configuration_flag': 'false',
'send_other_stateful_configuration_flag': 'false',
'send_default_router_preference_value': 'Medium',
'send_current_hop_limit': 64,
'send_mtu': 1500,
'send_router_lifetime_secs': 1800,
'send_reachable_time_ms': 0,
'send_retrans_timer_ms': 0,
'suppress_ra': 'Disabled',
'suppress_mtu_ra': 'Disabled',
'suppress_route_information_option_ra': 'Disabled'
},
'neighbor_solicitation': {
'ns_retransmit_interval_ms': 1000,
'nd_nud_retry_base': 1,
'nd_nud_retry_interval': 1000,
'nd_nud_retry_attempts': 3
},
'icmpv6_error_message': {
'send_redirects_num': 0,
'send_unreachables': 'false'
},
'icmpv6_dad': {
'maximum_dad_attempts': 1,
'current_dad_attempt': 0
}
},
'Ethernet1/2': {
'interface': 'Ethernet1/2',
'interface_status': 'protocol-up/link-up/admin-up',
'vrf': 'vrf1',
'ipv6_address': {
'fc00:db20:35b:7399::5/64': {
'status': 'VALID'
}
},
'ipv6_link_local_address': {
'fe80::5c01:c0ff:fe02:7': {
'status': 'VALID'
}
},
'nd_mac_extract': 'Disabled',
'icmpv6_active_timers': {
'last_neighbor_solicitation_sent': '00:09:34',
'last_neighbor_advertisement_sent': '00:01:07',
'last_router_advertisement_sent': '00:05:42',
'next_router_advertisement_sent': '00:01:46'
},
'router_advertisement': {
'periodic_interval_seconds': '200-600',
'send_managed_address_configuration_flag': 'false',
'send_other_stateful_configuration_flag': 'false',
'send_default_router_preference_value': 'Medium',
'send_current_hop_limit': 64,
'send_mtu': 1500,
'send_router_lifetime_secs': 1800,
'send_reachable_time_ms': 0,
'send_retrans_timer_ms': 0,
'suppress_ra': 'Disabled',
'suppress_mtu_ra': 'Disabled',
'suppress_route_information_option_ra': 'Disabled'
},
'neighbor_solicitation': {
'ns_retransmit_interval_ms': 1000,
'nd_nud_retry_base': 1,
'nd_nud_retry_interval': 1000,
'nd_nud_retry_attempts': 3
},
'icmpv6_error_message': {
'send_redirects_num': 0,
'send_unreachables': 'false'
},
'icmpv6_dad': {
'maximum_dad_attempts': 1,
'current_dad_attempt': 1
}
},
'Ethernet1/4': {
'interface': 'Ethernet1/4',
'interface_status': 'protocol-up/link-up/admin-up',
'vrf': 'vrf1',
'ipv6_address': {
'fd00:c2b6:b24b:be67:2827:688d:e6a1:6a3b/64': {
'status': 'VALID'
}
},
'ipv6_link_local_address': {
'fe80::5c01:c0ff:fe02:7': {
'status': 'VALID'
}
},
'nd_mac_extract': 'Disabled',
'icmpv6_active_timers': {
'last_neighbor_solicitation_sent': '00:03:31',
'last_neighbor_advertisement_sent': '07:32:12',
'last_router_advertisement_sent': '00:08:09',
'next_router_advertisement_sent': '00:01:36'
},
'router_advertisement': {
'periodic_interval_seconds': '200-600',
'send_managed_address_configuration_flag': 'false',
'send_other_stateful_configuration_flag': 'false',
'send_default_router_preference_value': 'Medium',
'send_current_hop_limit': 64,
'send_mtu': 1500,
'send_router_lifetime_secs': 1800,
'send_reachable_time_ms': 0,
'send_retrans_timer_ms': 0,
'suppress_ra': 'Disabled',
'suppress_mtu_ra': 'Disabled',
'suppress_route_information_option_ra': 'Disabled'
},
'neighbor_solicitation': {
'ns_retransmit_interval_ms': 1000,
'nd_nud_retry_base': 1,
'nd_nud_retry_interval': 1000,
'nd_nud_retry_attempts': 3
},
'icmpv6_error_message': {
'send_redirects_num': 0,
'send_unreachables': 'false'
},
'icmpv6_dad': {
'maximum_dad_attempts': 1,
'current_dad_attempt': 1
}
}
}
}
golden_output = {'execute.return_value': '''
n9kv-3# show ipv6 nd interface vrf all
ICMPv6 ND Interfaces for VRF "default"
Ethernet1/1, Interface status: protocol-up/link-up/admin-up
IPv6 address:
fc00:db20:35b:7399::5/64 [VALID]
IPv6 link-local address: fe80::5c01:c0ff:fe02:7 [VALID]
ND mac-extract : Disabled
ICMPv6 active timers:
Last Neighbor-Solicitation sent: 00:06:16
Last Neighbor-Advertisement sent: 00:02:12
Last Router-Advertisement sent: 1d18h
Next Router-Advertisement sent in: 0.000000
Router-Advertisement parameters:
Periodic interval: 200 to 201 seconds
Send "Managed Address Configuration" flag: false
Send "Other Stateful Configuration" flag: false
Send "Default Router Preference" value: Medium
Send "Current Hop Limit" field: 64
Send "MTU" option value: 1500
Send "Router Lifetime" field: 1801 secs
Send "Reachable Time" field: 0 ms
Send "Retrans Timer" field: 0 ms
Suppress RA: Enabled
Suppress MTU in RA: Disabled
Suppress Route Information Option in RA: Disabled
Neighbor-Solicitation parameters:
NS retransmit interval: 1000 ms
ND NUD retry base: 1
ND NUD retry interval: 1000
ND NUD retry attempts: 3
ICMPv6 error message parameters:
Send redirects: true (0)
Send unreachables: false
ICMPv6 DAD parameters:
Maximum DAD attempts: 1
Current DAD attempt : 1
Ethernet1/3, Interface status: protocol-up/link-up/admin-up
IPv6 address:
fdf8:f53e:61e4::18/64 [VALID]
IPv6 link-local address: fe80::5c01:c0ff:fe02:7 [VALID]
ND mac-extract : Disabled
ICMPv6 active timers:
Last Neighbor-Solicitation sent: 00:07:39
Last Neighbor-Advertisement sent: 02:39:27
Last Router-Advertisement sent: 00:01:33
Next Router-Advertisement sent in: 00:03:50
Router-Advertisement parameters:
Periodic interval: 200 to 600 seconds
Send "Managed Address Configuration" flag: false
Send "Other Stateful Configuration" flag: false
Send "Default Router Preference" value: Medium
Send "Current Hop Limit" field: 64
Send "MTU" option value: 1500
Send "Router Lifetime" field: 1800 secs
Send "Reachable Time" field: 0 ms
Send "Retrans Timer" field: 0 ms
Suppress RA: Disabled
Suppress MTU in RA: Disabled
Suppress Route Information Option in RA: Disabled
Neighbor-Solicitation parameters:
NS retransmit interval: 1000 ms
ND NUD retry base: 1
ND NUD retry interval: 1000
ND NUD retry attempts: 3
ICMPv6 error message parameters:
Send redirects: true (0)
Send unreachables: false
ICMPv6 DAD parameters:
Maximum DAD attempts: 1
Current DAD attempt : 1
loopback0, Interface status: protocol-up/link-up/admin-up
IPv6 address:
2001:3:3::3/128 [VALID]
IPv6 link-local address: fe80::5c01:c0ff:fe02:0 [VALID]
ND mac-extract : Disabled
ICMPv6 active timers:
Last Neighbor-Solicitation sent: never
Last Neighbor-Advertisement sent: never
Last Router-Advertisement sent: never
Next Router-Advertisement sent in: never
Router-Advertisement parameters:
Periodic interval: 200 to 600 seconds
Send "Managed Address Configuration" flag: false
Send "Other Stateful Configuration" flag: false
Send "Default Router Preference" value: Medium
Send "Current Hop Limit" field: 64
Send "MTU" option value: 1500
Send "Router Lifetime" field: 1800 secs
Send "Reachable Time" field: 0 ms
Send "Retrans Timer" field: 0 ms
Suppress RA: Disabled
Suppress MTU in RA: Disabled
Suppress Route Information Option in RA: Disabled
Neighbor-Solicitation parameters:
NS retransmit interval: 1000 ms
ND NUD retry base: 1
ND NUD retry interval: 1000
ND NUD retry attempts: 3
ICMPv6 error message parameters:
Send redirects: true (0)
Send unreachables: false
ICMPv6 DAD parameters:
Maximum DAD attempts: 1
Current DAD attempt : 0
loopback1, Interface status: protocol-up/link-up/admin-up
IPv6 address:
2001:33:33::33/128 [VALID]
IPv6 link-local address: fe80::5c01:c0ff:fe02:0 [VALID]
ND mac-extract : Disabled
ICMPv6 active timers:
Last Neighbor-Solicitation sent: never
Last Neighbor-Advertisement sent: never
Last Router-Advertisement sent: never
Next Router-Advertisement sent in: never
Router-Advertisement parameters:
Periodic interval: 200 to 600 seconds
Send "Managed Address Configuration" flag: false
Send "Other Stateful Configuration" flag: false
Send "Default Router Preference" value: Medium
Send "Current Hop Limit" field: 64
Send "MTU" option value: 1500
Send "Router Lifetime" field: 1800 secs
Send "Reachable Time" field: 0 ms
Send "Retrans Timer" field: 0 ms
Suppress RA: Disabled
Suppress MTU in RA: Disabled
Suppress Route Information Option in RA: Disabled
Neighbor-Solicitation parameters:
NS retransmit interval: 1000 ms
ND NUD retry base: 1
ND NUD retry interval: 1000
ND NUD retry attempts: 3
ICMPv6 error message parameters:
Send redirects: true (0)
Send unreachables: false
ICMPv6 DAD parameters:
Maximum DAD attempts: 1
Current DAD attempt : 0
ICMPv6 ND Interfaces for VRF "management"
ICMPv6 ND Interfaces for VRF "vrf1"
Ethernet1/2, Interface status: protocol-up/link-up/admin-up
IPv6 address:
fc00:db20:35b:7399::5/64 [VALID]
IPv6 link-local address: fe80::5c01:c0ff:fe02:7 [VALID]
ND mac-extract : Disabled
ICMPv6 active timers:
Last Neighbor-Solicitation sent: 00:09:34
Last Neighbor-Advertisement sent: 00:01:07
Last Router-Advertisement sent: 00:05:42
Next Router-Advertisement sent in: 00:01:46
Router-Advertisement parameters:
Periodic interval: 200 to 600 seconds
Send "Managed Address Configuration" flag: false
Send "Other Stateful Configuration" flag: false
Send "Default Router Preference" value: Medium
Send "Current Hop Limit" field: 64
Send "MTU" option value: 1500
Send "Router Lifetime" field: 1800 secs
Send "Reachable Time" field: 0 ms
Send "Retrans Timer" field: 0 ms
Suppress RA: Disabled
Suppress MTU in RA: Disabled
Suppress Route Information Option in RA: Disabled
Neighbor-Solicitation parameters:
NS retransmit interval: 1000 ms
ND NUD retry base: 1
ND NUD retry interval: 1000
ND NUD retry attempts: 3
ICMPv6 error message parameters:
Send redirects: true (0)
Send unreachables: false
ICMPv6 DAD parameters:
Maximum DAD attempts: 1
Current DAD attempt : 1
Ethernet1/4, Interface status: protocol-up/link-up/admin-up
IPv6 address:
fd00:c2b6:b24b:be67:2827:688d:e6a1:6a3b/64 [VALID]
IPv6 link-local address: fe80::5c01:c0ff:fe02:7 [VALID]
ND mac-extract : Disabled
ICMPv6 active timers:
Last Neighbor-Solicitation sent: 00:03:31
Last Neighbor-Advertisement sent: 07:32:12
Last Router-Advertisement sent: 00:08:09
Next Router-Advertisement sent in: 00:01:36
Router-Advertisement parameters:
Periodic interval: 200 to 600 seconds
Send "Managed Address Configuration" flag: false
Send "Other Stateful Configuration" flag: false
Send "Default Router Preference" value: Medium
Send "Current Hop Limit" field: 64
Send "MTU" option value: 1500
Send "Router Lifetime" field: 1800 secs
Send "Reachable Time" field: 0 ms
Send "Retrans Timer" field: 0 ms
Suppress RA: Disabled
Suppress MTU in RA: Disabled
Suppress Route Information Option in RA: Disabled
Neighbor-Solicitation parameters:
NS retransmit interval: 1000 ms
ND NUD retry base: 1
ND NUD retry interval: 1000
ND NUD retry attempts: 3
ICMPv6 error message parameters:
Send redirects: true (0)
Send unreachables: false
ICMPv6 DAD parameters:
Maximum DAD attempts: 1
Current DAD attempt : 1
'''}
golden_parsed_output_2 = {
'interfaces': {
'Ethernet1/1.390': {
'interface': 'Ethernet1/1.390',
'interface_status': 'protocol-up/link-up/admin-up',
'vrf': 'VRF1',
'ipv6_address': {
'2001:10:23:90::3/64': {
'status': 'VALID'
}
},
'ipv6_link_local_address': {
'fe80::5c00:c0ff:fe02:7': {
'status': 'VALID'
}
},
'nd_mac_extract': 'Disabled',
'icmpv6_active_timers': {
'last_neighbor_solicitation_sent': '00:22:04',
'last_neighbor_advertisement_sent': '00:00:39',
'last_router_advertisement_sent': '00:05:46',
'next_router_advertisement_sent': '00:03:54'
},
'router_advertisement': {
'periodic_interval_seconds': '200-600',
'send_managed_address_configuration_flag': 'false',
'send_other_stateful_configuration_flag': 'false',
'send_default_router_preference_value': 'Medium',
'send_current_hop_limit': 64,
'send_mtu': 1500,
'send_router_lifetime_secs': 1800,
'send_reachable_time_ms': 0,
'send_retrans_timer_ms': 0,
'suppress_ra': 'Disabled',
'suppress_mtu_ra': 'Disabled',
'suppress_route_information_option_ra': 'Disabled'
},
'neighbor_solicitation': {
'ns_retransmit_interval_ms': 1000,
'nd_nud_retry_base': 1,
'nd_nud_retry_interval': 1000,
'nd_nud_retry_attempts': 3
},
'icmpv6_error_message': {
'send_redirects_num': 0,
'send_unreachables': 'false'
},
'icmpv6_dad': {
'maximum_dad_attempts': 1,
'current_dad_attempt': 1
}
},
'Ethernet1/1.410': {
'interface': 'Ethernet1/1.410',
'interface_status': 'protocol-up/link-up/admin-up',
'vrf': 'VRF1',
'ipv6_address': {
'2001:10:23:110::3/64': {
'status': 'VALID'
}
},
'ipv6_link_local_address': {
'fe80::5c00:c0ff:fe02:7': {
'status': 'VALID'
}
},
'nd_mac_extract': 'Disabled',
'icmpv6_active_timers': {
'last_neighbor_solicitation_sent': '00:21:53',
'last_neighbor_advertisement_sent': '00:01:19',
'last_router_advertisement_sent': '00:04:54',
'next_router_advertisement_sent': '00:00:20'
},
'router_advertisement': {
'periodic_interval_seconds': '200-600',
'send_managed_address_configuration_flag': 'false',
'send_other_stateful_configuration_flag': 'false',
'send_default_router_preference_value': 'Medium',
'send_current_hop_limit': 64,
'send_mtu': 1500,
'send_router_lifetime_secs': 1800,
'send_reachable_time_ms': 0,
'send_retrans_timer_ms': 0,
'suppress_ra': 'Disabled',
'suppress_mtu_ra': 'Disabled',
'suppress_route_information_option_ra': 'Disabled'
},
'neighbor_solicitation': {
'ns_retransmit_interval_ms': 1000,
'nd_nud_retry_base': 1,
'nd_nud_retry_interval': 1000,
'nd_nud_retry_attempts': 3
},
'icmpv6_error_message': {
'send_redirects_num': 0,
'send_unreachables': 'false'
},
'icmpv6_dad': {
'maximum_dad_attempts': 1,
'current_dad_attempt': 1
}
},
'Ethernet1/1.415': {
'interface': 'Ethernet1/1.415',
'interface_status': 'protocol-up/link-up/admin-up',
'vrf': 'VRF1',
'ipv6_address': {
'2001:10:23:115::3/64': {
'status': 'VALID'
}
},
'ipv6_link_local_address': {
'fe80::5c00:c0ff:fe02:7': {
'status': 'VALID'
}
},
'nd_mac_extract': 'Disabled',
'icmpv6_active_timers': {
'last_neighbor_solicitation_sent': '1d14h',
'last_neighbor_advertisement_sent': '1d14h',
'last_router_advertisement_sent': '00:01:22',
'next_router_advertisement_sent': '00:08:35'
},
'router_advertisement': {
'periodic_interval_seconds': '200-600',
'send_managed_address_configuration_flag': 'false',
'send_other_stateful_configuration_flag': 'false',
'send_default_router_preference_value': 'Medium',
'send_current_hop_limit': 64,
'send_mtu': 1500,
'send_router_lifetime_secs': 1800,
'send_reachable_time_ms': 0,
'send_retrans_timer_ms': 0,
'suppress_ra': 'Disabled',
'suppress_mtu_ra': 'Disabled',
'suppress_route_information_option_ra': 'Disabled'
},
'neighbor_solicitation': {
'ns_retransmit_interval_ms': 1000,
'nd_nud_retry_base': 1,
'nd_nud_retry_interval': 1000,
'nd_nud_retry_attempts': 3
},
'icmpv6_error_message': {
'send_redirects_num': 0,
'send_unreachables': 'false'
},
'icmpv6_dad': {
'maximum_dad_attempts': 1,
'current_dad_attempt': 1
}
},
'Ethernet1/1.420': {
'interface': 'Ethernet1/1.420',
'interface_status': 'protocol-up/link-up/admin-up',
'vrf': 'VRF1',
'ipv6_address': {
'2001:10:23:120::3/64': {
'status': 'VALID'
}
},
'ipv6_link_local_address': {
'fe80::5c00:c0ff:fe02:7': {
'status': 'VALID'
}
},
'nd_mac_extract': 'Disabled',
'icmpv6_active_timers': {
'last_neighbor_solicitation_sent': '1d14h',
'last_neighbor_advertisement_sent': '1d14h',
'last_router_advertisement_sent': '00:03:45',
'next_router_advertisement_sent': '00:05:09'
},
'router_advertisement': {
'periodic_interval_seconds': '200-600',
'send_managed_address_configuration_flag': 'false',
'send_other_stateful_configuration_flag': 'false',
'send_default_router_preference_value': 'Medium',
'send_current_hop_limit': 64,
'send_mtu': 1500,
'send_router_lifetime_secs': 1800,
'send_reachable_time_ms': 0,
'send_retrans_timer_ms': 0,
'suppress_ra': 'Disabled',
'suppress_mtu_ra': 'Disabled',
'suppress_route_information_option_ra': 'Disabled'
},
'neighbor_solicitation': {
'ns_retransmit_interval_ms': 1000,
'nd_nud_retry_base': 1,
'nd_nud_retry_interval': 1000,
'nd_nud_retry_attempts': 3
},
'icmpv6_error_message': {
'send_redirects_num': 0,
'send_unreachables': 'false'
},
'icmpv6_dad': {
'maximum_dad_attempts': 1,
'current_dad_attempt': 1
}
},
'Ethernet1/2.390': {
'interface': 'Ethernet1/2.390',
'interface_status': 'protocol-up/link-up/admin-up',
'vrf': 'VRF1',
'ipv6_address': {
'2001:10:13:90::3/64': {
'status': 'VALID'
}
},
'ipv6_link_local_address': {
'fe80::5c00:c0ff:fe02:7': {
'status': 'VALID'
}
},
'nd_mac_extract': 'Disabled',
'icmpv6_active_timers': {
'last_neighbor_solicitation_sent': '00:22:20',
'last_neighbor_advertisement_sent': '03:25:16',
'last_router_advertisement_sent': '00:05:51',
'next_router_advertisement_sent': '00:01:37'
},
'router_advertisement': {
'periodic_interval_seconds': '200-600',
'send_managed_address_configuration_flag': 'false',
'send_other_stateful_configuration_flag': 'false',
'send_default_router_preference_value': 'Medium',
'send_current_hop_limit': 64,
'send_mtu': 1500,
'send_router_lifetime_secs': 1800,
'send_reachable_time_ms': 0,
'send_retrans_timer_ms': 0,
'suppress_ra': 'Disabled',
'suppress_mtu_ra': 'Disabled',
'suppress_route_information_option_ra': 'Disabled'
},
'neighbor_solicitation': {
'ns_retransmit_interval_ms': 1000,
'nd_nud_retry_base': 1,
'nd_nud_retry_interval': 1000,
'nd_nud_retry_attempts': 3
},
'icmpv6_error_message': {
'send_redirects_num': 0,
'send_unreachables': 'false'
},
'icmpv6_dad': {
'maximum_dad_attempts': 1,
'current_dad_attempt': 1
}
},
'Ethernet1/2.410': {
'interface': 'Ethernet1/2.410',
'interface_status': 'protocol-up/link-up/admin-up',
'vrf': 'VRF1',
'ipv6_address': {
'2001:10:13:110::3/64': {
'status': 'VALID'
}
},
'ipv6_link_local_address': {
'fe80::5c00:c0ff:fe02:7': {
'status': 'VALID'
}
},
'nd_mac_extract': 'Disabled',
'icmpv6_active_timers': {
'last_neighbor_solicitation_sent': '1d14h',
'last_neighbor_advertisement_sent': '1d14h',
'last_router_advertisement_sent': '00:03:48',
'next_router_advertisement_sent': '00:03:33'
},
'router_advertisement': {
'periodic_interval_seconds': '200-600',
'send_managed_address_configuration_flag': 'false',
'send_other_stateful_configuration_flag': 'false',
'send_default_router_preference_value': 'Medium',
'send_current_hop_limit': 64,
'send_mtu': 1500,
'send_router_lifetime_secs': 1800,
'send_reachable_time_ms': 0,
'send_retrans_timer_ms': 0,
'suppress_ra': 'Disabled',
'suppress_mtu_ra': 'Disabled',
'suppress_route_information_option_ra': 'Disabled'
},
'neighbor_solicitation': {
'ns_retransmit_interval_ms': 1000,
'nd_nud_retry_base': 1,
'nd_nud_retry_interval': 1000,
'nd_nud_retry_attempts': 3
},
'icmpv6_error_message': {
'send_redirects_num': 0,
'send_unreachables': 'false'
},
'icmpv6_dad': {
'maximum_dad_attempts': 1,
'current_dad_attempt': 1
}
},
'Ethernet1/2.415': {
'interface': 'Ethernet1/2.415',
'interface_status': 'protocol-up/link-up/admin-up',
'vrf': 'VRF1',
'ipv6_address': {
'2001:10:13:115::3/64': {
'status': 'VALID'
}
},
'ipv6_link_local_address': {
'fe80::5c00:c0ff:fe02:7': {
'status': 'VALID'
}
},
'nd_mac_extract': 'Disabled',
'icmpv6_active_timers': {
'last_neighbor_solicitation_sent': '00:23:24',
'last_neighbor_advertisement_sent': '1d14h',
'last_router_advertisement_sent': '00:02:47',
'next_router_advertisement_sent': '00:05:52'
},
'router_advertisement': {
'periodic_interval_seconds': '200-600',
'send_managed_address_configuration_flag': 'false',
'send_other_stateful_configuration_flag': 'false',
'send_default_router_preference_value': 'Medium',
'send_current_hop_limit': 64,
'send_mtu': 1500,
'send_router_lifetime_secs': 1800,
'send_reachable_time_ms': 0,
'send_retrans_timer_ms': 0,
'suppress_ra': 'Disabled',
'suppress_mtu_ra': 'Disabled',
'suppress_route_information_option_ra': 'Disabled'
},
'neighbor_solicitation': {
'ns_retransmit_interval_ms': 1000,
'nd_nud_retry_base': 1,
'nd_nud_retry_interval': 1000,
'nd_nud_retry_attempts': 3
},
'icmpv6_error_message': {
'send_redirects_num': 0,
'send_unreachables': 'false'
},
'icmpv6_dad': {
'maximum_dad_attempts': 1,
'current_dad_attempt': 1
}
},
'Ethernet1/2.420': {
'interface': 'Ethernet1/2.420',
'interface_status': 'protocol-up/link-up/admin-up',
'vrf': 'VRF1',
'ipv6_address': {
'2001:10:13:120::3/64': {
'status': 'VALID'
}
},
'ipv6_link_local_address': {
'fe80::5c00:c0ff:fe02:7': {
'status': 'VALID'
}
},
'nd_mac_extract': 'Disabled',
'icmpv6_active_timers': {
'last_neighbor_solicitation_sent': '00:18:48',
'last_neighbor_advertisement_sent': '00:18:43',
'last_router_advertisement_sent': '00:01:56',
'next_router_advertisement_sent': '00:07:53'
},
'router_advertisement': {
'periodic_interval_seconds': '200-600',
'send_managed_address_configuration_flag': 'false',
'send_other_stateful_configuration_flag': 'false',
'send_default_router_preference_value': 'Medium',
'send_current_hop_limit': 64,
'send_mtu': 1500,
'send_router_lifetime_secs': 1800,
'send_reachable_time_ms': 0,
'send_retrans_timer_ms': 0,
'suppress_ra': 'Disabled',
'suppress_mtu_ra': 'Disabled',
'suppress_route_information_option_ra': 'Disabled'
},
'neighbor_solicitation': {
'ns_retransmit_interval_ms': 1000,
'nd_nud_retry_base': 1,
'nd_nud_retry_interval': 1000,
'nd_nud_retry_attempts': 3
},
'icmpv6_error_message': {
'send_redirects_num': 0,
'send_unreachables': 'false'
},
'icmpv6_dad': {
'maximum_dad_attempts': 1,
'current_dad_attempt': 1
}
},
'loopback300': {
'interface': 'loopback300',
'interface_status': 'protocol-up/link-up/admin-up',
'vrf': 'VRF1',
'ipv6_address': {
'2001:3:3:3::3/128': {
'status': 'VALID'
}
},
'ipv6_link_local_address': {
'fe80::5c00:c0ff:fe02:0': {
'status': 'VALID'
}
},
'nd_mac_extract': 'Disabled',
'icmpv6_active_timers': {
'last_neighbor_solicitation_sent': 'never',
'last_neighbor_advertisement_sent': 'never',
'last_router_advertisement_sent': 'never',
'next_router_advertisement_sent': 'never'
},
'router_advertisement': {
'periodic_interval_seconds': '200-600',
'send_managed_address_configuration_flag': 'false',
'send_other_stateful_configuration_flag': 'false',
'send_default_router_preference_value': 'Medium',
'send_current_hop_limit': 64,
'send_mtu': 1500,
'send_router_lifetime_secs': 1800,
'send_reachable_time_ms': 0,
'send_retrans_timer_ms': 0,
'suppress_ra': 'Disabled',
'suppress_mtu_ra': 'Disabled',
'suppress_route_information_option_ra': 'Disabled'
},
'neighbor_solicitation': {
'ns_retransmit_interval_ms': 1000,
'nd_nud_retry_base': 1,
'nd_nud_retry_interval': 1000,
'nd_nud_retry_attempts': 3
},
'icmpv6_error_message': {
'send_redirects_num': 0,
'send_unreachables': 'false'
},
'icmpv6_dad': {
'maximum_dad_attempts': 1,
'current_dad_attempt': 0
}
},
'Ethernet1/1.90': {
'interface': 'Ethernet1/1.90',
'interface_status': 'protocol-up/link-up/admin-up',
'vrf': 'default',
'ipv6_address': {
'2001:10:23:90::3/64': {
'status': 'VALID'
}
},
'ipv6_link_local_address': {
'fe80::5c00:c0ff:fe02:7': {
'status': 'VALID'
}
},
'nd_mac_extract': 'Disabled',
'icmpv6_active_timers': {
'last_neighbor_solicitation_sent': '00:05:07',
'last_neighbor_advertisement_sent': '00:00:47',
'last_router_advertisement_sent': '00:07:57',
'next_router_advertisement_sent': '00:01:02'
},
'router_advertisement': {
'periodic_interval_seconds': '200-600',
'send_managed_address_configuration_flag': 'false',
'send_other_stateful_configuration_flag': 'false',
'send_default_router_preference_value': 'Medium',
'send_current_hop_limit': 64,
'send_mtu': 1500,
'send_router_lifetime_secs': 1800,
'send_reachable_time_ms': 0,
'send_retrans_timer_ms': 0,
'suppress_ra': 'Disabled',
'suppress_mtu_ra': 'Disabled',
'suppress_route_information_option_ra': 'Disabled'
},
'neighbor_solicitation': {
'ns_retransmit_interval_ms': 1000,
'nd_nud_retry_base': 1,
'nd_nud_retry_interval': 1000,
'nd_nud_retry_attempts': 3
},
'icmpv6_error_message': {
'send_redirects_num': 0,
'send_unreachables': 'false'
},
'icmpv6_dad': {
'maximum_dad_attempts': 1,
'current_dad_attempt': 1
}
},
'Ethernet1/1.110': {
'interface': 'Ethernet1/1.110',
'interface_status': 'protocol-up/link-up/admin-up',
'vrf': 'default',
'ipv6_address': {
'2001:10:23:110::3/64': {
'status': 'VALID'
}
},
'ipv6_link_local_address': {
'fe80::5c00:c0ff:fe02:7': {
'status': 'VALID'
}
},
'nd_mac_extract': 'Disabled',
'icmpv6_active_timers': {
'last_neighbor_solicitation_sent': '00:24:10',
'last_neighbor_advertisement_sent': '00:01:15',
'last_router_advertisement_sent': '00:03:02',
'next_router_advertisement_sent': '00:05:17'
},
'router_advertisement': {
'periodic_interval_seconds': '200-600',
'send_managed_address_configuration_flag': 'false',
'send_other_stateful_configuration_flag': 'false',
'send_default_router_preference_value': 'Medium',
'send_current_hop_limit': 64,
'send_mtu': 1500,
'send_router_lifetime_secs': 1800,
'send_reachable_time_ms': 0,
'send_retrans_timer_ms': 0,
'suppress_ra': 'Disabled',
'suppress_mtu_ra': 'Disabled',
'suppress_route_information_option_ra': 'Disabled'
},
'neighbor_solicitation': {
'ns_retransmit_interval_ms': 1000,
'nd_nud_retry_base': 1,
'nd_nud_retry_interval': 1000,
'nd_nud_retry_attempts': 3
},
'icmpv6_error_message': {
'send_redirects_num': 0,
'send_unreachables': 'false'
},
'icmpv6_dad': {
'maximum_dad_attempts': 1,
'current_dad_attempt': 1
}
},
'Ethernet1/1.115': {
'interface': 'Ethernet1/1.115',
'interface_status': 'protocol-up/link-up/admin-up',
'vrf': 'default',
'ipv6_address': {
'2001:10:23:115::3/64': {
'status': 'VALID'
}
},
'ipv6_link_local_address': {
'fe80::5c00:c0ff:fe02:7': {
'status': 'VALID'
}
},
'nd_mac_extract': 'Disabled',
'icmpv6_active_timers': {
'last_neighbor_solicitation_sent': '00:01:25',
'last_neighbor_advertisement_sent': '00:02:46',
'last_router_advertisement_sent': '00:02:50',
'next_router_advertisement_sent': '00:04:39'
},
'router_advertisement': {
'periodic_interval_seconds': '200-600',
'send_managed_address_configuration_flag': 'false',
'send_other_stateful_configuration_flag': 'false',
'send_default_router_preference_value': 'Medium',
'send_current_hop_limit': 64,
'send_mtu': 1500,
'send_router_lifetime_secs': 1800,
'send_reachable_time_ms': 0,
'send_retrans_timer_ms': 0,
'suppress_ra': 'Disabled',
'suppress_mtu_ra': 'Disabled',
'suppress_route_information_option_ra': 'Disabled'
},
'neighbor_solicitation': {
'ns_retransmit_interval_ms': 1000,
'nd_nud_retry_base': 1,
'nd_nud_retry_interval': 1000,
'nd_nud_retry_attempts': 3
},
'icmpv6_error_message': {
'send_redirects_num': 0,
'send_unreachables': 'false'
},
'icmpv6_dad': {
'maximum_dad_attempts': 1,
'current_dad_attempt': 1
}
},
'Ethernet1/1.120': {
'interface': 'Ethernet1/1.120',
'interface_status': 'protocol-up/link-up/admin-up',
'vrf': 'default',
'ipv6_address': {
'2001:10:23:120::3/64': {
'status': 'VALID'
}
},
'ipv6_link_local_address': {
'fe80::5c00:c0ff:fe02:7': {
'status': 'VALID'
}
},
'nd_mac_extract': 'Disabled',
'icmpv6_active_timers': {
'last_neighbor_solicitation_sent': '1d14h',
'last_neighbor_advertisement_sent': '1d14h',
'last_router_advertisement_sent': '00:05:39',
'next_router_advertisement_sent': '00:00:57'
},
'router_advertisement': {
'periodic_interval_seconds': '200-600',
'send_managed_address_configuration_flag': 'false',
'send_other_stateful_configuration_flag': 'false',
'send_default_router_preference_value': 'Medium',
'send_current_hop_limit': 64,
'send_mtu': 1500,
'send_router_lifetime_secs': 1800,
'send_reachable_time_ms': 0,
'send_retrans_timer_ms': 0,
'suppress_ra': 'Disabled',
'suppress_mtu_ra': 'Disabled',
'suppress_route_information_option_ra': 'Disabled'
},
'neighbor_solicitation': {
'ns_retransmit_interval_ms': 1000,
'nd_nud_retry_base': 1,
'nd_nud_retry_interval': 1000,
'nd_nud_retry_attempts': 3
},
'icmpv6_error_message': {
'send_redirects_num': 0,
'send_unreachables': 'false'
},
'icmpv6_dad': {
'maximum_dad_attempts': 1,
'current_dad_attempt': 1
}
},
'Ethernet1/2.90': {
'interface': 'Ethernet1/2.90',
'interface_status': 'protocol-up/link-up/admin-up',
'vrf': 'default',
'ipv6_address': {
'2001:10:13:90::3/64': {
'status': 'VALID'
}
},
'ipv6_link_local_address': {
'fe80::5c00:c0ff:fe02:7': {
'status': 'VALID'
}
},
'nd_mac_extract': 'Disabled',
'icmpv6_active_timers': {
'last_neighbor_solicitation_sent': '00:10:03',
'last_neighbor_advertisement_sent': '05:59:34',
'last_router_advertisement_sent': '00:07:11',
'next_router_advertisement_sent': '00:00:28'
},
'router_advertisement': {
'periodic_interval_seconds': '200-600',
'send_managed_address_configuration_flag': 'false',
'send_other_stateful_configuration_flag': 'false',
'send_default_router_preference_value': 'Medium',
'send_current_hop_limit': 64,
'send_mtu': 1500,
'send_router_lifetime_secs': 1800,
'send_reachable_time_ms': 0,
'send_retrans_timer_ms': 0,
'suppress_ra': 'Disabled',
'suppress_mtu_ra': 'Disabled',
'suppress_route_information_option_ra': 'Disabled'
},
'neighbor_solicitation': {
'ns_retransmit_interval_ms': 1000,
'nd_nud_retry_base': 1,
'nd_nud_retry_interval': 1000,
'nd_nud_retry_attempts': 3
},
'icmpv6_error_message': {
'send_redirects_num': 0,
'send_unreachables': 'false'
},
'icmpv6_dad': {
'maximum_dad_attempts': 1,
'current_dad_attempt': 1
}
},
'Ethernet1/2.110': {
'interface': 'Ethernet1/2.110',
'interface_status': 'protocol-up/link-up/admin-up',
'vrf': 'default',
'ipv6_address': {
'2001:10:13:110::3/64': {
'status': 'VALID'
}
},
'ipv6_link_local_address': {
'fe80::5c00:c0ff:fe02:7': {
'status': 'VALID'
}
},
'nd_mac_extract': 'Disabled',
'icmpv6_active_timers': {
'last_neighbor_solicitation_sent': '00:20:07',
'last_neighbor_advertisement_sent': '1d14h',
'last_router_advertisement_sent': '00:01:37',
'next_router_advertisement_sent': '00:03:52'
},
'router_advertisement': {
'periodic_interval_seconds': '200-600',
'send_managed_address_configuration_flag': 'false',
'send_other_stateful_configuration_flag': 'false',
'send_default_router_preference_value': 'Medium',
'send_current_hop_limit': 64,
'send_mtu': 1500,
'send_router_lifetime_secs': 1800,
'send_reachable_time_ms': 0,
'send_retrans_timer_ms': 0,
'suppress_ra': 'Disabled',
'suppress_mtu_ra': 'Disabled',
'suppress_route_information_option_ra': 'Disabled'
},
'neighbor_solicitation': {
'ns_retransmit_interval_ms': 1000,
'nd_nud_retry_base': 1,
'nd_nud_retry_interval': 1000,
'nd_nud_retry_attempts': 3
},
'icmpv6_error_message': {
'send_redirects_num': 0,
'send_unreachables': 'false'
},
'icmpv6_dad': {
'maximum_dad_attempts': 1,
'current_dad_attempt': 1
}
},
'Ethernet1/2.115': {
'interface': 'Ethernet1/2.115',
'interface_status': 'protocol-up/link-up/admin-up',
'vrf': 'default',
'ipv6_address': {
'2001:10:13:115::3/64': {
'status': 'VALID'
}
},
'ipv6_link_local_address': {
'fe80::5c00:c0ff:fe02:7': {
'status': 'VALID'
}
},
'nd_mac_extract': 'Disabled',
'icmpv6_active_timers': {
'last_neighbor_solicitation_sent': '00:08:55',
'last_neighbor_advertisement_sent': '1d14h',
'last_router_advertisement_sent': '00:01:11',
'next_router_advertisement_sent': '00:05:33'
},
'router_advertisement': {
'periodic_interval_seconds': '200-600',
'send_managed_address_configuration_flag': 'false',
'send_other_stateful_configuration_flag': 'false',
'send_default_router_preference_value': 'Medium',
'send_current_hop_limit': 64,
'send_mtu': 1500,
'send_router_lifetime_secs': 1800,
'send_reachable_time_ms': 0,
'send_retrans_timer_ms': 0,
'suppress_ra': 'Disabled',
'suppress_mtu_ra': 'Disabled',
'suppress_route_information_option_ra': 'Disabled'
},
'neighbor_solicitation': {
'ns_retransmit_interval_ms': 1000,
'nd_nud_retry_base': 1,
'nd_nud_retry_interval': 1000,
'nd_nud_retry_attempts': 3
},
'icmpv6_error_message': {
'send_redirects_num': 0,
'send_unreachables': 'false'
},
'icmpv6_dad': {
'maximum_dad_attempts': 1,
'current_dad_attempt': 1
}
},
'Ethernet1/2.120': {
'interface': 'Ethernet1/2.120',
'interface_status': 'protocol-up/link-up/admin-up',
'vrf': 'default',
'ipv6_address': {
'2001:10:13:120::3/64': {
'status': 'VALID'
}
},
'ipv6_link_local_address': {
'fe80::5c00:c0ff:fe02:7': {
'status': 'VALID'
}
},
'nd_mac_extract': 'Disabled',
'icmpv6_active_timers': {
'last_neighbor_solicitation_sent': '00:20:07',
'last_neighbor_advertisement_sent': '00:20:02',
'last_router_advertisement_sent': '00:01:48',
'next_router_advertisement_sent': '00:02:21'
},
'router_advertisement': {
'periodic_interval_seconds': '200-600',
'send_managed_address_configuration_flag': 'false',
'send_other_stateful_configuration_flag': 'false',
'send_default_router_preference_value': 'Medium',
'send_current_hop_limit': 64,
'send_mtu': 1500,
'send_router_lifetime_secs': 1800,
'send_reachable_time_ms': 0,
'send_retrans_timer_ms': 0,
'suppress_ra': 'Disabled',
'suppress_mtu_ra': 'Disabled',
'suppress_route_information_option_ra': 'Disabled'
},
'neighbor_solicitation': {
'ns_retransmit_interval_ms': 1000,
'nd_nud_retry_base': 1,
'nd_nud_retry_interval': 1000,
'nd_nud_retry_attempts': 3
},
'icmpv6_error_message': {
'send_redirects_num': 0,
'send_unreachables': 'false'
},
'icmpv6_dad': {
'maximum_dad_attempts': 1,
'current_dad_attempt': 1
}
},
'loopback0': {
'interface': 'loopback0',
'interface_status': 'protocol-up/link-up/admin-up',
'vrf': 'default',
'ipv6_address': {
'2001:3:3:3::3/128': {
'status': 'VALID'
}
},
'ipv6_link_local_address': {
'fe80::5c00:c0ff:fe02:0': {
'status': 'VALID'
}
},
'nd_mac_extract': 'Disabled',
'icmpv6_active_timers': {
'last_neighbor_solicitation_sent': 'never',
'last_neighbor_advertisement_sent': 'never',
'last_router_advertisement_sent': 'never',
'next_router_advertisement_sent': 'never'
},
'router_advertisement': {
'periodic_interval_seconds': '200-600',
'send_managed_address_configuration_flag': 'false',
'send_other_stateful_configuration_flag': 'false',
'send_default_router_preference_value': 'Medium',
'send_current_hop_limit': 64,
'send_mtu': 1500,
'send_router_lifetime_secs': 1800,
'send_reachable_time_ms': 0,
'send_retrans_timer_ms': 0,
'suppress_ra': 'Disabled',
'suppress_mtu_ra': 'Disabled',
'suppress_route_information_option_ra': 'Disabled'
},
'neighbor_solicitation': {
'ns_retransmit_interval_ms': 1000,
'nd_nud_retry_base': 1,
'nd_nud_retry_interval': 1000,
'nd_nud_retry_attempts': 3
},
'icmpv6_error_message': {
'send_redirects_num': 0,
'send_unreachables': 'false'
},
'icmpv6_dad': {
'maximum_dad_attempts': 1,
'current_dad_attempt': 0
}
}
}
}
golden_output_2 = {'execute.return_value': '''
# show ipv6 nd interface vrf all
ICMPv6 ND Interfaces for VRF "VRF1"
Ethernet1/1.390, Interface status: protocol-up/link-up/admin-up
IPv6 address:
2001:10:23:90::3/64 [VALID]
IPv6 link-local address: fe80::5c00:c0ff:fe02:7 [VALID]
ND mac-extract : Disabled
ICMPv6 active timers:
Last Neighbor-Solicitation sent: 00:22:04
Last Neighbor-Advertisement sent: 00:00:39
Last Router-Advertisement sent: 00:05:46
Next Router-Advertisement sent in: 00:03:54
Router-Advertisement parameters:
Periodic interval: 200 to 600 seconds
Send "Managed Address Configuration" flag: false
Send "Other Stateful Configuration" flag: false
Send "Default Router Preference" value: Medium
Send "Current Hop Limit" field: 64
Send "MTU" option value: 1500
Send "Router Lifetime" field: 1800 secs
Send "Reachable Time" field: 0 ms
Send "Retrans Timer" field: 0 ms
Suppress RA: Disabled
Suppress MTU in RA: Disabled
Suppress Route Information Option in RA: Disabled
Neighbor-Solicitation parameters:
NS retransmit interval: 1000 ms
ND NUD retry base: 1
ND NUD retry interval: 1000
ND NUD retry attempts: 3
ICMPv6 error message parameters:
Send redirects: true (0)
Send unreachables: false
ICMPv6 DAD parameters:
Maximum DAD attempts: 1
Current DAD attempt : 1
Ethernet1/1.410, Interface status: protocol-up/link-up/admin-up
IPv6 address:
2001:10:23:110::3/64 [VALID]
IPv6 link-local address: fe80::5c00:c0ff:fe02:7 [VALID]
ND mac-extract : Disabled
ICMPv6 active timers:
Last Neighbor-Solicitation sent: 00:21:53
Last Neighbor-Advertisement sent: 00:01:19
Last Router-Advertisement sent: 00:04:54
Next Router-Advertisement sent in: 00:00:20
Router-Advertisement parameters:
Periodic interval: 200 to 600 seconds
Send "Managed Address Configuration" flag: false
Send "Other Stateful Configuration" flag: false
Send "Default Router Preference" value: Medium
Send "Current Hop Limit" field: 64
Send "MTU" option value: 1500
Send "Router Lifetime" field: 1800 secs
Send "Reachable Time" field: 0 ms
Send "Retrans Timer" field: 0 ms
Suppress RA: Disabled
Suppress MTU in RA: Disabled
Suppress Route Information Option in RA: Disabled
Neighbor-Solicitation parameters:
NS retransmit interval: 1000 ms
ND NUD retry base: 1
ND NUD retry interval: 1000
ND NUD retry attempts: 3
ICMPv6 error message parameters:
Send redirects: true (0)
Send unreachables: false
ICMPv6 DAD parameters:
Maximum DAD attempts: 1
Current DAD attempt : 1
Ethernet1/1.415, Interface status: protocol-up/link-up/admin-up
IPv6 address:
2001:10:23:115::3/64 [VALID]
IPv6 link-local address: fe80::5c00:c0ff:fe02:7 [VALID]
ND mac-extract : Disabled
ICMPv6 active timers:
Last Neighbor-Solicitation sent: 1d14h
Last Neighbor-Advertisement sent: 1d14h
Last Router-Advertisement sent: 00:01:22
Next Router-Advertisement sent in: 00:08:35
Router-Advertisement parameters:
Periodic interval: 200 to 600 seconds
Send "Managed Address Configuration" flag: false
Send "Other Stateful Configuration" flag: false
Send "Default Router Preference" value: Medium
Send "Current Hop Limit" field: 64
Send "MTU" option value: 1500
Send "Router Lifetime" field: 1800 secs
Send "Reachable Time" field: 0 ms
Send "Retrans Timer" field: 0 ms
Suppress RA: Disabled
Suppress MTU in RA: Disabled
Suppress Route Information Option in RA: Disabled
Neighbor-Solicitation parameters:
NS retransmit interval: 1000 ms
ND NUD retry base: 1
ND NUD retry interval: 1000
ND NUD retry attempts: 3
ICMPv6 error message parameters:
Send redirects: true (0)
Send unreachables: false
ICMPv6 DAD parameters:
Maximum DAD attempts: 1
Current DAD attempt : 1
Ethernet1/1.420, Interface status: protocol-up/link-up/admin-up
IPv6 address:
2001:10:23:120::3/64 [VALID]
IPv6 link-local address: fe80::5c00:c0ff:fe02:7 [VALID]
ND mac-extract : Disabled
ICMPv6 active timers:
Last Neighbor-Solicitation sent: 1d14h
Last Neighbor-Advertisement sent: 1d14h
Last Router-Advertisement sent: 00:03:45
Next Router-Advertisement sent in: 00:05:09
Router-Advertisement parameters:
Periodic interval: 200 to 600 seconds
Send "Managed Address Configuration" flag: false
Send "Other Stateful Configuration" flag: false
Send "Default Router Preference" value: Medium
Send "Current Hop Limit" field: 64
Send "MTU" option value: 1500
Send "Router Lifetime" field: 1800 secs
Send "Reachable Time" field: 0 ms
Send "Retrans Timer" field: 0 ms
Suppress RA: Disabled
Suppress MTU in RA: Disabled
Suppress Route Information Option in RA: Disabled
Neighbor-Solicitation parameters:
NS retransmit interval: 1000 ms
ND NUD retry base: 1
ND NUD retry interval: 1000
ND NUD retry attempts: 3
ICMPv6 error message parameters:
Send redirects: true (0)
Send unreachables: false
ICMPv6 DAD parameters:
Maximum DAD attempts: 1
Current DAD attempt : 1
Ethernet1/2.390, Interface status: protocol-up/link-up/admin-up
IPv6 address:
2001:10:13:90::3/64 [VALID]
IPv6 link-local address: fe80::5c00:c0ff:fe02:7 [VALID]
ND mac-extract : Disabled
ICMPv6 active timers:
Last Neighbor-Solicitation sent: 00:22:20
Last Neighbor-Advertisement sent: 03:25:16
Last Router-Advertisement sent: 00:05:51
Next Router-Advertisement sent in: 00:01:37
Router-Advertisement parameters:
Periodic interval: 200 to 600 seconds
Send "Managed Address Configuration" flag: false
Send "Other Stateful Configuration" flag: false
Send "Default Router Preference" value: Medium
Send "Current Hop Limit" field: 64
Send "MTU" option value: 1500
Send "Router Lifetime" field: 1800 secs
Send "Reachable Time" field: 0 ms
Send "Retrans Timer" field: 0 ms
Suppress RA: Disabled
Suppress MTU in RA: Disabled
Suppress Route Information Option in RA: Disabled
Neighbor-Solicitation parameters:
NS retransmit interval: 1000 ms
ND NUD retry base: 1
ND NUD retry interval: 1000
ND NUD retry attempts: 3
ICMPv6 error message parameters:
Send redirects: true (0)
Send unreachables: false
ICMPv6 DAD parameters:
Maximum DAD attempts: 1
Current DAD attempt : 1
Ethernet1/2.410, Interface status: protocol-up/link-up/admin-up
IPv6 address:
2001:10:13:110::3/64 [VALID]
IPv6 link-local address: fe80::5c00:c0ff:fe02:7 [VALID]
ND mac-extract : Disabled
ICMPv6 active timers:
Last Neighbor-Solicitation sent: 1d14h
Last Neighbor-Advertisement sent: 1d14h
Last Router-Advertisement sent: 00:03:48
Next Router-Advertisement sent in: 00:03:33
Router-Advertisement parameters:
Periodic interval: 200 to 600 seconds
Send "Managed Address Configuration" flag: false
Send "Other Stateful Configuration" flag: false
Send "Default Router Preference" value: Medium
Send "Current Hop Limit" field: 64
Send "MTU" option value: 1500
Send "Router Lifetime" field: 1800 secs
Send "Reachable Time" field: 0 ms
Send "Retrans Timer" field: 0 ms
Suppress RA: Disabled
Suppress MTU in RA: Disabled
Suppress Route Information Option in RA: Disabled
Neighbor-Solicitation parameters:
NS retransmit interval: 1000 ms
ND NUD retry base: 1
ND NUD retry interval: 1000
ND NUD retry attempts: 3
ICMPv6 error message parameters:
Send redirects: true (0)
Send unreachables: false
ICMPv6 DAD parameters:
Maximum DAD attempts: 1
Current DAD attempt : 1
Ethernet1/2.415, Interface status: protocol-up/link-up/admin-up
IPv6 address:
2001:10:13:115::3/64 [VALID]
IPv6 link-local address: fe80::5c00:c0ff:fe02:7 [VALID]
ND mac-extract : Disabled
ICMPv6 active timers:
Last Neighbor-Solicitation sent: 00:23:24
Last Neighbor-Advertisement sent: 1d14h
Last Router-Advertisement sent: 00:02:47
Next Router-Advertisement sent in: 00:05:52
Router-Advertisement parameters:
Periodic interval: 200 to 600 seconds
Send "Managed Address Configuration" flag: false
Send "Other Stateful Configuration" flag: false
Send "Default Router Preference" value: Medium
Send "Current Hop Limit" field: 64
Send "MTU" option value: 1500
Send "Router Lifetime" field: 1800 secs
Send "Reachable Time" field: 0 ms
Send "Retrans Timer" field: 0 ms
Suppress RA: Disabled
Suppress MTU in RA: Disabled
Suppress Route Information Option in RA: Disabled
Neighbor-Solicitation parameters:
NS retransmit interval: 1000 ms
ND NUD retry base: 1
ND NUD retry interval: 1000
ND NUD retry attempts: 3
ICMPv6 error message parameters:
Send redirects: true (0)
Send unreachables: false
ICMPv6 DAD parameters:
Maximum DAD attempts: 1
Current DAD attempt : 1
Ethernet1/2.420, Interface status: protocol-up/link-up/admin-up
IPv6 address:
2001:10:13:120::3/64 [VALID]
IPv6 link-local address: fe80::5c00:c0ff:fe02:7 [VALID]
ND mac-extract : Disabled
ICMPv6 active timers:
Last Neighbor-Solicitation sent: 00:18:48
Last Neighbor-Advertisement sent: 00:18:43
Last Router-Advertisement sent: 00:01:56
Next Router-Advertisement sent in: 00:07:53
Router-Advertisement parameters:
Periodic interval: 200 to 600 seconds
Send "Managed Address Configuration" flag: false
Send "Other Stateful Configuration" flag: false
Send "Default Router Preference" value: Medium
Send "Current Hop Limit" field: 64
Send "MTU" option value: 1500
Send "Router Lifetime" field: 1800 secs
Send "Reachable Time" field: 0 ms
Send "Retrans Timer" field: 0 ms
Suppress RA: Disabled
Suppress MTU in RA: Disabled
Suppress Route Information Option in RA: Disabled
Neighbor-Solicitation parameters:
NS retransmit interval: 1000 ms
ND NUD retry base: 1
ND NUD retry interval: 1000
ND NUD retry attempts: 3
ICMPv6 error message parameters:
Send redirects: true (0)
Send unreachables: false
ICMPv6 DAD parameters:
Maximum DAD attempts: 1
Current DAD attempt : 1
loopback300, Interface status: protocol-up/link-up/admin-up
IPv6 address:
2001:3:3:3::3/128 [VALID]
IPv6 link-local address: fe80::5c00:c0ff:fe02:0 [VALID]
ND mac-extract : Disabled
ICMPv6 active timers:
Last Neighbor-Solicitation sent: never
Last Neighbor-Advertisement sent: never
Last Router-Advertisement sent: never
Next Router-Advertisement sent in: never
Router-Advertisement parameters:
Periodic interval: 200 to 600 seconds
Send "Managed Address Configuration" flag: false
Send "Other Stateful Configuration" flag: false
Send "Default Router Preference" value: Medium
Send "Current Hop Limit" field: 64
Send "MTU" option value: 1500
Send "Router Lifetime" field: 1800 secs
Send "Reachable Time" field: 0 ms
Send "Retrans Timer" field: 0 ms
Suppress RA: Disabled
Suppress MTU in RA: Disabled
Suppress Route Information Option in RA: Disabled
Neighbor-Solicitation parameters:
NS retransmit interval: 1000 ms
ND NUD retry base: 1
ND NUD retry interval: 1000
ND NUD retry attempts: 3
ICMPv6 error message parameters:
Send redirects: true (0)
Send unreachables: false
ICMPv6 DAD parameters:
Maximum DAD attempts: 1
Current DAD attempt : 0
ICMPv6 ND Interfaces for VRF "default"
Ethernet1/1.90, Interface status: protocol-up/link-up/admin-up
IPv6 address:
2001:10:23:90::3/64 [VALID]
IPv6 link-local address: fe80::5c00:c0ff:fe02:7 [VALID]
ND mac-extract : Disabled
ICMPv6 active timers:
Last Neighbor-Solicitation sent: 00:05:07
Last Neighbor-Advertisement sent: 00:00:47
Last Router-Advertisement sent: 00:07:57
Next Router-Advertisement sent in: 00:01:02
Router-Advertisement parameters:
Periodic interval: 200 to 600 seconds
Send "Managed Address Configuration" flag: false
Send "Other Stateful Configuration" flag: false
Send "Default Router Preference" value: Medium
Send "Current Hop Limit" field: 64
Send "MTU" option value: 1500
Send "Router Lifetime" field: 1800 secs
Send "Reachable Time" field: 0 ms
Send "Retrans Timer" field: 0 ms
Suppress RA: Disabled
Suppress MTU in RA: Disabled
Suppress Route Information Option in RA: Disabled
Neighbor-Solicitation parameters:
NS retransmit interval: 1000 ms
ND NUD retry base: 1
ND NUD retry interval: 1000
ND NUD retry attempts: 3
ICMPv6 error message parameters:
Send redirects: true (0)
Send unreachables: false
ICMPv6 DAD parameters:
Maximum DAD attempts: 1
Current DAD attempt : 1
Ethernet1/1.110, Interface status: protocol-up/link-up/admin-up
IPv6 address:
2001:10:23:110::3/64 [VALID]
IPv6 link-local address: fe80::5c00:c0ff:fe02:7 [VALID]
ND mac-extract : Disabled
ICMPv6 active timers:
Last Neighbor-Solicitation sent: 00:24:10
Last Neighbor-Advertisement sent: 00:01:15
Last Router-Advertisement sent: 00:03:02
Next Router-Advertisement sent in: 00:05:17
Router-Advertisement parameters:
Periodic interval: 200 to 600 seconds
Send "Managed Address Configuration" flag: false
Send "Other Stateful Configuration" flag: false
Send "Default Router Preference" value: Medium
Send "Current Hop Limit" field: 64
Send "MTU" option value: 1500
Send "Router Lifetime" field: 1800 secs
Send "Reachable Time" field: 0 ms
Send "Retrans Timer" field: 0 ms
Suppress RA: Disabled
Suppress MTU in RA: Disabled
Suppress Route Information Option in RA: Disabled
Neighbor-Solicitation parameters:
NS retransmit interval: 1000 ms
ND NUD retry base: 1
ND NUD retry interval: 1000
ND NUD retry attempts: 3
ICMPv6 error message parameters:
Send redirects: true (0)
Send unreachables: false
ICMPv6 DAD parameters:
Maximum DAD attempts: 1
Current DAD attempt : 1
Ethernet1/1.115, Interface status: protocol-up/link-up/admin-up
IPv6 address:
2001:10:23:115::3/64 [VALID]
IPv6 link-local address: fe80::5c00:c0ff:fe02:7 [VALID]
ND mac-extract : Disabled
ICMPv6 active timers:
Last Neighbor-Solicitation sent: 00:01:25
Last Neighbor-Advertisement sent: 00:02:46
Last Router-Advertisement sent: 00:02:50
Next Router-Advertisement sent in: 00:04:39
Router-Advertisement parameters:
Periodic interval: 200 to 600 seconds
Send "Managed Address Configuration" flag: false
Send "Other Stateful Configuration" flag: false
Send "Default Router Preference" value: Medium
Send "Current Hop Limit" field: 64
Send "MTU" option value: 1500
Send "Router Lifetime" field: 1800 secs
Send "Reachable Time" field: 0 ms
Send "Retrans Timer" field: 0 ms
Suppress RA: Disabled
Suppress MTU in RA: Disabled
Suppress Route Information Option in RA: Disabled
Neighbor-Solicitation parameters:
NS retransmit interval: 1000 ms
ND NUD retry base: 1
ND NUD retry interval: 1000
ND NUD retry attempts: 3
ICMPv6 error message parameters:
Send redirects: true (0)
Send unreachables: false
ICMPv6 DAD parameters:
Maximum DAD attempts: 1
Current DAD attempt : 1
Ethernet1/1.120, Interface status: protocol-up/link-up/admin-up
IPv6 address:
2001:10:23:120::3/64 [VALID]
IPv6 link-local address: fe80::5c00:c0ff:fe02:7 [VALID]
ND mac-extract : Disabled
ICMPv6 active timers:
Last Neighbor-Solicitation sent: 1d14h
Last Neighbor-Advertisement sent: 1d14h
Last Router-Advertisement sent: 00:05:39
Next Router-Advertisement sent in: 00:00:57
Router-Advertisement parameters:
Periodic interval: 200 to 600 seconds
Send "Managed Address Configuration" flag: false
Send "Other Stateful Configuration" flag: false
Send "Default Router Preference" value: Medium
Send "Current Hop Limit" field: 64
Send "MTU" option value: 1500
Send "Router Lifetime" field: 1800 secs
Send "Reachable Time" field: 0 ms
Send "Retrans Timer" field: 0 ms
Suppress RA: Disabled
Suppress MTU in RA: Disabled
Suppress Route Information Option in RA: Disabled
Neighbor-Solicitation parameters:
NS retransmit interval: 1000 ms
ND NUD retry base: 1
ND NUD retry interval: 1000
ND NUD retry attempts: 3
ICMPv6 error message parameters:
Send redirects: true (0)
Send unreachables: false
ICMPv6 DAD parameters:
Maximum DAD attempts: 1
Current DAD attempt : 1
Ethernet1/2.90, Interface status: protocol-up/link-up/admin-up
IPv6 address:
2001:10:13:90::3/64 [VALID]
IPv6 link-local address: fe80::5c00:c0ff:fe02:7 [VALID]
ND mac-extract : Disabled
ICMPv6 active timers:
Last Neighbor-Solicitation sent: 00:10:03
Last Neighbor-Advertisement sent: 05:59:34
Last Router-Advertisement sent: 00:07:11
Next Router-Advertisement sent in: 00:00:28
Router-Advertisement parameters:
Periodic interval: 200 to 600 seconds
Send "Managed Address Configuration" flag: false
Send "Other Stateful Configuration" flag: false
Send "Default Router Preference" value: Medium
Send "Current Hop Limit" field: 64
Send "MTU" option value: 1500
Send "Router Lifetime" field: 1800 secs
Send "Reachable Time" field: 0 ms
Send "Retrans Timer" field: 0 ms
Suppress RA: Disabled
Suppress MTU in RA: Disabled
Suppress Route Information Option in RA: Disabled
Neighbor-Solicitation parameters:
NS retransmit interval: 1000 ms
ND NUD retry base: 1
ND NUD retry interval: 1000
ND NUD retry attempts: 3
ICMPv6 error message parameters:
Send redirects: true (0)
Send unreachables: false
ICMPv6 DAD parameters:
Maximum DAD attempts: 1
Current DAD attempt : 1
Ethernet1/2.110, Interface status: protocol-up/link-up/admin-up
IPv6 address:
2001:10:13:110::3/64 [VALID]
IPv6 link-local address: fe80::5c00:c0ff:fe02:7 [VALID]
ND mac-extract : Disabled
ICMPv6 active timers:
Last Neighbor-Solicitation sent: 00:20:07
Last Neighbor-Advertisement sent: 1d14h
Last Router-Advertisement sent: 00:01:37
Next Router-Advertisement sent in: 00:03:52
Router-Advertisement parameters:
Periodic interval: 200 to 600 seconds
Send "Managed Address Configuration" flag: false
Send "Other Stateful Configuration" flag: false
Send "Default Router Preference" value: Medium
Send "Current Hop Limit" field: 64
Send "MTU" option value: 1500
Send "Router Lifetime" field: 1800 secs
Send "Reachable Time" field: 0 ms
Send "Retrans Timer" field: 0 ms
Suppress RA: Disabled
Suppress MTU in RA: Disabled
Suppress Route Information Option in RA: Disabled
Neighbor-Solicitation parameters:
NS retransmit interval: 1000 ms
ND NUD retry base: 1
ND NUD retry interval: 1000
ND NUD retry attempts: 3
ICMPv6 error message parameters:
Send redirects: true (0)
Send unreachables: false
ICMPv6 DAD parameters:
Maximum DAD attempts: 1
Current DAD attempt : 1
Ethernet1/2.115, Interface status: protocol-up/link-up/admin-up
IPv6 address:
2001:10:13:115::3/64 [VALID]
IPv6 link-local address: fe80::5c00:c0ff:fe02:7 [VALID]
ND mac-extract : Disabled
ICMPv6 active timers:
Last Neighbor-Solicitation sent: 00:08:55
Last Neighbor-Advertisement sent: 1d14h
Last Router-Advertisement sent: 00:01:11
Next Router-Advertisement sent in: 00:05:33
Router-Advertisement parameters:
Periodic interval: 200 to 600 seconds
Send "Managed Address Configuration" flag: false
Send "Other Stateful Configuration" flag: false
Send "Default Router Preference" value: Medium
Send "Current Hop Limit" field: 64
Send "MTU" option value: 1500
Send "Router Lifetime" field: 1800 secs
Send "Reachable Time" field: 0 ms
Send "Retrans Timer" field: 0 ms
Suppress RA: Disabled
Suppress MTU in RA: Disabled
Suppress Route Information Option in RA: Disabled
Neighbor-Solicitation parameters:
NS retransmit interval: 1000 ms
ND NUD retry base: 1
ND NUD retry interval: 1000
ND NUD retry attempts: 3
ICMPv6 error message parameters:
Send redirects: true (0)
Send unreachables: false
ICMPv6 DAD parameters:
Maximum DAD attempts: 1
Current DAD attempt : 1
Ethernet1/2.120, Interface status: protocol-up/link-up/admin-up
IPv6 address:
2001:10:13:120::3/64 [VALID]
IPv6 link-local address: fe80::5c00:c0ff:fe02:7 [VALID]
ND mac-extract : Disabled
ICMPv6 active timers:
Last Neighbor-Solicitation sent: 00:20:07
Last Neighbor-Advertisement sent: 00:20:02
Last Router-Advertisement sent: 00:01:48
Next Router-Advertisement sent in: 00:02:21
Router-Advertisement parameters:
Periodic interval: 200 to 600 seconds
Send "Managed Address Configuration" flag: false
Send "Other Stateful Configuration" flag: false
Send "Default Router Preference" value: Medium
Send "Current Hop Limit" field: 64
Send "MTU" option value: 1500
Send "Router Lifetime" field: 1800 secs
Send "Reachable Time" field: 0 ms
Send "Retrans Timer" field: 0 ms
Suppress RA: Disabled
Suppress MTU in RA: Disabled
Suppress Route Information Option in RA: Disabled
Neighbor-Solicitation parameters:
NS retransmit interval: 1000 ms
ND NUD retry base: 1
ND NUD retry interval: 1000
ND NUD retry attempts: 3
ICMPv6 error message parameters:
Send redirects: true (0)
Send unreachables: false
ICMPv6 DAD parameters:
Maximum DAD attempts: 1
Current DAD attempt : 1
loopback0, Interface status: protocol-up/link-up/admin-up
IPv6 address:
2001:3:3:3::3/128 [VALID]
IPv6 link-local address: fe80::5c00:c0ff:fe02:0 [VALID]
ND mac-extract : Disabled
ICMPv6 active timers:
Last Neighbor-Solicitation sent: never
Last Neighbor-Advertisement sent: never
Last Router-Advertisement sent: never
Next Router-Advertisement sent in: never
Router-Advertisement parameters:
Periodic interval: 200 to 600 seconds
Send "Managed Address Configuration" flag: false
Send "Other Stateful Configuration" flag: false
Send "Default Router Preference" value: Medium
Send "Current Hop Limit" field: 64
Send "MTU" option value: 1500
Send "Router Lifetime" field: 1800 secs
Send "Reachable Time" field: 0 ms
Send "Retrans Timer" field: 0 ms
Suppress RA: Disabled
Suppress MTU in RA: Disabled
Suppress Route Information Option in RA: Disabled
Neighbor-Solicitation parameters:
NS retransmit interval: 1000 ms
ND NUD retry base: 1
ND NUD retry interval: 1000
ND NUD retry attempts: 3
ICMPv6 error message parameters:
Send redirects: true (0)
Send unreachables: false
ICMPv6 DAD parameters:
Maximum DAD attempts: 1
Current DAD attempt : 0
ICMPv6 ND Interfaces for VRF "management"
'''}
def test_show_ipv6_nd_interface_vrf_all_empty(self):
self.device = Mock(**self.empty_output)
obj = ShowIpv6NdInterfaceVrfAll(device=self.device)
with self.assertRaises(SchemaEmptyParserError):
parsed_output = obj.parse()
def test_show_ipv6_nd_interface_vrf_all_golden(self):
self.device = Mock(**self.golden_output)
obj = ShowIpv6NdInterfaceVrfAll(device=self.device)
parsed_output = obj.parse()
self.assertEqual(parsed_output, self.golden_parsed_output)
def test_show_ipv6_nd_interface_vrf_all_golden_2(self):
self.device = Mock(**self.golden_output_2)
obj = ShowIpv6NdInterfaceVrfAll(device=self.device)
parsed_output = obj.parse()
self.assertEqual(parsed_output, self.golden_parsed_output_2)
#############################################################################
# Unittest for 'show ipv6 neighbor detail vrf all'
#############################################################################
class test_show_ipv6_neighbor_detail_vrf_all(unittest.TestCase):
device = Device(name='aDevice')
empty_output = {'execute.return_value': ''}
golden_parsed_output1 = {
'adjacency_hit': {
'GLEAN': {
'byte_count': 0,
'packet_count': 0},
'GLOBAL DROP': {
'byte_count': 0,
'packet_count': 0},
'GLOBAL GLEAN': {
'byte_count': 0,
'packet_count': 0},
'GLOBAL PUNT': {
'byte_count': 0,
'packet_count': 0},
'INVALID': {
'byte_count': 0,
'packet_count': 0},
'NORMAL': {
'byte_count': 0,
'packet_count': 0}},
'adjacency_statistics_last_updated_before': 'never',
'interfaces': {
'Ethernet1/1': {
'interface': 'Ethernet1/1',
'neighbors': {
'fc00:e968:6179::de52:7100': {
'age': '00:09:27',
'best': 'Yes',
'byte_count': 0,
'ip': 'fc00:e968:6179::de52:7100',
'mac_addr': 'fa16.3e82.6320',
'packet_count': 0,
'physical_interface': 'Ethernet1/1',
'preference': '50',
'source': 'icmpv6',
'throttled': 'No'},
'fd00:a516:7c1b:17cd:6d81:2137:bd2a:2c5b': {
'age': '2d15h',
'best': 'Yes',
'byte_count': 0,
'ip': 'fd00:a516:7c1b:17cd:6d81:2137:bd2a:2c5b',
'mac_addr': 'aabb.beef.cccc',
'packet_count': 0,
'physical_interface': 'Ethernet1/1',
'preference': '1',
'source': 'Static',
'throttled': 'No'},
'fdf8:f53e:61e4::18': {
'age': '1d18h',
'best': 'Yes',
'byte_count': 0,
'ip': 'fdf8:f53e:61e4::18',
'mac_addr': 'aaab.beef.ccce',
'packet_count': 0,
'physical_interface': 'Ethernet1/1',
'preference': '1',
'source': 'Static',
'throttled': 'No'},
'fe80::f816:3eff:fe82:6320': {
'age': '00:05:42',
'best': 'Yes',
'byte_count': 0,
'ip': 'fe80::f816:3eff:fe82:6320',
'mac_addr': 'fa16.3e82.6320',
'packet_count': 0,
'physical_interface': 'Ethernet1/1',
'preference': '50',
'source': 'icmpv6',
'throttled': 'No'}}},
'Ethernet1/2': {
'interface': 'Ethernet1/2',
'neighbors': {
'fdf8:f53e:61e4::18': {
'age': '00:09:00',
'best': 'Yes',
'byte_count': 0,
'ip': 'fdf8:f53e:61e4::18',
'mac_addr': 'fa16.3e8b.59c9',
'packet_count': 0,
'physical_interface': 'Ethernet1/2',
'preference': '50',
'source': 'icmpv6',
'throttled': 'No'},
'fd00:a516:7c1b:17cd:6d81:2137:bd2a:2c5b': {
'age': '2d15h',
'best': 'Yes',
'byte_count': 0,
'ip': 'fd00:a516:7c1b:17cd:6d81:2137:bd2a:2c5b',
'mac_addr': 'aaaa.bbbb.cccc',
'packet_count': 0,
'physical_interface': 'Ethernet1/2',
'preference': '1',
'source': 'Static',
'throttled': 'No'},
'fe80::f816:3eff:fe8b:59c9': {
'age': '00:14:08',
'best': 'Yes',
'byte_count': 0,
'ip': 'fe80::f816:3eff:fe8b:59c9',
'mac_addr': 'fa16.3e8b.59c9',
'packet_count': 0,
'physical_interface': 'Ethernet1/2',
'preference': '50',
'source': 'icmpv6',
'throttled': 'No'}}},
'Ethernet1/3': {
'interface': 'Ethernet1/3',
'neighbors': {
'fdf8:f53e:61e4::18': {
'age': '2d15h',
'best': 'Yes',
'byte_count': 0,
'ip': 'fdf8:f53e:61e4::18',
'mac_addr': 'fa16.3e19.8682',
'packet_count': 0,
'physical_interface': 'Ethernet1/3',
'preference': '50',
'source': 'icmpv6',
'throttled': 'No'},
'fe80::f816:3eff:fe19:8682': {
'age': '2d15h',
'best': 'Yes',
'byte_count': 0,
'ip': 'fe80::f816:3eff:fe19:8682',
'mac_addr': 'fa16.3e19.8682',
'packet_count': 0,
'physical_interface': 'Ethernet1/3',
'preference': '50',
'source': 'icmpv6',
'throttled': 'No'}}},
'Ethernet1/4': {
'interface': 'Ethernet1/4',
'neighbors': {
'fd00:a516:7c1b:17cd:6d81:2137:bd2a:2c5b': {
'age': '2d15h',
'best': 'Yes',
'byte_count': 0,
'ip': 'fd00:a516:7c1b:17cd:6d81:2137:bd2a:2c5b',
'mac_addr': 'fa16.3ec7.8140',
'packet_count': 0,
'physical_interface': 'Ethernet1/4',
'preference': '50',
'source': 'icmpv6',
'throttled': 'No'},
'fe80::f816:3eff:fec7:8140': {
'age': '2d15h',
'best': 'Yes',
'byte_count': 0,
'ip': 'fe80::f816:3eff:fec7:8140',
'mac_addr': 'fa16.3ec7.8140',
'packet_count': 0,
'physical_interface': 'Ethernet1/4',
'preference': '50',
'source': 'icmpv6',
'throttled': 'No'}}}},
'total_number_of_entries': 11}
golden_output1 = {'execute.return_value': '''
n9kv-3# show ipv6 neighbor detail vrf all
No. of Adjacency hit with type INVALID: Packet count 0, Byte count 0
No. of Adjacency hit with type GLOBAL DROP: Packet count 0, Byte count 0
No. of Adjacency hit with type GLOBAL PUNT: Packet count 0, Byte count 0
No. of Adjacency hit with type GLOBAL GLEAN: Packet count 0, Byte count 0
No. of Adjacency hit with type GLEAN: Packet count 0, Byte count 0
No. of Adjacency hit with type NORMAL: Packet count 0, Byte count 0
Adjacency statistics last updated before: never
IPv6 Adjacency Table for all VRFs
Total number of entries: 11
Address : fc00:e968:6179::de52:7100
Age : 00:09:27
MacAddr : fa16.3e82.6320
Preference : 50
Source : icmpv6
Interface : Ethernet1/1
Physical Interface : Ethernet1/1
Packet Count : 0
Byte Count : 0
Best : Yes
Throttled : No
Address : fd00:a516:7c1b:17cd:6d81:2137:bd2a:2c5b
Age : 2d15h
MacAddr : aabb.beef.cccc
Preference : 1
Source : Static
Interface : Ethernet1/1
Physical Interface : Ethernet1/1
Packet Count : 0
Byte Count : 0
Best : Yes
Throttled : No
Address : fdf8:f53e:61e4::18
Age : 1d18h
MacAddr : aaab.beef.ccce
Preference : 1
Source : Static
Interface : Ethernet1/1
Physical Interface : Ethernet1/1
Packet Count : 0
Byte Count : 0
Best : Yes
Throttled : No
Address : fe80::f816:3eff:fe82:6320
Age : 00:05:42
MacAddr : fa16.3e82.6320
Preference : 50
Source : icmpv6
Interface : Ethernet1/1
Physical Interface : Ethernet1/1
Packet Count : 0
Byte Count : 0
Best : Yes
Throttled : No
Address : fdf8:f53e:61e4::18
Age : 00:09:00
MacAddr : fa16.3e8b.59c9
Preference : 50
Source : icmpv6
Interface : Ethernet1/2
Physical Interface : Ethernet1/2
Packet Count : 0
Byte Count : 0
Best : Yes
Throttled : No
Address : fd00:a516:7c1b:17cd:6d81:2137:bd2a:2c5b
Age : 2d15h
MacAddr : aaaa.bbbb.cccc
Preference : 1
Source : Static
Interface : Ethernet1/2
Physical Interface : Ethernet1/2
Packet Count : 0
Byte Count : 0
Best : Yes
Throttled : No
Address : fe80::f816:3eff:fe8b:59c9
Age : 00:14:08
MacAddr : fa16.3e8b.59c9
Preference : 50
Source : icmpv6
Interface : Ethernet1/2
Physical Interface : Ethernet1/2
Packet Count : 0
Byte Count : 0
Best : Yes
Throttled : No
Address : fdf8:f53e:61e4::18
Age : 2d15h
MacAddr : fa16.3e19.8682
Preference : 50
Source : icmpv6
Interface : Ethernet1/3
Physical Interface : Ethernet1/3
Packet Count : 0
Byte Count : 0
Best : Yes
Throttled : No
Address : fe80::f816:3eff:fe19:8682
Age : 2d15h
MacAddr : fa16.3e19.8682
Preference : 50
Source : icmpv6
Interface : Ethernet1/3
Physical Interface : Ethernet1/3
Packet Count : 0
Byte Count : 0
Best : Yes
Throttled : No
Address : fd00:a516:7c1b:17cd:6d81:2137:bd2a:2c5b
Age : 2d15h
MacAddr : fa16.3ec7.8140
Preference : 50
Source : icmpv6
Interface : Ethernet1/4
Physical Interface : Ethernet1/4
Packet Count : 0
Byte Count : 0
Best : Yes
Throttled : No
Address : fe80::f816:3eff:fec7:8140
Age : 2d15h
MacAddr : fa16.3ec7.8140
Preference : 50
Source : icmpv6
Interface : Ethernet1/4
Physical Interface : Ethernet1/4
Packet Count : 0
Byte Count : 0
Best : Yes
Throttled : No'''}
golden_parsed_output2 = {
'adjacency_hit': {
'GLEAN': {
'byte_count': 0,
'packet_count': 0},
'GLOBAL DROP': {
'byte_count': 0,
'packet_count': 0},
'GLOBAL GLEAN': {
'byte_count': 0,
'packet_count': 0},
'GLOBAL PUNT': {
'byte_count': 0,
'packet_count': 0},
'INVALID': {
'byte_count': 0,
'packet_count': 0},
'NORMAL': {
'byte_count': 0,
'packet_count': 0}},
'adjacency_statistics_last_updated_before': 'never',
'interfaces': {
'Ethernet1/1.110': {
'interface': 'Ethernet1/1.110',
'neighbors': {
'fe80::f816:3eff:fe5a:9eb3': {
'age': '00:02:23',
'best': 'Yes',
'byte_count': 0,
'ip': 'fe80::f816:3eff:fe5a:9eb3',
'mac_addr': 'fa16.3e5a.9eb3',
'packet_count': 0,
'physical_interface': 'Ethernet1/1.110',
'preference': '50',
'source': 'icmpv6',
'throttled': 'No'}}},
'Ethernet1/1.115': {
'interface': 'Ethernet1/1.115',
'neighbors': {
'fe80::f816:3eff:fe5a:9eb3': {
'age': '00:04:11',
'best': 'Yes',
'byte_count': 0,
'ip': 'fe80::f816:3eff:fe5a:9eb3',
'mac_addr': 'fa16.3e5a.9eb3',
'packet_count': 0,
'physical_interface': 'Ethernet1/1.115',
'preference': '50',
'source': 'icmpv6',
'throttled': 'No'}}},
'Ethernet1/1.390': {
'interface': 'Ethernet1/1.390',
'neighbors': {
'fe80::f816:3eff:fe5a:9eb3': {
'age': '00:22:28',
'best': 'Yes',
'byte_count': 0,
'ip': 'fe80::f816:3eff:fe5a:9eb3',
'mac_addr': 'fa16.3e5a.9eb3',
'packet_count': 0,
'physical_interface': 'Ethernet1/1.390',
'preference': '50',
'source': 'icmpv6',
'throttled': 'No'}}},
'Ethernet1/1.410': {
'interface': 'Ethernet1/1.410',
'neighbors': {
'fe80::f816:3eff:fe5a:9eb3': {
'age': '00:02:30',
'best': 'Yes',
'byte_count': 0,
'ip': 'fe80::f816:3eff:fe5a:9eb3',
'mac_addr': 'fa16.3e5a.9eb3',
'packet_count': 0,
'physical_interface': 'Ethernet1/1.410',
'preference': '50',
'source': 'icmpv6',
'throttled': 'No'}}},
'Ethernet1/1.90': {
'interface': 'Ethernet1/1.90',
'neighbors': {
'fe80::f816:3eff:fe5a:9eb3': {
'age': '00:08:01',
'best': 'Yes',
'byte_count': 0,
'ip': 'fe80::f816:3eff:fe5a:9eb3',
'mac_addr': 'fa16.3e5a.9eb3',
'packet_count': 0,
'physical_interface': 'Ethernet1/1.90',
'preference': '50',
'source': 'icmpv6',
'throttled': 'No'}}},
'Ethernet1/2.110': {
'interface': 'Ethernet1/2.110',
'neighbors': {
'fe80::f816:3eff:fe55:9514': {
'age': '1d15h',
'best': 'Yes',
'byte_count': 0,
'ip': 'fe80::f816:3eff:fe55:9514',
'mac_addr': 'fa16.3e55.9514',
'packet_count': 0,
'physical_interface': 'Ethernet1/2.110',
'preference': '50',
'source': 'icmpv6',
'throttled': 'No'}}},
'Ethernet1/2.115': {
'interface': 'Ethernet1/2.115',
'neighbors': {
'fe80::f816:3eff:fe55:9514': {
'age': '1d15h',
'best': 'Yes',
'byte_count': 0,
'ip': 'fe80::f816:3eff:fe55:9514',
'mac_addr': 'fa16.3e55.9514',
'packet_count': 0,
'physical_interface': 'Ethernet1/2.115',
'preference': '50',
'source': 'icmpv6',
'throttled': 'No'}}},
'Ethernet1/2.120': {
'interface': 'Ethernet1/2.120',
'neighbors': {
'fe80::f816:3eff:fe55:9514': {
'age': '1d15h',
'best': 'Yes',
'byte_count': 0,
'ip': 'fe80::f816:3eff:fe55:9514',
'mac_addr': 'fa16.3e55.9514',
'packet_count': 0,
'physical_interface': 'Ethernet1/2.120',
'preference': '50',
'source': 'icmpv6',
'throttled': 'No'}}},
'Ethernet1/2.390': {
'interface': 'Ethernet1/2.390',
'neighbors': {
'fe80::f816:3eff:fe55:9514': {
'age': '1d15h',
'best': 'Yes',
'byte_count': 0,
'ip': 'fe80::f816:3eff:fe55:9514',
'mac_addr': 'fa16.3e55.9514',
'packet_count': 0,
'physical_interface': 'Ethernet1/2.390',
'preference': '50',
'source': 'icmpv6',
'throttled': 'No'}}},
'Ethernet1/2.415': {
'interface': 'Ethernet1/2.415',
'neighbors': {
'fe80::f816:3eff:fe55:9514': {
'age': '1d15h',
'best': 'Yes',
'byte_count': 0,
'ip': 'fe80::f816:3eff:fe55:9514',
'mac_addr': 'fa16.3e55.9514',
'packet_count': 0,
'physical_interface': 'Ethernet1/2.415',
'preference': '50',
'source': 'icmpv6',
'throttled': 'No'}}},
'Ethernet1/2.420': {
'interface': 'Ethernet1/2.420',
'neighbors': {
'fe80::f816:3eff:fe55:9514': {
'age': '1d15h',
'best': 'Yes',
'byte_count': 0,
'ip': 'fe80::f816:3eff:fe55:9514',
'mac_addr': 'fa16.3e55.9514',
'packet_count': 0,
'physical_interface': 'Ethernet1/2.420',
'preference': '50',
'source': 'icmpv6',
'throttled': 'No'}}},
'Ethernet1/2.90': {
'interface': 'Ethernet1/2.90',
'neighbors': {
'fe80::f816:3eff:fe55:9514': {
'age': '1d15h',
'best': 'Yes',
'byte_count': 0,
'ip': 'fe80::f816:3eff:fe55:9514',
'mac_addr': 'fa16.3e55.9514',
'packet_count': 0,
'physical_interface': 'Ethernet1/2.90',
'preference': '50',
'source': 'icmpv6',
'throttled': 'No'}}}},
'total_number_of_entries': 12}
golden_output2 = {'execute.return_value': '''
show ipv6 neighbor detail vrf all
No. of Adjacency hit with type INVALID: Packet count 0, Byte count 0
No. of Adjacency hit with type GLOBAL DROP: Packet count 0, Byte count 0
No. of Adjacency hit with type GLOBAL PUNT: Packet count 0, Byte count 0
No. of Adjacency hit with type GLOBAL GLEAN: Packet count 0, Byte count 0
No. of Adjacency hit with type GLEAN: Packet count 0, Byte count 0
No. of Adjacency hit with type NORMAL: Packet count 0, Byte count 0
Adjacency statistics last updated before: never
IPv6 Adjacency Table for all VRFs
Total number of entries: 12
Address : fe80::f816:3eff:fe5a:9eb3
Age : 00:08:01
MacAddr : fa16.3e5a.9eb3
Preference : 50
Source : icmpv6
Interface : Ethernet1/1.90
Physical Interface : Ethernet1/1.90
Packet Count : 0
Byte Count : 0
Best : Yes
Throttled : No
Address : fe80::f816:3eff:fe5a:9eb3
Age : 00:02:23
MacAddr : fa16.3e5a.9eb3
Preference : 50
Source : icmpv6
Interface : Ethernet1/1.110
Physical Interface : Ethernet1/1.110
Packet Count : 0
Byte Count : 0
Best : Yes
Throttled : No
Address : fe80::f816:3eff:fe5a:9eb3
Age : 00:04:11
MacAddr : fa16.3e5a.9eb3
Preference : 50
Source : icmpv6
Interface : Ethernet1/1.115
Physical Interface : Ethernet1/1.115
Packet Count : 0
Byte Count : 0
Best : Yes
Throttled : No
Address : fe80::f816:3eff:fe5a:9eb3
Age : 00:22:28
MacAddr : fa16.3e5a.9eb3
Preference : 50
Source : icmpv6
Interface : Ethernet1/1.390
Physical Interface : Ethernet1/1.390
Packet Count : 0
Byte Count : 0
Best : Yes
Throttled : No
Address : fe80::f816:3eff:fe5a:9eb3
Age : 00:02:30
MacAddr : fa16.3e5a.9eb3
Preference : 50
Source : icmpv6
Interface : Ethernet1/1.410
Physical Interface : Ethernet1/1.410
Packet Count : 0
Byte Count : 0
Best : Yes
Throttled : No
Address : fe80::f816:3eff:fe55:9514
Age : 1d15h
MacAddr : fa16.3e55.9514
Preference : 50
Source : icmpv6
Interface : Ethernet1/2.90
Physical Interface : Ethernet1/2.90
Packet Count : 0
Byte Count : 0
Best : Yes
Throttled : No
Address : fe80::f816:3eff:fe55:9514
Age : 1d15h
MacAddr : fa16.3e55.9514
Preference : 50
Source : icmpv6
Interface : Ethernet1/2.110
Physical Interface : Ethernet1/2.110
Packet Count : 0
Byte Count : 0
Best : Yes
Throttled : No
Address : fe80::f816:3eff:fe55:9514
Age : 1d15h
MacAddr : fa16.3e55.9514
Preference : 50
Source : icmpv6
Interface : Ethernet1/2.115
Physical Interface : Ethernet1/2.115
Packet Count : 0
Byte Count : 0
Best : Yes
Throttled : No
Address : fe80::f816:3eff:fe55:9514
Age : 1d15h
MacAddr : fa16.3e55.9514
Preference : 50
Source : icmpv6
Interface : Ethernet1/2.120
Physical Interface : Ethernet1/2.120
Packet Count : 0
Byte Count : 0
Best : Yes
Throttled : No
Address : fe80::f816:3eff:fe55:9514
Age : 1d15h
MacAddr : fa16.3e55.9514
Preference : 50
Source : icmpv6
Interface : Ethernet1/2.390
Physical Interface : Ethernet1/2.390
Packet Count : 0
Byte Count : 0
Best : Yes
Throttled : No
Address : fe80::f816:3eff:fe55:9514
Age : 1d15h
MacAddr : fa16.3e55.9514
Preference : 50
Source : icmpv6
Interface : Ethernet1/2.415
Physical Interface : Ethernet1/2.415
Packet Count : 0
Byte Count : 0
Best : Yes
Throttled : No
Address : fe80::f816:3eff:fe55:9514
Age : 1d15h
MacAddr : fa16.3e55.9514
Preference : 50
Source : icmpv6
Interface : Ethernet1/2.420
Physical Interface : Ethernet1/2.420
Packet Count : 0
Byte Count : 0
Best : Yes
Throttled : No
'''}
def test_show_ipv6_neighbor_detail_vrf_all_empty(self):
self.device = Mock(**self.empty_output)
obj = ShowIpv6NeighborsDetailVrfAll(device=self.device)
with self.assertRaises(SchemaEmptyParserError):
parsed_output = obj.parse()
def test_show_ipv6_neighbor_detail_vrf_all_golden1(self):
self.device = Mock(**self.golden_output1)
obj = ShowIpv6NeighborsDetailVrfAll(device=self.device)
parsed_output = obj.parse()
self.assertEqual(parsed_output, self.golden_parsed_output1)
def test_show_ipv6_neighbor_detail_vrf_all_golden2(self):
self.device = Mock(**self.golden_output2)
obj = ShowIpv6NeighborsDetailVrfAll(device=self.device)
parsed_output = obj.parse()
self.assertEqual(parsed_output, self.golden_parsed_output2)
if __name__ == '__main__':
unittest.main() | StarcoderdataPython |
1626157 | """Sorting utilities for alphanumeric strings."""
import re
def _atoi(text):
"""Convert a string to an int."""
return int(text) if text.isdigit() else text
def natural_sort(text):
"""Given an alphanumeric string, sort using the natural sort algorithm."""
return [_atoi(a) for a in re.split(r"(\d+)", text)]
| StarcoderdataPython |
1974008 | <gh_stars>1-10
#!/usr/bin/env python3
import unittest
import numpy as np
from panda import Panda
from panda.tests.safety import libpandasafety_py
import panda.tests.safety.common as common
from panda.tests.safety.common import CANPackerPanda, make_msg, \
MAX_WRONG_COUNTERS, UNSAFE_MODE
MAX_BRAKE = 255
class Btn:
CANCEL = 2
SET = 3
RESUME = 4
HONDA_N_HW = 0
HONDA_BG_HW = 1
HONDA_BH_HW = 2
class TestHondaSafety(common.PandaSafetyTest):
cnt_speed = 0
cnt_gas = 0
cnt_button = 0
PT_BUS = 0
@classmethod
def setUpClass(cls):
if cls.__name__ == "TestHondaSafety":
cls.packer = None
cls.safety = None
raise unittest.SkipTest
# override these inherited tests. honda doesn't use pcm enable
def test_disable_control_allowed_from_cruise(self): pass
def test_enable_control_allowed_from_cruise(self): pass
def test_cruise_engaged_prev(self): pass
def _speed_msg(self, speed):
values = {"XMISSION_SPEED": speed, "COUNTER": self.cnt_speed % 4}
self.__class__.cnt_speed += 1
return self.packer.make_can_msg_panda("ENGINE_DATA", self.PT_BUS, values)
def _button_msg(self, buttons):
values = {"CRUISE_BUTTONS": buttons, "COUNTER": self.cnt_button % 4}
self.__class__.cnt_button += 1
return self.packer.make_can_msg_panda("SCM_BUTTONS", self.PT_BUS, values)
def _brake_msg(self, brake):
values = {"BRAKE_PRESSED": brake, "COUNTER": self.cnt_gas % 4}
self.__class__.cnt_gas += 1
return self.packer.make_can_msg_panda("POWERTRAIN_DATA", self.PT_BUS, values)
def _gas_msg(self, gas):
values = {"PEDAL_GAS": gas, "COUNTER": self.cnt_gas % 4}
self.__class__.cnt_gas += 1
return self.packer.make_can_msg_panda("POWERTRAIN_DATA", self.PT_BUS, values)
def _send_brake_msg(self, brake):
values = {}
if self.safety.get_honda_hw() == HONDA_N_HW:
values = {"COMPUTER_BRAKE": brake}
return self.packer.make_can_msg_panda("BRAKE_COMMAND", 0, values)
def _send_steer_msg(self, steer):
values = {"STEER_TORQUE": steer}
return self.packer.make_can_msg_panda("STEERING_CONTROL", 0, values)
def test_resume_button(self):
self.safety.set_controls_allowed(0)
self._rx(self._button_msg(Btn.RESUME))
self.assertTrue(self.safety.get_controls_allowed())
def test_set_button(self):
self.safety.set_controls_allowed(0)
self._rx(self._button_msg(Btn.SET))
self.assertTrue(self.safety.get_controls_allowed())
def test_cancel_button(self):
self.safety.set_controls_allowed(1)
self._rx(self._button_msg(Btn.CANCEL))
self.assertFalse(self.safety.get_controls_allowed())
def test_disengage_on_brake(self):
self.safety.set_controls_allowed(1)
self._rx(self._brake_msg(1))
self.assertFalse(self.safety.get_controls_allowed())
def test_steer_safety_check(self):
self.safety.set_controls_allowed(0)
self.assertTrue(self._tx(self._send_steer_msg(0x0000)))
self.assertFalse(self._tx(self._send_steer_msg(0x1000)))
def test_rx_hook(self):
# TODO: move this test to common
# checksum checks
for msg in ["btn", "gas", "speed"]:
self.safety.set_controls_allowed(1)
# TODO: add this coverage back by re-running all tests with the acura dbc
# to_push = self._button_msg(Btn.SET, 0x1A6) # only in Honda_NIDEC
if msg == "btn":
to_push = self._button_msg(Btn.SET)
if msg == "gas":
to_push = self._gas_msg(0)
if msg == "speed":
to_push = self._speed_msg(0)
self.assertTrue(self._rx(to_push))
if msg != "btn":
to_push[0].RDHR = 0 # invalidate checksum
self.assertFalse(self._rx(to_push))
self.assertFalse(self.safety.get_controls_allowed())
# counter
# reset wrong_counters to zero by sending valid messages
for i in range(MAX_WRONG_COUNTERS + 1):
self.__class__.cnt_speed += 1
self.__class__.cnt_gas += 1
self.__class__.cnt_button += 1
if i < MAX_WRONG_COUNTERS:
self.safety.set_controls_allowed(1)
self._rx(self._button_msg(Btn.SET))
self._rx(self._speed_msg(0))
self._rx(self._gas_msg(0))
else:
self.assertFalse(self._rx(self._button_msg(Btn.SET)))
self.assertFalse(self._rx(self._speed_msg(0)))
self.assertFalse(self._rx(self._gas_msg(0)))
self.assertFalse(self.safety.get_controls_allowed())
# restore counters for future tests with a couple of good messages
for i in range(2):
self.safety.set_controls_allowed(1)
self._rx(self._button_msg(Btn.SET))
self._rx(self._speed_msg(0))
self._rx(self._gas_msg(0))
self._rx(self._button_msg(Btn.SET))
self.assertTrue(self.safety.get_controls_allowed())
def test_tx_hook_on_pedal_pressed(self):
for mode in [UNSAFE_MODE.DEFAULT, UNSAFE_MODE.DISABLE_DISENGAGE_ON_GAS]:
for pedal in ['brake', 'gas']:
self.safety.set_unsafe_mode(mode)
allow_ctrl = False
if pedal == 'brake':
# brake_pressed_prev and vehicle_moving
self._rx(self._speed_msg(100))
self._rx(self._brake_msg(1))
elif pedal == 'gas':
# gas_pressed_prev
self._rx(self._gas_msg(1))
allow_ctrl = mode == UNSAFE_MODE.DISABLE_DISENGAGE_ON_GAS
self.safety.set_controls_allowed(1)
hw = self.safety.get_honda_hw()
if hw == HONDA_N_HW:
self.safety.set_honda_fwd_brake(False)
self.assertEqual(allow_ctrl, self._tx(self._send_brake_msg(MAX_BRAKE)))
self.assertEqual(allow_ctrl, self._tx(self._send_steer_msg(0x1000)))
# reset status
self.safety.set_controls_allowed(0)
self.safety.set_unsafe_mode(UNSAFE_MODE.DEFAULT)
self._tx(self._send_brake_msg(0))
self._tx(self._send_steer_msg(0))
if pedal == 'brake':
self._rx(self._speed_msg(0))
self._rx(self._brake_msg(0))
elif pedal == 'gas':
self._rx(self._gas_msg(0))
class TestHondaNidecSafety(TestHondaSafety, common.InterceptorSafetyTest):
TX_MSGS = [[0xE4, 0], [0x194, 0], [0x1FA, 0], [0x200, 0], [0x30C, 0], [0x33D, 0]]
STANDSTILL_THRESHOLD = 0
RELAY_MALFUNCTION_ADDR = 0xE4
RELAY_MALFUNCTION_BUS = 0
FWD_BLACKLISTED_ADDRS = {2: [0xE4, 0x194, 0x33D, 0x30C]}
FWD_BUS_LOOKUP = {0: 2, 2: 0}
INTERCEPTOR_THRESHOLD = 344
def setUp(self):
self.packer = CANPackerPanda("honda_civic_touring_2016_can_generated")
self.safety = libpandasafety_py.libpandasafety
self.safety.set_safety_hooks(Panda.SAFETY_HONDA_NIDEC, 0)
self.safety.init_tests_honda()
# Honda gas gains are the different
def _interceptor_msg(self, gas, addr):
to_send = make_msg(0, addr, 6)
gas2 = gas * 2
to_send[0].RDLR = ((gas & 0xff) << 8) | ((gas & 0xff00) >> 8) | \
((gas2 & 0xff) << 24) | ((gas2 & 0xff00) << 8)
return to_send
def test_fwd_hook(self):
# normal operation, not forwarding AEB
self.FWD_BLACKLISTED_ADDRS[2].append(0x1FA)
self.safety.set_honda_fwd_brake(False)
super().test_fwd_hook()
# TODO: test latching until AEB event is over?
# forwarding AEB brake signal
self.FWD_BLACKLISTED_ADDRS = {2: [0xE4, 0x194, 0x33D, 0x30C]}
self.safety.set_honda_fwd_brake(True)
super().test_fwd_hook()
def test_brake_safety_check(self):
for fwd_brake in [False, True]:
self.safety.set_honda_fwd_brake(fwd_brake)
for brake in np.arange(0, MAX_BRAKE + 10, 1):
for controls_allowed in [True, False]:
self.safety.set_controls_allowed(controls_allowed)
if fwd_brake:
send = False # block openpilot brake msg when fwd'ing stock msg
elif controls_allowed:
send = MAX_BRAKE >= brake >= 0
else:
send = brake == 0
self.assertEqual(send, self._tx(self._send_brake_msg(brake)))
self.safety.set_honda_fwd_brake(False)
def test_tx_hook_on_interceptor_pressed(self):
for mode in [UNSAFE_MODE.DEFAULT, UNSAFE_MODE.DISABLE_DISENGAGE_ON_GAS]:
self.safety.set_unsafe_mode(mode)
# gas_interceptor_prev > INTERCEPTOR_THRESHOLD
self._rx(self._interceptor_msg(self.INTERCEPTOR_THRESHOLD+1, 0x201))
self._rx(self._interceptor_msg(self.INTERCEPTOR_THRESHOLD+1, 0x201))
allow_ctrl = mode == UNSAFE_MODE.DISABLE_DISENGAGE_ON_GAS
self.safety.set_controls_allowed(1)
self.safety.set_honda_fwd_brake(False)
self.assertEqual(allow_ctrl, self._tx(self._send_brake_msg(MAX_BRAKE)))
self.assertEqual(allow_ctrl, self._tx(self._interceptor_msg(self.INTERCEPTOR_THRESHOLD, 0x200)))
self.assertEqual(allow_ctrl, self._tx(self._send_steer_msg(0x1000)))
# reset status
self.safety.set_controls_allowed(0)
self.safety.set_unsafe_mode(UNSAFE_MODE.DEFAULT)
self._tx(self._send_brake_msg(0))
self._tx(self._send_steer_msg(0))
self._tx(self._interceptor_msg(0, 0x200))
self.safety.set_gas_interceptor_detected(False)
class TestHondaBoschHarnessSafety(TestHondaSafety):
TX_MSGS = [[0xE4, 0], [0xE5, 0], [0x296, 1], [0x33D, 0]] # Bosch Harness
STANDSTILL_THRESHOLD = 0
RELAY_MALFUNCTION_ADDR = 0xE4
RELAY_MALFUNCTION_BUS = 0
FWD_BLACKLISTED_ADDRS = {2: [0xE4, 0xE5, 0x33D]}
FWD_BUS_LOOKUP = {0: 2, 2: 0}
PT_BUS = 1
def setUp(self):
self.packer = CANPackerPanda("honda_accord_s2t_2018_can_generated")
self.safety = libpandasafety_py.libpandasafety
self.safety.set_safety_hooks(Panda.SAFETY_HONDA_BOSCH_HARNESS, 0)
self.safety.init_tests_honda()
def _alt_brake_msg(self, brake):
to_send = make_msg(0, 0x1BE)
to_send[0].RDLR = 0x10 if brake else 0
return to_send
def test_spam_cancel_safety_check(self):
self.safety.set_controls_allowed(0)
self.assertTrue(self._tx(self._button_msg(Btn.CANCEL)))
self.assertFalse(self._tx(self._button_msg(Btn.RESUME)))
self.assertFalse(self._tx(self._button_msg(Btn.SET)))
# do not block resume if we are engaged already
self.safety.set_controls_allowed(1)
self.assertTrue(self._tx(self._button_msg(Btn.RESUME)))
def test_alt_disengage_on_brake(self):
self.safety.set_honda_alt_brake_msg(1)
self.safety.set_controls_allowed(1)
self._rx(self._alt_brake_msg(1))
self.assertFalse(self.safety.get_controls_allowed())
self.safety.set_honda_alt_brake_msg(0)
self.safety.set_controls_allowed(1)
self._rx(self._alt_brake_msg(1))
self.assertTrue(self.safety.get_controls_allowed())
class TestHondaBoschGiraffeSafety(TestHondaBoschHarnessSafety):
TX_MSGS = [[0xE4, 2], [0xE5, 2], [0x296, 0], [0x33D, 2]] # Bosch Giraffe
STANDSTILL_THRESHOLD = 0
RELAY_MALFUNCTION_ADDR = 0xE4
RELAY_MALFUNCTION_BUS = 2
FWD_BLACKLISTED_ADDRS = {1: [0xE4, 0xE5, 0x33D]}
FWD_BUS_LOOKUP = {1: 2, 2: 1}
PT_BUS = 0
def setUp(self):
super().setUp()
self.safety = libpandasafety_py.libpandasafety
self.safety.set_safety_hooks(Panda.SAFETY_HONDA_BOSCH_GIRAFFE, 0)
self.safety.init_tests_honda()
def _send_steer_msg(self, steer):
values = {"STEER_TORQUE": steer}
return self.packer.make_can_msg_panda("STEERING_CONTROL", 2, values)
if __name__ == "__main__":
unittest.main()
| StarcoderdataPython |
333929 | # -*- encoding=utf8 -*-
from .parser import Parser
from huey.djhuey import crontab, db_periodic_task # , db_task, periodic_task
from weather_parser.models import City, AirPort
from bs4 import BeautifulSoup
from LatLon import Latitude, Longitude
from cStringIO import StringIO
import re
import csv
import requests
s = Parser()
def _iter(qs, chunk_size=500):
from django.core.paginator import Paginator
paginator = Paginator(qs, chunk_size)
print 'iter', qs, paginator.count, paginator.num_pages
for page in xrange(1, paginator.num_pages + 1):
print 'page', page
for row in paginator.page(page).object_list:
yield row
@db_periodic_task(crontab(hour='24'))
def scan_airport():
airport_url = 'https://raw.githubusercontent.com/jpatokal/openflights/master/data/airports.dat'
content = requests.request('GET', airport_url).content.decode('utf-8', 'ignore')
fields = "airport_id,name,city_name,country_name,iata,icao,latitude,longitude,altitude,timezone,dst".split(',')
reader = csv.DictReader(StringIO(content.encode('utf-8')), fieldnames=fields)
for info in reader:
del info[None]
AirPort.objects.get_or_create(**info)
@db_periodic_task(crontab(hour='24'))
def update_city():
city_list = []
for x in xrange(0, 10):
url = 'http://www.tiptopglobe.com/biggest-cities-world?p=' + str(x)
html = requests.request('GET', url).content.replace('\xb0', 'O')
html = html.decode('utf-8', 'ignore').replace(u'</h2>', '')
body = BeautifulSoup(html, "html.parser")
city_list_from_web = body.select('tr')
for city in city_list_from_web[1:]:
try:
name = city.select('td')[1].select('font')[0].text
name = re.sub("\s*\(.*\)", "", name).strip()
population = int(city.select('td')[2].text.replace(' ', ''))
try:
altitude = int(city.select('td')[3].text.split(' ')[0])
except:
altitude = None
country = city.select('td')[4].select('font')[0].text
latitude = city.select('td')[5].text
latitude = re.search(r'(\d+)O(\d+)\'([\d\.]+)"', latitude).groups()
latitude = float(Latitude(
degree=int(latitude[0]),
minute=int(latitude[1]),
second=float(latitude[2])))
longitude = city.select('td')[6].text
longitude = re.search(r'(\d+)O(\d+)\'([\d\.]+)"', longitude).groups()
longitude = float(Longitude(
degree=int(longitude[0]),
minute=int(longitude[1]),
second=float(longitude[2])))
city_list.append({
'name': name,
'population': population,
'altitude': altitude,
'country': country,
'latitude': latitude,
'longitude': longitude,
})
except:
pass
for city in city_list:
city = City.objects.get_or_create(**city)
city.save()
| StarcoderdataPython |
4845274 | """Imports"""
import webbrowser
class Movie():
"""Movie class definition."""
def __init__(self, movie_title, movie_storyline, poster_image, trailer_youtube): # NOQA
"""Instantiates the Movie Class."""
self.title = movie_title
self.storyline = movie_storyline
self.poster_image_url = poster_image
self.trailer_youtube_url = trailer_youtube
def show_trailer(self):
"""Opens a trailer url in the browser."""
webbrowser.open(self.trailer_youtube_url)
| StarcoderdataPython |
3399029 | <reponame>Park-Young-Hun/Algorithm
from queue import Queue
def solution(progresses, speeds):
answer = []
q = Queue() # 각각의 기능에 대한 작업소요일을 queue에 넣음.
for i in range(len(progresses)):
progresses[i] = 100 - progresses[i]
if progresses[i] % speeds[i] == 0:
q.put(progresses[i] // speeds[i])
else:
q.put(progresses[i] // speeds[i] + 1)
count = 0
first = q.get() # 첫번째 요소
count += 1
if q.empty(): # queue에 요소가 하나밖에 없을 경우.
answer.append(1)
return answer
while not q.empty():
second = q.get() # 첫번째 요소와 비교할 비교대상 추가
count += 1
if first < second: # first까지만 묶어서 배포진행.
count -= 1 # second 제외
answer.append(count) # 배포
count = 0 # 기능 갯수 초기화
first = second # second를 first로 초기화
count += 1 # 새로 초기화된 first 요소 카운팅
answer.append(count) # 마지막 남은 기능 배포
return answer
| StarcoderdataPython |
1833048 | #!/usr/bin/env python3
class heap:
@staticmethod
def insert(nums, x):
# 将元素加入到堆的末尾位置
nums.append(x)
idx = len(nums) - 1
while idx != 0:
parent_idx = int((idx - 1) / 2)
# 如果插入的元素小,则需要和父节点交换位置
if nums[idx] < nums[parent_idx]:
nums[idx], nums[parent_idx] = nums[parent_idx], nums[idx]
idx = parent_idx
else:
break
@staticmethod
def delete(nums):
# -1表示数组中最后一个元素
# 最后一个元素和最开始元素交换
nums[0], nums[-1] = nums[-1], nums[0]
res = nums.pop(-1)
lens, idx = len(nums), 0
while True:
temp = idx
left = idx * 2 + 1
right = idx * 2 + 2
# 查找左子树最小值
if left < lens and nums[idx] > nums[left]:
idx = left
if right < lens and nums[idx] > nums[right]:
idx = right
if idx == temp:
break
else:
nums[idx], nums[temp] = nums[temp], nums[idx]
return res
if __name__ == '__main__':
arr = [6, 8, 9, 1, 3, 5, 4, 3, 2, 7]
# 堆的存储位置
result = []
for a in arr:
heap.insert(result, a)
print(result)
arr.clear()
while result:
arr.append(heap.delete(result))
print(arr)
| StarcoderdataPython |
1932095 | from drf_elasticsearch_dsl.tasks import searchIndexUpdateTask, searchIndexDeleteTask
from drf_elasticsearch_dsl.connection_handler import connection_handler
from django.db.models.signals import post_delete, post_save
class CelerySignalProcessor(object):
def __init__(self):
self.setup()
def handle_save(self, sender, instance, **kwargs):
raw = kwargs.get('raw', False)
if not raw:
meta = instance._meta
searchIndexUpdateTask.delay(
meta.label, instance.pk)
def handle_delete(self, sender, instance, **kwargs):
raw = kwargs.get('raw', False)
if not raw:
meta = instance._meta
searchIndexDeleteTask.delay(
meta.lable, instance.pk)
def setup(self):
for lablel, document in connection_handler.documents.items():
model = document.get_model()
post_save.connect(
self.handle_save, sender=model
)
post_delete.connect(
self.handle_delete, sender=model
)
def teardown(self):
for lablel, document in connection_handler.documents.items():
model = document.get_model()
post_save.disconnect(
self.handle_save, sender=model
)
post_delete.disconnect(
self.handle_delete, sender=model
)
| StarcoderdataPython |
241230 | <reponame>pipebio/api-examples
from typing import Optional
from library.models.sequence_document_kind import SequenceDocumentKind
class UploadSummary:
id: int
sequence_count: Optional[int]
sequence_document_kind: Optional[SequenceDocumentKind]
def __init__(self, id: int, sequence_count: int = None, sequence_document_kind: SequenceDocumentKind = None):
self.id = id
self.sequence_count = sequence_count
self.sequence_document_kind = sequence_document_kind
def __repr__(self):
return 'UploadSummary({},count={},kind={})'.format(self.id, self.sequence_count, self.sequence_document_kind)
def to_json(self):
data = {'visible': True}
if self.sequence_count is not None:
data['sequenceCount'] = self.sequence_count
if self.sequence_document_kind is not None:
data['sequenceDocumentKind'] = self.sequence_document_kind.value
return data
| StarcoderdataPython |
6524490 | from baselines.ddpg.memory import RingBuffer, array_min2d
import random
from collections import namedtuple
import numpy as np
import sortedcontainers
import tensorflow as tf
import math
from os import path, makedirs
class ESMemoryAdapter(object):
"""Adapter for the baselines DDPG code
overwrite options: 'FIFO', 'expl_xx' (stochastic exploration magnitude based with alpha = xx),
tde_xx (stochastic TDE based with alpha = xx), 'resv' (Reservoir sampling)
sample options: 'uniform','PER_xx (TDE rank based with alpha = xx)
"""
def __init__(self, limit, action_shape, observation_shape, overwrite_policy='FIFO',
sample_policy='uniform', batch_size=64, forgetting_factor=0.99):
ow = overwrite_policy.lower().strip()
if 'fifo' in ow:
ow_tab = {'type': 'FIFO'}
elif 'expl' in ow:
_, alpha = ow.split('_')
ow_tab = {'type': 'rank based stochastic',
'metric': 'exploration_magnitude',
'proportional': False, # lowest values have the highest chance of being
# overwritten
'alpha': float(alpha)}
elif 'tde' in ow:
_, alpha = ow.split('_')
ow_tab = {'type': 'rank based stochastic',
'metric': 'tde',
'proportional': False, # lowest values have the highest chance of being
# overwritten
'alpha': float(alpha)}
elif 'resv' in ow or 'reservoir' in ow:
ow_tab = {'type': 'Reservoir'}
else:
assert False, 'unknown overwrite policy: {:s}'.format(overwrite_policy)
sa = sample_policy.lower().strip()
if 'uniform' in sa:
sa_tab = {'type': 'uniform'}
elif 'per' in sa:
_, alpha = ow.split('_')
sa_tab = {'type': 'rank based stochastic',
'metric': 'tde',
'proportional': True, # Samples with high TDE have a higher chance of
# being sampled again
'alpha': float(alpha)}
else:
assert False, 'unknown sample policy: {:s}'.format(sample_policy)
settings = {
'buffer_size': limit,
'forgetting_factor': forgetting_factor,
'batch_size': batch_size,
'reuse': 32, # not used in the baselines version
'experience_properties': {
'observations': {
'state': {
'shape': observation_shape,
'dtype': np.float32,
'ttype': tf.float32,
},
},
'action': {
'shape': action_shape,
'dtype': np.float32,
'ttype': tf.float32,
},
'terminal': {
'shape': (1,),
'dtype': np.uint8,
'ttype': tf.float32,
},
'reward': {
'shape': (1,),
'dtype': np.float32,
'ttype': tf.float32,
},
'experience_meta_data': {
'tde': {
'shape': (1,),
'dtype': np.float16,
'default': np.inf,
},
'exploration_magnitude': {
'shape': (1,),
'dtype': np.float16,
'default': 0.0
},
}
},
'buffer_properties': {
'overwrite policy': ow_tab,
'sample policy': sa_tab
},
}
self.experience_selection_buffer = ExperienceBuffer(settings)
self.limit = limit
self.observations0 = RingBuffer(limit, shape=observation_shape)
self.actions = RingBuffer(limit, shape=action_shape)
self.rewards = RingBuffer(limit, shape=(1,))
self.terminals1 = RingBuffer(limit, shape=(1,))
self.observations1 = RingBuffer(limit, shape=observation_shape)
def sample(self, batch_size):
# Draw such that we always have a proceeding element.
batch_idxs = np.random.random_integers(self.nb_entries - 2, size=batch_size)
obs0_batch = self.observations0.get_batch(batch_idxs)
obs1_batch = self.observations1.get_batch(batch_idxs)
action_batch = self.actions.get_batch(batch_idxs)
reward_batch = self.rewards.get_batch(batch_idxs)
terminal1_batch = self.terminals1.get_batch(batch_idxs)
return self.experience_selection_buffer.get_batch_baselines(batch_size=batch_size)
def append(self, obs0, action, reward, obs1, terminal1, training=True,
experience_meta_data=None):
if not training:
return
self.experience_selection_buffer.add_experience(observation={'state': obs0},
action=action,
next_observation={'state': obs1},
reward=reward,
terminal=terminal1,
experience_meta_data=experience_meta_data)
self.observations0.append(obs0)
self.actions.append(action)
self.rewards.append(reward)
self.observations1.append(obs1)
self.terminals1.append(terminal1)
@property
def nb_entries(self):
return len(self.experience_selection_buffer)
class ExperienceBuffer(object):
"""Class with methods for saving and replaying experiences.
This class takes a description of the observations, actions, rewards and other signals that
are relevant. It creates a series of placeholders. It further includes methods to store
experiences, sample batches (return the placeholders and numpy arrays with values in a
feed-dict) and store and restore to and from a file.
"""
def __init__(self, properties):
"""Inits the buffer using the properties specified.
Args:
properties: a dictionary specifying the properties of the buffer to be made.
Properties should have following structure:
{
'buffer_size': <int, required, number of experiences to store>
'batch_size': <int, optional, can also be specified in
the functions that require it>
'reuse': <int, required, number of times a sample will on average be used
if the get_available_batches method is used>
'experience_properties': <dict, required, describes the properties of
the signals to be stored. Should have the following keys:>
{
'observations': <dict, required, sensor or state data signals>
'action': <dict, required, action signal properties>
'reward': <dict, required, reward signal properties>
'terminal': <dict, required, properties of signal that
indicates whether the experience was the last in the episode>
'experience_meta_data': <dict, optional, meta data signals used for
learning (such as TDE)>
}
observations and learn data are dicts where the keys are names of
signals and the values are dicts of the same form as action,
reward and terminal:
{
'shape': <tuple, required, dimensions of signal (e.g. (2,) )>
'dtype': <numpy dtype, required, numpy data type>
'ttype': <tensorflow dtype, required, tensorflow data type>
}
'load_replay_buffer': <string, optional, file path of a saved experience
buffer, from which experiences are loaded into this one>
'buffer_properties': <dict, required, describes the overwrite and sample
strategies, see overwrite and sample policy classes at the bottom of
this file for options. Default options:
'overwrite': {
'type': 'FIFO'
},
'sample': {
'type': 'uniform'
},
'scale_rewards': <float, optional, scale rewards by this factor when
replaying, to limit (increase) gradients while keeping original rewards
for bookkeeping.
'forgetting_factor': <float, required, gamma in [0,1)>
}
"""
assert properties is not None
self._properties = properties
self._buffer = self._create_buffer()
self._meta_data_change_listeners = self._create_meta_data_change_listeners()
"""_buffer contains the numpy data of the signals (experience tuples)"""
self._buffer_metadata = self._create_buffer_metadata()
"""_buffer_metadata contains meta data about the buffer, such as the last write index,
the number of new experiences and the indices of unused experiences """
self._experience_and_episode_metadata = self._create_experience_and_episode_metadata()
"""_experience_metadata contains automatically collected meta data about the
experiences in the buffer, such the episode they were from and the return from the
experience to the final experience in the episode. """
self._placeholders = self._create_placeholders()
self.overwrite_policy = self._create_overwrite_policy()
self.sample_policy = self._create_sample_policy()
self._optionally_load_buffer()
def get_two_timestep_tensor_placeholders(self):
"""Get a dict with references to the placeholders by time-step (current, next) """
timestep_tensors = {'current_timestep': {}, 'next_timestep': {}}
ct = timestep_tensors['current_timestep']
nt = timestep_tensors['next_timestep']
for name in self._placeholders['observations']:
ct[name] = self._placeholders['observations'][name]
nt[name] = self._placeholders['observations_post'][name]
ct['action'] = self._placeholders['action']
nt['reward'] = self._placeholders['reward']
nt['terminal'] = self._placeholders['terminal']
return timestep_tensors
def add_experience(self, observation, action, next_observation, reward, terminal,
experience_meta_data=None):
"""Add a new experience to the buffer.
Args:
observation: current time-step observation dict with numpy arrays for the sensor signals
action: current time-step action numpy array, float or int
next_observation: next time-step observation dict with numpy arrays for the sensor
signals
reward: float or int of the (next time-step) reward
terminal: bool: True is the experience is the last of an episode. False otherwise
experience_meta_data: optional dict with (part of)
"""
write_index = self.overwrite_policy.next_index()
if self._experience_and_episode_metadata['current_episode_finished']:
self._start_episode()
self._experience_and_episode_metadata['last_episode_rewards']['rewards'].append(
self.Seq_ep_rew(buffer_index=write_index, reward=reward))
if write_index is not None:
for modality in self._buffer['observations']:
self._buffer['observations'][modality][write_index] = observation[modality]
for modality in self._buffer['observations_post']:
self._buffer['observations_post'][modality][write_index] = next_observation[
modality]
self._buffer['action'][write_index] = action
self._buffer['reward'][write_index] = reward
self._buffer['terminal'][write_index] = terminal
for cat in self._buffer['experience_meta_data']:
self._call_meta_data_change_listeners(indices=write_index, category=cat, pre=True)
if experience_meta_data is not None and cat in experience_meta_data:
self._buffer['experience_meta_data'][cat][write_index] = experience_meta_data[cat]
else:
self._buffer['experience_meta_data'][cat][write_index] = \
self._properties['experience_properties']['experience_meta_data'][cat]['default']
self._call_meta_data_change_listeners(indices=write_index, category=cat)
self._buffer_metadata['unused_experience_idcs'].add(write_index)
self._buffer_metadata['fresh_experience_count'] += 1
if terminal:
self._finish_episode()
def nr_available_batch_updates(self, batch_size=None):
"""Number of batch updates available given batch_size, fresh experiences, reuse
Args:
batch_size: int, optional, use this batch size instead of the one given during
initialization
Returns: int, number of batch updates available. Note that get_batch gives no warning
when more batches are requested.
"""
batch_size = batch_size or self._properties['batch_size']
reuse = self._properties['reuse']
fresh = min(self._buffer_metadata['fresh_experience_count'], len(self))
return math.floor(fresh * reuse / batch_size)
def get_batch_baselines(self, batch_size):
indcs = self.sample_policy.sample_indices(batch_size, only_new=False)
return {
'obs0': array_min2d(self._buffer['observations']['state'][indcs]),
'obs1': array_min2d(self._buffer['observations_post']['state'][indcs]),
'rewards': array_min2d(self._buffer['reward'][indcs]),
'actions': array_min2d(self._buffer['action'][indcs]),
'terminals1': array_min2d(self._buffer['terminal'][indcs]),
'indices': indcs
}
def get_batch(self, batch_size=None, **kwargs):
"""Get a tuple: (training batch feed_dict, the buffer indices of the experiences)
Args:
batch_size: int, optional: use a different batch size than given in the init properties
**kwargs: give additional named arguments, options include:
only_new_experiences: boolean, only return experiences that have not been
returned before
dont_count_as_use: boolean, do not count the returned experiences as used
indices: list, return the experiences with the given indices
Returns: a tuple: (feed_dict (placeholders and the numpy contents), list: indices of the
returned experiences.
"""
only_new = kwargs.get('only_new_experiences', False)
dont_count_as_use = kwargs.get('dont_count_as_use', False)
batch_size = batch_size or self._properties['batch_size']
if batch_size < self._buffer_metadata['last_write_index'] + 1:
if 'indices' in kwargs:
indcs = kwargs['indices']
else:
indcs = self.sample_policy.sample_indices(batch_size, only_new)
if indcs is None:
return None, None
if not dont_count_as_use:
self._buffer_metadata['unused_experience_idcs'].difference_update(indcs)
self._buffer_metadata['fresh_experience_count'] -= (batch_size / float(
self._properties['reuse']))
self._buffer_metadata['fresh_experience_count'] = max(
self._buffer_metadata['fresh_experience_count'], 0)
feed_dict = {}
for exp_comp in 'observations observations_post action reward terminal'.split():
self._feed_data(feed_dict=feed_dict, exp_cmp=exp_comp,
indcs=indcs, place_holders=self._placeholders,
buffer=self._buffer, properties=self._properties[
'experience_properties'])
if self._properties.get('scale_rewards'):
feed_dict[self._placeholders['reward']] = feed_dict[self._placeholders[
'reward']] * self._properties.get('scale_rewards')
return feed_dict, indcs
else:
return None, None
def get_indices_for_n_batches(self, number_of_batches, batch_size=None):
"""Predetermine the buffer indices for sampling a number of batches.
The buffer indices are returned and can be given to get_batch() to get those specific
experience
Args:
number_of_batches: int, required, number of batches to return indices for
batch_size: int, optional, the number of experiences per batch. If not specified the
given during initialization is used.
Returns: numpy array of batches * batch_size with the indices
"""
batch_size = batch_size or self._properties['batch_size']
if number_of_batches > 0:
indices = np.empty((number_of_batches, batch_size), dtype=np.int32)
indices.fill(np.nan)
for bi in range(number_of_batches):
idcs = self.sample_policy.sample_indices(batch_size)
if idcs is not None:
indices[bi] = idcs
return indices
def update_experience_meta_data(self, indices, data):
"""Update the metadata (learn data) for the experiences of the given indices.
Args:
indices: list, buffer indices of the experiences for which the data is provided. Note
that get_batch gives the indices of the experiences in the batch
data: dict, containing (some of) the fields specified in learn data during init and
the values of those fields corresponding to the experiences with the provided indices.
"""
for cat in data:
self._call_meta_data_change_listeners(category=cat, indices=indices, pre=True)
self._buffer['experience_meta_data'][cat][indices] = data[cat]
self._call_meta_data_change_listeners(category=cat, indices=indices)
def feed_dict_from_observation(self, observation):
"""Return a feed dict with the internal placeholders and the given observation
Args:
observation: observation dict with numpy observation (no batch dimension)
Returns: the feed dict, observations are expanded to batch dimension 1
"""
feed_dict = {}
meta_data = self._properties['experience_properties']['observations']
for mod in observation:
mod_meta_data = meta_data[mod]
data = np.expand_dims(observation[mod], axis=0)
feed_dict[
self._placeholders['observations'][mod]] = \
ExperienceBuffer.optionally_normalize(data, mod_meta_data)
return feed_dict
@staticmethod
def optionally_normalize(data, meta_data):
if 'norm_dev' in meta_data:
data = data.astype(np.float32)
data /= meta_data['norm_dev']
if 'norm_add' in meta_data:
data += meta_data['norm_add']
return data
def save_to_disk(self, file_path):
"""Saves the contents of the buffer (experiences only) to a specified directory.
Args:
file_path: directory path, file name buffer.npz is appended by the function.
"""
file_path = path.expanduser(file_path)
makedirs(file_path, exist_ok=True)
filename = file_path + 'buffer.npz'
flat_buffer = self._flatten_dict(self._buffer)
for key, npar in flat_buffer.items():
flat_buffer[key] = npar[0:self._buffer_metadata['last_write_index']]
np.savez_compressed(filename, **flat_buffer)
def load_buffer_from_disk(self, file_path):
"""Loads the experiences from a previously saved buffer into this one.
Caution: this function assumes the current buffer is empty and overwrites it. Only
experiences and learn data are loaded, no metadata.
Args:
file_path: directory in which a file 'buffer.npz' is saved.
"""
bufferfile_name = path.expanduser(file_path) + 'buffer.npz'
try:
with np.load(bufferfile_name) as external_flat_buffer:
added_experiences = self._process_flat_buffer_file(external_flat_buffer)
self._buffer_metadata['last_write_index'] = added_experiences - 1
print("Loaded {:d} experiences from {:s}".format(added_experiences, bufferfile_name))
except IOError:
print('Could not load: {:s}'.format(bufferfile_name))
def all_fresh(self):
"""Mark all experiences in the buffer as unused for training. """
self._buffer_metadata['fresh_experience_count'] = self._buffer_metadata['last_write_index']
def discard_memory(self):
"""Discard all experiences to start with an empty buffer"""
self._buffer_metadata['last_write_index'] = -1
self._buffer_metadata['unused_experience_idcs'] = set()
def add_experience_meta_data_update_listener(self, experience_meta_data_category, listener):
"""Add an event listener that is called with indices for which the metadata has changed."""
assert experience_meta_data_category in self._buffer['experience_meta_data'], 'no metadata for {:s}'.format(
experience_meta_data_category)
self._meta_data_change_listeners[experience_meta_data_category].append(listener)
def get_report(self):
"""Get a report of the buffer data for a tb summary"""
report = {'experiences': self._buffer_metadata['last_write_index'] + 1}
for exp_data in self._buffer['experience_meta_data']:
x = self._buffer['experience_meta_data'][exp_data][
0:self._buffer_metadata['last_write_index'], 0]
x = x[~np.isnan(x)]
x = x[~np.isinf(x)]
report[exp_data] = x
return report
def __len__(self):
return self._properties['buffer_size']
def _create_meta_data_change_listeners(self):
return {name: [] for name in self._buffer['experience_meta_data']}
def _call_meta_data_change_listeners(self, category, indices, pre=False):
for callback_function in self._meta_data_change_listeners[category]:
callback_function(indices, pre)
@property
def fresh_experiences(self):
"""The number of experiences not yet trained with (keeping in mind batch size and reuse)"""
return self._buffer_metadata['fresh_experience_count']
@property
def last_episode_mean_return(self):
"""Returns the mean return over the states visited in the last episode.
This function can only be called between episodes; after an experience has been added
with terminal = True, but before the first experience of the next episode is added.
Returns: The mean return over the states visited in the last episode
Throws: assertion error when an episode has not just finished
"""
assert self._experience_and_episode_metadata['current_episode_finished'], \
'last_episode_mean_return can only be called after an episode has just terminated; ' \
'after ' \
'an experience has been added with terminal = True and before the first experience' \
' of the next episode is added.'
return self._experience_and_episode_metadata['last_episode_mean_return']
@property
def last_episode_initial_state_return(self):
"""Returns the return of the first state visited in the last episode.
This function can only be called between episodes; after an experience has been added
with terminal = True, but before the first experience of the next episode is added.
Returns: The return of the first state visited in the last episode
Throws: assertion error when an episode has not just finished
"""
assert self._experience_and_episode_metadata['current_episode_finished'], \
'last_episode_initial_state_return can only be called after an episode has just ' \
'terminated; after ' \
'an experience has been added with terminal = True and before the first experience' \
' of the next episode is added.'
return self._experience_and_episode_metadata['last_episode_initial_return']
def _create_buffer(self):
""" Create the numpy nd-arrays for the experiences and their meta data.
Returns:
A dict of the same structure as 'experience_properties' with the initialized numpy
tensors
"""
exp_prop = self._properties['experience_properties']
# here the s a s' r t experience is saved each time-step because of experience replay
# research.
# More memory efficient would be to save s a r t per timestep and ensure timesteps are not
# orphaned (at least 2 subsequent)
assert all(name in exp_prop for name in ['observations', 'action', 'reward'])
exp_prop['observations_post'] = exp_prop['observations']
return self._create_variable_buffer(exp_prop)
def _create_variable_buffer(self, variable_description):
"""Recursively build parts of the experience buffer from the dict definition.
Args:
variable_description: either a signal description dict of the following structure:
{
'shape': <tuple, required, dimensions of signal (e.g. (2,) )>
'dtype': <numpy dtype, required, numpy data type>
'ttype': <tensorflow dtype, required, tensorflow data type>
}
or a (multi level) dict containing signal descriptions as values.
Returns:
numpy nd-array for a signal description, (multi level) dict of numpy arrays for a
(multi level) dict of descriptions
"""
if 'shape' in variable_description and 'dtype' in variable_description:
shape = [self._properties['buffer_size']]
shape.extend(list(variable_description['shape']))
return np.empty(shape=shape, dtype=variable_description['dtype'])
else:
returndict = {}
for var_props in variable_description:
assert isinstance(variable_description[var_props], dict), 'bad experience replay ' \
'settings'
returndict[var_props] = self._create_variable_buffer(
variable_description[var_props])
return returndict
@staticmethod
def _create_buffer_metadata():
"""Create a dict with metadata specific to the operation of the buffer.
Returns: the metadatadict
"""
metadata_dict = {
'last_write_index': -1,
'fresh_experience_count': 0,
'unused_experience_idcs': set(),
}
return metadata_dict
def _create_experience_and_episode_metadata(self):
"""Create a dict with metadata specific to experiences and episodes.
Returns: the metadatadict
"""
self.Seq_ep_rew = namedtuple('rewardseq', ['reward', 'buffer_index'])
metadata_dict = {
'experience_episodes': np.zeros(self._properties['buffer_size'], dtype=np.int32),
'experience_returns': np.zeros(self._properties['buffer_size'], dtype=np.float32),
'last_episode_mean_return': None,
'last_episode_initial_return': None,
'last_episode_rewards': {'episode': 0, 'rewards': []},
'current_episode_index': 0,
'current_episode_finished': False
}
return metadata_dict
def _create_placeholders(self):
"""Create the internal set of tensorflow placeholders to feed experiences to."""
prop = self._properties['experience_properties']
with tf.variable_scope('placeholders'):
return {
'observations': self._create_placeholder_set(prop['observations'], timestep=0),
'observations_post': self._create_placeholder_set(
prop['observations'], timestep=1),
'action': self._create_placeholder_set(prop['action'], timestep=0, name='action'),
'reward': self._create_placeholder_set(prop['reward'], timestep=1, name='reward'),
'terminal': self._create_placeholder_set(prop['terminal'], timestep=1,
name='terminal')
}
def _create_placeholder_set(self, param, **kwargs):
"""Recursively create a (dict of) tf placeholders from a (dict of) signal description(s).
Args:
param: a (dict of) signal description(s) (see init)
Returns: a (dict of) placeholders with the specified type and shape (+ -1 batch dimension)
"""
if 'shape' in param:
shape = [None]
shape.extend(list(param['shape']))
full_name = '{:s}_time_{:d}'.format(kwargs['name'], kwargs['timestep'])
return tf.placeholder(shape=shape, dtype=param['ttype'], name=full_name)
else:
return {name: self._create_placeholder_set(param[name], name=name, **kwargs) for name in
param}
def _create_overwrite_policy(self):
"""Init the overwrite policy which determines the next buffer index to be (over)written to.
Returns: The overwrite policy object
"""
policy_prop = self._properties['buffer_properties']['overwrite policy']
if policy_prop['type'] == 'FIFO':
return FifoOverwritePolicy(self)
elif policy_prop['type'] == 'rank based stochastic':
return StochasticRankBasedOverwritePolicy(
experience_buffer=self,
metric=policy_prop['metric'],
highest_values_highest_priority=policy_prop['proportional'],
alpha=policy_prop['alpha']
)
elif policy_prop['type'] == 'Reservoir':
return ReservoirOverwritePolicy(self)
else:
assert False, 'unknown overwrite policy'
def _create_sample_policy(self):
"""Create the sample policy instance based on the settings dict provided to init.
Returns: the sample policy instance, which determines how to sample from the buffer."""
policy_prop = self._properties['buffer_properties']['sample policy']
if policy_prop['type'] == 'uniform':
return UniformSamplePolicy(self)
elif policy_prop['type'] == 'rank based stochastic':
return RankBasedPrioritizedSamplePolicy(
self, metric=policy_prop['metric'],
highest_values_highest_priority=policy_prop['proportional'],
alpha=policy_prop['alpha'])
else:
assert False, 'unknown sample policy'
def _feed_data(self, feed_dict, exp_cmp, indcs, place_holders, buffer, properties):
"""Internal recursive function to fill part of a feed_dict with placeholders and data.
Args:
feed_dict: the (partially filled) feed_dict
exp_cmp: key of the dict to be filled (the value of which is
either another dict with signals or a signal)
indcs: the experience indices to be used for the batch
place_holders: dict with (dict of) placeholders containing at least exp_cmp
buffer: buffer dict containing at least exp_cmp as key
"""
if isinstance(buffer[exp_cmp], dict):
for sub_cmp in buffer[exp_cmp]:
self._feed_data(feed_dict, sub_cmp, indcs, buffer=buffer[exp_cmp],
place_holders=place_holders[exp_cmp],
properties=properties[exp_cmp])
else:
result_data = buffer[exp_cmp][indcs]
feed_dict[place_holders[exp_cmp]] = \
ExperienceBuffer.optionally_normalize(result_data, properties[exp_cmp])
def _start_episode(self):
"""Start experience metadata administration for a new episode.
This function is called when a new experience is added after the last episode finished.
"""
self._experience_and_episode_metadata['last_episode_mean_return'] = None
self._experience_and_episode_metadata['last_episode_initial_return'] = None
self._experience_and_episode_metadata['current_episode_finished'] = False
self._experience_and_episode_metadata['current_episode_index'] += 1
self._experience_and_episode_metadata['last_episode_rewards']['episode'] = \
self._experience_and_episode_metadata['current_episode_index']
self._experience_and_episode_metadata['last_episode_rewards']['rewards'] = []
def _finish_episode(self):
"""Update experience metdadata with the knowledge that the current episode just finished.
This function is called by add_experience when terminal is True
"""
self._experience_and_episode_metadata['current_episode_finished'] = True
episode = self._experience_and_episode_metadata['current_episode_index']
count, rollout_sum, ret = 0, 0, 0
for seq_rew in reversed(
self._experience_and_episode_metadata['last_episode_rewards']['rewards']):
ret = seq_rew.reward + self._properties['forgetting_factor'] * ret
count, rollout_sum = count + 1, rollout_sum + ret
idx = seq_rew.buffer_index
if idx is not None and self._experience_and_episode_metadata['experience_episodes'][ \
idx] == episode:
self._experience_and_episode_metadata['experience_returns'][idx] = ret
self._experience_and_episode_metadata['last_episode_initial_return'] = ret
self._experience_and_episode_metadata['last_episode_mean_return'] = rollout_sum / float(
count)
self._experience_and_episode_metadata['last_episode_rewards']['rewards'] = []
def _optionally_load_buffer(self):
"""Load the contents of a saved buffer iff 'load_replay_buffer' is set in settings."""
filepath = self._properties.get('load_replay_buffer')
if filepath:
self.load_buffer_from_disk(filepath)
def _flatten_dict(self, dictionary, basename=''):
"""Recursive helper function that produces a one level dict from a dict.
Args:
dictionary: the dict to be flattened
basename: concatenated name of the higher level keys, used to recreate the original
structure.
Returns: a one level dictionary in which the keys of different levels are joined by '/'
"""
result = {}
for key, val in dictionary.items():
if isinstance(val, np.ndarray):
result[basename + key] = val
elif isinstance(val, dict):
branch = self._flatten_dict(val, basename=basename + key + '/')
result.update(branch)
else:
assert False, 'unexpected type: {:s}'.format(str(type(val)))
return result
def _process_flat_buffer_file(self, flat_external):
"""Load the contents from an external flat buffer file into the buffer.
Args:
flat_external: the flat buffer dict to be loaded
Returns: the number of experiences that were loaded from the external buffer into the
local buffer. This function does not mark the loaded experiences as new experiences,
see all_fresh() to do so.
"""
added_experiences = min(len(self), len(flat_external.items()[0][1]))
flat_self = self._flatten_dict(self._buffer)
for key, val in flat_self.items():
if key in flat_external:
val[0:added_experiences - 1] = flat_external[key][0: added_experiences - 1]
else:
print("MISSING FROM EXTERNAL DATABASE BUFFER: {:s}".format(key))
return added_experiences
# noinspection PyProtectedMember
class OverwritePolicy(object):
"""Abstract base class for determining buffer index to write new experience to.
This class is only defines general methods and is subclassed by the actual overwrite policy
classes. """
def __init__(self, experience_buffer):
"""Initialize the overwrite policy.
Args:
experience_buffer: ExperienceBuffer instance which the policy acts upon.
"""
self.experience_buffer = experience_buffer
self.index = -1
def next_index(self):
"""Return the buffer index that the next new experience should be written to.
Returns: int, buffer index
"""
if self.experience_buffer._buffer_metadata['last_write_index'] < len(
self.experience_buffer) - 1:
self.experience_buffer._buffer_metadata['last_write_index'] += 1
self.index = self.experience_buffer._buffer_metadata['last_write_index']
else:
self.experience_buffer._buffer_metadata['last_write_index'] = len(
self.experience_buffer) - 1
self._next_index() # overwrite in a smart way when full
if self.index is not None and self.index > len(self.experience_buffer) - 1:
self.index = 0
return self.index
def _next_index(self):
"""Called by next_index when the buffer is full to implement more advanced overwriting
logic.
Do not call this method directly, always call next_index() instead.
"""
raise NotImplementedError
class FifoOverwritePolicy(OverwritePolicy):
"""Basic overwrite policy that always overwrites the oldest experience."""
def __init__(self, experience_buffer):
"""Initialize the policy to overwrite the given ExperienceBuffer instance in a FIFO
manner."""
super().__init__(experience_buffer)
def _next_index(self):
"""Called by next_index when the buffer is full to implement more advanced overwriting
logic.
Do not call this method directly, always call next_index() instead."""
self.index += 1
class ReservoirOverwritePolicy(OverwritePolicy):
"""Overwrite policy that ensures each time-step ever experienced has an equal chance of
being in the buffer at any given time."""
def __init__(self, experience_buffer):
"""Initialize the policy to overwrite the given ExperienceBuffer instance using
Reservoir sampling."""
super().__init__(experience_buffer)
self.idx_count = len(experience_buffer)
def _next_index(self):
"""Called by next_index when the buffer is full to implement more advanced overwriting
logic.
Do not call this method directly, always call next_index() instead."""
self.idx_count += 1
retention_chance = len(self.experience_buffer) / self.idx_count
if random.random() < retention_chance:
self.index = random.randint(0, len(self.experience_buffer) - 1)
else:
self.index = None
class StochasticRankBasedOverwritePolicy(OverwritePolicy):
"""Overwrite policy that overwrites stochastically based on some (experience_meta_data) metric when
full."""
def __init__(self, experience_buffer, metric, highest_values_highest_priority=True, alpha=1.2):
super().__init__(experience_buffer)
self.sampler = OrderedDatabaseIndicesSampler(
experience_buffer=experience_buffer,
metric=metric,
bins=max(3, int(len(self.experience_buffer) / 500)),
alpha=alpha,
lowest_value_lowest_index=not highest_values_highest_priority
)
def _next_index(self):
"""Called by next_index when the buffer is full to implement more advanced overwriting
logic.
Do not call this method directly, always call next_index() instead."""
self.index = self.sampler.sample_one()
# noinspection PyMethodMayBeStatic,PyUnusedLocal
class SamplePolicy(object):
"""Abstract base class for determining buffer indices to sample experience batches from.
This class is only defines general methods and is subclassed by the actual sample policy
classes. """
def __init__(self, experience_buffer):
"""Initialize the sample policy for the given ExperienceBuffer instance.
Args:
experience_buffer: the ExperienceBuffer to be sampled from
"""
self.experience_buffer = experience_buffer
def sample_indices(self, batch_size, only_new):
"""Get the buffer indices for a training batch of experiences.
Args:
batch_size: int, required, number of experiences in the batch
only_new: boolean, only sample from previously unsampled experiences.
"""
raise NotImplementedError
def _default_sample(self, batch_size, only_new):
if only_new:
idcs = np.array(list(
self.experience_buffer._buffer_metadata['unused_experience_idcs']))
if len(idcs) == 0:
return None
if len(idcs) < batch_size:
return np.random.choice(idcs, batch_size, replace=True)
else:
return np.random.choice(idcs, batch_size, replace=False)
else:
if self.experience_buffer._buffer_metadata['last_write_index'] - 1 < batch_size:
return np.random.choice(self.experience_buffer._buffer_metadata['last_write_index'],
batch_size, replace=True)
else:
return np.random.choice(self.experience_buffer._buffer_metadata['last_write_index'],
batch_size, replace=False)
# noinspection PyProtectedMember
class UniformSamplePolicy(SamplePolicy):
"""Sample policy that samples uniformly at random from the buffer."""
def __init__(self, experience_buffer):
"""Initialize the sample policy that samples from experience_buffer uniformly at random"""
super().__init__(experience_buffer)
def sample_indices(self, batch_size, only_new=False):
"""Return a batch of buffer indices.
Args:
batch_size: int, required, number of buffer indices to return
only_new: bool, only return previously unsampled experiences.
Returns:
list, experience buffer indices.
"""
return self._default_sample(batch_size, only_new)
# noinspection PyProtectedMember
class RankBasedPrioritizedSamplePolicy(SamplePolicy):
def __init__(self, experience_buffer, metric, highest_values_highest_priority=True, alpha=0.7):
super().__init__(experience_buffer)
self.sampler = OrderedDatabaseIndicesSampler(
experience_buffer=experience_buffer,
metric=metric,
bins=self.experience_buffer._properties['batch_size'],
alpha=alpha,
lowest_value_lowest_index=not highest_values_highest_priority
)
def sample_indices(self, batch_size, only_new=False):
if batch_size:
assert batch_size == self.experience_buffer._properties['batch_size']
if only_new or self.experience_buffer._buffer_metadata[
'last_write_index'] - 1 <= batch_size:
return self._default_sample(batch_size, only_new)
else:
return self.sampler.sample_all()
class OrderedDatabaseIndicesSampler(object):
def __init__(self, experience_buffer, metric, bins, alpha, lowest_value_lowest_index=True):
order_multiplier = 1 if lowest_value_lowest_index else -1
self.experience_buffer = experience_buffer
self.bins = bins
self.bin_indices = [0, 0]
self.alpha = alpha
start_list = []
self.ordered_indices = sortedcontainers.SortedListWithKey(start_list, key=lambda
x: float(order_multiplier * self.experience_buffer._buffer['experience_meta_data'][metric][x][0]))
experience_buffer.add_experience_meta_data_update_listener(metric, self.update)
def update(self, indices, pre=False):
"""Since the ordered list is indexed based on the sorting,
entries should be removed with their old keys, otherwise duplicate entries arise."""
if type(indices) == int:
if pre:
self.ordered_indices.discard(indices)
else:
self.ordered_indices.add(indices)
else:
for idx in indices:
i = int(idx)
if pre:
self.ordered_indices.discard(i)
else:
self.ordered_indices.add(i)
def __getitem__(self, item):
return self.ordered_indices[item]
def __len__(self):
return len(self.ordered_indices)
def sample_one(self):
self._possibly_rebuild_bins()
return self._sample_bin(np.random.randint(0, len(self.bin_indices) - 1))
def sample_all(self):
self._possibly_rebuild_bins()
return [self._sample_bin(i) for i in range(len(self.bin_indices) - 1)]
def _possibly_rebuild_bins(self):
size = self.experience_buffer._buffer_metadata['last_write_index']
if len(self.bin_indices) - 1 != self.bins or self.bin_indices[-1] != \
size:
sample_probabilities = (1 / np.arange(1, size + 1)) ** self.alpha
sample_probabilities = sample_probabilities / sample_probabilities.sum()
cum_prob = sample_probabilities.cumsum()
self.bin_indices = [0]
bins = min(self.bins, size)
for i in range(bins - 1):
self.bin_indices.append(max(
self.bin_indices[i] + 1,
np.argmax(cum_prob >= (i + 1) / (bins))))
self.bin_indices.append(size)
def _sample_bin(self, bin):
ordered_index = np.random.randint(self.bin_indices[bin], self.bin_indices[
bin + 1])
return self[ordered_index]
| StarcoderdataPython |
1872989 | import logging
from selvpcclient import base
from selvpcclient.util import resource_filter
from selvpcclient.exceptions.base import ClientException
log = logging.getLogger(__name__)
class Subnet(base.Resource):
"""Represents a subnet."""
def delete(self):
"""Delete current subnet from domain."""
self.manager.delete(self.id)
class SubnetManager(base.Manager):
"""Manager class for manipulating subnet."""
resource_class = Subnet
@resource_filter
def list(self, detailed=False, return_raw=False):
"""Get list of all public subnets in current domain.
:param bool detailed: Include info about servers. (optional)
:param return_raw: flag to force returning raw JSON instead of
Python object of self.resource_class
:rtype: list of :class:`Subnet`
"""
return self._list('/subnets?detailed=' + str(detailed), 'subnets',
return_raw=return_raw)
def add(self, project_id, subnets, return_raw=False):
"""Create public subnets for project.
:param string project_id: Project id.
:param dict subnets: Dict with key `subnets` and value as array
of items region, quantity and type::
{
"subnets": [
{
"region": "ru-1",
"quantity": 4,
"type": "ipv4",
"prefix_length": 29
}
]
}
:param return_raw: flag to force returning raw JSON instead of
Python object of self.resource_class
:rtype: list of :class:`Subnet`
"""
url = '/subnets/projects/{}'.format(project_id)
return self._list(url, 'subnets', body=subnets, return_raw=return_raw)
def show(self, subnet_id, return_raw=False):
"""Show detailed subnet information.
:param string subnet_id: Subnet id.
:param return_raw: flag to force returning raw JSON instead of
Python object of self.resource_class
:rtype: :class:`Subnet`
"""
return self._get('/subnets/{}'.format(subnet_id), 'subnet',
return_raw=return_raw)
def delete(self, subnet_id):
"""Delete subnet from domain."""
self._delete('/subnets/{}'.format(subnet_id))
def delete_many(self, subnet_ids, raise_if_not_found=True):
"""Delete few subnets from domain.
:param list subnet_ids: Subnet id's list
:param bool raise_if_not_found: Raise exception if object won't found
"""
for subnet_id in subnet_ids:
try:
self.delete(subnet_id)
log.info("Subnet %s has been deleted", subnet_id)
except ClientException as err:
if raise_if_not_found:
raise err
log.error("%s %s", err, subnet_id)
| StarcoderdataPython |
1649024 | # This code is part of Qiskit.
#
# (C) Copyright IBM 2021.
#
# This code is licensed under the Apache License, Version 2.0. You may
# obtain a copy of this license in the LICENSE.txt file in the root directory
# of this source tree or at http://www.apache.org/licenses/LICENSE-2.0.
#
# Any modifications or derivative works of this code must retain this
# copyright notice, and modified files need to carry a notice indicating
# that they have been altered from the originals.
"""Class to test the backend calibrations."""
from qiskit.test import QiskitTestCase
from qiskit.test.mock import FakeArmonk
from qiskit_experiments.calibration_management import BackendCalibrations
class TestBackendCalibrations(QiskitTestCase):
"""Class to test the functionality of a BackendCalibrations"""
def test_run_options(self):
"""Test that we can get run options."""
cals = BackendCalibrations(FakeArmonk())
self.assertEqual(cals.get_meas_frequencies(), [6993370669.000001])
self.assertEqual(cals.get_qubit_frequencies(), [4971852852.405576])
| StarcoderdataPython |
12852279 | <gh_stars>0
# Copying <NAME>'s solution https://github.com/hollygrimm/cs294-homework/blob/master/hw1/bc.py
# Copy and pasting and merging it into a copy of my behavior_cloner.py code.
import argparse
import pickle
import os
import sys
import tensorflow.compat.v1 as tf
import numpy as np
from sklearn.model_selection import train_test_split
import mlflow.tensorflow
import gym
from gym import wrappers
from tqdm import tqdm
#Imports copied from hollygrimm's solution
import logging
from hollygrimm_model import Model
# The following doesn't seem to work with the way <NAME> builds her tensorflow model.
mlflow.tensorflow.autolog()
def config_logging(log_file):
if os.path.exists(log_file):
os.remove(log_file)
logger = logging.getLogger()
logger.setLevel(logging.DEBUG)
formatter = logging.Formatter('%(asctime)s - %(message)s')
fh = logging.FileHandler(log_file)
fh.setLevel(logging.DEBUG)
fh.setFormatter(formatter)
logger.addHandler(fh)
return logger
def create_model(session, obs_samples, num_observations, num_actions, logger, optimizer,
learning_rate, restore, checkpoint_dir):
model = Model(obs_samples, num_observations, num_actions, checkpoint_dir, logger,
optimizer, learning_rate)
if restore:
model.load(session)
else:
logger.info("Created model with fresh parameters")
session.run(tf.global_variables_initializer())
return model
def bc(expert_data_filename, env_name, restore, results_dir, max_timesteps=None,
optimizer='adam', num_epochs=100, learning_rate=.001, batch_size=32, keep_prob=1):
# Reset TF env
tf.reset_default_graph()
# Create a gym env.
env = gym.make(env_name)
max_steps = max_timesteps or env.spec.max_episode_steps
with open(expert_data_filename, 'rb') as f:
data = pickle.loads(f.read())
obs = np.stack(data['observations'], axis=0)
actions = np.squeeze(np.stack(data['actions'], axis=0))
x_train, x_test, y_train, y_test = train_test_split(obs, actions, test_size=0.2)
num_samples = len(x_train)
min_val_loss = sys.maxsize
with tf.Session() as session:
model = create_model(session, x_train, x_train.shape[1], y_train.shape[1], logger,
optimizer, learning_rate, restore, results_dir)
file_writer = tf.summary.FileWriter(results_dir, session.graph)
#file_writer = tf.summary.FileWriter(results_dir, session.graph)
for epoch in tqdm(range(num_epochs)):
perm = np.random.permutation(x_train.shape[0])
obs_samples = x_train[perm]
action_samples = y_train[perm]
loss = 0.
for k in range(0, obs_samples.shape[0], batch_size):
batch_loss, training_scalar = model.update(session, obs_samples[k:k + batch_size],
action_samples[k:k + batch_size],
keep_prob)
loss += batch_loss
file_writer.add_summary(training_scalar, epoch)
min_val_loss, validation_scalar = validate(model, logger, session, x_test, y_test,
epoch, batch_size, min_val_loss, results_dir)
file_writer.add_summary(validation_scalar, epoch)
# Test the updated model after each epoch of training the DNN.
new_exp = model.test_run(session, env, max_steps)
tqdm.write(
"Epoch %3d; Loss %f; Reward %f; Steps %d" % (epoch, loss / num_samples,
new_exp['reward'], new_exp['steps']))
# Write a video of the final gym test results.
env = wrappers.Monitor(env, results_dir, force=True)
results = []
for _ in tqdm(range(10)):
results.append(model.test_run(session, env, max_steps)['reward'])
logger.info("Reward mean and std dev with behavior cloning: %f(%f)" % (np.mean(results),
np.std(results)))
mlflow.log_params({"reward_mean": np.mean(results), "reward_std": np.std(results)})
return np.mean(results), np.std(results)
def validate(model, logger, session, x_test, y_test, num_epoch, batch_size, min_loss, checkpoint_dir):
avg_loss = []
# for k in range(0, x_test.shape[0], batch_size):
loss, validation_scalar = model.validate(session, x_test, y_test)
avg_loss.append(loss)
new_loss = sum(avg_loss) / len(avg_loss)
logger.info("Finished epoch %d, average validation loss = %f" % (num_epoch, new_loss))
if new_loss < min_loss: # Only save model if val loss dropped
model.save(session)
min_loss = new_loss
return min_loss, validation_scalar
if __name__ == "__main__":
parser = argparse.ArgumentParser()
parser.add_argument('expert_run_id', type=str)
parser.add_argument('--num_epochs', type=int, default=100)
parser.add_argument('--batch_size', type=int, default=32)
parser.add_argument("--restore", type=bool, default=False)
args = parser.parse_args()
for k, v in vars(args).items():
mlflow.log_param(k, v)
if not os.path.exists('results'):
os.makedirs('results')
log_file = os.path.join(os.getcwd(), 'results', 'train_out.log')
logger = config_logging(log_file)
#env_models = [('Ant-v1', 'data/Ant-v1_data_250_rollouts.pkl', 'experts/Ant-v1.pkl', 250),
# ('HalfCheetah-v1', 'data/HalfCheetah-v1_data_10_rollouts.pkl', 'experts/HalfCheetah-v1.pkl', 10),
# ('Hopper-v1', 'data/Hopper-v1_data_10_rollouts.pkl', 'experts/Hopper-v1.pkl', 10),
# ('Humanoid-v1', 'data/Humanoid-v1_data_250_rollouts.pkl', 'experts/Humanoid-v1.pkl', 250),
# ('Reacher-v1', 'data/Reacher-v1_data_250_rollouts.pkl', 'experts/Reacher-v1.pkl', 250),
# ('Walker2d-v1', 'data/Walker2d-v1_data_10_rollouts.pkl','experts/Walker2d-v1.pkl', 10)
# ]
#for env_name, rollout_data, expert_policy_file, num_rollouts in env_models :
# ===================================================
# read in dataset from expert policy rollouts.
mlflow_c = mlflow.tracking.MlflowClient()
expert_data_file_base = mlflow_c.download_artifacts(args.expert_run_id, "")
expert_data_file_rel_path = mlflow_c.list_artifacts(args.expert_run_id, "expert_data_file")[
0].path
expert_data_filename = expert_data_file_base + "/" + expert_data_file_rel_path
print("opening {0}".format(expert_data_filename))
env_name = mlflow_c.get_run(args.expert_run_id).data.params["envname"]
bc_results_dir = os.path.join(os.getcwd(), 'results', env_name, 'bc')
bc_reward_mean, bc_reward_std = bc(expert_data_filename, env_name, args.restore, bc_results_dir,
batch_size=args.batch_size, num_epochs=args.num_epochs)
logger.info('Behavior Cloning mean & std rewards: %f(%f))' %
(bc_reward_mean, bc_reward_std))
print("logging 'results' directory to mlflow.")
mlflow.log_artifacts('results')
# Commenting out dagger for now.
#da_results_dir = os.path.join(os.getcwd(), 'results', env_name, 'da')
#if not os.path.exists(da_results_dir):
# os.makedirs(da_results_dir)
#_,_, da_mean,da_std = dagger(rollout_data, expert_policy_file, env_name, args.restore, da_results_dir, num_rollouts)
#results.append((env_name, ex_mean, ex_std, bc_mean, bc_std, da_mean, da_std))
#for env_name, ex_mean, ex_std, bc_mean, bc_std, da_mean, da_std in results :
# logger.info('Env: %s, Expert: %f(%f), Behavior Cloning: %f(%f), Dagger: %f(%f)'%
# (env_name, ex_mean, ex_std, bc_mean, bc_std, da_mean, da_std))
| StarcoderdataPython |
8088183 | <gh_stars>0
from discord.ext import commands
def can_mute(**perms):
def predicate(ctx):
if ctx.author.guild_permissions.mute_members:
return True
else:
return False
return commands.check(predicate)
def can_kick(**perms):
def predicate(ctx):
if ctx.author.guild_permissions.kick_members:
return True
else:
return False
return commands.check(predicate)
def can_ban(**perms):
def predicate(ctx):
if ctx.author.guild_permissions.ban_members:
return True
else:
return False
return commands.check(predicate)
def can_managemsg(**perms):
def predicate(ctx):
if ctx.author.guild_permissions.manage_messages:
return True
else:
return False
return commands.check(predicate)
def can_manageguild(**perms):
def predicate(ctx):
if ctx.author.guild_permissions.manage_guild:
return True
else:
return False
return commands.check(predicate)
def is_admin(**perms):
def predicate(ctx):
if ctx.author.guild_permissions.administrator:
return True
else:
return False
return commands.check(predicate)
| StarcoderdataPython |
3248713 | from __future__ import print_function
import os
import sys
import shutil
import tempfile
import pytest
from gcpm.cli import cli
__ORIG_ARGV__ = sys.argv
def test_show_config():
sys.argv = ["gcpm", "show-config", "--config", "./tests/data/gcpm.yml"]
cli()
sys.argv = __ORIG_ARGV__
assert True
def test_help():
sys.argv = ["gcpm", "help"]
cli()
sys.argv = __ORIG_ARGV__
assert True
def test_version():
sys.argv = ["gcpm", "version"]
cli()
sys.argv = __ORIG_ARGV__
assert True
@pytest.mark.skip
def test_install():
sys.argv = ["gcpm", "install"]
cli()
sys.argv = __ORIG_ARGV__
assert True
@pytest.mark.skip
def test_uninstall():
sys.argv = ["gcpm", "uninstall"]
cli()
sys.argv = __ORIG_ARGV__
assert True
def test_run(default_gcpm):
sys.argv = ["gcpm", "run", "--config", "./tests/data/gcpm.yml",
"--test", "True", "--oneshot", "True"]
cli()
sys.argv = __ORIG_ARGV__
assert default_gcpm.get_gce().delete_instance("gcp-test-wn-1core-000002")
@pytest.mark.skip
def test_service(default_gcpm):
sys.argv = ["gcpm", "service", "--test", "True", "--oneshot", "True"]
cli()
sys.argv = __ORIG_ARGV__
assert default_gcpm.get_gce().delete_instance("gcp-test-wn-1core-000002")
def test_set_pool_password(default_gcpm):
directory = tempfile.mkdtemp()
filename = directory + "/pool_password"
with open(filename, "a"):
os.utime(filename, None)
sys.argv = ["gcpm", "set-pool-password", filename,
"--config", "./tests/data/gcpm.yml"]
cli()
sys.argv = __ORIG_ARGV__
assert True
assert default_gcpm.get_gcs().delete_file("pool_password") == ""
assert default_gcpm.get_gcs().delete_bucket() is None
shutil.rmtree(directory)
| StarcoderdataPython |
272635 | <reponame>vuhcl/cs110_final_project
import math, mmh3
import numpy as np
class QuotientFilter:
# num_stored (n): the QF must be able to store this many elements
# while maintaining the false positive rate.
# error_rate (f): the theoretically expected probability of
# returning false positives, default is 1%.
# alpha: load factor, default is None, where we will use n and f
# to calculate the quotient bit size (q) and remainder bit size (r).
def __init__(self, num_stored, alpha=None, error_rate=0.01):
"""
Initialize the QF, calculate the parameters and raise error
if needed. Then, create a QF with a corresponding size.
"""
if not (0 < error_rate < 1):
raise ValueError("Error_Rate must be between 0 and 1.")
if num_stored <= 0:
raise ValueError("Number of elements stored must be > 0.")
self.r = int(-math.log(error_rate, 2))
if alpha is None:
self.m = int(-num_stored / (math.log(1 - error_rate) * 2 ** self.r))
self.q = int(math.log(self.m, 2))
else:
if not (0 < alpha <= 1):
raise ValueError("Load factor must be between 0 and 1.")
self.m = int(num_stored / alpha)
self.q = int(math.ceil(math.log(self.m, 2)))
if self.q + self.r > 64:
raise ValueError("Fingerprint size must be 64bits or less.")
# Create the filter, the three bits are is_occupied, is_continuation,
# and is_shifted in order. The last element is to store the remainder
self.array = np.array([[False, False, False, None] for _ in range(self.m)])
def get_elem(self, elem):
"""Get the quotient and remainder of an element using a hash function"""
quotient = mmh3.hash(elem) // (2 ** self.r) % self.m
remainder = mmh3.hash(elem) % (2 ** self.r)
return quotient, remainder
def is_empty(self, index):
"""Return a boolean value stating whether the slot is empty"""
return not any(self.array[index][:3])
def is_run_start(self, index):
"""Return a boolean value stating whether the slot is the start of a run"""
return not self.array[index][1] and (self.array[index][0] or self.array[index][2])
def is_cluster_start(self, index):
"""Return a boolean value stating whether the slot is the start of a cluster"""
# Actually not used in this implementation, but will be needed
# when we expand the implementation to support deletion
return self.array[index][0] and not any(self.array[index][1:3])
def find_run_start(self, index):
"""Find the index of the start of the run containing the input index"""
running_count = 0
# Scan left and count the number of runs until encounter
for i in range(index, -1, -1) + range(index, self.m)[::-1]:
if not self.is_empty(i) and not self.array[i][2]:
break
if self.array[i][0]:
running_count += 1
# Scan right and countdown every time a new run starts until running_count == 0
for j in range(i, self.m) + range(i):
if not self.array[j][1]:
running_count -= 1
if running_count == 0:
break
return j
def query(self, elem):
"""Perform a lookup operation"""
quotient, remainder = self.get_elem(elem)
# If is_occupied is False, element is not in QF
if not self.array[quotient][0]:
return False
# Else, find the start of the run that should containing the element
start = self.find_run_start(quotient)
# Scan the run to see if any slot contain the remainder
for index in range(start, self.m) + range(start):
if remainder == self.array[index][3]:
return True
if not self.array[index][2] and index != start:
return False
return False
def insert(self, elem):
"""
Follow the same path as lookup until we are sure the element
is not in the QF, then find the slot to insert the element,
push back the remainders in any slots in the cluster at or
after the insert slot and update the bits.
"""
quotient, remainder = self.get_elem(elem)
# If the canonical slot is not empty, insert into the slot
if self.is_empty(quotient):
self.array[quotient][3] = remainder
self.array[quotient][0] = True
return
# If not is_occupied, set is_occupied
if not self.array[quotient][0]:
self.array[quotient][0] = True
# Scan the run to see if the element has been inserted
start = self.find_run_start(quotient)
if self.array[quotient][0]:
for slot in range(start, self.m) + range(start):
if remainder < self.array[slot][3]:
break
elif not self.array[slot][2] and slot != quotient:
break
elif not self.array[slot][1] and slot != quotient:
break
# If the slot does not contain a value, insert into the slot
# and update bits
if self.array[slot][3] is None:
self.array[slot][3] = remainder
if slot != quotient:
self.array[slot][2] = True
if not self.is_run_start(slot):
self.array[slot][1] = True
return
# Else, switch the value and update bits
self.array[slot][3], remainder = remainder, self.array[slot][3]
if slot != quotient:
self.array[slot][2] = True
if not self.is_run_start(slot):
self.array[slot][1] = True
# Then push back the remainders in the cluster
for index in range(slot + 1, self.m) + range(slot):
if self.array[index][3] is None:
self.array[index][3] = remainder
if not self.array[index][2]:
self.array[index][2] = True
if self.is_run_start(index - 1):
self.array[index][1] = True
return
self.array[index][3], remainder = remainder, self.array[index][3]
if index == self.find_run_start(index):
self.array[index][1] = True
if not self.array[index][2]:
self.array[index][2] = True | StarcoderdataPython |
6584427 | from UdonPie import UnityEngine
from UdonPie.Undefined import *
class ParticleSystemShapeTextureChannel:
def __new__(cls, arg1=None):
'''
:returns: ParticleSystemShapeTextureChannel
:rtype: UnityEngine.ParticleSystemShapeTextureChannel
'''
pass
| StarcoderdataPython |
1790109 | from django.contrib import admin
from .models import Category, Section, Topic, Message
from backend.utils.admin import all_fields
class CategoryAdmin(admin.ModelAdmin):
"""Админка категорий"""
list_display = ('title', 'id')
class SectionAdmin(admin.ModelAdmin):
"""Админка разделов"""
list_display = ("id", "title", "category", "created", 'modified')
list_display_links = ("title", )
prepopulated_fields = {"slug": ("title",)}
class TopicAdmin(admin.ModelAdmin):
"""Админка тем"""
list_display = ("id", "title", "user", "modified", 'moderated', 'deleted', 'private', "created")
list_display_links = ("title", )
list_editable = ('moderated', 'deleted', 'private')
class MessageAdmin(admin.ModelAdmin):
"""Админка сообщений"""
list_display = ("id", "user", "topic", 'moderated', 'deleted', "created")
list_display_links = ("user", )
# class TopicAdmin(admin.ModelAdmin):
# """Админка топиков"""
# list_display = all_fields(Topic)
# list_editable = ('moderated', 'deleted', 'private')
admin.site.register(Category)
admin.site.register(Section, SectionAdmin)
admin.site.register(Topic, TopicAdmin)
admin.site.register(Message, MessageAdmin)
| StarcoderdataPython |
3511761 | <filename>test_project/settings.py
"""
Django settings for test_project project.
Generated by 'django-admin startproject' using Django 2.1.
For more information on this file, see
https://docs.djangoproject.com/en/2.1/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/2.1/ref/settings/
"""
import os
import logging
import django.utils.log
import logging.handlers
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/2.1/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = '<KEY>'
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
ALLOWED_HOSTS = [
'likeyiyy.com',
'127.0.0.1',
'172.16.58.3'
]
# Application definition
INSTALLED_APPS = [
'mirrors.apps.MirrorsConfig',
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
]
MIDDLEWARE = [
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
]
ROOT_URLCONF = 'test_project.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [
os.path.join(BASE_DIR, 'frontend', 'build'),
],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
WSGI_APPLICATION = 'test_project.wsgi.application'
# Database
# https://docs.djangoproject.com/en/2.1/ref/settings/#databases
import json
config_path = os.environ.get('MIRRORS_CONFIG')
if not config_path:
config_path = '/opt/web/config-mirror.json'
json_config = json.loads(open(config_path).read())
DATABASES = json_config['DATABASES']
# Password validation
# https://docs.djangoproject.com/en/2.1/ref/settings/#auth-password-validators
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
# Internationalization
# https://docs.djangoproject.com/en/2.1/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'Asia/Shanghai'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/2.1/howto/static-files/
STATIC_URL = '/static/'
STATICFILES_DIRS = [
os.path.join(BASE_DIR, 'frontend', 'build', 'static'),
]
CLIENT_CONFIG = {
'CLIENT_CODE': 'business'
}
LOGGING = {
'version': 1,
'disable_existing_loggers': True,
'formatters': {
'standard': {
'format': '%(asctime)s [%(threadName)s:%(thread)d] [%(name)s:%(lineno)d] [%(module)s:%(funcName)s] [%(levelname)s]- %(message)s'} #日志格式
},
'filters': {
},
'handlers': {
'default': {
'level': 'DEBUG',
'class': 'logging.handlers.RotatingFileHandler',
'filename': '/opt/log/mirrors/all.log', #日志输出文件
'maxBytes': 1024*1024*10, #文件大小
'backupCount': 10, #备份份数
'formatter': 'standard', #使用哪种formatters日志格式
},
'error': {
'level':'ERROR',
'class':'logging.handlers.RotatingFileHandler',
'filename': '/opt/log/mirrors/error.log',
'maxBytes':1024*1024*10,
'backupCount': 10,
'formatter':'standard',
},
'console':{
'level': 'DEBUG',
'class': 'logging.StreamHandler',
'formatter': 'standard'
}
},
'loggers': {
'django': {
'handlers': ['default', 'console', 'error'],
'level': 'DEBUG',
'propagate': False
}
}
}
logger = logging.getLogger('django')
| StarcoderdataPython |
8075943 | import json
import pytest
from GSuiteSecurityAlertCenter import MESSAGES, GSuiteClient, DemistoException
from unittest.mock import patch
def get_data_from_file(filepath):
"""
Returns data of specified file.
:param filepath: absolute or relative path of file
"""
with open(filepath) as f:
return f.read()
TEST_JSON = get_data_from_file('test_data/service_account_json.json')
MOCKER_HTTP_METHOD = 'GSuiteApiModule.GSuiteClient.http_request'
PARAMS = {
'user_service_account_json': TEST_JSON,
'admin_email': '<EMAIL>'
}
@pytest.fixture
def gsuite_client():
headers = {
'Content-Type': 'application/json'
}
return GSuiteClient(GSuiteClient.safe_load_non_strict_json(TEST_JSON), verify=False, proxy=False, headers=headers)
def test_test_function(mocker, gsuite_client):
"""
Scenario: Call to test-module should return 'ok' if API call succeeds.
Given:
- client object
When:
- Calling test function.
Then:
- Ensure 'ok' should be return.
"""
from GSuiteSecurityAlertCenter import test_module, GSuiteClient, service_account
mocker.patch.object(GSuiteClient, 'set_authorized_http')
mocker.patch.object(GSuiteClient, 'http_request')
mocker.patch.object(service_account.Credentials, 'refresh')
gsuite_client.credentials.token = True
assert test_module(gsuite_client, {}, {}) == 'ok'
def test_test_function_error(mocker, gsuite_client):
"""
Scenario: Call to test-module should return error message.
Given:
- client object
When:
- Calling test function.
Then:
- Ensure message should be as expected.
"""
from GSuiteSecurityAlertCenter import test_module, GSuiteClient, service_account
mocker.patch.object(GSuiteClient, 'set_authorized_http')
mocker.patch.object(GSuiteClient, 'http_request')
mocker.patch.object(service_account.Credentials, 'refresh')
gsuite_client.credentials.token = None
with pytest.raises(DemistoException, match=MESSAGES['TEST_CONNECTIVITY_FAILED_ERROR']):
test_module(gsuite_client, {}, {})
def test_validate_params_for_fetch_incidents_error():
"""
Scenario: Parameters provided for fetch-incidents.
Given:
- Configuration parameters.
When:
- Calling validate_params_for_fetch_incidents with parameters.
Then:
- Ensure parameters validation.
"""
from GSuiteSecurityAlertCenter import validate_params_for_fetch_incidents
params = {
'isFetch': True,
'max_fetch': 'abc',
'admin_email': 'hello',
}
with pytest.raises(ValueError, match=MESSAGES['MAX_INCIDENT_ERROR']):
validate_params_for_fetch_incidents(params, {})
def test_prepare_args_for_invalid_args():
"""
Tests prepare_args function.
Should raise exception for invalid argument.
"""
from GSuiteSecurityAlertCenter import validate_params_for_list_alerts
args = {
'page_size': -1,
'filter': "createTime >= '2020-10-28T20:43:34.381Z' AND type='Suspicious login'"
}
with pytest.raises(Exception, match=MESSAGES['INTEGER_ERROR'].format('page_size')):
validate_params_for_list_alerts(args)
args.pop('page_size')
params = validate_params_for_list_alerts(args)
assert params['filter'] == 'createTime >= "2020-10-28T20:43:34.381Z" AND type="Suspicious login"'
def test_create_custom_context_for_batch_command():
"""
Tests create_custom_context_for_batch_command function.
Should return proper custom context response.
"""
from GSuiteSecurityAlertCenter import create_custom_context_for_batch_command
input_data = {
"successAlertIds": [
"dummy_alertId1"
],
"failedAlertStatus": {
"dummy_alertId2": {
"code": 5,
"message": "NOT_FOUND"
}
}
}
expected_data_success = [
{
"id": "dummy_alertId1"
}
],
expected_data_failed = [
{
"id": "dummy_alertId2",
"code": 5,
"message": "NOT_FOUND"
}
]
output_data = create_custom_context_for_batch_command(input_data)
assert expected_data_success, expected_data_failed == output_data
def test_prepare_hr_for_batch_command():
"""
Tests prepare_hr_for_batch_delete_command function.
Should return proper hr response.
"""
from GSuiteSecurityAlertCenter import prepare_hr_for_batch_command
input_data = {
"successAlertIds": [
"dummy_alertId1"
],
"failedAlertStatus": {
"dummy_alertId2": {
"code": 5,
"message": "NOT_FOUND"
}
}
}
expected_data = "### Delete Alerts\n" \
"|Alert ID|Status|\n|---|---|" \
"\n| dummy_alertId1 | Success |\n| dummy_alertId2 | Fail (NOT_FOUND) |\n"
output_data = prepare_hr_for_batch_command(input_data, 'Delete Alerts')
assert expected_data == output_data
@patch(MOCKER_HTTP_METHOD)
def test_gsac_list_alerts_command_success(mocker_http_request, gsuite_client):
"""
Scenario: For gsac_list_alerts command successful run.
Given:
- Command args.
When:
- Calling gsac_list_alerts command with the parameters provided.
Then:
- Ensure command's raw_response, outputs and readable_output should be as expected.
"""
from GSuiteSecurityAlertCenter import gsac_list_alerts_command
with open('test_data/list_alert_response.json') as data:
mock_response = json.load(data)
with open('test_data/list_alert_context.json') as data:
expected_res = json.load(data)
with open('test_data/list_alert.md') as data:
expected_hr = data.read()
mocker_http_request.return_value = mock_response
args = {'admin_email': '<EMAIL>'}
result = gsac_list_alerts_command(gsuite_client, args)
assert result.raw_response == mock_response
assert result.outputs == expected_res
assert result.readable_output == expected_hr
@patch(MOCKER_HTTP_METHOD)
def test_gsac_list_alerts_command_with_empty_response(mocker_http_request, gsuite_client):
"""
Scenario: For gsac_list_alerts returns message for empty response.
Given:
- Command args.
When:
- Calling gsac_list_alerts command with the parameters provided.
Then:
- Ensure command's readable_output should be as expected.
"""
from GSuiteSecurityAlertCenter import gsac_list_alerts_command
mocker_http_request.return_value = {}
args = {'admin_email': '<EMAIL>'}
result = gsac_list_alerts_command(gsuite_client, args)
assert result.readable_output == MESSAGES['NO_RECORDS_FOUND'].format('alert(s)')
@patch(MOCKER_HTTP_METHOD)
def test_gsac_list_alerts_command_wrong_argument(mocker_http_request, gsuite_client):
"""
Scenario: Wrong argument given gsac_list_alerts command.
Given:
- Command args.
When:
- Calling gsac_list_alerts command with the parameters provided.
Then:
- Ensure command should raise Exception as expected.
"""
from GSuiteSecurityAlertCenter import gsac_list_alerts_command
message = "message"
mocker_http_request.side_effect = Exception(message)
args = {'page_token': '1', 'admin_email': '<EMAIL>'}
with pytest.raises(Exception, match=message):
gsac_list_alerts_command(gsuite_client, args)
@patch(MOCKER_HTTP_METHOD)
def test_gsac_get_alert_command_success(mocker_http_request, gsuite_client):
"""
Scenario: For gsac-get-alert command successful run.
Given:
- Command args.
When:
- Calling gsac-get-alert command with the parameters provided.
Then:
- Ensure command's raw_response, outputs and readable_output should be as expected.
"""
from GSuiteSecurityAlertCenter import gsac_get_alert_command
with open('test_data/get_alert_response.json') as data:
mock_response = json.load(data)
with open('test_data/get_alert_context.json') as data:
expected_res = json.load(data)
with open('test_data/get_alert.md') as data:
expected_hr = data.read()
mocker_http_request.return_value = mock_response
args = {'alert_id': 'demoId'}
result = gsac_get_alert_command(gsuite_client, args)
assert result.raw_response == mock_response
assert result.outputs == expected_res
assert result.readable_output == expected_hr
@patch(MOCKER_HTTP_METHOD)
def test_gsac_get_alert_command_with_empty_response(mocker_http_request, gsuite_client):
"""
Scenario: For gsac-get-alert returns message for empty response.
Given:
- Command args.
When:
- Calling gsac-get-alert command with the parameters provided.
Then:
- Ensure command's readable_output should be as expected.
"""
from GSuiteSecurityAlertCenter import gsac_get_alert_command
mocker_http_request.return_value = {}
args = {'alert_id': 'demoId'}
result = gsac_get_alert_command(gsuite_client, args)
assert result.readable_output == MESSAGES['NO_RECORDS_FOUND'].format('alert')
def test_gsac_get_alert_command_wrong_argument(gsuite_client):
"""
Scenario: Wrong argument given gsac-get-alert command.
Given:
- Command args.
When:
- Calling gsac-get-alert command with the parameters provided.
Then:
- Ensure command should raise Exception as expected.
"""
from GSuiteSecurityAlertCenter import gsac_get_alert_command
args = {'alert_id': 'demo_id'}
with pytest.raises(Exception):
gsac_get_alert_command(gsuite_client, args)
@patch(MOCKER_HTTP_METHOD)
def test_gsac_create_alert_feedback_command_success(mocker_http_request, gsuite_client):
"""
Scenario: For gsac_create_alert_feedback command successful run.
Given:
- Command args.
When:
- Calling gsac_create_alert_feedback command with the parameters provided.
Then:
- Ensure command's raw_response, outputs and readable_output should be as expected.
"""
from GSuiteSecurityAlertCenter import gsac_create_alert_feedback_command
with open('test_data/create_alert_feedback_response.json') as data:
mock_response = json.load(data)
with open('test_data/create_alert_feedback_response.json') as data:
expected_res = json.load(data)
with open('test_data/create_alert_feedback.md') as data:
expected_hr = data.read()
mocker_http_request.return_value = mock_response
args = {'feedback_type': 'NOT_USEFUL', 'alert_id': 'dummy_alertId'}
result = gsac_create_alert_feedback_command(gsuite_client, args)
assert result.raw_response == mock_response
assert result.outputs == expected_res
assert result.readable_output == expected_hr
@patch(MOCKER_HTTP_METHOD)
def test_gsac_create_alert_feedback_command_wrong_argument(mocker_http_request, gsuite_client):
"""
Scenario: Wrong argument given gsac_create_alert_feedback command.
Given:
- Command args.
When:
- Calling gsac_create_alert_feedback command with the parameters provided.
Then:
- Ensure command should raise Exception as expected.
"""
from GSuiteSecurityAlertCenter import gsac_create_alert_feedback_command
message = MESSAGES['INVALID_FEEDBACK_TYPE_ERROR']
mocker_http_request.side_effect = Exception(message)
args = {'feedback_type': 'dummy', 'alert_id': 'dummy alertId'}
with pytest.raises(Exception, match=message):
gsac_create_alert_feedback_command(gsuite_client, args)
@patch(MOCKER_HTTP_METHOD)
def test_gsac_batch_delete_alerts_command_success(mocker_http_request, gsuite_client):
"""
Scenario: For gsac_get_batch_delete_alerts command successful run.
Given:
- Command args.
When:
- Calling gsac_get_batch_delete_alerts command with the parameters provided.
Then:
- Ensure command's raw_response, outputs and readable_output should be as expected.
"""
from GSuiteSecurityAlertCenter import gsac_batch_delete_alerts_command
with open('test_data/batch_delete_alerts_raw_response.json') as data:
mock_response = json.load(data)
with open('test_data/batch_delete_alerts_context.json') as data:
expected_res = json.load(data)
with open('test_data/batch_delete_alerts.md') as data:
expected_hr = data.read()
mocker_http_request.return_value = mock_response
args = {'alert_id': 'dummy_alertId1,dummy_alertId2'}
result = gsac_batch_delete_alerts_command(gsuite_client, args)
assert result.raw_response == mock_response
assert result.outputs == expected_res
assert result.readable_output == expected_hr
@patch(MOCKER_HTTP_METHOD)
def test_gsac_batch_recover_alerts_command_success(mocker_http_request, gsuite_client):
"""
Scenario: For gsac_get_batch_recover_alerts command successful run.
Given:
- Command args.
When:
- Calling gsac_get_batch_recover_alerts command with the parameters provided.
Then:
- Ensure command's raw_response, outputs and readable_output should be as expected.
"""
from GSuiteSecurityAlertCenter import gsac_batch_recover_alerts_command
with open('test_data/batch_recover_alerts_raw_response.json') as data:
mock_response = json.load(data)
with open('test_data/batch_recover_alerts_context.json') as data:
expected_res = json.load(data)
with open('test_data/batch_recover_alerts.md') as data:
expected_hr = data.read()
mocker_http_request.return_value = mock_response
args = {'alert_id': 'dummy_alertId1,dummy_alertId2'}
result = gsac_batch_recover_alerts_command(gsuite_client, args)
assert result.raw_response == mock_response
assert result.outputs == expected_res
assert result.readable_output == expected_hr
@patch(MOCKER_HTTP_METHOD)
def test_gsac_list_alert_feedback_command_success(mocker_http_request, gsuite_client):
"""
Scenario: For gsac_list_alert_feedback command successful run.
Given:
- Command args.
When:
- Calling gsac_list_alert_feedback command with the parameters provided.
Then:
- Ensure command's raw_response, outputs and readable_output should be as expected.
"""
from GSuiteSecurityAlertCenter import gsac_list_alert_feedback_command
with open('test_data/list_alert_feedback_response.json') as data:
mock_response = json.load(data)
with open('test_data/list_alert_feedback_context.json') as data:
expected_res = json.load(data)
with open('test_data/list_alert_feedback.md') as data:
expected_hr = data.read()
mocker_http_request.return_value = mock_response
args = {'alert_id': 'dummy_alertId_1'}
result = gsac_list_alert_feedback_command(gsuite_client, args)
assert result.raw_response == mock_response
assert result.outputs == expected_res
assert result.readable_output == expected_hr
@patch(MOCKER_HTTP_METHOD)
def test_gsac_list_alert_feedback_command_with_empty_response(mocker_http_request, gsuite_client):
"""
Scenario: For gsac_list_alert_feedback returns message for empty response.
Given:
- Command args.
When:
- Calling gsac_list_alert_feedback command with the parameters provided.
Then:
- Ensure command's readable_output should be as expected.
"""
from GSuiteSecurityAlertCenter import gsac_list_alert_feedback_command
mocker_http_request.return_value = {}
args = {'alert_id': 'demoId'}
result = gsac_list_alert_feedback_command(gsuite_client, args)
assert result.readable_output == MESSAGES['NO_RECORDS_FOUND'].format('feedback(s)')
def test_validate_params_for_fetch_incidents():
"""
Scenario: Parameters provided for fetch-incidents.
Given:
- Configuration parameters.
When:
- Calling validate_params_for_fetch_incidents with parameters.
Then:
- Ensure filter parameter validation.
"""
from GSuiteSecurityAlertCenter import validate_params_for_fetch_incidents
input = {
'alert_type': ['Suspicious login', 'User spam spike'],
'first_fetch': '3 days',
'max_fetch': '1'
}
response, _ = validate_params_for_fetch_incidents(input, {})
filter = response['filter'].split('AND')
assert filter[1] == ' (type="Suspicious login" OR type="User spam spike")'
def test_fetch_incidents(gsuite_client, mocker):
"""
Scenario: fetch_incidents called with valid arguments.
Given:
- Configuration parameters.
When:
- Calling fetch_incidents with parameters.
Then:
- Ensure successful execution of fetch_incidents.
"""
from GSuiteSecurityAlertCenter import fetch_incidents
params = {
'filter': "type='Suspicious login'",
'alert_type': 'Suspicious login',
'first_fetch': '3 days',
'max_fetch': '1',
'admin_email': 'dummy'
}
with open('test_data/fetch_incidents_alert_response.json') as file:
fetch_incidents_response = json.load(file)
with open('test_data/fetch_incidents_output.json') as file:
fetch_incidents_output = json.load(file)
mocker.patch("demistomock.info", return_value=True)
mocker.patch(MOCKER_HTTP_METHOD, return_value=fetch_incidents_response)
fetch_incident = fetch_incidents(gsuite_client, {}, params)
assert fetch_incident[0] == fetch_incidents_output['incidents']
def test_main_fetch_incidents(mocker):
"""
Given working service integration
When fetch-incidents is called from main()
Then demistomock.incidents and demistomock.setLastRun should be called with respected values.
:param args: Mocker objects.
:return: None
"""
from GSuiteSecurityAlertCenter import main, demisto
with open('test_data/fetch_incidents_output.json') as file:
fetch_incidents_output = json.load(file)
mocker.patch.object(demisto, 'command', return_value='fetch-incidents')
mocker.patch.object(demisto, 'incidents')
mocker.patch.object(demisto, 'setLastRun')
mocker.patch("demistomock.info", return_value=True)
mocker.patch.object(demisto, 'params',
return_value={'user_service_account_json': TEST_JSON, 'max_incidents': 1,
'first_fetch': '10 minutes', 'isFetch': True, 'user_id': 'hellod'})
mocker.patch('GSuiteSecurityAlertCenter.fetch_incidents',
return_value=(fetch_incidents_output['incidents'], fetch_incidents_output['last_fetch']))
main()
demisto.incidents.assert_called_once_with(fetch_incidents_output['incidents'])
demisto.setLastRun.assert_called_once_with(fetch_incidents_output['last_fetch'])
| StarcoderdataPython |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.