code
stringlengths 22
1.05M
| apis
listlengths 1
3.31k
| extract_api
stringlengths 75
3.25M
|
|---|---|---|
import rhinoscriptsyntax as rs
import Rhino.Geometry as rg
import System.Drawing as sd
import Rhino.RhinoDoc as rr
import scriptcontext as sc
sc.doc=rr.ActiveDoc
def createColoredPoint(x,y,z,r,g,b):
currentColor = [r,g,b]
pt = rs.AddPoint(x,y,z)
rs.ObjectColor(pt, currentColor)
rs.EnableRedraw(False)
for x in range(0,256, step):
for y in range(0,256, step):
for z in range(0,256,step):
createColoredPoint(x,y,z,x,y,z)
rs.Redraw()
|
[
"rhinoscriptsyntax.Redraw",
"rhinoscriptsyntax.ObjectColor",
"rhinoscriptsyntax.AddPoint",
"rhinoscriptsyntax.EnableRedraw"
] |
[((309, 331), 'rhinoscriptsyntax.EnableRedraw', 'rs.EnableRedraw', (['(False)'], {}), '(False)\n', (324, 331), True, 'import rhinoscriptsyntax as rs\n'), ((483, 494), 'rhinoscriptsyntax.Redraw', 'rs.Redraw', ([], {}), '()\n', (492, 494), True, 'import rhinoscriptsyntax as rs\n'), ((247, 267), 'rhinoscriptsyntax.AddPoint', 'rs.AddPoint', (['x', 'y', 'z'], {}), '(x, y, z)\n', (258, 267), True, 'import rhinoscriptsyntax as rs\n'), ((271, 303), 'rhinoscriptsyntax.ObjectColor', 'rs.ObjectColor', (['pt', 'currentColor'], {}), '(pt, currentColor)\n', (285, 303), True, 'import rhinoscriptsyntax as rs\n')]
|
from flask import Flask, render_template
app = Flask(__name__)
@app.route('/')
def index():
return render_template('home.html')
@app.route('/puppy/<name>')
def pup_name(name):
return render_template('puppy.html', name=name)
if __name__ == "__main__":
app.run(debug=True)
|
[
"flask.Flask",
"flask.render_template"
] |
[((49, 64), 'flask.Flask', 'Flask', (['__name__'], {}), '(__name__)\n', (54, 64), False, 'from flask import Flask, render_template\n'), ((106, 134), 'flask.render_template', 'render_template', (['"""home.html"""'], {}), "('home.html')\n", (121, 134), False, 'from flask import Flask, render_template\n'), ((196, 236), 'flask.render_template', 'render_template', (['"""puppy.html"""'], {'name': 'name'}), "('puppy.html', name=name)\n", (211, 236), False, 'from flask import Flask, render_template\n')]
|
import numpy as np
import tensorflow as tf
from tools.tf_tools import binary_entropy, repeat_axis
class EntropyTest(tf.test.TestCase):
def test_binary_entropy_logits(self):
H1 = binary_entropy(logits=[0., 0.]) # i.e. sigmoid(logits) = 0.5
H0 = binary_entropy(logits=[100., -100.])
with self.test_session():
self.assertAllEqual(H1.eval(), [1., 1.])
self.assertAllClose(H0.eval(), [0., 0.])
def test_binary_entropy_probs(self):
H1 = binary_entropy(probs=tf.constant([0.5, 0.5]))
H0 = binary_entropy(probs=tf.constant([0., 1.]))
with self.test_session():
self.assertAllEqual(H1.eval(), [1., 1.])
self.assertAllEqual(H0.eval(), [0., 0.])
class RepeatsTest(tf.test.TestCase):
def test_repeat_axis(self):
x = np.random.rand(10, 10)
x1 = np.repeat(x, repeats=5, axis=1)
x2 = repeat_axis(tf.constant(x), axis=1, repeats=5)
with self.test_session():
self.assertAllEqual(x1, x2.eval())
if __name__ == '__main__':
tf.test.main()
|
[
"tensorflow.test.main",
"numpy.random.rand",
"tensorflow.constant",
"tools.tf_tools.binary_entropy",
"numpy.repeat"
] |
[((1072, 1086), 'tensorflow.test.main', 'tf.test.main', ([], {}), '()\n', (1084, 1086), True, 'import tensorflow as tf\n'), ((193, 226), 'tools.tf_tools.binary_entropy', 'binary_entropy', ([], {'logits': '[0.0, 0.0]'}), '(logits=[0.0, 0.0])\n', (207, 226), False, 'from tools.tf_tools import binary_entropy, repeat_axis\n'), ((268, 306), 'tools.tf_tools.binary_entropy', 'binary_entropy', ([], {'logits': '[100.0, -100.0]'}), '(logits=[100.0, -100.0])\n', (282, 306), False, 'from tools.tf_tools import binary_entropy, repeat_axis\n'), ((828, 850), 'numpy.random.rand', 'np.random.rand', (['(10)', '(10)'], {}), '(10, 10)\n', (842, 850), True, 'import numpy as np\n'), ((865, 896), 'numpy.repeat', 'np.repeat', (['x'], {'repeats': '(5)', 'axis': '(1)'}), '(x, repeats=5, axis=1)\n', (874, 896), True, 'import numpy as np\n'), ((922, 936), 'tensorflow.constant', 'tf.constant', (['x'], {}), '(x)\n', (933, 936), True, 'import tensorflow as tf\n'), ((522, 545), 'tensorflow.constant', 'tf.constant', (['[0.5, 0.5]'], {}), '([0.5, 0.5])\n', (533, 545), True, 'import tensorflow as tf\n'), ((581, 604), 'tensorflow.constant', 'tf.constant', (['[0.0, 1.0]'], {}), '([0.0, 1.0])\n', (592, 604), True, 'import tensorflow as tf\n')]
|
"""Serializers for Certificate API"""
import django_countries
from dj_rest_auth.serializers import UserDetailsSerializer
from django.contrib.auth import password_validation
from django_countries.serializers import CountryFieldMixin
from rest_framework import serializers
from certificate_engine.types import CertificateTypes
from x509_pki.models import Certificate, DistinguishedName, KeyStore
countries = django_countries.Countries()
class DistinguishedNameSerializer(CountryFieldMixin, serializers.ModelSerializer):
class Meta:
fields = (
"commonName",
"countryName",
"stateOrProvinceName",
"localityName",
"organizationName",
"organizationalUnitName",
"emailAddress",
"subjectAltNames",
)
model = DistinguishedName
class CertificateSerializer(serializers.ModelSerializer):
dn = DistinguishedNameSerializer()
passphrase_issuer = serializers.CharField(max_length=200, required=False, allow_null=True, allow_blank=True)
passphrase_out = serializers.CharField(max_length=200, required=False, allow_null=True, allow_blank=True)
passphrase_out_confirmation = serializers.CharField(
max_length=200, required=False, allow_null=True, allow_blank=True
)
owner = serializers.HiddenField(default=serializers.CurrentUserDefault())
class Meta:
fields = (
"id",
"name",
"owner",
"parent",
"type",
"dn",
"created_at",
"expires_at",
"revoked_at",
"days_valid",
"expired",
"revoked",
"crl_distribution_url",
"ocsp_distribution_host",
"passphrase_issuer",
"passphrase_out",
"passphrase_out_confirmation",
)
model = Certificate
extra_kwargs = {
"passphrase_out": {"write_only": True},
"passphrase_out_confirmation": {"write_only": True},
"passphrase_issuer": {"write_only": True},
}
def validate_passphrase_out(self, passphrase_out):
if passphrase_out:
password_validation.validate_password(passphrase_out, self.instance)
return passphrase_out
return None
def validate_passphrase_issuer(self, passphrase_issuer):
if passphrase_issuer:
if not self.initial_data.get("parent"):
raise serializers.ValidationError(
"You should provide a parent certificate if you provide an issuer passphrase"
)
parent = Certificate.objects.get(pk=self.initial_data.get("parent"))
try:
if not parent.is_passphrase_valid(passphrase_issuer):
raise serializers.ValidationError("Passphrase incorrect. Not allowed " "to revoke your certificate")
except KeyStore.DoesNotExist:
raise serializers.ValidationError("Certificate has no cert, something went " "wrong during generation")
return passphrase_issuer
return None
def validate_passphrase_out_confirmation(self, passphrase_out_confirmation):
if passphrase_out_confirmation:
passphrase_out = self.initial_data.get("passphrase_out")
if passphrase_out and passphrase_out_confirmation and passphrase_out != passphrase_out_confirmation:
raise serializers.ValidationError("The two passphrase fields didn't match.")
password_validation.validate_password(passphrase_out_confirmation, self.instance)
return passphrase_out_confirmation
return None
def validate(self, data):
name = data.get("name")
if not name:
name = str(data.get("dn").get("commonName"))
cert_type = data.get("type")
owner = data.get("owner")
if Certificate.objects.filter(name=name, owner=owner, type=cert_type).count() > 0:
raise serializers.ValidationError(f"{dict(Certificate.TYPES)[cert_type]} " f'"{name}" already exists.')
return data
def create(self, validated_data):
dn_data = validated_data.pop("dn")
dn = DistinguishedName.objects.create(**dn_data)
certificate = Certificate.objects.create(dn=dn, **validated_data)
return certificate
class CertificateRevokeSerializer(serializers.ModelSerializer):
passphrase_issuer = serializers.CharField(max_length=200, required=True)
class Meta:
fields = ("passphrase_issuer",)
model = Certificate
extra_kwargs = {"passphrase_issuer": {"write_only": True}}
def validate_passphrase_issuer(self, passphrase_issuer):
if passphrase_issuer:
if self.instance.type == CertificateTypes.ROOT:
revoke_issuer = self.instance
else:
revoke_issuer = self.instance.parent
try:
if not revoke_issuer.is_passphrase_valid(passphrase_issuer):
raise serializers.ValidationError("Passphrase incorrect. Not allowed " "to revoke your certificate")
except KeyStore.DoesNotExist:
raise serializers.ValidationError("Certificate has no cert, something went " "wrong during generation")
return passphrase_issuer
return None
class CertificateCRLSerializer(serializers.ModelSerializer):
passphrase_issuer = serializers.CharField(max_length=200, required=True)
class Meta:
fields = ("passphrase_issuer",)
model = Certificate
extra_kwargs = {"passphrase_issuer": {"write_only": True}}
def validate_passphrase_issuer(self, passphrase_issuer):
if passphrase_issuer:
self.instance.passphrase_issuer = passphrase_issuer
if not self.instance.is_passphrase_valid():
raise serializers.ValidationError("Passphrase issuer incorrect. No permission to create CRL File")
return passphrase_issuer
return None
def update(self, instance, validated_data):
instance.passphrase_issuer = validated_data["passphrase_issuer"]
instance.generate_crl()
return instance
class UserSerializer(UserDetailsSerializer):
class Meta(UserDetailsSerializer.Meta):
fields = ("username", "email", "first_name", "last_name")
read_only_fields = ("username",)
|
[
"x509_pki.models.DistinguishedName.objects.create",
"django_countries.Countries",
"django.contrib.auth.password_validation.validate_password",
"rest_framework.serializers.CharField",
"rest_framework.serializers.CurrentUserDefault",
"x509_pki.models.Certificate.objects.filter",
"x509_pki.models.Certificate.objects.create",
"rest_framework.serializers.ValidationError"
] |
[((409, 437), 'django_countries.Countries', 'django_countries.Countries', ([], {}), '()\n', (435, 437), False, 'import django_countries\n'), ((970, 1062), 'rest_framework.serializers.CharField', 'serializers.CharField', ([], {'max_length': '(200)', 'required': '(False)', 'allow_null': '(True)', 'allow_blank': '(True)'}), '(max_length=200, required=False, allow_null=True,\n allow_blank=True)\n', (991, 1062), False, 'from rest_framework import serializers\n'), ((1080, 1172), 'rest_framework.serializers.CharField', 'serializers.CharField', ([], {'max_length': '(200)', 'required': '(False)', 'allow_null': '(True)', 'allow_blank': '(True)'}), '(max_length=200, required=False, allow_null=True,\n allow_blank=True)\n', (1101, 1172), False, 'from rest_framework import serializers\n'), ((1203, 1295), 'rest_framework.serializers.CharField', 'serializers.CharField', ([], {'max_length': '(200)', 'required': '(False)', 'allow_null': '(True)', 'allow_blank': '(True)'}), '(max_length=200, required=False, allow_null=True,\n allow_blank=True)\n', (1224, 1295), False, 'from rest_framework import serializers\n'), ((4481, 4533), 'rest_framework.serializers.CharField', 'serializers.CharField', ([], {'max_length': '(200)', 'required': '(True)'}), '(max_length=200, required=True)\n', (4502, 4533), False, 'from rest_framework import serializers\n'), ((5476, 5528), 'rest_framework.serializers.CharField', 'serializers.CharField', ([], {'max_length': '(200)', 'required': '(True)'}), '(max_length=200, required=True)\n', (5497, 5528), False, 'from rest_framework import serializers\n'), ((4246, 4289), 'x509_pki.models.DistinguishedName.objects.create', 'DistinguishedName.objects.create', ([], {}), '(**dn_data)\n', (4278, 4289), False, 'from x509_pki.models import Certificate, DistinguishedName, KeyStore\n'), ((4312, 4363), 'x509_pki.models.Certificate.objects.create', 'Certificate.objects.create', ([], {'dn': 'dn'}), '(dn=dn, **validated_data)\n', (4338, 4363), False, 'from x509_pki.models import Certificate, DistinguishedName, KeyStore\n'), ((1351, 1383), 'rest_framework.serializers.CurrentUserDefault', 'serializers.CurrentUserDefault', ([], {}), '()\n', (1381, 1383), False, 'from rest_framework import serializers\n'), ((2210, 2278), 'django.contrib.auth.password_validation.validate_password', 'password_validation.validate_password', (['passphrase_out', 'self.instance'], {}), '(passphrase_out, self.instance)\n', (2247, 2278), False, 'from django.contrib.auth import password_validation\n'), ((3561, 3647), 'django.contrib.auth.password_validation.validate_password', 'password_validation.validate_password', (['passphrase_out_confirmation', 'self.instance'], {}), '(passphrase_out_confirmation, self.\n instance)\n', (3598, 3647), False, 'from django.contrib.auth import password_validation\n'), ((2499, 2615), 'rest_framework.serializers.ValidationError', 'serializers.ValidationError', (['"""You should provide a parent certificate if you provide an issuer passphrase"""'], {}), "(\n 'You should provide a parent certificate if you provide an issuer passphrase'\n )\n", (2526, 2615), False, 'from rest_framework import serializers\n'), ((3478, 3548), 'rest_framework.serializers.ValidationError', 'serializers.ValidationError', (['"""The two passphrase fields didn\'t match."""'], {}), '("The two passphrase fields didn\'t match.")\n', (3505, 3548), False, 'from rest_framework import serializers\n'), ((5915, 6012), 'rest_framework.serializers.ValidationError', 'serializers.ValidationError', (['"""Passphrase issuer incorrect. No permission to create CRL File"""'], {}), "(\n 'Passphrase issuer incorrect. No permission to create CRL File')\n", (5942, 6012), False, 'from rest_framework import serializers\n'), ((2838, 2934), 'rest_framework.serializers.ValidationError', 'serializers.ValidationError', (['"""Passphrase incorrect. Not allowed to revoke your certificate"""'], {}), "(\n 'Passphrase incorrect. Not allowed to revoke your certificate')\n", (2865, 2934), False, 'from rest_framework import serializers\n'), ((2997, 3096), 'rest_framework.serializers.ValidationError', 'serializers.ValidationError', (['"""Certificate has no cert, something went wrong during generation"""'], {}), "(\n 'Certificate has no cert, something went wrong during generation')\n", (3024, 3096), False, 'from rest_framework import serializers\n'), ((3934, 4000), 'x509_pki.models.Certificate.objects.filter', 'Certificate.objects.filter', ([], {'name': 'name', 'owner': 'owner', 'type': 'cert_type'}), '(name=name, owner=owner, type=cert_type)\n', (3960, 4000), False, 'from x509_pki.models import Certificate, DistinguishedName, KeyStore\n'), ((5075, 5171), 'rest_framework.serializers.ValidationError', 'serializers.ValidationError', (['"""Passphrase incorrect. Not allowed to revoke your certificate"""'], {}), "(\n 'Passphrase incorrect. Not allowed to revoke your certificate')\n", (5102, 5171), False, 'from rest_framework import serializers\n'), ((5234, 5333), 'rest_framework.serializers.ValidationError', 'serializers.ValidationError', (['"""Certificate has no cert, something went wrong during generation"""'], {}), "(\n 'Certificate has no cert, something went wrong during generation')\n", (5261, 5333), False, 'from rest_framework import serializers\n')]
|
# -*- coding: utf-8 -*-
"""fileio module."""
import pandas as pd # create_conf_file
import csv # write_conf_header, create_conf_file
import json # create_conf_file
import os # read_c3d_file
import btk # C3D class
import bmch # C3D class
import numpy as np # C3D class
def write_conf_header(metadata_path):
"""Create and write header in the csv configuration files.
:param metadata_path: path to the metadata folder
:type metadata_path: str
Example::
result = write_conf_header('/home/romain/Downloads/irsst/metadata/')
"""
files = ['emg', 'markers', 'force', 'participants', 'trials']
headers = {
'emg': ['labels', 'publication_name'],
'markers': ['labels'],
'force': ['labels'],
'participants': ['pseudo', 'process', 'laterality', 'group', 'mass', 'height', 'date'],
'trials': ['folder', 'emg', 'markers', 'force']
}
for ifile in files:
with open('{}{}.csv'.format(metadata_path, ifile), 'w') as out:
writer = csv.DictWriter(out, fieldnames=headers[ifile])
writer.writeheader()
def create_conf_file(metadata_path):
"""Create a json conf file based on the csv conf files.
:param metadata_path: path to the metadata folder
:type metadata_path: str
Example::
result = write_conf_header('/home/romain/Downloads/irsst/metadata/')
"""
files = ['emg', 'markers', 'force', 'participants', 'trials']
# read each csv files into dict
csv_dict = {ifile: pd.read_csv('{}{}.csv'.format(metadata_path, ifile)) for ifile in files}
# merge dicts into json files
json_file = {key: json.loads(csv_dict[key].to_json()) for key in csv_dict}
# export json file
json_path = '{}config.json'.format(metadata_path)
with open(json_path, 'w') as json_data:
json_data.write(json.dumps(json_file, indent=4))
def load_conf_file(metadata_path):
"""Load the json configuration file create with the function `create_conf_file`.
:param metadata_path: path to the metadata folder
:type metadata_path: str
Example::
result = load_conf_file('/home/romain/Downloads/irsst/metadata/')
"""
json_path = '{}config.json'.format(metadata_path)
with open(json_path, 'r') as json_data:
return json.load(json_data)
def save_conf_file(metadata_path, json_file):
json_path = '{}config.json'.format(metadata_path)
with open(json_path, 'w') as json_data:
json_data.write(json.dumps(json_file, indent=4))
class C3D:
"""C3D class read c3d files and return data.
:param data_folders: dict with path to the data folder(s) as key and type (*markers and/or emg and/or emg*) as value
:type data_folders: dict
Example::
data_folders = {'/home/romain/Downloads/irsst/inputs/DapO/mvc/': ['emg'],
'/home/romain/Downloads/irsst/inputs/DapO/score/': ['markers']}
c3d = load_conf_file(data_folders)
c3d.read_data()
"""
def __init__(self, data_folders, conf_file):
"""Constructor for C3D"""
print('import c3d files from:')
self.folders = data_folders
self.conf_file = conf_file
self.assign = []
def read_data(self):
# todo complete return docstring
"""Read data from `self.folders`
:return
"""
for ifolder, kind in self.folders.items():
print('\t{}'.format(ifolder))
c3d_files = [f for f in os.listdir(ifolder) if f.endswith('.c3d')]
for ifile in c3d_files:
print('\t\t{}'.format(ifile))
file = os.path.join(ifolder, ifile)
metadata, markers, analogs = self._open_file(file, kind)
save_assign
def _open_file(self, file, kind):
"""Open c3d acquisition (*private function*).
:param file: path to the c3d file
:type file: str
:param kind: type (*markers and/or emg and/or emg*)
:type kind: list
"""
reader = btk.btkAcquisitionFileReader()
reader.SetFilename(file)
reader.Update()
acq = reader.GetOutput()
metadata = {'first_frame': acq.GetFirstFrame(), 'last_frame': acq.GetLastFrame()}
data = {}
for i in ['markers', 'force', 'emg']:
if i in kind:
if i is 'markers':
metadata.update({'point_rate': acq.GetPointFrequency(), 'point_used': acq.GetPointNumber()})
data_temp = self._iterate(acq=acq, kind='markers')
n = metadata['last_frame']
else:
metadata.update({'analog_rate': acq.GetAnalogFrequency(), 'analog_used': acq.GetAnalogNumber()})
data_temp = self._iterate(acq=acq, kind='analogs')
n = (metadata['last_frame'] * metadata['analog_rate']) / acq.GetPointFrequency()
data[i] = self._attribute_channels(data_temp, kind=i, frames=n)
else:
data[i] = None
def _attribute_channels(self, data_temp, kind, frames):
fields = list(data_temp.keys())
targets = list(self.conf_file[kind]['labels'].values())
# TODELETE:
# targets[-1] = 'Voltage.1'
# gui = bmch.util.GuiC3D(targets, fields)
gui = ['Delt_ant.EMG1',
'Delt_med.EMG2',
'Delt_post.EMG3',
'Biceps.EMG4',
'Triceps.EMG5',
'Trap_sup.EMG6',
'Pec.IM EMG12',
'Supra.EMG9',
'Infra.EMG10']
output = np.zeros((int(frames), len(targets)))
for i, iassign in enumerate(gui):
output[:, i] = np.squeeze(data_temp[iassign])
itarget = 'Delt_ant.EMG1'
# check if all target are in fields
# check if all previous assign are in fields
# GUI
gui = bmch.util.GuiC3D(targets, fields)
self.assign.append(gui.assign)
# save assign
return output
@staticmethod
def _iterate(acq, kind='markers'):
"""Iterate through a btkCollection object (*private function*) and return data as dict.
:param acq: btkAcquisition object
:type acq: btk.btkAcquisition
:param kind: type of the data (*markers or analogs*)
:type kind: str
"""
out = {}
if kind == 'markers':
iterator = btk.Iterate(acq.GetPoints())
elif kind == 'analogs':
iterator = btk.Iterate(acq.GetAnalogs())
else:
iterator = []
for it in iterator:
data_temp = it.GetValues()
if data_temp.any():
out.update({it.GetLabel(): data_temp})
return out
|
[
"json.load",
"json.dumps",
"bmch.util.GuiC3D",
"btk.btkAcquisitionFileReader",
"numpy.squeeze",
"os.path.join",
"os.listdir",
"csv.DictWriter"
] |
[((2284, 2304), 'json.load', 'json.load', (['json_data'], {}), '(json_data)\n', (2293, 2304), False, 'import json\n'), ((4011, 4041), 'btk.btkAcquisitionFileReader', 'btk.btkAcquisitionFileReader', ([], {}), '()\n', (4039, 4041), False, 'import btk\n'), ((5891, 5924), 'bmch.util.GuiC3D', 'bmch.util.GuiC3D', (['targets', 'fields'], {}), '(targets, fields)\n', (5907, 5924), False, 'import bmch\n'), ((1022, 1068), 'csv.DictWriter', 'csv.DictWriter', (['out'], {'fieldnames': 'headers[ifile]'}), '(out, fieldnames=headers[ifile])\n', (1036, 1068), False, 'import csv\n'), ((1839, 1870), 'json.dumps', 'json.dumps', (['json_file'], {'indent': '(4)'}), '(json_file, indent=4)\n', (1849, 1870), False, 'import json\n'), ((2475, 2506), 'json.dumps', 'json.dumps', (['json_file'], {'indent': '(4)'}), '(json_file, indent=4)\n', (2485, 2506), False, 'import json\n'), ((5697, 5727), 'numpy.squeeze', 'np.squeeze', (['data_temp[iassign]'], {}), '(data_temp[iassign])\n', (5707, 5727), True, 'import numpy as np\n'), ((3615, 3643), 'os.path.join', 'os.path.join', (['ifolder', 'ifile'], {}), '(ifolder, ifile)\n', (3627, 3643), False, 'import os\n'), ((3467, 3486), 'os.listdir', 'os.listdir', (['ifolder'], {}), '(ifolder)\n', (3477, 3486), False, 'import os\n')]
|
import pandas as pd
def combine_reciprocal_hits(keep_df, other_df):
"""
"""
missed_samples = set(other_df.index.values).difference(
set(keep_df.index.values))
for each in missed_samples:
hit = other_df.loc[each, 'B_id']
if hit not in keep_df['B_id'].values:
new_row = [hit] + [None for i in range(keep_df.shape[1] - 1)]
keep_df.loc[each] = new_row
return keep_df
def combine_single_hits(keep_df, other_df):
"""
"""
new_spus = set(other_df['subject'].unique()).difference(
keep_df['B_id'].values)
for spu in new_spus:
scores = other_df['bitscore'][other_df['subject'] == spu]
row = [scores.idxmax()] + [None for i in range(keep_df.shape[1] - 1)]
keep_df.loc[spu] = row
return keep_df
def add_uniprot_annotations(sample_df, uniprot):
"""
"""
gene_df = pd.DataFrame(index=uniprot.index.values,
columns=["UniProt.ID", "UniProt.Name"],
dtype=str)
for idx in uniprot.index.values:
prot_id, prot_name = uniprot.loc[idx, 'subject'].split('|')[1:]
if isinstance(prot_id, str) and isinstance(prot_name, str):
gene_df.loc[idx, 'UniProt.ID'] = prot_id
gene_df.loc[idx, 'UniProt.Name'] = prot_name
return pd.concat([sample_df, gene_df], axis=1, join='outer', sort=False)
def add_interpro_annotations(sample_df, interpro_file):
"""
"""
data = {'evm': [], 'IPR.IDs': [], 'IPR.Desc': []}
with open(interpro_file, 'r') as f:
for line in f:
line = line.strip().split('\t')
evm = line[0]
ipr_ids = []
desc_ids = []
for each in line[2:]:
ipr, desc = each.split(';')
ipr_ids.append(ipr.strip())
desc_ids.append(desc.strip())
data['evm'].append(evm)
data['IPR.IDs'].append(';'.join(ipr_ids))
data['IPR.Desc'].append(';'.join(desc_ids))
ipr = pd.DataFrame(data)
ipr.set_index('evm', inplace=True)
return pd.concat([sample_df, ipr], axis=1, join='outer', sort=False)
def add_kegg_annotations(sample_df, kegg_file):
"""
"""
data = {'evm': [], 'KEGG.IDs': []}
with open(kegg_file, 'r') as f:
for line in f:
line = line.strip().split('\t')
data['evm'].append(line[0])
data['KEGG.IDs'].append(line[4])
kegg = pd.DataFrame(data)
kegg.set_index('evm', inplace=True)
return pd.concat([sample_df, kegg], axis=1, join='outer', sort=False)
def add_ncbi_annotations(sample_df, ncbi):
"""
"""
gene_df = pd.DataFrame(index=uniprot.index.values,
columns=["NCBI.ID"], dtype=str)
for idx in ncbi.index.values:
gene_df.loc[idx, 'NCBI.ID'] = ncbi.loc[idx, 'subject'].split('|')[-2]
return pd.concat([sample_df, gene_df], axis=1, join='outer', sort=False)
def add_trembl_annotations(sample_df, tremble):
gene_df = pd.DataFrame(index=uniprot.index.values,
columns=["TrEMBL.ID"], dtype=str)
for idx in ncbi.index.values:
gene_df.loc[idx, 'TrEMBL.ID'] = ncbi.loc[idx, 'subject'].split('|')[1]
return pd.concat([sample_df, gene_df], axis=1, join='outer', sort=False)
if __name__ == "__main__":
blast_columns = ['subject', 'perc.id', 'length', 'mismatch', 'gapopen',
'qstart', 'qend', 'sstart', 'send', 'evalue', 'bitscore']
protein_models = pd.read_csv("/home/dakota/SequenceData/GenomeAnnotations/ProteinModels_SPU_BestHits_peptide.txt",
sep='\t', index_col=0)
transcripts_pep = pd.read_csv("/home/dakota/SequenceData/GenomeAnnotations/SPU_BestHits_peptide.txt",
sep='\t', index_col=0)
transcripts_nuc = pd.read_csv("/home/dakota/SequenceData/GenomeAnnotations/SPU_BestHits.txt",
sep='\t', index_col=0)
homologues = pd.read_csv("/home/dakota/SequenceData/GenomeAnnotations/best_spu_aligns.blastn",
sep='\t', header=None, index_col=0,
names=blast_columns)
uniprot = pd.read_csv("/home/dakota/SequenceData/GenomeAnnotations/Echinoderm_project/sea_urchin/5. gene_function_annotation/Lytechinus_variegatus_EVM_out_pep.SwissProt.blast",
sep='\t', header=None, index_col=0,
names=blast_columns)
interpro_file = "/home/dakota/SequenceData/GenomeAnnotations/Echinoderm_project/sea_urchin/5. gene_function_annotation/Lytechinus_variegatus_EVM_out_pep.ipr"
kegg_file = "/home/dakota/SequenceData/GenomeAnnotations/Echinoderm_project/sea_urchin/5. gene_function_annotation/Lytechinus_variegatus_EVM_out_pep.KEGG.blast"
ncbi = pd.read_csv("/home/dakota/SequenceData/GenomeAnnotations/Echinoderm_project/sea_urchin/5. gene_function_annotation/Lytechinus_variegatus_EVM_out_pep.nr.blast",
sep='\t', header=None, index_col=0, names=blast_columns)
trembl = pd.read_csv("/home/dakota/SequenceData/GenomeAnnotations/Echinoderm_project/sea_urchin/5. gene_function_annotation/Lytechinus_variegatus_EVM_out_pep.TrEMBL.blast",
sep='\t', header=None, index_col=0,
names=blast_columns)
annotations = combine_reciprocal_hits(pd.DataFrame(protein_models['B_id']),
pd.DataFrame(transcripts_pep['B_id']))
annotations = combine_reciprocal_hits(annotations,
pd.DataFrame(transcripts_nuc))
annotations = combine_single_hits(annotations, homologues)
annotations.columns.values[0] = 'SPU'
annotations = add_uniprot_annotations(annotations, uniprot)
annotations = add_interpro_annotations(annotations, interpro_file)
annotations = add_kegg_annotations(annotations, kegg_file)
annotations = add_ncbi_annotations(annotations, ncbi)
annotations = add_trembl_annotations(annotations, trembl)
annotations.to_csv('/home/dakota/SequenceData/evm_annotations.csv')
|
[
"pandas.DataFrame",
"pandas.read_csv",
"pandas.concat"
] |
[((940, 1035), 'pandas.DataFrame', 'pd.DataFrame', ([], {'index': 'uniprot.index.values', 'columns': "['UniProt.ID', 'UniProt.Name']", 'dtype': 'str'}), "(index=uniprot.index.values, columns=['UniProt.ID',\n 'UniProt.Name'], dtype=str)\n", (952, 1035), True, 'import pandas as pd\n'), ((1384, 1449), 'pandas.concat', 'pd.concat', (['[sample_df, gene_df]'], {'axis': '(1)', 'join': '"""outer"""', 'sort': '(False)'}), "([sample_df, gene_df], axis=1, join='outer', sort=False)\n", (1393, 1449), True, 'import pandas as pd\n'), ((2086, 2104), 'pandas.DataFrame', 'pd.DataFrame', (['data'], {}), '(data)\n', (2098, 2104), True, 'import pandas as pd\n'), ((2155, 2216), 'pandas.concat', 'pd.concat', (['[sample_df, ipr]'], {'axis': '(1)', 'join': '"""outer"""', 'sort': '(False)'}), "([sample_df, ipr], axis=1, join='outer', sort=False)\n", (2164, 2216), True, 'import pandas as pd\n'), ((2521, 2539), 'pandas.DataFrame', 'pd.DataFrame', (['data'], {}), '(data)\n', (2533, 2539), True, 'import pandas as pd\n'), ((2591, 2653), 'pandas.concat', 'pd.concat', (['[sample_df, kegg]'], {'axis': '(1)', 'join': '"""outer"""', 'sort': '(False)'}), "([sample_df, kegg], axis=1, join='outer', sort=False)\n", (2600, 2653), True, 'import pandas as pd\n'), ((2728, 2800), 'pandas.DataFrame', 'pd.DataFrame', ([], {'index': 'uniprot.index.values', 'columns': "['NCBI.ID']", 'dtype': 'str'}), "(index=uniprot.index.values, columns=['NCBI.ID'], dtype=str)\n", (2740, 2800), True, 'import pandas as pd\n'), ((2951, 3016), 'pandas.concat', 'pd.concat', (['[sample_df, gene_df]'], {'axis': '(1)', 'join': '"""outer"""', 'sort': '(False)'}), "([sample_df, gene_df], axis=1, join='outer', sort=False)\n", (2960, 3016), True, 'import pandas as pd\n'), ((3084, 3158), 'pandas.DataFrame', 'pd.DataFrame', ([], {'index': 'uniprot.index.values', 'columns': "['TrEMBL.ID']", 'dtype': 'str'}), "(index=uniprot.index.values, columns=['TrEMBL.ID'], dtype=str)\n", (3096, 3158), True, 'import pandas as pd\n'), ((3310, 3375), 'pandas.concat', 'pd.concat', (['[sample_df, gene_df]'], {'axis': '(1)', 'join': '"""outer"""', 'sort': '(False)'}), "([sample_df, gene_df], axis=1, join='outer', sort=False)\n", (3319, 3375), True, 'import pandas as pd\n'), ((3580, 3710), 'pandas.read_csv', 'pd.read_csv', (['"""/home/dakota/SequenceData/GenomeAnnotations/ProteinModels_SPU_BestHits_peptide.txt"""'], {'sep': '"""\t"""', 'index_col': '(0)'}), "(\n '/home/dakota/SequenceData/GenomeAnnotations/ProteinModels_SPU_BestHits_peptide.txt'\n , sep='\\t', index_col=0)\n", (3591, 3710), True, 'import pandas as pd\n'), ((3756, 3871), 'pandas.read_csv', 'pd.read_csv', (['"""/home/dakota/SequenceData/GenomeAnnotations/SPU_BestHits_peptide.txt"""'], {'sep': '"""\t"""', 'index_col': '(0)'}), "(\n '/home/dakota/SequenceData/GenomeAnnotations/SPU_BestHits_peptide.txt',\n sep='\\t', index_col=0)\n", (3767, 3871), True, 'import pandas as pd\n'), ((3918, 4020), 'pandas.read_csv', 'pd.read_csv', (['"""/home/dakota/SequenceData/GenomeAnnotations/SPU_BestHits.txt"""'], {'sep': '"""\t"""', 'index_col': '(0)'}), "('/home/dakota/SequenceData/GenomeAnnotations/SPU_BestHits.txt',\n sep='\\t', index_col=0)\n", (3929, 4020), True, 'import pandas as pd\n'), ((4067, 4214), 'pandas.read_csv', 'pd.read_csv', (['"""/home/dakota/SequenceData/GenomeAnnotations/best_spu_aligns.blastn"""'], {'sep': '"""\t"""', 'header': 'None', 'index_col': '(0)', 'names': 'blast_columns'}), "(\n '/home/dakota/SequenceData/GenomeAnnotations/best_spu_aligns.blastn',\n sep='\\t', header=None, index_col=0, names=blast_columns)\n", (4078, 4214), True, 'import pandas as pd\n'), ((4278, 4511), 'pandas.read_csv', 'pd.read_csv', (['"""/home/dakota/SequenceData/GenomeAnnotations/Echinoderm_project/sea_urchin/5. gene_function_annotation/Lytechinus_variegatus_EVM_out_pep.SwissProt.blast"""'], {'sep': '"""\t"""', 'header': 'None', 'index_col': '(0)', 'names': 'blast_columns'}), "(\n '/home/dakota/SequenceData/GenomeAnnotations/Echinoderm_project/sea_urchin/5. gene_function_annotation/Lytechinus_variegatus_EVM_out_pep.SwissProt.blast'\n , sep='\\t', header=None, index_col=0, names=blast_columns)\n", (4289, 4511), True, 'import pandas as pd\n'), ((4892, 5118), 'pandas.read_csv', 'pd.read_csv', (['"""/home/dakota/SequenceData/GenomeAnnotations/Echinoderm_project/sea_urchin/5. gene_function_annotation/Lytechinus_variegatus_EVM_out_pep.nr.blast"""'], {'sep': '"""\t"""', 'header': 'None', 'index_col': '(0)', 'names': 'blast_columns'}), "(\n '/home/dakota/SequenceData/GenomeAnnotations/Echinoderm_project/sea_urchin/5. gene_function_annotation/Lytechinus_variegatus_EVM_out_pep.nr.blast'\n , sep='\\t', header=None, index_col=0, names=blast_columns)\n", (4903, 5118), True, 'import pandas as pd\n'), ((5145, 5375), 'pandas.read_csv', 'pd.read_csv', (['"""/home/dakota/SequenceData/GenomeAnnotations/Echinoderm_project/sea_urchin/5. gene_function_annotation/Lytechinus_variegatus_EVM_out_pep.TrEMBL.blast"""'], {'sep': '"""\t"""', 'header': 'None', 'index_col': '(0)', 'names': 'blast_columns'}), "(\n '/home/dakota/SequenceData/GenomeAnnotations/Echinoderm_project/sea_urchin/5. gene_function_annotation/Lytechinus_variegatus_EVM_out_pep.TrEMBL.blast'\n , sep='\\t', header=None, index_col=0, names=blast_columns)\n", (5156, 5375), True, 'import pandas as pd\n'), ((5458, 5494), 'pandas.DataFrame', 'pd.DataFrame', (["protein_models['B_id']"], {}), "(protein_models['B_id'])\n", (5470, 5494), True, 'import pandas as pd\n'), ((5538, 5575), 'pandas.DataFrame', 'pd.DataFrame', (["transcripts_pep['B_id']"], {}), "(transcripts_pep['B_id'])\n", (5550, 5575), True, 'import pandas as pd\n'), ((5674, 5703), 'pandas.DataFrame', 'pd.DataFrame', (['transcripts_nuc'], {}), '(transcripts_nuc)\n', (5686, 5703), True, 'import pandas as pd\n')]
|
import argparse
import getpass
import json
import os
import subprocess
import sys
from axolpy import configuration, logging, solidity
from web3 import Web3
def init_arg_parser() -> argparse.ArgumentParser:
"""
Initialize argument parser.
:return: An argument parser for inputs.
:rtype: :class:`argparse.ArgumentParser`
"""
arg_parser = argparse.ArgumentParser(
description="Deploy smart contract to Ethereum.")
arg_parser.add_argument("-k", "--private-key")
arg_parser.add_argument("-c", "--contract-name", required=True)
arg_parser.add_argument("--solidity-compiler-version")
return arg_parser
arg_parser = init_arg_parser()
args = arg_parser.parse_args()
logger = logging.get_logger(name=os.path.basename(__file__))
logging.set_level(logging.INFO)
logging.show_milliseconds()
config = configuration.AxolpyConfigManager.get_context(name="blockchain")
base_path = config["main"]["base_path"]
private_key: str = args.private_key if args.private_key else getpass.getpass(
prompt="Private Key: ")
contract_name: str = args.contract_name
contract_filepath: str = f'{config["main"]["contracts.path"]}/{contract_name}.sol'
solidity_compiler_version: str = args.solidity_compiler_version \
if args.solidity_compiler_version \
else config["main"]["solidity.compiler.version"]
contract_content: str = None
logger.info(f"Reading contract file {contract_filepath}")
with open(contract_filepath, "r") as file:
contract_content = file.read()
compiled_sol = solidity.SolidityHelper.solcx_compile_standard(
source_name=f"{contract_name}.sol",
source_content=contract_content,
solidity_compiler_version=solidity_compiler_version)
with open(f'{config["main"]["distribution.path"]}/{contract_name}.json', "w") as file:
json.dump(compiled_sol, file)
# Get bytecode and abi from compiled solidity file
bytecode: str = compiled_sol["contracts"][f"{contract_name}.sol"][contract_name]["evm"]["bytecode"]["object"]
abi: str = compiled_sol["contracts"][f"{contract_name}.sol"][contract_name]["abi"]
# Connect to the provider
w3: Web3 = Web3(Web3.HTTPProvider(config["web3"]["http_provider"]))
chain_id: int = config["main"].getint("chain.id")
wallet_address: str = config["wallet"]["local.address.0"]
nonce = w3.eth.get_transaction_count(wallet_address)
w3contract = w3.eth.contract(abi=abi, bytecode=bytecode)
logger.info(f"Wallet address is {wallet_address}")
contract_txn = w3contract.constructor().buildTransaction(
{"chainId": chain_id, "from": wallet_address, "nonce": nonce})
# Sign the transaction and send it to the network
signed_contract_txn = w3.eth.account.sign_transaction(
contract_txn, private_key)
logger.info("Deploying contract ...")
tx_hash = w3.eth.send_raw_transaction(
transaction=signed_contract_txn.rawTransaction)
logger.info("Waiting for transaction receipt ...")
tx_receipt = w3.eth.wait_for_transaction_receipt(tx_hash)
logger.info(f"Contract is deployed to {tx_receipt.contractAddress}")
# Run the corresponding script we built for trial run
with subprocess.Popen([sys.executable,
f"{base_path}/bin/blockchain/call-partner-agreement.py",
"--contract-address",
tx_receipt.contractAddress],
stdout=subprocess.PIPE,
stderr=subprocess.STDOUT) as proc:
while True:
line = proc.stdout.readline()
if not line:
break
print(line.decode("utf-8").rstrip())
proc.stdout.close()
|
[
"json.dump",
"subprocess.Popen",
"argparse.ArgumentParser",
"axolpy.logging.show_milliseconds",
"axolpy.solidity.SolidityHelper.solcx_compile_standard",
"web3.Web3.HTTPProvider",
"getpass.getpass",
"os.path.basename",
"axolpy.configuration.AxolpyConfigManager.get_context",
"axolpy.logging.set_level"
] |
[((774, 805), 'axolpy.logging.set_level', 'logging.set_level', (['logging.INFO'], {}), '(logging.INFO)\n', (791, 805), False, 'from axolpy import configuration, logging, solidity\n'), ((806, 833), 'axolpy.logging.show_milliseconds', 'logging.show_milliseconds', ([], {}), '()\n', (831, 833), False, 'from axolpy import configuration, logging, solidity\n'), ((844, 908), 'axolpy.configuration.AxolpyConfigManager.get_context', 'configuration.AxolpyConfigManager.get_context', ([], {'name': '"""blockchain"""'}), "(name='blockchain')\n", (889, 908), False, 'from axolpy import configuration, logging, solidity\n'), ((1521, 1698), 'axolpy.solidity.SolidityHelper.solcx_compile_standard', 'solidity.SolidityHelper.solcx_compile_standard', ([], {'source_name': 'f"""{contract_name}.sol"""', 'source_content': 'contract_content', 'solidity_compiler_version': 'solidity_compiler_version'}), "(source_name=\n f'{contract_name}.sol', source_content=contract_content,\n solidity_compiler_version=solidity_compiler_version)\n", (1567, 1698), False, 'from axolpy import configuration, logging, solidity\n'), ((364, 437), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {'description': '"""Deploy smart contract to Ethereum."""'}), "(description='Deploy smart contract to Ethereum.')\n", (387, 437), False, 'import argparse\n'), ((1011, 1050), 'getpass.getpass', 'getpass.getpass', ([], {'prompt': '"""Private Key: """'}), "(prompt='Private Key: ')\n", (1026, 1050), False, 'import getpass\n'), ((1795, 1824), 'json.dump', 'json.dump', (['compiled_sol', 'file'], {}), '(compiled_sol, file)\n', (1804, 1824), False, 'import json\n'), ((2113, 2163), 'web3.Web3.HTTPProvider', 'Web3.HTTPProvider', (["config['web3']['http_provider']"], {}), "(config['web3']['http_provider'])\n", (2130, 2163), False, 'from web3 import Web3\n'), ((3065, 3269), 'subprocess.Popen', 'subprocess.Popen', (["[sys.executable, f'{base_path}/bin/blockchain/call-partner-agreement.py',\n '--contract-address', tx_receipt.contractAddress]"], {'stdout': 'subprocess.PIPE', 'stderr': 'subprocess.STDOUT'}), "([sys.executable,\n f'{base_path}/bin/blockchain/call-partner-agreement.py',\n '--contract-address', tx_receipt.contractAddress], stdout=subprocess.\n PIPE, stderr=subprocess.STDOUT)\n", (3081, 3269), False, 'import subprocess\n'), ((746, 772), 'os.path.basename', 'os.path.basename', (['__file__'], {}), '(__file__)\n', (762, 772), False, 'import os\n')]
|
import torch
# Path or parameters for data
DATA_DIR = 'data'
SP_DIR = f'{DATA_DIR}/sp'
SRC_DIR = 'src'
TRG_DIR = 'trg'
SRC_RAW_DATA_NAME = 'raw_data.src'
TRG_RAW_DATA_NAME = 'raw_data.trg'
TRAIN_NAME = 'train.txt'
VALID_NAME = 'valid.txt'
TEST_NAME = 'test.txt'
# Parameters for sentencepiece tokenizer
pad_id = 0
sos_id = 1
eos_id = 2
unk_id = 3
src_model_prefix = 'src_sp'
trg_model_prefix = 'trg_sp'
sp_vocab_size = 16000
character_coverage = 1.0
model_type = 'unigram'
# Parameters for Transformer & training
device = torch.device('cuda') if torch.cuda.is_available() else torch.device('cpu')
learning_rate = 1e-4
batch_size = 80
seq_len = 200
num_heads = 8
num_layers = 6
d_model = 512
d_ff = 2048
d_k = d_model // num_heads
drop_out_rate = 0.1
num_epochs = 10
beam_size = 8
ckpt_dir = 'saved_model'
|
[
"torch.cuda.is_available",
"torch.device"
] |
[((549, 574), 'torch.cuda.is_available', 'torch.cuda.is_available', ([], {}), '()\n', (572, 574), False, 'import torch\n'), ((525, 545), 'torch.device', 'torch.device', (['"""cuda"""'], {}), "('cuda')\n", (537, 545), False, 'import torch\n'), ((580, 599), 'torch.device', 'torch.device', (['"""cpu"""'], {}), "('cpu')\n", (592, 599), False, 'import torch\n')]
|
import numpy as np
import cv2
import pandas as pd
import face_recognition as fc
import time
import random as rd
import smtplib
import xlrd
fcc=0
v=cv2.VideoCapture(0)
fd=cv2.CascadeClassifier(r"C:\Users\HP\AppData\Local\Programs\Python\Python36\Lib\site-packages\cv2\data\haarcascade_frontalface_alt2.xml")
def cap():
ret,i=v.read()
j=cv2.cvtColor(i,cv2.COLOR_BGR2GRAY)
f=fd.detectMultiScale(j)
if len(f)==1:
for(x,y,w,h) in f:
image=i[y:y+h,x:x+w].copy()
fl=fc.face_locations(image)
fcl=fc.face_encodings(image,fl)
cv2.imshow('image',image)
k= cv2.waitKey(5)
return fcl
break
else:
print("Face not Detected")
def genotp():
ran=rd.random()
otp=ran*10000
return int(otp)
def enterdata():
name=input("Enter Name: ")
roll=input("Enter Roll No.: ")
number=int(input("Enter MObile Number: "))
email=input("Enter E-Mail: ")
print("Hold Still The Camera will initialize to detect your face in few seconds")
print("Name:",name,"\nRoll",roll,"\nNumber",number,"\nEmail",email)
time.sleep(2)
q=0
while(q!=1):
try:
fcc=cap()
if len(fcc) != 0:
print("Successfully Entered Data OTP is sent to your email")
q=1
return name,roll,number,email,fcc
except:
pass
def sendmail(email,otp):
server=smtplib.SMTP('smtp.gmail.com',587)
server.starttls()
server.login("Enter your mail","Enter your password")
msg="Subject: OTP is "+str(otp)+" \nWelcome to our Institute!\nTo Complete Registration Please Enter the following OTP:"+str(otp)+" \nThank you for enrolling with us."
server.sendmail("<NAME>",email,msg)
Data = pd.read_excel("Data.xlsx")
df = pd.DataFrame(Data)
Data1 = pd.read_excel("Attendance.xlsx")
df1 = pd.DataFrame(Data1)
name,roll,number,email,fcc = enterdata()
v.release()
otp=genotp()
sendmail(email,otp)
q=0
dataf=pd.DataFrame({"Name":[name],
"Roll":[roll],
"Number":[number],
"Email":[email],
"Encoding":list(fcc)})
dataf1=pd.DataFrame({'Name':[name],
'Email':[email]})
while(q!=1):
eotp=int(input("Enter OTP"))
if eotp==otp:
q=1
df=df.append(dataf,ignore_index=True,sort=False)
df.to_excel("Data.xlsx",index=False)
df1=df1.append(dataf1,ignore_index=True,sort=False)
df1.to_excel("Attendance.xlsx",index=False)
print("Success")
else :
print("Re-Enter OTP")
|
[
"pandas.DataFrame",
"smtplib.SMTP",
"cv2.cvtColor",
"cv2.waitKey",
"face_recognition.face_encodings",
"time.sleep",
"cv2.VideoCapture",
"pandas.read_excel",
"random.random",
"cv2.CascadeClassifier",
"face_recognition.face_locations",
"cv2.imshow"
] |
[((158, 177), 'cv2.VideoCapture', 'cv2.VideoCapture', (['(0)'], {}), '(0)\n', (174, 177), False, 'import cv2\n'), ((182, 339), 'cv2.CascadeClassifier', 'cv2.CascadeClassifier', (['"""C:\\\\Users\\\\HP\\\\AppData\\\\Local\\\\Programs\\\\Python\\\\Python36\\\\Lib\\\\site-packages\\\\cv2\\\\data\\\\haarcascade_frontalface_alt2.xml"""'], {}), "(\n 'C:\\\\Users\\\\HP\\\\AppData\\\\Local\\\\Programs\\\\Python\\\\Python36\\\\Lib\\\\site-packages\\\\cv2\\\\data\\\\haarcascade_frontalface_alt2.xml'\n )\n", (203, 339), False, 'import cv2\n'), ((1924, 1950), 'pandas.read_excel', 'pd.read_excel', (['"""Data.xlsx"""'], {}), "('Data.xlsx')\n", (1937, 1950), True, 'import pandas as pd\n'), ((1957, 1975), 'pandas.DataFrame', 'pd.DataFrame', (['Data'], {}), '(Data)\n', (1969, 1975), True, 'import pandas as pd\n'), ((1985, 2017), 'pandas.read_excel', 'pd.read_excel', (['"""Attendance.xlsx"""'], {}), "('Attendance.xlsx')\n", (1998, 2017), True, 'import pandas as pd\n'), ((2025, 2044), 'pandas.DataFrame', 'pd.DataFrame', (['Data1'], {}), '(Data1)\n', (2037, 2044), True, 'import pandas as pd\n'), ((2342, 2390), 'pandas.DataFrame', 'pd.DataFrame', (["{'Name': [name], 'Email': [email]}"], {}), "({'Name': [name], 'Email': [email]})\n", (2354, 2390), True, 'import pandas as pd\n'), ((366, 401), 'cv2.cvtColor', 'cv2.cvtColor', (['i', 'cv2.COLOR_BGR2GRAY'], {}), '(i, cv2.COLOR_BGR2GRAY)\n', (378, 401), False, 'import cv2\n'), ((853, 864), 'random.random', 'rd.random', ([], {}), '()\n', (862, 864), True, 'import random as rd\n'), ((1241, 1254), 'time.sleep', 'time.sleep', (['(2)'], {}), '(2)\n', (1251, 1254), False, 'import time\n'), ((1579, 1614), 'smtplib.SMTP', 'smtplib.SMTP', (['"""smtp.gmail.com"""', '(587)'], {}), "('smtp.gmail.com', 587)\n", (1591, 1614), False, 'import smtplib\n'), ((555, 579), 'face_recognition.face_locations', 'fc.face_locations', (['image'], {}), '(image)\n', (572, 579), True, 'import face_recognition as fc\n'), ((601, 629), 'face_recognition.face_encodings', 'fc.face_encodings', (['image', 'fl'], {}), '(image, fl)\n', (618, 629), True, 'import face_recognition as fc\n'), ((646, 672), 'cv2.imshow', 'cv2.imshow', (['"""image"""', 'image'], {}), "('image', image)\n", (656, 672), False, 'import cv2\n'), ((692, 706), 'cv2.waitKey', 'cv2.waitKey', (['(5)'], {}), '(5)\n', (703, 706), False, 'import cv2\n')]
|
import json
from urllib.parse import unquote
from django.http import HttpRequest
from django.template.response import TemplateResponse
class CookieConsentMiddleware:
def __init__(self, get_response):
self.get_response = get_response
def __call__(self, request):
return self.get_response(request)
def process_template_response(
self, request: HttpRequest, response: TemplateResponse
) -> TemplateResponse:
response.context_data["showGTM"] = False
cookie_policy = request.COOKIES.get("cookies_policy", None)
dont_show_cookie_notice = request.COOKIES.get("dontShowCookieNotice", None)
if cookie_policy:
decoder = json.JSONDecoder()
decoded = decoder.decode(unquote(cookie_policy))
response.context_data["showGTM"] = decoded["usage"] or False
if dont_show_cookie_notice:
if dont_show_cookie_notice == "true":
response.context_data["dontShowCookieNotice"] = True
return response
|
[
"urllib.parse.unquote",
"json.JSONDecoder"
] |
[((700, 718), 'json.JSONDecoder', 'json.JSONDecoder', ([], {}), '()\n', (716, 718), False, 'import json\n'), ((756, 778), 'urllib.parse.unquote', 'unquote', (['cookie_policy'], {}), '(cookie_policy)\n', (763, 778), False, 'from urllib.parse import unquote\n')]
|
# Email received from Leopold Mozart:
#
# From: "<NAME>" <<EMAIL>>
# Date: Thu, 1 Sep 2016 01:39:31 -0700
# Message-ID: <<EMAIL>>
# Subject: Re: my broken zip Re: sorry
# MIME-Version: 1.0
# Content-Type: text/plain; charset=UTF-8
# Content-Transfer-Encoding: 7bit
# Content-Disposition: inline
# Precedence: bulk
# X-Autoreply: yes
# Auto-Submitted: auto-replied
#
# Never mind that.
#
# Have you found my broken zip?
#
# md5: bbb8b499a0eef99b52c7f13f4e78c24b
#
# Can you believe what one mistake can lead to?
import md5
import zipfile
import StringIO
import Image
file = open('mybroken.zip')
src = file.read()
file.close()
for i in range(len(src)):
for j in range(256):
changed = src[:i] + chr(j) + src[i+1:]
if md5.md5(changed).hexdigest() == 'bbb8b499a0eef99b52c7f13f4e78c24b':
src = changed
break
z = zipfile.ZipFile(StringIO.StringIO(src))
Image.open(StringIO.StringIO(z.read('mybroken.gif'))).show()
|
[
"md5.md5",
"StringIO.StringIO"
] |
[((841, 863), 'StringIO.StringIO', 'StringIO.StringIO', (['src'], {}), '(src)\n', (858, 863), False, 'import StringIO\n'), ((726, 742), 'md5.md5', 'md5.md5', (['changed'], {}), '(changed)\n', (733, 742), False, 'import md5\n')]
|
import time
from PyQt5.QtCore import *
from PyQt5.QtGui import *
from PyQt5.QtWidgets import *
from PyQt5 import QtCore
from QUANTAXIS.QASU.save_tdx import (QA_SU_save_stock_day,
QA_SU_save_stock_week,
QA_SU_save_stock_month,
QA_SU_save_stock_year,
QA_SU_save_stock_xdxr,
QA_SU_save_stock_min,
QA_SU_save_index_day,
QA_SU_save_index_min,
QA_SU_save_etf_day,
QA_SU_save_etf_min,
QA_SU_save_stock_list,
QA_SU_save_stock_block,
QA_SU_save_stock_info,
QA_SU_save_stock_transaction,
QA_SU_save_option_day)
from QUANTAXIS.QAUtil import DATABASE
'''
https://martinfitzpatrick.name/article/multithreading-pyqt-applications-with-qthreadpool/
QThread
'''
class QA_GUI_Date_Fetch_Task(QThread):
#todo fix here 不会执行 __init__的 QThread 是一个很特别的对象。
#
#def __int__(self, qParentWidget):
# 初始化函数,默认
# super(QA_GUI_Date_Fetch_Task, self).__init__()
# self.qParentWidget = qParentWidget;
# abstract method, 线程工作的地方
def run(self):
pass
# 定义一个信号, 更新任务进度
trigger_new_log = pyqtSignal(str)
trigger_new_progress = pyqtSignal(int)
trigger_start_task_begin = pyqtSignal(str)
trigger_start_task_done = pyqtSignal(str)
#abstract method ?
def connectSignalSlot(self):
self.trigger_new_log.connect(self.updateLogTriggerHandler)
self.trigger_new_progress.connect(self.updateProgressTriggerHandler)
self.trigger_start_task_begin.connect(self.startTaskTriggerHandler)
self.trigger_start_task_done.connect(self.doneTaskTriggerHandler)
def setLoggingUIWidget(self, logDisplay):
self.logDisplay = logDisplay
def setProgressUIWidget(self, qProgressBar):
self.qProgressBar = qProgressBar
def setCheckboxUIWidget(self, qCheckBox):
self.qCheckBox = qCheckBox
#abstract method
def changeRunningTaskColor0(self, qColor=None):
palette = self.qCheckBox.palette()
if qColor == None:
palette.setColor(QPalette.Active, QPalette.WindowText, Qt.black)
else:
palette.setColor(QPalette.Active, QPalette.WindowText, qColor)
self.qCheckBox.setPalette(palette)
pass
#abstract method
def updateLogTriggerHandler(self):
pass
#abstract method
def updateProgressTriggerHandler(self):
pass
#abstract method
def startTaskTriggerHandler(self):
pass
#abstract method
def doneTaskTriggerHandler(self):
pass
class QA_GUI_DateFetch_SU_job01_stock_day(QA_GUI_Date_Fetch_Task):
# todo fix here 不会执行 __init__的 QThread 是一个很特别的对象。
#def __int__(self, qParentWidget):
#super(QA_GUI_DateFetch_SU_job01_stock_day, self).__init__()
#self.qCheckBox = qParentWidget.qCheckBoxJob01_save_stock_day
#self.qProgressBar = qParentWidget.qProgressJob01_save_stock_day;
def startTaskTriggerHandler(self, info_str):
self.changeRunningTaskColor0(QtCore.Qt.red)
pass
def doneTaskTriggerHandler(self, info_str):
#
self.changeRunningTaskColor0(QtCore.Qt.black)
pass
def updateLogTriggerHandler(self, log):
#print("append task log emite triggered!", log);
#self.logDisplay.append(log)
if log and log.strip():
rowCount = self.logDisplay.rowCount()
newItem1 = QTableWidgetItem(log)
newItem2 = QTableWidgetItem("QA_SU_save_stock_day")
self.logDisplay.setRowCount(rowCount+1)
self.logDisplay.setItem(rowCount,0,newItem1)
self.logDisplay.setItem(rowCount,1,newItem2)
#self.logDisplay.scrollToBottom()
pass
def updateProgressTriggerHandler(self, progress):
# print('update task progress ', progress);
self.qProgressBar.setValue(progress)
pass
# thread is working here
def run(self):
self.trigger_start_task_begin.emit("begin")
QA_SU_save_stock_day(client=DATABASE, ui_log=self.trigger_new_log, ui_progress= self.trigger_new_progress)
self.trigger_start_task_done.emit("end")
###################################################################################################################\
class QA_GUI_DateFetch_SU_job01_stock_week(QA_GUI_Date_Fetch_Task):
# 🛠todo fix here 不会执行 __init__的 QThread 是一个很特别的对象。
# def __int__(self):
# # 初始化函数,默认
# super(QA_GUI_DateFetch_SU_job01_stock_week, self).__init__()
def startTaskTriggerHandler(self, info_str):
self.changeRunningTaskColor0(QtCore.Qt.yellow)
pass
def doneTaskTriggerHandler(self, info_str):
#
self.changeRunningTaskColor0(QtCore.Qt.green)
pass
def updateLogTriggerHandler(self, log):
# print("append task log emite triggered!", log);
# self.logDisplay.append(log)
if log and log.strip():
rowCount = self.logDisplay.rowCount()
newItem1 = QTableWidgetItem(log)
newItem2 = QTableWidgetItem("QA_SU_save_stock_week")
self.logDisplay.setRowCount(rowCount + 1)
self.logDisplay.setItem(rowCount, 0, newItem1)
self.logDisplay.setItem(rowCount, 1, newItem2)
#self.logDisplay.scrollToBottom()
pass
def updateProgressTriggerHandler(self, progress):
# print('update task progress ', progress);
self.qProgressBar.setValue(progress)
pass
def run(self):
self.trigger_start_task_begin.emit("begin")
QA_SU_save_stock_week(client=DATABASE, ui_log= self.trigger_new_log, ui_progress= self.trigger_new_progress)
self.trigger_start_task_done.emit("end")
pass
###################################################################################################################\
class QA_GUI_DateFetch_SU_job01_stock_month(QA_GUI_Date_Fetch_Task):
# todo fix here 不会执行 __init__的 QThread 是一个很特别的对象。
# def __int__(self):
# # 初始化函数,默认
# super(QA_GUI_DateFetch_SU_job01_stock_month, self).__init__()
def startTaskTriggerHandler(self, info_str):
self.changeRunningTaskColor0(QtCore.Qt.blue)
pass
def doneTaskTriggerHandler(self, info_str):
#
self.changeRunningTaskColor0(QtCore.Qt.green)
pass
def updateLogTriggerHandler(self, log):
# print("append task log emite triggered!", log);
# self.logDisplay.append(log)
if log and log.strip():
rowCount = self.logDisplay.rowCount()
newItem1 = QTableWidgetItem(log)
newItem2 = QTableWidgetItem("QA_SU_save_stock_month")
self.logDisplay.setRowCount(rowCount + 1)
self.logDisplay.setItem(rowCount, 0, newItem1)
self.logDisplay.setItem(rowCount, 1, newItem2)
#self.logDisplay.scrollToBottom()
pass
def updateProgressTriggerHandler(self, progress):
# print('update task progress ', progress);
self.qProgressBar.setValue(progress)
pass
def run(self):
self.trigger_start_task_begin.emit("begin")
QA_SU_save_stock_month(client=DATABASE, ui_log= self.trigger_new_log, ui_progress= self.trigger_new_progress)
self.trigger_start_task_done.emit("end")
pass
###################################################################################################################\
class QA_GUI_DateFetch_SU_job01_stock_year(QA_GUI_Date_Fetch_Task):
# todo fix here 不会执行 __init__的 QThread 是一个很特别的对象。
# def __int__(self):
# # # 初始化函数,默认
# super(QA_GUI_DateFetch_SU_job01_stock_year, self).__init__()
def startTaskTriggerHandler(self, info_str):
self.changeRunningTaskColor0(QtCore.Qt.magenta)
pass
def doneTaskTriggerHandler(self, info_str):
#
self.changeRunningTaskColor0(QtCore.Qt.green)
pass
def updateProgressTriggerHandler(self, progress):
# print('update task progress ', progress);
self.qProgressBar.setValue(progress)
pass
def updateLogTriggerHandler(self, log):
# print("append task log emite triggered!", log);
# self.logDisplay.append(log)
if log and log.strip():
rowCount = self.logDisplay.rowCount()
newItem1 = QTableWidgetItem(log)
newItem2 = QTableWidgetItem("QA_SU_save_stock_year")
self.logDisplay.setRowCount(rowCount + 1)
self.logDisplay.setItem(rowCount, 0, newItem1)
self.logDisplay.setItem(rowCount, 1, newItem2)
#self.logDisplay.scrollToBottom()
pass
def run(self):
self.trigger_start_task_begin.emit("begin")
QA_SU_save_stock_year(client=DATABASE, ui_log= self.trigger_new_log, ui_progress= self.trigger_new_progress)
self.trigger_start_task_done.emit("end")
pass
###################################################################################################################\
class QA_GUI_DateFetch_SU_job02_stock_xdxr(QA_GUI_Date_Fetch_Task):
# todo fix here 不会执行 __init__的 QThread 是一个很特别的对象。
# def _init_(self):
# # 初始化函数,默认
# super(QA_GUI_DateFetch_SU_job02_stock_xdxr, self).__init__()
def startTaskTriggerHandler(self, info_str):
self.changeRunningTaskColor0(QtCore.Qt.red)
pass
def doneTaskTriggerHandler(self, info_str):
#
self.changeRunningTaskColor0(QtCore.Qt.green)
pass
def updateProgressTriggerHandler(self, progress):
# print('update task progress ', progress);
self.qProgressBar.setValue(progress)
pass
def updateLogTriggerHandler(self, log):
# print("append task log emite triggered!", log);
# self.logDisplay.append(log)
if log and log.strip():
rowCount = self.logDisplay.rowCount()
newItem1 = QTableWidgetItem(log)
newItem2 = QTableWidgetItem("QA_SU_save_stock_xdxr")
self.logDisplay.setRowCount(rowCount + 1)
self.logDisplay.setItem(rowCount, 0, newItem1)
self.logDisplay.setItem(rowCount, 1, newItem2)
#self.logDisplay.scrollToBottom()
pass
def run(self):
self.trigger_start_task_begin.emit("begin")
QA_SU_save_stock_xdxr(client=DATABASE, ui_log= self.trigger_new_log, ui_progress= self.trigger_new_progress)
self.trigger_start_task_done.emit("end")
pass
###################################################################################################################\
class QA_GUI_DateFetch_SU_job03_stock_min(QA_GUI_Date_Fetch_Task):
def startTaskTriggerHandler(self, info_str):
self.changeRunningTaskColor0(QtCore.Qt.red)
pass
def doneTaskTriggerHandler(self, info_str):
#
self.changeRunningTaskColor0(QtCore.Qt.green)
pass
def updateProgressTriggerHandler(self, progress):
# print('update task progress ', progress);
self.qProgressBar.setValue(progress)
pass
def updateLogTriggerHandler(self, log):
# print("append task log emite triggered!", log);
# self.logDisplay.append(log)
if log and log.strip():
rowCount = self.logDisplay.rowCount()
newItem1 = QTableWidgetItem(log)
newItem2 = QTableWidgetItem("QA_SU_save_stock_min")
self.logDisplay.setRowCount(rowCount + 1)
self.logDisplay.setItem(rowCount, 0, newItem1)
self.logDisplay.setItem(rowCount, 1, newItem2)
#self.logDisplay.scrollToBottom()
pass
def run(self):
self.trigger_start_task_begin.emit("begin")
QA_SU_save_stock_min(client=DATABASE, ui_log= self.trigger_new_log, ui_progress= self.trigger_new_progress)
self.trigger_start_task_done.emit("end")
pass
###################################################################################################################
class QA_GUI_DateFetch_SU_job04_index_day(QA_GUI_Date_Fetch_Task):
def startTaskTriggerHandler(self, info_str):
self.changeRunningTaskColor0(QtCore.Qt.red)
pass
def doneTaskTriggerHandler(self, info_str):
#
self.changeRunningTaskColor0(QtCore.Qt.green)
pass
def updateProgressTriggerHandler(self, progress):
# print('update task progress ', progress);
self.qProgressBar.setValue(progress)
pass
def updateLogTriggerHandler(self, log):
# print("append task log emite triggered!", log);
# self.logDisplay.append(log)
if log and log.strip():
rowCount = self.logDisplay.rowCount()
newItem1 = QTableWidgetItem(log)
newItem2 = QTableWidgetItem("QA_SU_save_index_day")
self.logDisplay.setRowCount(rowCount + 1)
self.logDisplay.setItem(rowCount, 0, newItem1)
self.logDisplay.setItem(rowCount, 1, newItem2)
# self.logDisplay.scrollToBottom()
pass
def run(self):
self.trigger_start_task_begin.emit("begin")
QA_SU_save_index_day(client=DATABASE, ui_log=self.trigger_new_log, ui_progress=self.trigger_new_progress)
self.trigger_start_task_done.emit("end")
pass
###################################################################################################################
class QA_GUI_DateFetch_SU_job05_index_min(QA_GUI_Date_Fetch_Task):
def startTaskTriggerHandler(self, info_str):
self.changeRunningTaskColor0(QtCore.Qt.red)
pass
def doneTaskTriggerHandler(self, info_str):
#
self.changeRunningTaskColor0(QtCore.Qt.green)
pass
def updateProgressTriggerHandler(self, progress):
# print('update task progress ', progress);
self.qProgressBar.setValue(progress)
pass
def updateLogTriggerHandler(self, log):
# print("append task log emite triggered!", log);
# self.logDisplay.append(log)
if log and log.strip():
rowCount = self.logDisplay.rowCount()
newItem1 = QTableWidgetItem(log)
newItem2 = QTableWidgetItem("QA_SU_save_index_min")
self.logDisplay.setRowCount(rowCount + 1)
self.logDisplay.setItem(rowCount, 0, newItem1)
self.logDisplay.setItem(rowCount, 1, newItem2)
# self.logDisplay.scrollToBottom()
pass
def run(self):
self.trigger_start_task_begin.emit("begin")
QA_SU_save_index_min(client=DATABASE, ui_log=self.trigger_new_log, ui_progress=self.trigger_new_progress)
self.trigger_start_task_done.emit("end")
pass
###################################################################################################################
class QA_GUI_DateFetch_SU_job06_etf_day(QA_GUI_Date_Fetch_Task):
def startTaskTriggerHandler(self, info_str):
self.changeRunningTaskColor0(QtCore.Qt.red)
pass
def doneTaskTriggerHandler(self, info_str):
#
self.changeRunningTaskColor0(QtCore.Qt.green)
pass
def updateProgressTriggerHandler(self, progress):
# print('update task progress ', progress);
self.qProgressBar.setValue(progress)
pass
def updateLogTriggerHandler(self, log):
# print("append task log emite triggered!", log);
# self.logDisplay.append(log)
if log and log.strip():
rowCount = self.logDisplay.rowCount()
newItem1 = QTableWidgetItem(log)
newItem2 = QTableWidgetItem("QA_SU_save_etf_day")
self.logDisplay.setRowCount(rowCount + 1)
self.logDisplay.setItem(rowCount, 0, newItem1)
self.logDisplay.setItem(rowCount, 1, newItem2)
# self.logDisplay.scrollToBottom()
pass
def run(self):
self.trigger_start_task_begin.emit("begin")
QA_SU_save_etf_day(client=DATABASE, ui_log=self.trigger_new_log, ui_progress=self.trigger_new_progress)
self.trigger_start_task_done.emit("end")
pass
###################################################################################################################
class QA_GUI_DateFetch_SU_job07_etf_min(QA_GUI_Date_Fetch_Task):
def startTaskTriggerHandler(self, info_str):
self.changeRunningTaskColor0(QtCore.Qt.red)
pass
def doneTaskTriggerHandler(self, info_str):
#
self.changeRunningTaskColor0(QtCore.Qt.green)
pass
def updateProgressTriggerHandler(self, progress):
# print('update task progress ', progress);
self.qProgressBar.setValue(progress)
pass
def updateLogTriggerHandler(self, log):
# print("append task log emite triggered!", log);
# self.logDisplay.append(log)
if log and log.strip():
rowCount = self.logDisplay.rowCount()
newItem1 = QTableWidgetItem(log)
newItem2 = QTableWidgetItem("QA_SU_save_etf_min")
self.logDisplay.setRowCount(rowCount + 1)
self.logDisplay.setItem(rowCount, 0, newItem1)
self.logDisplay.setItem(rowCount, 1, newItem2)
# self.logDisplay.scrollToBottom()
pass
def run(self):
self.trigger_start_task_begin.emit("begin")
QA_SU_save_etf_min(client=DATABASE, ui_log=self.trigger_new_log, ui_progress=self.trigger_new_progress)
self.trigger_start_task_done.emit("end")
pass
###################################################################################################################
class QA_GUI_DateFetch_SU_job08_stock_list(QA_GUI_Date_Fetch_Task):
def startTaskTriggerHandler(self, info_str):
self.changeRunningTaskColor0(QtCore.Qt.red)
pass
def doneTaskTriggerHandler(self, info_str):
#
self.changeRunningTaskColor0(QtCore.Qt.green)
pass
def updateProgressTriggerHandler(self, progress):
# print('update task progress ', progress);
self.qProgressBar.setValue(progress)
pass
def updateLogTriggerHandler(self, log):
# print("append task log emite triggered!", log);
# self.logDisplay.append(log)
if log and log.strip():
rowCount = self.logDisplay.rowCount()
newItem1 = QTableWidgetItem(log)
newItem2 = QTableWidgetItem("QA_SU_save_stock_list")
self.logDisplay.setRowCount(rowCount + 1)
self.logDisplay.setItem(rowCount, 0, newItem1)
self.logDisplay.setItem(rowCount, 1, newItem2)
# self.logDisplay.scrollToBottom()
pass
def run(self):
self.trigger_start_task_begin.emit("begin")
QA_SU_save_stock_list(client=DATABASE, ui_log=self.trigger_new_log, ui_progress=self.trigger_new_progress)
self.trigger_start_task_done.emit("end")
pass
class QA_GUI_DateFetch_SU_job09_stock_block(QA_GUI_Date_Fetch_Task):
def startTaskTriggerHandler(self, info_str):
self.changeRunningTaskColor0(QtCore.Qt.red)
pass
def doneTaskTriggerHandler(self, info_str):
#
self.changeRunningTaskColor0(QtCore.Qt.green)
pass
def updateProgressTriggerHandler(self, progress):
# print('update task progress ', progress);
self.qProgressBar.setValue(progress)
pass
def updateLogTriggerHandler(self, log):
# print("append task log emite triggered!", log);
# self.logDisplay.append(log)
if log and log.strip():
rowCount = self.logDisplay.rowCount()
newItem1 = QTableWidgetItem(log)
newItem2 = QTableWidgetItem("QA_SU_save_stock_list")
self.logDisplay.setRowCount(rowCount + 1)
self.logDisplay.setItem(rowCount, 0, newItem1)
self.logDisplay.setItem(rowCount, 1, newItem2)
# self.logDisplay.scrollToBottom()
pass
def run(self):
self.trigger_start_task_begin.emit("begin")
QA_SU_save_stock_block(client=DATABASE, ui_log=self.trigger_new_log, ui_progress=self.trigger_new_progress)
self.trigger_start_task_done.emit("end")
pass
class QA_GUI_DateFetch_SU_job10_stock_info(QA_GUI_Date_Fetch_Task):
def startTaskTriggerHandler(self, info_str):
self.changeRunningTaskColor0(QtCore.Qt.red)
pass
def doneTaskTriggerHandler(self, info_str):
#
self.changeRunningTaskColor0(QtCore.Qt.green)
pass
def updateProgressTriggerHandler(self, progress):
# print('update task progress ', progress);
self.qProgressBar.setValue(progress)
pass
def updateLogTriggerHandler(self, log):
# print("append task log emite triggered!", log);
# self.logDisplay.append(log)
if log and log.strip():
rowCount = self.logDisplay.rowCount()
newItem1 = QTableWidgetItem(log)
newItem2 = QTableWidgetItem("QA_SU_save_stock_list")
self.logDisplay.setRowCount(rowCount + 1)
self.logDisplay.setItem(rowCount, 0, newItem1)
self.logDisplay.setItem(rowCount, 1, newItem2)
# self.logDisplay.scrollToBottom()
pass
def run(self):
self.trigger_start_task_begin.emit("begin")
QA_SU_save_stock_info(client=DATABASE, ui_log=self.trigger_new_log, ui_progress=self.trigger_new_progress)
self.trigger_start_task_done.emit("end")
pass
class QA_GUI_DateFetch_SU_job11_stock_transaction(QA_GUI_Date_Fetch_Task):
def startTaskTriggerHandler(self, info_str):
self.changeRunningTaskColor0(QtCore.Qt.red)
pass
def doneTaskTriggerHandler(self, info_str):
#
self.changeRunningTaskColor0(QtCore.Qt.green)
pass
def updateProgressTriggerHandler(self, progress):
# print('update task progress ', progress);
self.qProgressBar.setValue(progress)
pass
def updateLogTriggerHandler(self, log):
# print("append task log emite triggered!", log);
# self.logDisplay.append(log)
if log and log.strip():
rowCount = self.logDisplay.rowCount()
newItem1 = QTableWidgetItem(log)
newItem2 = QTableWidgetItem("QA_SU_save_stock_transaction")
self.logDisplay.setRowCount(rowCount + 1)
self.logDisplay.setItem(rowCount, 0, newItem1)
self.logDisplay.setItem(rowCount, 1, newItem2)
# self.logDisplay.scrollToBottom()
pass
def run(self):
self.trigger_start_task_begin.emit("begin")
QA_SU_save_stock_transaction(client=DATABASE, ui_log=self.trigger_new_log, ui_progress=self.trigger_new_progress)
self.trigger_start_task_done.emit("end")
pass
class QA_GUI_DateFetch_SU_job12_option_day(QA_GUI_Date_Fetch_Task):
def startTaskTriggerHandler(self, info_str):
self.changeRunningTaskColor0(QtCore.Qt.red)
pass
def doneTaskTriggerHandler(self, info_str):
#
self.changeRunningTaskColor0(QtCore.Qt.green)
pass
def updateProgressTriggerHandler(self, progress):
# print('update task progress ', progress);
self.qProgressBar.setValue(progress)
pass
def updateLogTriggerHandler(self, log):
# print("append task log emite triggered!", log);
# self.logDisplay.append(log)
if log and log.strip():
rowCount = self.logDisplay.rowCount()
newItem1 = QTableWidgetItem(log)
newItem2 = QTableWidgetItem("QA_SU_save_stock_transaction")
self.logDisplay.setRowCount(rowCount + 1)
self.logDisplay.setItem(rowCount, 0, newItem1)
self.logDisplay.setItem(rowCount, 1, newItem2)
# self.logDisplay.scrollToBottom()
pass
def run(self):
self.trigger_start_task_begin.emit("begin")
QA_SU_save_option_day(client=DATABASE, ui_log=self.trigger_new_log, ui_progress=self.trigger_new_progress)
self.trigger_start_task_done.emit("end")
pass
#通达信pytdx 会输出消息, 一同输出到gui界面只能够
class EmittingStream(QtCore.QObject):
textWritten = QtCore.pyqtSignal(str) # 定义一个发送str的信号
def write(self, text):
self.textWritten.emit(str(text))
class QA_GUI_Selected_TaskQueue(QThread):
# QThread 继承的不执行__init__
#def __int__(self, logDisplay):
# 奇怪的问题, 不执行 __init__
# 初始化函数,默认
# super().__init__()
#sfassda
#print("run here")
#exit(0)
#self.logDisplay = logDisplay
#sys.stderr.textWritten.connect(self.outputWrittenStderr)
# 下面将print 系统输出重定向到textEdit中
#sys.stdout = EmittingStream()
#sys.stderr = EmittingStream()
# 接收信号str的信号槽
'''
def outputWrittenStdout(self, text):
cursor = self.logDisplay.textCursor()
cursor.movePosition(QtGui.QTextCursor.End)
cursor.insertText(text)
self.logDisplay.setTextCursor(cursor)
self.logDisplay.ensureCursorVisible()
def outputWrittenStderr(self, text):
cursor = self.logDisplay.textCursor()
cursor.movePosition(QtGui.QTextCursor.End)
cursor.insertText(text)
self.logDisplay.setTextCursor(cursor)
self.logDisplay.ensureCursorVisible()
'''
# 定义一个信号,
trigger_all_task_start = pyqtSignal(str)
trigger_all_task_done = pyqtSignal(str)
#定义任务(每个是一个线程)
QA_GUI_Task_List = []
def run(self):
self.trigger_all_task_start.emit('all_task_start')
for iSubTask in self.QA_GUI_Task_List:
iSubTask.start()
# wait finish iSubTask
while (iSubTask.isRunning()):
time.sleep(1)
self.trigger_all_task_done.emit('all_task_done')
def putTask(self, subTask):
self.QA_GUI_Task_List.append(subTask)
def clearTask(self):
self.QA_GUI_Task_List.clear()
|
[
"PyQt5.QtCore.pyqtSignal",
"QUANTAXIS.QASU.save_tdx.QA_SU_save_stock_xdxr",
"QUANTAXIS.QASU.save_tdx.QA_SU_save_etf_day",
"QUANTAXIS.QASU.save_tdx.QA_SU_save_stock_transaction",
"QUANTAXIS.QASU.save_tdx.QA_SU_save_index_day",
"QUANTAXIS.QASU.save_tdx.QA_SU_save_index_min",
"QUANTAXIS.QASU.save_tdx.QA_SU_save_etf_min",
"QUANTAXIS.QASU.save_tdx.QA_SU_save_stock_min",
"QUANTAXIS.QASU.save_tdx.QA_SU_save_stock_week",
"time.sleep",
"QUANTAXIS.QASU.save_tdx.QA_SU_save_stock_day",
"QUANTAXIS.QASU.save_tdx.QA_SU_save_option_day",
"QUANTAXIS.QASU.save_tdx.QA_SU_save_stock_list",
"QUANTAXIS.QASU.save_tdx.QA_SU_save_stock_info",
"QUANTAXIS.QASU.save_tdx.QA_SU_save_stock_block",
"QUANTAXIS.QASU.save_tdx.QA_SU_save_stock_year",
"QUANTAXIS.QASU.save_tdx.QA_SU_save_stock_month"
] |
[((24627, 24649), 'PyQt5.QtCore.pyqtSignal', 'QtCore.pyqtSignal', (['str'], {}), '(str)\n', (24644, 24649), False, 'from PyQt5 import QtCore\n'), ((4445, 4554), 'QUANTAXIS.QASU.save_tdx.QA_SU_save_stock_day', 'QA_SU_save_stock_day', ([], {'client': 'DATABASE', 'ui_log': 'self.trigger_new_log', 'ui_progress': 'self.trigger_new_progress'}), '(client=DATABASE, ui_log=self.trigger_new_log,\n ui_progress=self.trigger_new_progress)\n', (4465, 4554), False, 'from QUANTAXIS.QASU.save_tdx import QA_SU_save_stock_day, QA_SU_save_stock_week, QA_SU_save_stock_month, QA_SU_save_stock_year, QA_SU_save_stock_xdxr, QA_SU_save_stock_min, QA_SU_save_index_day, QA_SU_save_index_min, QA_SU_save_etf_day, QA_SU_save_etf_min, QA_SU_save_stock_list, QA_SU_save_stock_block, QA_SU_save_stock_info, QA_SU_save_stock_transaction, QA_SU_save_option_day\n'), ((6016, 6126), 'QUANTAXIS.QASU.save_tdx.QA_SU_save_stock_week', 'QA_SU_save_stock_week', ([], {'client': 'DATABASE', 'ui_log': 'self.trigger_new_log', 'ui_progress': 'self.trigger_new_progress'}), '(client=DATABASE, ui_log=self.trigger_new_log,\n ui_progress=self.trigger_new_progress)\n', (6037, 6126), False, 'from QUANTAXIS.QASU.save_tdx import QA_SU_save_stock_day, QA_SU_save_stock_week, QA_SU_save_stock_month, QA_SU_save_stock_year, QA_SU_save_stock_xdxr, QA_SU_save_stock_min, QA_SU_save_index_day, QA_SU_save_index_min, QA_SU_save_etf_day, QA_SU_save_etf_min, QA_SU_save_stock_list, QA_SU_save_stock_block, QA_SU_save_stock_info, QA_SU_save_stock_transaction, QA_SU_save_option_day\n'), ((7603, 7714), 'QUANTAXIS.QASU.save_tdx.QA_SU_save_stock_month', 'QA_SU_save_stock_month', ([], {'client': 'DATABASE', 'ui_log': 'self.trigger_new_log', 'ui_progress': 'self.trigger_new_progress'}), '(client=DATABASE, ui_log=self.trigger_new_log,\n ui_progress=self.trigger_new_progress)\n', (7625, 7714), False, 'from QUANTAXIS.QASU.save_tdx import QA_SU_save_stock_day, QA_SU_save_stock_week, QA_SU_save_stock_month, QA_SU_save_stock_year, QA_SU_save_stock_xdxr, QA_SU_save_stock_min, QA_SU_save_index_day, QA_SU_save_index_min, QA_SU_save_etf_day, QA_SU_save_etf_min, QA_SU_save_stock_list, QA_SU_save_stock_block, QA_SU_save_stock_info, QA_SU_save_stock_transaction, QA_SU_save_option_day\n'), ((9199, 9309), 'QUANTAXIS.QASU.save_tdx.QA_SU_save_stock_year', 'QA_SU_save_stock_year', ([], {'client': 'DATABASE', 'ui_log': 'self.trigger_new_log', 'ui_progress': 'self.trigger_new_progress'}), '(client=DATABASE, ui_log=self.trigger_new_log,\n ui_progress=self.trigger_new_progress)\n', (9220, 9309), False, 'from QUANTAXIS.QASU.save_tdx import QA_SU_save_stock_day, QA_SU_save_stock_week, QA_SU_save_stock_month, QA_SU_save_stock_year, QA_SU_save_stock_xdxr, QA_SU_save_stock_min, QA_SU_save_index_day, QA_SU_save_index_min, QA_SU_save_etf_day, QA_SU_save_etf_min, QA_SU_save_stock_list, QA_SU_save_stock_block, QA_SU_save_stock_info, QA_SU_save_stock_transaction, QA_SU_save_option_day\n'), ((10779, 10889), 'QUANTAXIS.QASU.save_tdx.QA_SU_save_stock_xdxr', 'QA_SU_save_stock_xdxr', ([], {'client': 'DATABASE', 'ui_log': 'self.trigger_new_log', 'ui_progress': 'self.trigger_new_progress'}), '(client=DATABASE, ui_log=self.trigger_new_log,\n ui_progress=self.trigger_new_progress)\n', (10800, 10889), False, 'from QUANTAXIS.QASU.save_tdx import QA_SU_save_stock_day, QA_SU_save_stock_week, QA_SU_save_stock_month, QA_SU_save_stock_year, QA_SU_save_stock_xdxr, QA_SU_save_stock_min, QA_SU_save_index_day, QA_SU_save_index_min, QA_SU_save_etf_day, QA_SU_save_etf_min, QA_SU_save_stock_list, QA_SU_save_stock_block, QA_SU_save_stock_info, QA_SU_save_stock_transaction, QA_SU_save_option_day\n'), ((12184, 12293), 'QUANTAXIS.QASU.save_tdx.QA_SU_save_stock_min', 'QA_SU_save_stock_min', ([], {'client': 'DATABASE', 'ui_log': 'self.trigger_new_log', 'ui_progress': 'self.trigger_new_progress'}), '(client=DATABASE, ui_log=self.trigger_new_log,\n ui_progress=self.trigger_new_progress)\n', (12204, 12293), False, 'from QUANTAXIS.QASU.save_tdx import QA_SU_save_stock_day, QA_SU_save_stock_week, QA_SU_save_stock_month, QA_SU_save_stock_year, QA_SU_save_stock_xdxr, QA_SU_save_stock_min, QA_SU_save_index_day, QA_SU_save_index_min, QA_SU_save_etf_day, QA_SU_save_etf_min, QA_SU_save_stock_list, QA_SU_save_stock_block, QA_SU_save_stock_info, QA_SU_save_stock_transaction, QA_SU_save_option_day\n'), ((13587, 13696), 'QUANTAXIS.QASU.save_tdx.QA_SU_save_index_day', 'QA_SU_save_index_day', ([], {'client': 'DATABASE', 'ui_log': 'self.trigger_new_log', 'ui_progress': 'self.trigger_new_progress'}), '(client=DATABASE, ui_log=self.trigger_new_log,\n ui_progress=self.trigger_new_progress)\n', (13607, 13696), False, 'from QUANTAXIS.QASU.save_tdx import QA_SU_save_stock_day, QA_SU_save_stock_week, QA_SU_save_stock_month, QA_SU_save_stock_year, QA_SU_save_stock_xdxr, QA_SU_save_stock_min, QA_SU_save_index_day, QA_SU_save_index_min, QA_SU_save_etf_day, QA_SU_save_etf_min, QA_SU_save_stock_list, QA_SU_save_stock_block, QA_SU_save_stock_info, QA_SU_save_stock_transaction, QA_SU_save_option_day\n'), ((14989, 15098), 'QUANTAXIS.QASU.save_tdx.QA_SU_save_index_min', 'QA_SU_save_index_min', ([], {'client': 'DATABASE', 'ui_log': 'self.trigger_new_log', 'ui_progress': 'self.trigger_new_progress'}), '(client=DATABASE, ui_log=self.trigger_new_log,\n ui_progress=self.trigger_new_progress)\n', (15009, 15098), False, 'from QUANTAXIS.QASU.save_tdx import QA_SU_save_stock_day, QA_SU_save_stock_week, QA_SU_save_stock_month, QA_SU_save_stock_year, QA_SU_save_stock_xdxr, QA_SU_save_stock_min, QA_SU_save_index_day, QA_SU_save_index_min, QA_SU_save_etf_day, QA_SU_save_etf_min, QA_SU_save_stock_list, QA_SU_save_stock_block, QA_SU_save_stock_info, QA_SU_save_stock_transaction, QA_SU_save_option_day\n'), ((16387, 16494), 'QUANTAXIS.QASU.save_tdx.QA_SU_save_etf_day', 'QA_SU_save_etf_day', ([], {'client': 'DATABASE', 'ui_log': 'self.trigger_new_log', 'ui_progress': 'self.trigger_new_progress'}), '(client=DATABASE, ui_log=self.trigger_new_log,\n ui_progress=self.trigger_new_progress)\n', (16405, 16494), False, 'from QUANTAXIS.QASU.save_tdx import QA_SU_save_stock_day, QA_SU_save_stock_week, QA_SU_save_stock_month, QA_SU_save_stock_year, QA_SU_save_stock_xdxr, QA_SU_save_stock_min, QA_SU_save_index_day, QA_SU_save_index_min, QA_SU_save_etf_day, QA_SU_save_etf_min, QA_SU_save_stock_list, QA_SU_save_stock_block, QA_SU_save_stock_info, QA_SU_save_stock_transaction, QA_SU_save_option_day\n'), ((17782, 17889), 'QUANTAXIS.QASU.save_tdx.QA_SU_save_etf_min', 'QA_SU_save_etf_min', ([], {'client': 'DATABASE', 'ui_log': 'self.trigger_new_log', 'ui_progress': 'self.trigger_new_progress'}), '(client=DATABASE, ui_log=self.trigger_new_log,\n ui_progress=self.trigger_new_progress)\n', (17800, 17889), False, 'from QUANTAXIS.QASU.save_tdx import QA_SU_save_stock_day, QA_SU_save_stock_week, QA_SU_save_stock_month, QA_SU_save_stock_year, QA_SU_save_stock_xdxr, QA_SU_save_stock_min, QA_SU_save_index_day, QA_SU_save_index_min, QA_SU_save_etf_day, QA_SU_save_etf_min, QA_SU_save_stock_list, QA_SU_save_stock_block, QA_SU_save_stock_info, QA_SU_save_stock_transaction, QA_SU_save_option_day\n'), ((19183, 19293), 'QUANTAXIS.QASU.save_tdx.QA_SU_save_stock_list', 'QA_SU_save_stock_list', ([], {'client': 'DATABASE', 'ui_log': 'self.trigger_new_log', 'ui_progress': 'self.trigger_new_progress'}), '(client=DATABASE, ui_log=self.trigger_new_log,\n ui_progress=self.trigger_new_progress)\n', (19204, 19293), False, 'from QUANTAXIS.QASU.save_tdx import QA_SU_save_stock_day, QA_SU_save_stock_week, QA_SU_save_stock_month, QA_SU_save_stock_year, QA_SU_save_stock_xdxr, QA_SU_save_stock_min, QA_SU_save_index_day, QA_SU_save_index_min, QA_SU_save_etf_day, QA_SU_save_etf_min, QA_SU_save_stock_list, QA_SU_save_stock_block, QA_SU_save_stock_info, QA_SU_save_stock_transaction, QA_SU_save_option_day\n'), ((20474, 20585), 'QUANTAXIS.QASU.save_tdx.QA_SU_save_stock_block', 'QA_SU_save_stock_block', ([], {'client': 'DATABASE', 'ui_log': 'self.trigger_new_log', 'ui_progress': 'self.trigger_new_progress'}), '(client=DATABASE, ui_log=self.trigger_new_log,\n ui_progress=self.trigger_new_progress)\n', (20496, 20585), False, 'from QUANTAXIS.QASU.save_tdx import QA_SU_save_stock_day, QA_SU_save_stock_week, QA_SU_save_stock_month, QA_SU_save_stock_year, QA_SU_save_stock_xdxr, QA_SU_save_stock_min, QA_SU_save_index_day, QA_SU_save_index_min, QA_SU_save_etf_day, QA_SU_save_etf_min, QA_SU_save_stock_list, QA_SU_save_stock_block, QA_SU_save_stock_info, QA_SU_save_stock_transaction, QA_SU_save_option_day\n'), ((21763, 21873), 'QUANTAXIS.QASU.save_tdx.QA_SU_save_stock_info', 'QA_SU_save_stock_info', ([], {'client': 'DATABASE', 'ui_log': 'self.trigger_new_log', 'ui_progress': 'self.trigger_new_progress'}), '(client=DATABASE, ui_log=self.trigger_new_log,\n ui_progress=self.trigger_new_progress)\n', (21784, 21873), False, 'from QUANTAXIS.QASU.save_tdx import QA_SU_save_stock_day, QA_SU_save_stock_week, QA_SU_save_stock_month, QA_SU_save_stock_year, QA_SU_save_stock_xdxr, QA_SU_save_stock_min, QA_SU_save_index_day, QA_SU_save_index_min, QA_SU_save_etf_day, QA_SU_save_etf_min, QA_SU_save_stock_list, QA_SU_save_stock_block, QA_SU_save_stock_info, QA_SU_save_stock_transaction, QA_SU_save_option_day\n'), ((23066, 23183), 'QUANTAXIS.QASU.save_tdx.QA_SU_save_stock_transaction', 'QA_SU_save_stock_transaction', ([], {'client': 'DATABASE', 'ui_log': 'self.trigger_new_log', 'ui_progress': 'self.trigger_new_progress'}), '(client=DATABASE, ui_log=self.trigger_new_log,\n ui_progress=self.trigger_new_progress)\n', (23094, 23183), False, 'from QUANTAXIS.QASU.save_tdx import QA_SU_save_stock_day, QA_SU_save_stock_week, QA_SU_save_stock_month, QA_SU_save_stock_year, QA_SU_save_stock_xdxr, QA_SU_save_stock_min, QA_SU_save_index_day, QA_SU_save_index_min, QA_SU_save_etf_day, QA_SU_save_etf_min, QA_SU_save_stock_list, QA_SU_save_stock_block, QA_SU_save_stock_info, QA_SU_save_stock_transaction, QA_SU_save_option_day\n'), ((24370, 24480), 'QUANTAXIS.QASU.save_tdx.QA_SU_save_option_day', 'QA_SU_save_option_day', ([], {'client': 'DATABASE', 'ui_log': 'self.trigger_new_log', 'ui_progress': 'self.trigger_new_progress'}), '(client=DATABASE, ui_log=self.trigger_new_log,\n ui_progress=self.trigger_new_progress)\n', (24391, 24480), False, 'from QUANTAXIS.QASU.save_tdx import QA_SU_save_stock_day, QA_SU_save_stock_week, QA_SU_save_stock_month, QA_SU_save_stock_year, QA_SU_save_stock_xdxr, QA_SU_save_stock_min, QA_SU_save_index_day, QA_SU_save_index_min, QA_SU_save_etf_day, QA_SU_save_etf_min, QA_SU_save_stock_list, QA_SU_save_stock_block, QA_SU_save_stock_info, QA_SU_save_stock_transaction, QA_SU_save_option_day\n'), ((26159, 26172), 'time.sleep', 'time.sleep', (['(1)'], {}), '(1)\n', (26169, 26172), False, 'import time\n')]
|
import base64
import json
import pickle
from sys import argv
from time import sleep
import websocket
map_function = None
socket: websocket.WebSocketApp = None
def set_map_function(code: str):
global map_function
map_function = pickle.loads(base64.b64decode(code))
def execute_map(data):
decoded_data = pickle.loads(base64.b64decode(data))
result = map_function(decoded_data)
encoded_result = base64.b64encode(pickle.dumps(result)).decode("utf-8")
socket.send(json.dumps({"type": "result", "value": encoded_result}))
def handle_message(ws, message):
message = json.loads(message)
{"function": set_map_function, "data": execute_map,}[
message["type"]
](message["value"])
def wait_and_start_websocket(*_):
sleep(1)
start_websocket()
def on_open(ws):
ws.send(json.dumps({"type": "ready", "value": None}))
def start_websocket():
global socket
socket = websocket.WebSocketApp(
websocket_url,
on_open=on_open,
on_message=handle_message,
on_close=wait_and_start_websocket,
)
socket.run_forever()
if __name__ == "__main__":
_, websocket_url = argv
start_websocket()
|
[
"websocket.WebSocketApp",
"json.loads",
"json.dumps",
"base64.b64decode",
"time.sleep",
"pickle.dumps"
] |
[((595, 614), 'json.loads', 'json.loads', (['message'], {}), '(message)\n', (605, 614), False, 'import json\n'), ((761, 769), 'time.sleep', 'sleep', (['(1)'], {}), '(1)\n', (766, 769), False, 'from time import sleep\n'), ((925, 1046), 'websocket.WebSocketApp', 'websocket.WebSocketApp', (['websocket_url'], {'on_open': 'on_open', 'on_message': 'handle_message', 'on_close': 'wait_and_start_websocket'}), '(websocket_url, on_open=on_open, on_message=\n handle_message, on_close=wait_and_start_websocket)\n', (947, 1046), False, 'import websocket\n'), ((252, 274), 'base64.b64decode', 'base64.b64decode', (['code'], {}), '(code)\n', (268, 274), False, 'import base64\n'), ((333, 355), 'base64.b64decode', 'base64.b64decode', (['data'], {}), '(data)\n', (349, 355), False, 'import base64\n'), ((489, 544), 'json.dumps', 'json.dumps', (["{'type': 'result', 'value': encoded_result}"], {}), "({'type': 'result', 'value': encoded_result})\n", (499, 544), False, 'import json\n'), ((823, 867), 'json.dumps', 'json.dumps', (["{'type': 'ready', 'value': None}"], {}), "({'type': 'ready', 'value': None})\n", (833, 867), False, 'import json\n'), ((435, 455), 'pickle.dumps', 'pickle.dumps', (['result'], {}), '(result)\n', (447, 455), False, 'import pickle\n')]
|
#
# This file is part of WinPexpect. WinPexpect is free software that is made
# available under the MIT license. Consult the file "LICENSE" that is
# distributed together with this file for the exact licensing terms.
#
# WinPexpect is copyright (c) 2008-2010 by the WinPexpect authors. See the
# file "AUTHORS" for a complete overview.
import sys
from setuptools import setup
if sys.version_info[0] == 3:
from lib2to3.fixes import fix_types
fix_types._TYPE_MAPPING['StringTypes'] = '(str,)'
setup(
name = 'winpexpect',
version = '1.6',
description = 'A version of pexpect that works under Windows.',
author = '<NAME>, <NAME>',
author_email = '<EMAIL>, <EMAIL>',
url = 'https://bitbucket.org/weyou/winpexpect',
license = 'MIT',
classifiers = ['Development Status :: 4 - Beta',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python',
'Operating System :: Microsoft :: Windows'],
package_dir = {'': 'lib'},
py_modules = ['pexpect', 'winpexpect', 'expectstub'],
test_suite = 'nose.collector',
install_requires = ['pywin32 >= 214'],
zip_safe = False,
use_2to3 = True
)
|
[
"setuptools.setup"
] |
[((503, 1145), 'setuptools.setup', 'setup', ([], {'name': '"""winpexpect"""', 'version': '"""1.6"""', 'description': '"""A version of pexpect that works under Windows."""', 'author': '"""<NAME>, <NAME>"""', 'author_email': '"""<EMAIL>, <EMAIL>"""', 'url': '"""https://bitbucket.org/weyou/winpexpect"""', 'license': '"""MIT"""', 'classifiers': "['Development Status :: 4 - Beta', 'Intended Audience :: Developers',\n 'License :: OSI Approved :: MIT License',\n 'Programming Language :: Python',\n 'Operating System :: Microsoft :: Windows']", 'package_dir': "{'': 'lib'}", 'py_modules': "['pexpect', 'winpexpect', 'expectstub']", 'test_suite': '"""nose.collector"""', 'install_requires': "['pywin32 >= 214']", 'zip_safe': '(False)', 'use_2to3': '(True)'}), "(name='winpexpect', version='1.6', description=\n 'A version of pexpect that works under Windows.', author=\n '<NAME>, <NAME>', author_email='<EMAIL>, <EMAIL>', url=\n 'https://bitbucket.org/weyou/winpexpect', license='MIT', classifiers=[\n 'Development Status :: 4 - Beta', 'Intended Audience :: Developers',\n 'License :: OSI Approved :: MIT License',\n 'Programming Language :: Python',\n 'Operating System :: Microsoft :: Windows'], package_dir={'': 'lib'},\n py_modules=['pexpect', 'winpexpect', 'expectstub'], test_suite=\n 'nose.collector', install_requires=['pywin32 >= 214'], zip_safe=False,\n use_2to3=True)\n", (508, 1145), False, 'from setuptools import setup\n')]
|
import warnings
import numpy as np
import pandas as pd
from matplotlib import patches
from sklearn.cluster import DBSCAN
from sklearn.decomposition import PCA
from sklearn.metrics import confusion_matrix
from sklearn.preprocessing import MinMaxScaler
from sklearn.preprocessing import OneHotEncoder, LabelEncoder
import matplotlib.pyplot as plt
import concurrent.futures
import time
from pylab import bone, pcolor, colorbar, plot, show, rcParams, savefig
from hyperopt import fmin, hp, tpe, Trials, STATUS_OK
warnings.filterwarnings("ignore")
dataset_train=pd.read_csv('kdd_train.csv')
dataset_test=pd.read_csv('kdd_test.csv')
col_names = ["duration","protocol_type","service","flag","src_bytes",
"dst_bytes","land","wrong_fragment","urgent","hot","num_failed_logins",
"logged_in","num_compromised","root_shell","su_attempted","num_root",
"num_file_creations","num_shells","num_access_files","num_outbound_cmds",
"is_host_login","is_guest_login","count","srv_count","serror_rate",
"srv_serror_rate","rerror_rate","srv_rerror_rate","same_srv_rate",
"diff_srv_rate","srv_diff_host_rate","dst_host_count","dst_host_srv_count",
"dst_host_same_srv_rate","dst_host_diff_srv_rate","dst_host_same_src_port_rate",
"dst_host_srv_diff_host_rate","dst_host_serror_rate","dst_host_srv_serror_rate",
"dst_host_rerror_rate","dst_host_srv_rerror_rate","label"]
print("Shape of Training Dataset:", dataset_train.shape)
print("Shape of Testing Dataset:", dataset_test.shape)
# # Assigning attribute name to dataset
dataset_train = pd.read_csv("kdd_train.csv", header=None, names = col_names)
dataset_test = pd.read_csv("kdd_test.csv", header=None, names = col_names)
# #label distribution of Training set and testing set
print('Label distribution Training set:')
print(dataset_train['label'].value_counts())
print()
print('Label distribution Test set:')
print(dataset_test['label'].value_counts())
# # colums that are categorical and not binary yet: protocol_type (column 2), service (column 3), flag (column 4).
# # explore categorical features
print('Training set:')
for col_name in dataset_train.columns:
if dataset_train[col_name].dtypes == 'object' :
unique_cat = len(dataset_train[col_name].unique())
print("Feature '{col_name}' has {unique_cat} categories".format(col_name=col_name, unique_cat=unique_cat))
# #see how distributed the feature service is, it is evenly distributed and therefore we need to make dummies for all.
# print()
print('Distribution of categories in service:')
print(dataset_train['service'].value_counts().sort_values(ascending=False).head())
# # Test set
print('Test set:')
for col_name in dataset_test.columns:
if dataset_test[col_name].dtypes == 'object' :
unique_cat = len(dataset_test[col_name].unique())
print("Feature '{col_name}' has {unique_cat} categories".format(col_name=col_name, unique_cat=unique_cat))
#categorical_columns=['protocol_type', 'service', 'flag']
# # insert code to get a list of categorical columns into a variable, categorical_columns
categorical_columns=['protocol_type', 'service', 'flag']
# # Get the categorical values into a 2D numpy array
dataset_train_categorical_values = dataset_train[categorical_columns]
dataset_test_categorical_values = dataset_test[categorical_columns]
dataset_train_categorical_values.head()
# # protocol type
unique_protocol=sorted(dataset_train.protocol_type.unique())
string1 = 'Protocol_type_'
unique_protocol2=[string1 + x for x in unique_protocol]
# # service
unique_service=sorted(dataset_train.service.unique())
string2 = 'service_'
unique_service2=[string2 + x for x in unique_service]
# # flag
unique_flag=sorted(dataset_train.flag.unique())
string3 = 'flag_'
unique_flag2=[string3 + x for x in unique_flag]
# # put together
dumcols=unique_protocol2 + unique_service2 + unique_flag2
print(dumcols)
# #do same for test set
unique_service_test=sorted(dataset_test.service.unique())
unique_service2_test=[string2 + x for x in unique_service_test]
testdumcols=unique_protocol2 + unique_service2_test + unique_flag2
# #Transform categorical features into numbers using LabelEncoder()
dataset_train_categorical_values_enc=dataset_train_categorical_values.apply(LabelEncoder().fit_transform)
print(dataset_train_categorical_values_enc.head())
# # test set
dataset_test_categorical_values_enc=dataset_test_categorical_values.apply(LabelEncoder().fit_transform)
# #One-Hot-Encoding¶
enc = OneHotEncoder()
dataset_train_categorical_values_encenc = enc.fit_transform(dataset_train_categorical_values_enc)
dataset_train_cat_data = pd.DataFrame(dataset_train_categorical_values_encenc.toarray(),columns=dumcols)
# # test set
dataset_test_categorical_values_encenc = enc.fit_transform(dataset_test_categorical_values_enc)
dataset_test_cat_data = pd.DataFrame(dataset_test_categorical_values_encenc.toarray(),columns=testdumcols)
dataset_train_cat_data.head()
trainservice=dataset_train['service'].tolist()
testservice= dataset_test['service'].tolist()
difference=list(set(trainservice) - set(testservice))
string = 'service_'
difference=[string + x for x in difference]
print(difference)
for col in difference:
dataset_test_cat_data[col] = 0
print(dataset_test_cat_data.shape)
# #Join encoded categorical dataframe with the non-categorical dataframe
newdf=dataset_train.join(dataset_train_cat_data)
newdf.drop('flag', axis=1, inplace=True)
newdf.drop('protocol_type', axis=1, inplace=True)
newdf.drop('service', axis=1, inplace=True)
# # test data
newdf_test=dataset_test.join(dataset_test_cat_data)
newdf_test.drop('flag', axis=1, inplace=True)
newdf_test.drop('protocol_type', axis=1, inplace=True)
newdf_test.drop('service', axis=1, inplace=True)
print(newdf.shape)
print(newdf_test.shape)
# # take label column
labeldf=newdf['label']
labeldf_test=newdf_test['label']
# # change the label column
newlabeldf=labeldf.replace({ 'neptune' : 1 ,'back': 1, 'land': 1, 'pod': 1, 'smurf': 1, 'teardrop': 1,'mailbomb': 1, 'apache2': 1, 'processtable': 1, 'udpstorm': 1, 'worm': 1,
'ipsweep' : 2,'nmap' : 2,'portsweep' : 2,'satan' : 2,'mscan' : 2,'saint' : 2
,'ftp_write': 3,'guess_passwd': 3,'imap': 3,'multihop': 3,'phf': 3,'spy': 3,'warezclient': 3,'warezmaster': 3,'sendmail': 3,'named': 3,'snmpgetattack': 3,'snmpguess': 3,'xlock': 3,'xsnoop': 3,'httptunnel': 3,
'buffer_overflow': 4,'loadmodule': 4,'perl': 4,'rootkit': 4,'ps': 4,'sqlattack': 4,'xterm': 4})
newlabeldf_test=labeldf_test.replace({ 'neptune' : 1 ,'back': 1, 'land': 1, 'pod': 1, 'smurf': 1, 'teardrop': 1,'mailbomb': 1, 'apache2': 1, 'processtable': 1, 'udpstorm': 1, 'worm': 1,
'ipsweep' : 2,'nmap' : 2,'portsweep' : 2,'satan' : 2,'mscan' : 2,'saint' : 2
,'ftp_write': 3,'guess_passwd': 3,'imap': 3,'multihop': 3,'phf': 3,'spy': 3,'warezclient': 3,'warezmaster': 3,'sendmail': 3,'named': 3,'snmpgetattack': 3,'snmpguess': 3,'xlock': 3,'xsnoop': 3,'httptunnel': 3,
'buffer_overflow': 4,'loadmodule': 4,'perl': 4,'rootkit': 4,'ps': 4,'sqlattack': 4,'xterm': 4})
# # put the new label column back
newdf['label'] = newlabeldf
newdf_test['label'] = newlabeldf_test
y_train= newdf['label']
y_test= newdf_test['label']
import csv
with open('newdataset/labeltrain.csv', 'a', newline='') as csvfile:
spamwriter = csv.writer(csvfile, delimiter=',')
#for row in rows:
spamwriter.writerows(map(lambda x: [x], y_train))
with open('newdataset/labeltest.csv', 'a', newline='') as csvfile:
spamwriter = csv.writer(csvfile, delimiter=',')
#for row in rows:
spamwriter.writerows(map(lambda x: [x], y_test))
X_train=newdf.drop('label',axis='columns')
X_train = X_train[1:] #take the data less the header row
for i in range(len(X_train)):
with open('newdataset/train.csv', 'a', newline='') as csvfile:
spamwriter = csv.writer(csvfile, delimiter=',')
#for row in rows:
spamwriter.writerow(X_train.iloc[i])
new_header = X_train.iloc[0]
X_train.columns = new_header
X_test=newdf_test.drop('label',axis='columns')
X_test = X_test[1:] #take the data less the header row
for i in range(len(X_test)):
with open('newdataset/test.csv', 'a', newline='') as csvfile:
spamwriter = csv.writer(csvfile, delimiter=',')
#for row in rows:
spamwriter.writerow(X_test.iloc[i])
X_test.columns = new_header
new_header = X_test.iloc[0]
X_test.columns = new_header
|
[
"csv.writer",
"warnings.filterwarnings",
"pandas.read_csv",
"sklearn.preprocessing.OneHotEncoder",
"sklearn.preprocessing.LabelEncoder"
] |
[((509, 542), 'warnings.filterwarnings', 'warnings.filterwarnings', (['"""ignore"""'], {}), "('ignore')\n", (532, 542), False, 'import warnings\n'), ((558, 586), 'pandas.read_csv', 'pd.read_csv', (['"""kdd_train.csv"""'], {}), "('kdd_train.csv')\n", (569, 586), True, 'import pandas as pd\n'), ((601, 628), 'pandas.read_csv', 'pd.read_csv', (['"""kdd_test.csv"""'], {}), "('kdd_test.csv')\n", (612, 628), True, 'import pandas as pd\n'), ((1565, 1623), 'pandas.read_csv', 'pd.read_csv', (['"""kdd_train.csv"""'], {'header': 'None', 'names': 'col_names'}), "('kdd_train.csv', header=None, names=col_names)\n", (1576, 1623), True, 'import pandas as pd\n'), ((1641, 1698), 'pandas.read_csv', 'pd.read_csv', (['"""kdd_test.csv"""'], {'header': 'None', 'names': 'col_names'}), "('kdd_test.csv', header=None, names=col_names)\n", (1652, 1698), True, 'import pandas as pd\n'), ((4473, 4488), 'sklearn.preprocessing.OneHotEncoder', 'OneHotEncoder', ([], {}), '()\n', (4486, 4488), False, 'from sklearn.preprocessing import OneHotEncoder, LabelEncoder\n'), ((7435, 7469), 'csv.writer', 'csv.writer', (['csvfile'], {'delimiter': '""","""'}), "(csvfile, delimiter=',')\n", (7445, 7469), False, 'import csv\n'), ((7633, 7667), 'csv.writer', 'csv.writer', (['csvfile'], {'delimiter': '""","""'}), "(csvfile, delimiter=',')\n", (7643, 7667), False, 'import csv\n'), ((4246, 4260), 'sklearn.preprocessing.LabelEncoder', 'LabelEncoder', ([], {}), '()\n', (4258, 4260), False, 'from sklearn.preprocessing import OneHotEncoder, LabelEncoder\n'), ((4414, 4428), 'sklearn.preprocessing.LabelEncoder', 'LabelEncoder', ([], {}), '()\n', (4426, 4428), False, 'from sklearn.preprocessing import OneHotEncoder, LabelEncoder\n'), ((7960, 7994), 'csv.writer', 'csv.writer', (['csvfile'], {'delimiter': '""","""'}), "(csvfile, delimiter=',')\n", (7970, 7994), False, 'import csv\n'), ((8334, 8368), 'csv.writer', 'csv.writer', (['csvfile'], {'delimiter': '""","""'}), "(csvfile, delimiter=',')\n", (8344, 8368), False, 'import csv\n')]
|
#!/usr/bin/python3
import signal
import RPi.GPIO as GPIO
import logging
import coloredlogs
import sys
sys.path.append("..")
import argparse
import ruamel.yaml as YAML
import time
import threading
import asyncio
from neopixeldevice import NeopixelDevice, LED_PIN, LightMode, ws as ws_
from utils import *
from pn532 import Pn532
from kuzzle.kuzzle import KuzzleIOT
import namedtupled
yaml = YAML.YAML()
CONFIG_PATH = '../config'
log = logging.getLogger('MAIN')
UID = None
devices = {}
pn532 = None
neo = None
# @formatter: off
default_state = {
"mode": LightMode.COLOR_RAMP.value,
"ramp": [
(255, 0, 0),
(127, 127, 0),
(0, 255, 0),
(0, 127, 127),
(0, 0, 255),
(127, 0, 127),
(255, 127, 0),
(255, 255, 255),
]
}
# @formatter:on
GPIO.setmode(GPIO.BCM)
buttons = {
"button_0": "RELEASED",
"button_1": "RELEASED",
"button_2": "RELEASED",
"button_3": "RELEASED",
}
def init_hw_components(fw_config, hw_config):
global devices
global pn532
global pi
global UID
global neo
kuzzle_cfg = fw_config.kuzzle
dev_conn = () # devices to connect
UID = rpi_get_serial()
log.info('Getting device base UID: %s', UID)
log.info('Connecting to Kuzzle on {}:{}'.format(kuzzle_cfg.host, kuzzle_cfg.port))
log.debug("Neopixel: led_count = {}".format(hw_config.rgb_light.led_count))
neo = NeopixelDevice(hw_config.rgb_light.led_count, LED_PIN, strip_type=ws_.WS2811_STRIP_GRB)
devices["kuzzle_neo"] = KuzzleIOT(
'rgb_light_{}'.format(UID),
'neopixel-linear',
host=kuzzle_cfg.host,
port=kuzzle_cfg.port,
owner=fw_config.device.owner,
additional_info={'led_count': hw_config.rgb_light.led_count}
)
dev_conn += (devices["kuzzle_neo"].connect(neo.on_kuzzle_connected),)
devices["kuzzle_rfid"] = KuzzleIOT(
"NFC_" + UID,
"RFID_reader",
host=kuzzle_cfg.host,
port=kuzzle_cfg.port,
owner=fw_config.device.owner
)
dev_conn += (devices["kuzzle_rfid"].connect(None),)
if hw_config.motion_sensor.enabled:
devices["kuzzle_motion"] = KuzzleIOT(
"motion_" + UID,
"motion-sensor",
host=kuzzle_cfg.host,
port=kuzzle_cfg.port,
owner=fw_config.device.owner
)
dev_conn += (devices["kuzzle_motion"].connect(None),)
if hw_config.buttons.enabled:
devices["kuzzle_buttons"] = KuzzleIOT(
"buttons_{}".format(UID),
"button",
host=kuzzle_cfg.host,
port=kuzzle_cfg.port,
owner=fw_config.device.owner
)
dev_conn += (devices["kuzzle_buttons"].connect(None),)
devices["kuzzle_light"] = KuzzleIOT(
"light_lvl_{}".format(UID),
"light_sensor",
host=kuzzle_cfg.host,
port=kuzzle_cfg.port,
owner=fw_config.device.owner
)
dev_conn += (devices["kuzzle_light"].connect(None),)
asyncio.get_event_loop().run_until_complete(
asyncio.gather(*dev_conn)
)
attached_devices = []
for d in devices:
attached_devices.append(devices[d].device_uid)
board = KuzzleIOT(
UID,
hw_config.type,
host=kuzzle_cfg.host,
port=kuzzle_cfg.port,
owner=fw_config.device.owner,
additional_info={
"devices": attached_devices,
"hw_version": hw_config.hw_version,
"sw_version": fw_config.firmware.version
}
)
asyncio.get_event_loop().run_until_complete(
asyncio.gather(
board.connect(None),
)
)
log.debug('All KuzzleIoT instances are connected...')
neo.state = default_state
neo.publish_state()
pn532 = Pn532('/dev/serial0', devices["kuzzle_rfid"].publish_state)
def logs_init():
coloredlogs.install(logger=log,
fmt='[%(thread)X] - %(asctime)s - %(name)s - %(levelname)s - %(message)s',
level=logging.DEBUG,
stream=sys.stdout)
class GpioHandler:
def __init__(self, hw_config):
self.hw_config = hw_config
def on_gpio_changed(self, gpio, level):
if gpio in self.hw_config.buttons.gpios:
buttons[
'button_{}'.format(self.hw_config.buttons.gpios.index(gpio))] = 'PRESSED' if not level else 'RELEASED'
log.debug('Buttons state: %s', buttons)
devices["kuzzle_buttons"].publish_state(buttons)
elif gpio == self.hw_config.motion_sensor.gpio:
log.debug('Motion: %s', 'True' if level else 'False')
devices["kuzzle_motion"].publish_state({'motion': True if level else False})
else:
log.warning('Unexpected GPIO: %d', gpio)
def on_gpio_changed_up(self, channel):
time.sleep(0.03) # 30 ms sleep to make sure the GPIO state is stabilized before reading it
self.on_gpio_changed(channel, GPIO.input(channel))
def motion_sensor_install(self):
GPIO.setup(self.hw_config.motion_sensor.gpio, GPIO.IN, pull_up_down=GPIO.PUD_UP)
GPIO.setup(self.hw_config.motion_sensor.gpio, GPIO.IN, pull_up_down=GPIO.PUD_UP)
GPIO.add_event_detect(self.hw_config.motion_sensor.gpio, GPIO.BOTH, callback=self.on_gpio_changed_up)
def buttons_install(self):
GPIO.setup(self.hw_config.buttons.gpios, GPIO.IN, pull_up_down=GPIO.PUD_UP)
for gpio in self.hw_config.buttons.gpios:
GPIO.add_event_detect(gpio, GPIO.BOTH, callback=self.on_gpio_changed_up, bouncetime=50)
def cleanup(hw_config):
if hw_config.connection_led.enabled:
GPIO.output(hw_config.connection_led.gpio, 0)
if hw_config.power_led.enabled:
GPIO.output(hw_config.power_led.gpio, 0)
global neo
neo.state = {
'on': False,
}
GPIO.cleanup()
def start_sensing_light(hw_config):
log.info("Starting light level sensing thread: reading in MCP channel {}".format(hw_config.light_sensor.mcp_channel))
import tept5700
tept = tept5700.Tept5700(5.2, 10000, mcp_channel=hw_config.light_sensor.mcp_channel)
try:
while 1:
voltage, lux = tept.read_lux()
devices["kuzzle_light"].publish_state({"level": lux}) # "{:.3f}".format(lux)})
time.sleep(1)
except KeyboardInterrupt as e:
pass
class SignalHandler:
def __init__(self, hw_config):
self.hw_config = hw_config
def on_sigterm(self, sig_num, stack_frame):
log.debug("I'm dying!!!")
GPIO.output(self.hw_config.connection_led.gpio, 0)
time.sleep(0.5)
GPIO.output(self.hw_config.connection_led.gpio, 1)
time.sleep(0.5)
GPIO.output(self.hw_config.connection_led.gpio, 0)
time.sleep(0.5)
GPIO.output(self.hw_config.connection_led.gpio, 1)
time.sleep(0.5)
GPIO.output(self.hw_config.connection_led.gpio, 0)
log.info("service stopped")
cleanup(self.hw_config)
exit(0)
def startup():
logs_init()
fw_config, hw_config = load_configs(CONFIG_PATH)
fw_config = namedtupled.map(fw_config)
hw_config = namedtupled.map(hw_config)
kuzzle_config = fw_config.kuzzle
sh = SignalHandler(hw_config)
signal.signal(signal.SIGTERM, sh.on_sigterm)
gpio_handler = GpioHandler(hw_config)
if hw_config.power_led.enabled:
GPIO.setup(hw_config.power_led.gpio, GPIO.OUT)
GPIO.output(hw_config.power_led.gpio, 1)
if hw_config.connection_led.gpio:
GPIO.setup(hw_config.connection_led.gpio, GPIO.OUT)
GPIO.output(hw_config.connection_led.gpio, 0)
retry = 50
while retry:
khost = kuzzle_config.host
kport = kuzzle_config.port
res = KuzzleIOT.server_info(khost, kport)
if res:
retry = 0
log.debug('Connected to Kuzzle on http://{}:{}, version = {}'.format(
khost,
kport,
res["serverInfo"]["kuzzle"]["version"])
)
init_hw_components(fw_config, hw_config)
GPIO.output(hw_config.connection_led.gpio, 1)
if hw_config.motion_sensor.enabled:
gpio_handler.motion_sensor_install()
if hw_config.buttons.enabled:
gpio_handler.buttons_install()
pn532_thread = threading.Thread(target=pn532.start_polling, name="pn532_polling")
pn532_thread.daemon = True
pn532_thread.start()
light_sensor_thread = threading.Thread(target=start_sensing_light, args=(hw_config,),
name="light_sensor")
light_sensor_thread.daemon = True
light_sensor_thread.start()
else:
log.warning("Unable to connect to Kuzzle...")
retry -= 1
if retry:
log.info('Trying to reconnect in 5s, %d retries remaining', retry)
else:
log.critical('Impossible to connect to Kuzzle service...quitting')
exit(-1)
time.sleep(5)
try:
log.info("Entering event loop...")
asyncio.get_event_loop().run_forever()
log.info("Configuration changed, restarting firmware...")
except KeyboardInterrupt as e:
pass
finally:
cleanup(hw_config)
if __name__ == '__main__':
startup()
|
[
"pn532.Pn532",
"kuzzle.kuzzle.KuzzleIOT",
"kuzzle.kuzzle.KuzzleIOT.server_info",
"RPi.GPIO.output",
"sys.path.append",
"asyncio.gather",
"tept5700.Tept5700",
"RPi.GPIO.cleanup",
"RPi.GPIO.setup",
"neopixeldevice.NeopixelDevice",
"ruamel.yaml.YAML",
"namedtupled.map",
"threading.Thread",
"RPi.GPIO.setmode",
"asyncio.get_event_loop",
"time.sleep",
"RPi.GPIO.input",
"signal.signal",
"coloredlogs.install",
"logging.getLogger",
"RPi.GPIO.add_event_detect"
] |
[((105, 126), 'sys.path.append', 'sys.path.append', (['""".."""'], {}), "('..')\n", (120, 126), False, 'import sys\n'), ((395, 406), 'ruamel.yaml.YAML', 'YAML.YAML', ([], {}), '()\n', (404, 406), True, 'import ruamel.yaml as YAML\n'), ((439, 464), 'logging.getLogger', 'logging.getLogger', (['"""MAIN"""'], {}), "('MAIN')\n", (456, 464), False, 'import logging\n'), ((811, 833), 'RPi.GPIO.setmode', 'GPIO.setmode', (['GPIO.BCM'], {}), '(GPIO.BCM)\n', (823, 833), True, 'import RPi.GPIO as GPIO\n'), ((1420, 1512), 'neopixeldevice.NeopixelDevice', 'NeopixelDevice', (['hw_config.rgb_light.led_count', 'LED_PIN'], {'strip_type': 'ws_.WS2811_STRIP_GRB'}), '(hw_config.rgb_light.led_count, LED_PIN, strip_type=ws_.\n WS2811_STRIP_GRB)\n', (1434, 1512), False, 'from neopixeldevice import NeopixelDevice, LED_PIN, LightMode, ws as ws_\n'), ((1887, 2004), 'kuzzle.kuzzle.KuzzleIOT', 'KuzzleIOT', (["('NFC_' + UID)", '"""RFID_reader"""'], {'host': 'kuzzle_cfg.host', 'port': 'kuzzle_cfg.port', 'owner': 'fw_config.device.owner'}), "('NFC_' + UID, 'RFID_reader', host=kuzzle_cfg.host, port=\n kuzzle_cfg.port, owner=fw_config.device.owner)\n", (1896, 2004), False, 'from kuzzle.kuzzle import KuzzleIOT\n'), ((3221, 3462), 'kuzzle.kuzzle.KuzzleIOT', 'KuzzleIOT', (['UID', 'hw_config.type'], {'host': 'kuzzle_cfg.host', 'port': 'kuzzle_cfg.port', 'owner': 'fw_config.device.owner', 'additional_info': "{'devices': attached_devices, 'hw_version': hw_config.hw_version,\n 'sw_version': fw_config.firmware.version}"}), "(UID, hw_config.type, host=kuzzle_cfg.host, port=kuzzle_cfg.port,\n owner=fw_config.device.owner, additional_info={'devices':\n attached_devices, 'hw_version': hw_config.hw_version, 'sw_version':\n fw_config.firmware.version})\n", (3230, 3462), False, 'from kuzzle.kuzzle import KuzzleIOT\n'), ((3800, 3859), 'pn532.Pn532', 'Pn532', (['"""/dev/serial0"""', "devices['kuzzle_rfid'].publish_state"], {}), "('/dev/serial0', devices['kuzzle_rfid'].publish_state)\n", (3805, 3859), False, 'from pn532 import Pn532\n'), ((3883, 4038), 'coloredlogs.install', 'coloredlogs.install', ([], {'logger': 'log', 'fmt': '"""[%(thread)X] - %(asctime)s - %(name)s - %(levelname)s - %(message)s"""', 'level': 'logging.DEBUG', 'stream': 'sys.stdout'}), "(logger=log, fmt=\n '[%(thread)X] - %(asctime)s - %(name)s - %(levelname)s - %(message)s',\n level=logging.DEBUG, stream=sys.stdout)\n", (3902, 4038), False, 'import coloredlogs\n'), ((5886, 5900), 'RPi.GPIO.cleanup', 'GPIO.cleanup', ([], {}), '()\n', (5898, 5900), True, 'import RPi.GPIO as GPIO\n'), ((6093, 6170), 'tept5700.Tept5700', 'tept5700.Tept5700', (['(5.2)', '(10000)'], {'mcp_channel': 'hw_config.light_sensor.mcp_channel'}), '(5.2, 10000, mcp_channel=hw_config.light_sensor.mcp_channel)\n', (6110, 6170), False, 'import tept5700\n'), ((7161, 7187), 'namedtupled.map', 'namedtupled.map', (['fw_config'], {}), '(fw_config)\n', (7176, 7187), False, 'import namedtupled\n'), ((7204, 7230), 'namedtupled.map', 'namedtupled.map', (['hw_config'], {}), '(hw_config)\n', (7219, 7230), False, 'import namedtupled\n'), ((7307, 7351), 'signal.signal', 'signal.signal', (['signal.SIGTERM', 'sh.on_sigterm'], {}), '(signal.SIGTERM, sh.on_sigterm)\n', (7320, 7351), False, 'import signal\n'), ((2178, 2300), 'kuzzle.kuzzle.KuzzleIOT', 'KuzzleIOT', (["('motion_' + UID)", '"""motion-sensor"""'], {'host': 'kuzzle_cfg.host', 'port': 'kuzzle_cfg.port', 'owner': 'fw_config.device.owner'}), "('motion_' + UID, 'motion-sensor', host=kuzzle_cfg.host, port=\n kuzzle_cfg.port, owner=fw_config.device.owner)\n", (2187, 2300), False, 'from kuzzle.kuzzle import KuzzleIOT\n'), ((3072, 3097), 'asyncio.gather', 'asyncio.gather', (['*dev_conn'], {}), '(*dev_conn)\n', (3086, 3097), False, 'import asyncio\n'), ((4870, 4886), 'time.sleep', 'time.sleep', (['(0.03)'], {}), '(0.03)\n', (4880, 4886), False, 'import time\n'), ((5067, 5152), 'RPi.GPIO.setup', 'GPIO.setup', (['self.hw_config.motion_sensor.gpio', 'GPIO.IN'], {'pull_up_down': 'GPIO.PUD_UP'}), '(self.hw_config.motion_sensor.gpio, GPIO.IN, pull_up_down=GPIO.PUD_UP\n )\n', (5077, 5152), True, 'import RPi.GPIO as GPIO\n'), ((5156, 5241), 'RPi.GPIO.setup', 'GPIO.setup', (['self.hw_config.motion_sensor.gpio', 'GPIO.IN'], {'pull_up_down': 'GPIO.PUD_UP'}), '(self.hw_config.motion_sensor.gpio, GPIO.IN, pull_up_down=GPIO.PUD_UP\n )\n', (5166, 5241), True, 'import RPi.GPIO as GPIO\n'), ((5245, 5350), 'RPi.GPIO.add_event_detect', 'GPIO.add_event_detect', (['self.hw_config.motion_sensor.gpio', 'GPIO.BOTH'], {'callback': 'self.on_gpio_changed_up'}), '(self.hw_config.motion_sensor.gpio, GPIO.BOTH,\n callback=self.on_gpio_changed_up)\n', (5266, 5350), True, 'import RPi.GPIO as GPIO\n'), ((5387, 5462), 'RPi.GPIO.setup', 'GPIO.setup', (['self.hw_config.buttons.gpios', 'GPIO.IN'], {'pull_up_down': 'GPIO.PUD_UP'}), '(self.hw_config.buttons.gpios, GPIO.IN, pull_up_down=GPIO.PUD_UP)\n', (5397, 5462), True, 'import RPi.GPIO as GPIO\n'), ((5688, 5733), 'RPi.GPIO.output', 'GPIO.output', (['hw_config.connection_led.gpio', '(0)'], {}), '(hw_config.connection_led.gpio, 0)\n', (5699, 5733), True, 'import RPi.GPIO as GPIO\n'), ((5779, 5819), 'RPi.GPIO.output', 'GPIO.output', (['hw_config.power_led.gpio', '(0)'], {}), '(hw_config.power_led.gpio, 0)\n', (5790, 5819), True, 'import RPi.GPIO as GPIO\n'), ((6590, 6640), 'RPi.GPIO.output', 'GPIO.output', (['self.hw_config.connection_led.gpio', '(0)'], {}), '(self.hw_config.connection_led.gpio, 0)\n', (6601, 6640), True, 'import RPi.GPIO as GPIO\n'), ((6649, 6664), 'time.sleep', 'time.sleep', (['(0.5)'], {}), '(0.5)\n', (6659, 6664), False, 'import time\n'), ((6673, 6723), 'RPi.GPIO.output', 'GPIO.output', (['self.hw_config.connection_led.gpio', '(1)'], {}), '(self.hw_config.connection_led.gpio, 1)\n', (6684, 6723), True, 'import RPi.GPIO as GPIO\n'), ((6732, 6747), 'time.sleep', 'time.sleep', (['(0.5)'], {}), '(0.5)\n', (6742, 6747), False, 'import time\n'), ((6756, 6806), 'RPi.GPIO.output', 'GPIO.output', (['self.hw_config.connection_led.gpio', '(0)'], {}), '(self.hw_config.connection_led.gpio, 0)\n', (6767, 6806), True, 'import RPi.GPIO as GPIO\n'), ((6815, 6830), 'time.sleep', 'time.sleep', (['(0.5)'], {}), '(0.5)\n', (6825, 6830), False, 'import time\n'), ((6839, 6889), 'RPi.GPIO.output', 'GPIO.output', (['self.hw_config.connection_led.gpio', '(1)'], {}), '(self.hw_config.connection_led.gpio, 1)\n', (6850, 6889), True, 'import RPi.GPIO as GPIO\n'), ((6898, 6913), 'time.sleep', 'time.sleep', (['(0.5)'], {}), '(0.5)\n', (6908, 6913), False, 'import time\n'), ((6922, 6972), 'RPi.GPIO.output', 'GPIO.output', (['self.hw_config.connection_led.gpio', '(0)'], {}), '(self.hw_config.connection_led.gpio, 0)\n', (6933, 6972), True, 'import RPi.GPIO as GPIO\n'), ((7441, 7487), 'RPi.GPIO.setup', 'GPIO.setup', (['hw_config.power_led.gpio', 'GPIO.OUT'], {}), '(hw_config.power_led.gpio, GPIO.OUT)\n', (7451, 7487), True, 'import RPi.GPIO as GPIO\n'), ((7496, 7536), 'RPi.GPIO.output', 'GPIO.output', (['hw_config.power_led.gpio', '(1)'], {}), '(hw_config.power_led.gpio, 1)\n', (7507, 7536), True, 'import RPi.GPIO as GPIO\n'), ((7584, 7635), 'RPi.GPIO.setup', 'GPIO.setup', (['hw_config.connection_led.gpio', 'GPIO.OUT'], {}), '(hw_config.connection_led.gpio, GPIO.OUT)\n', (7594, 7635), True, 'import RPi.GPIO as GPIO\n'), ((7644, 7689), 'RPi.GPIO.output', 'GPIO.output', (['hw_config.connection_led.gpio', '(0)'], {}), '(hw_config.connection_led.gpio, 0)\n', (7655, 7689), True, 'import RPi.GPIO as GPIO\n'), ((7807, 7842), 'kuzzle.kuzzle.KuzzleIOT.server_info', 'KuzzleIOT.server_info', (['khost', 'kport'], {}), '(khost, kport)\n', (7828, 7842), False, 'from kuzzle.kuzzle import KuzzleIOT\n'), ((3019, 3043), 'asyncio.get_event_loop', 'asyncio.get_event_loop', ([], {}), '()\n', (3041, 3043), False, 'import asyncio\n'), ((3556, 3580), 'asyncio.get_event_loop', 'asyncio.get_event_loop', ([], {}), '()\n', (3578, 3580), False, 'import asyncio\n'), ((5000, 5019), 'RPi.GPIO.input', 'GPIO.input', (['channel'], {}), '(channel)\n', (5010, 5019), True, 'import RPi.GPIO as GPIO\n'), ((5525, 5616), 'RPi.GPIO.add_event_detect', 'GPIO.add_event_detect', (['gpio', 'GPIO.BOTH'], {'callback': 'self.on_gpio_changed_up', 'bouncetime': '(50)'}), '(gpio, GPIO.BOTH, callback=self.on_gpio_changed_up,\n bouncetime=50)\n', (5546, 5616), True, 'import RPi.GPIO as GPIO\n'), ((6344, 6357), 'time.sleep', 'time.sleep', (['(1)'], {}), '(1)\n', (6354, 6357), False, 'import time\n'), ((8145, 8190), 'RPi.GPIO.output', 'GPIO.output', (['hw_config.connection_led.gpio', '(1)'], {}), '(hw_config.connection_led.gpio, 1)\n', (8156, 8190), True, 'import RPi.GPIO as GPIO\n'), ((8411, 8477), 'threading.Thread', 'threading.Thread', ([], {'target': 'pn532.start_polling', 'name': '"""pn532_polling"""'}), "(target=pn532.start_polling, name='pn532_polling')\n", (8427, 8477), False, 'import threading\n'), ((8585, 8674), 'threading.Thread', 'threading.Thread', ([], {'target': 'start_sensing_light', 'args': '(hw_config,)', 'name': '"""light_sensor"""'}), "(target=start_sensing_light, args=(hw_config,), name=\n 'light_sensor')\n", (8601, 8674), False, 'import threading\n'), ((9146, 9159), 'time.sleep', 'time.sleep', (['(5)'], {}), '(5)\n', (9156, 9159), False, 'import time\n'), ((9221, 9245), 'asyncio.get_event_loop', 'asyncio.get_event_loop', ([], {}), '()\n', (9243, 9245), False, 'import asyncio\n')]
|
#!/usr/bin/env python3
"""
App base.
- APP: flask app object
- DB: sqlalchemy database
- UTIL: utility methods
"""
import logging
from twitoff.app import make_app
APP, DB, REDIS = make_app()
application = APP
LOG = logging.getLogger("twitoff")
from twitoff import Routes
from twitoff.service.util_service import UtilService
UTIL = UtilService()
logging.basicConfig(level=logging.DEBUG)
|
[
"logging.basicConfig",
"twitoff.service.util_service.UtilService",
"logging.getLogger",
"twitoff.app.make_app"
] |
[((212, 222), 'twitoff.app.make_app', 'make_app', ([], {}), '()\n', (220, 222), False, 'from twitoff.app import make_app\n'), ((247, 275), 'logging.getLogger', 'logging.getLogger', (['"""twitoff"""'], {}), "('twitoff')\n", (264, 275), False, 'import logging\n'), ((365, 378), 'twitoff.service.util_service.UtilService', 'UtilService', ([], {}), '()\n', (376, 378), False, 'from twitoff.service.util_service import UtilService\n'), ((380, 420), 'logging.basicConfig', 'logging.basicConfig', ([], {'level': 'logging.DEBUG'}), '(level=logging.DEBUG)\n', (399, 420), False, 'import logging\n')]
|
# #################################################################
# Python codes PENN for caching
# Codes have been tested successfully on Python 3.6.0 with TensorFlow 1.14.0.
# #################################################################
import scipy.io as sio
import numpy as np
import runner
import math
import sys
K = 10 # number of files
num_H = 1000 # number of training samples, 10000 for K=10 and 20, 15000 for K=30
num_val = math.ceil(0.1*num_H) # number of validation samples
training_epochs = 3000 # number of training epochs
N_mont = 10 # number of Montercalo simulations
LR = 0.01 # initial learning rate
batch_size = min(num_H, 1000) # batch size
# load data
Xtrain = sio.loadmat('../Data/Sup_WFpol_Nf'+str(K)+'.mat')['X_train']
Ytrain = sio.loadmat('../Data/Sup_WFpol_Nf'+str(K)+'.mat')['pol_tr']
X = sio.loadmat('../Data/Sup_WFpol_Nf'+str(K)+'.mat')['X_test']
Y = sio.loadmat('../Data/Sup_WFpol_Nf'+str(K)+'.mat')['pol_te']
pf_test = sio.loadmat('../Data/Sup_WFpol_Nf'+str(K)+'.mat')['pf_test']
num_tr = Xtrain.shape[2]
num_te = X.shape[2]
d_past= Xtrain.shape[1]
layernum = [d_past*K, 10*K,K] # layer size
Xtrain = np.reshape(Xtrain,(d_past*K,num_tr))
X = np.reshape(X,(d_past*K,num_te))
# training
Ratio,Time = runner.run(Xtrain, Ytrain,X,Y,pf_test,num_H,num_val,N_mont, training_epochs=training_epochs, LR=LR,
batch_size=batch_size, K=K, layernum=layernum)
# performance
Sort_Ratio = np.sort(Ratio)
print('The second worst ratio is: %f ' % Sort_Ratio[1] )
print('Average time for each training is: %f s' % (np.mean(Time)) )
|
[
"math.ceil",
"runner.run",
"numpy.sort",
"numpy.mean",
"numpy.reshape"
] |
[((538, 560), 'math.ceil', 'math.ceil', (['(0.1 * num_H)'], {}), '(0.1 * num_H)\n', (547, 560), False, 'import math\n'), ((1300, 1340), 'numpy.reshape', 'np.reshape', (['Xtrain', '(d_past * K, num_tr)'], {}), '(Xtrain, (d_past * K, num_tr))\n', (1310, 1340), True, 'import numpy as np\n'), ((1341, 1376), 'numpy.reshape', 'np.reshape', (['X', '(d_past * K, num_te)'], {}), '(X, (d_past * K, num_te))\n', (1351, 1376), True, 'import numpy as np\n'), ((1401, 1561), 'runner.run', 'runner.run', (['Xtrain', 'Ytrain', 'X', 'Y', 'pf_test', 'num_H', 'num_val', 'N_mont'], {'training_epochs': 'training_epochs', 'LR': 'LR', 'batch_size': 'batch_size', 'K': 'K', 'layernum': 'layernum'}), '(Xtrain, Ytrain, X, Y, pf_test, num_H, num_val, N_mont,\n training_epochs=training_epochs, LR=LR, batch_size=batch_size, K=K,\n layernum=layernum)\n', (1411, 1561), False, 'import runner\n'), ((1612, 1626), 'numpy.sort', 'np.sort', (['Ratio'], {}), '(Ratio)\n', (1619, 1626), True, 'import numpy as np\n'), ((1738, 1751), 'numpy.mean', 'np.mean', (['Time'], {}), '(Time)\n', (1745, 1751), True, 'import numpy as np\n')]
|
# Generated by Django 2.1.1 on 2020-08-03 04:47
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('app', '0001_initial'),
]
operations = [
migrations.AddField(
model_name='pathtest',
name='district',
field=models.CharField(default=None, max_length=50),
preserve_default=False,
),
migrations.AddField(
model_name='pathtest',
name='state',
field=models.CharField(default=None, max_length=50),
preserve_default=False,
),
]
|
[
"django.db.models.CharField"
] |
[((323, 368), 'django.db.models.CharField', 'models.CharField', ([], {'default': 'None', 'max_length': '(50)'}), '(default=None, max_length=50)\n', (339, 368), False, 'from django.db import migrations, models\n'), ((525, 570), 'django.db.models.CharField', 'models.CharField', ([], {'default': 'None', 'max_length': '(50)'}), '(default=None, max_length=50)\n', (541, 570), False, 'from django.db import migrations, models\n')]
|
# -*- coding: utf8 -*-
"""
The main idea of this module, that you can combine
any number of any filters without any knowledge about their
implementation. You have only one requirement — user functions
should return a filter (or something that can be cast to a filter).
"""
from __future__ import absolute_import, division, print_function
from builtins import range
from shot_detector.filters import (
DelayFilter,
MeanSWFilter,
)
WINDOW_SIZE = 25
delay = DelayFilter()
original = delay(0)
mean = MeanSWFilter(
# window_size=50,
# strict_windows=True,
# mean_name='EWMA',
cs=False,
)
def multi_mean(start=5, stop=50, step=None, pivot=None, **kwargs):
"""
:param start:
:param stop:
:param step:
:param pivot:
:param kwargs:
:return:
"""
if step is None:
step = 1
res = min_size_filter_generator(start, stop, step, pivot, **kwargs)
res = sum(res) / (stop - start) / step
return res
def min_size_filter_generator(start,
stop,
step=None,
pivot=None,
**kwargs):
"""
:param start:
:param stop:
:param step:
:param pivot:
:param kwargs:
:return:
"""
if step is None:
step = 1
if pivot is None:
pivot = start
for size1 in range(start, stop, step):
m1 = mean(s=pivot, **kwargs)
m2 = mean(s=size1 + 1, **kwargs)
yield m2 - m1
|
[
"shot_detector.filters.DelayFilter",
"builtins.range",
"shot_detector.filters.MeanSWFilter"
] |
[((485, 498), 'shot_detector.filters.DelayFilter', 'DelayFilter', ([], {}), '()\n', (496, 498), False, 'from shot_detector.filters import DelayFilter, MeanSWFilter\n'), ((528, 550), 'shot_detector.filters.MeanSWFilter', 'MeanSWFilter', ([], {'cs': '(False)'}), '(cs=False)\n', (540, 550), False, 'from shot_detector.filters import DelayFilter, MeanSWFilter\n'), ((1424, 1448), 'builtins.range', 'range', (['start', 'stop', 'step'], {}), '(start, stop, step)\n', (1429, 1448), False, 'from builtins import range\n')]
|
from django.contrib.auth import get_user_model
from django.test import TestCase
from posts.models import Post, Group
User = get_user_model()
class PostModelTest(TestCase):
@classmethod
def setUpClass(cls):
super().setUpClass()
user = User.objects.create_user(username='TestUser')
cls.post = Post.objects.create(
text='Text in post more then 15 simbols',
author=user
)
def test_str_method(self):
post = PostModelTest.post
first_15_simbols = post.text[:15]
self.assertEqual(first_15_simbols, str(post))
def test_verbose_name(self):
post = PostModelTest.post
field_verboses = {
'text': 'Текст',
'author': 'Автор',
'group': 'Группа',
}
for field, verbose_name in field_verboses.items():
with self.subTest(field=field):
self.assertEqual(
post._meta.get_field(field).verbose_name, verbose_name)
def test_help_text(self):
post = PostModelTest.post
field_help_text = {
'group': 'Выберете группу сообщества для публикации.',
'text': 'Введите текст публикации.'
}
for field, help_text in field_help_text.items():
with self.subTest(field=field):
self.assertEqual(
post._meta.get_field(field).help_text, help_text)
class GroupModelTest(TestCase):
@classmethod
def setUpClass(cls):
super().setUpClass()
cls.group = Group.objects.create(
title='Test case',
slug='test'
)
def test_str_method(self):
group = GroupModelTest.group
str_method_name = group.title
self.assertEqual(str_method_name, str(group))
def test_verbose_name(self):
group = GroupModelTest.group
verbose_name = 'Группа'
self.assertEqual(
group._meta.get_field('title').verbose_name, verbose_name)
|
[
"posts.models.Post.objects.create",
"django.contrib.auth.get_user_model",
"posts.models.Group.objects.create"
] |
[((126, 142), 'django.contrib.auth.get_user_model', 'get_user_model', ([], {}), '()\n', (140, 142), False, 'from django.contrib.auth import get_user_model\n'), ((327, 401), 'posts.models.Post.objects.create', 'Post.objects.create', ([], {'text': '"""Text in post more then 15 simbols"""', 'author': 'user'}), "(text='Text in post more then 15 simbols', author=user)\n", (346, 401), False, 'from posts.models import Post, Group\n'), ((1555, 1607), 'posts.models.Group.objects.create', 'Group.objects.create', ([], {'title': '"""Test case"""', 'slug': '"""test"""'}), "(title='Test case', slug='test')\n", (1575, 1607), False, 'from posts.models import Post, Group\n')]
|
import matplotlib.gridspec as gridspec
from nose.tools import assert_equal
def test_equal():
gs = gridspec.GridSpec(2, 1)
assert_equal(gs[0, 0], gs[0, 0])
assert_equal(gs[:, 0], gs[:, 0])
|
[
"matplotlib.gridspec.GridSpec",
"nose.tools.assert_equal"
] |
[((104, 127), 'matplotlib.gridspec.GridSpec', 'gridspec.GridSpec', (['(2)', '(1)'], {}), '(2, 1)\n', (121, 127), True, 'import matplotlib.gridspec as gridspec\n'), ((132, 164), 'nose.tools.assert_equal', 'assert_equal', (['gs[0, 0]', 'gs[0, 0]'], {}), '(gs[0, 0], gs[0, 0])\n', (144, 164), False, 'from nose.tools import assert_equal\n'), ((169, 201), 'nose.tools.assert_equal', 'assert_equal', (['gs[:, 0]', 'gs[:, 0]'], {}), '(gs[:, 0], gs[:, 0])\n', (181, 201), False, 'from nose.tools import assert_equal\n')]
|
#Copyright 2020 Battelle Energy Alliance, LLC, ALL RIGHTS RESERVED.
#Buffered File with 0x00's
#Offset measured from center of chunk
#Adjustable slide %
#Added .Net Bytecode
#Added startup Notes
#Additional Error checking
#Added DotNet Bytecode
#Added Compiler Detection
#Added Percent Compressed or Encrypted
from tkinter import *
from tkinter import filedialog
from tkinter import messagebox
import shutil
import os
import sys
import subprocess
import math
import zipfile
programPath = '.' + os.sep
selectedFilename = ''
def clearDirectories():
if os.path.exists(programPath + 'Input'):
shutil.rmtree(programPath + 'Input')
if os.path.exists(programPath + 'Output'):
shutil.rmtree(programPath + 'Output')
if os.path.exists(programPath + 'OutputTemp'):
shutil.rmtree(programPath + 'OutputTemp')
os.mkdir(programPath + 'Input')
os.mkdir(programPath + 'Output')
os.mkdir(programPath + 'OutputTemp')
def getEndianess():
inputResults = open(programPath + 'Output' + os.sep + 'Input.csv','r')
contents = inputResults.read()
inputResults.close()
#Read last Entry (Big Endian Status bit)
contents = contents[-2:-1]
if '0' in contents:
contents = 'Big'
else:
contents = 'Little'
return contents
def goButtonArch():
#Clear resultsbox
resultsBox.delete(1.0, END)
#Check if Arch or Compiler Radio Button Selected
if radioValue.get() == 0:
resultsBox.insert(END, 'Radio Button Not Selected')
return None
#Clear Current PickleFiles
modelPath = programPath + 'PickledSKLearnModels'
models = os.listdir(modelPath)
for model in models:
if model.endswith(".sav"):
os.remove(os.path.join(modelPath, model))
#Unzip Selected PickledModels
if radioValue.get() == 1:
zip = zipfile.ZipFile(programPath + 'PickledSKLearnModels' + os.sep + 'Architectures.zip')
zip.extractall(programPath + 'PickledSKLearnModels')
if radioValue.get() == 2:
zip = zipfile.ZipFile(programPath + 'PickledSKLearnModels' + os.sep + 'Compilers.zip')
zip.extractall(programPath + 'PickledSKLearnModels')
if radioValue.get() == 3:
zip = zipfile.ZipFile(programPath + 'PickledSKLearnModels' + os.sep + 'Bytecode.zip')
zip.extractall(programPath + 'PickledSKLearnModels')
if radioValue.get() == 4:
zip = zipfile.ZipFile(programPath + 'PickledSKLearnModels' + os.sep + 'Architectures_and_Bytecode.zip')
zip.extractall(programPath + 'PickledSKLearnModels')
#Copy file to Input Dir
global selectedFilename
try:
shutil.copy(selectedFilename, programPath + 'Input')
except:
resultsBox.insert(END, 'File not selected. Select a file and try again.')
return None
try:
os.system('python3 GenerateByteHistogram.py a ' + programPath + 'Input' + os.sep + ' ' + programPath + 'Output' + os.sep + ' 0 100000000000000000000 ' + entropyEntryText.get() + ' ' + blocksizeEntryText.get() + ' 0')
except:
os.system('python GenerateByteHistogram.py a ' + programPath + 'Input' + os.sep + ' ' + programPath + 'Output' + os.sep + ' 0 100000000000000000000 ' + entropyEntryText.get() + ' ' + blocksizeEntryText.get() + ' 0')
results = open(programPath + 'Output' + os.sep + 'Results.txt','r')
contents = results.read()
header = 'Type\tProbability\t\tAlgorthim\n------------------------------------------\n'
resultsBox.insert(END,header + contents)
#Set Endianess GUI Box
endianLabelText.set(getEndianess())
clearDirectories()
def goButtonData():
#Clear resultsbox
resultsBox.delete(1.0, END)
#Check if Arch or Compiler Radio Button Selected
if radioValue.get() == 0:
resultsBox.insert(END, 'Radio Button Not Selected')
return None
#Clear Current PickleFiles
modelPath = programPath + 'PickledSKLearnModels'
models = os.listdir(modelPath)
for model in models:
if model.endswith(".sav"):
os.remove(os.path.join(modelPath, model))
#Unzip Selected PickledModels
if radioValue.get() == 1:
zip = zipfile.ZipFile(programPath + 'PickledSKLearnModels' + os.sep + 'Architectures.zip')
zip.extractall(programPath + 'PickledSKLearnModels')
if radioValue.get() == 2:
zip = zipfile.ZipFile(programPath + 'PickledSKLearnModels' + os.sep + 'Compilers.zip')
zip.extractall(programPath + 'PickledSKLearnModels')
if radioValue.get() == 3:
zip = zipfile.ZipFile(programPath + 'PickledSKLearnModels' + os.sep + 'Bytecode.zip')
zip.extractall(programPath + 'PickledSKLearnModels')
if radioValue.get() == 4:
zip = zipfile.ZipFile(programPath + 'PickledSKLearnModels' + os.sep + 'Architectures_and_Bytecode.zip')
zip.extractall(programPath + 'PickledSKLearnModels')
#Copy file to Input Dir
global selectedFilename
try:
shutil.copy(selectedFilename, programPath + 'Input')
except:
resultsBox.insert(END, 'File not selected. Select a file and try again.')
return None
#ZeroBuffer
zero=[0]*int((int(chunkEntryText.get())/2)-1)
zeroBuffer=bytearray(zero)
#Pad Original File with Leading and Trailing Zeros
old = open('Input' + os.sep + selectedFilename.split(os.sep)[-1], 'rb')
new = open('Input' + os.sep + 'temp__' + selectedFilename.split(os.sep)[-1], 'wb')
new.write(zeroBuffer)
new.write(old.read())
new.write(zeroBuffer)
old.close()
new.close()
os.remove('Input' + os.sep + selectedFilename.split(os.sep)[-1])
os.rename('Input' + os.sep + 'temp__' + selectedFilename.split(os.sep)[-1], 'Input' + os.sep + selectedFilename.split(os.sep)[-1])
#Split File Via Rolling Window
try:
os.system('python3 RollingWindowExtractor.py ' + programPath + 'Input' + os.sep + ' ' + programPath + 'OutputTemp' + os.sep + ' ' + chunkEntryText.get() + ' ' + slideEntryText.get())
os.system('python3 GenerateByteHistogram.py d ' + programPath + 'OutputTemp' + os.sep + ' ' + programPath + 'Output' + os.sep + ' 0 100000000000000000000 ' + entropyEntryText.get() + ' ' + blocksizeEntryText.get() + ' ' + votesEntryText.get())
except:
os.system('python RollingWindowExtractor.py ' + programPath + 'Input' + os.sep + ' ' + programPath + 'OutputTemp' + os.sep + ' ' + chunkEntryText.get() + ' ' + slideEntryText.get())
os.system('python GenerateByteHistogram.py d ' + programPath + 'OutputTemp' + os.sep + ' ' + programPath + 'Output' + os.sep + ' 0 100000000000000000000 ' + entropyEntryText.get() + ' ' + blocksizeEntryText.get() + ' ' + votesEntryText.get())
results = open(programPath + 'Output' + os.sep + 'Results.txt','r')
contents = results.readlines()
#Convert to list
finalList = []
for i in range(0,len(contents)):
finalList.append(str(contents[i].rstrip('\n')))
#Generate final String
finalString = 'File\tByte Offset\t\tAgreement\n'\
+ '-------------------------------------------------------\n'
for i in range(0,len(finalList)):
finalString = finalString + str(int(finalList[i].split(',')[0])+1) + '\t' + str(math.trunc((int(finalList[i].split(',')[0]))*(int(chunkEntryText.get())*(int(slideEntryText.get())/100)))).zfill(7) + '\t\t' + str(finalList[i].split(',')[1]) + '\n'
if len(finalList) == 0:
finalString = 'No Data Offset Matches Found'
resultsBox.insert(END,finalString)
#Set Endianess GUI Box
endianLabelText.set('<????>')
clearDirectories()
def entropyPercentage(passedFile):
upperLimit = entropyEntryText.get().split(':')[0]
fileSize = os.path.getsize(passedFile)
entropy = subprocess.check_output(['binwalk', '-E', '-v', '--block=' + str(blocksizeEntryText.get()), '--nplot', passedFile]).split()
entropy = entropy[entropy.index(b'ENTROPY')+2:len(entropy)]
del entropy[1::3]
#Loop through Binwalk elements to decode them
for index in range(0,len(entropy)):
entropy[index] = entropy[index].decode()
entropy.append(str(fileSize))
byteTotal = 0
for index in range(1,len(entropy),2):
if float(entropy[index]) > float(upperLimit):
byteTotal = byteTotal + (float(entropy[index+1])-float(entropy[index-1]))
percentage = str(round((byteTotal / fileSize) * 100, 2)) + '%'
return percentage
def fileSelectButton():
global selectedFilename
fileSelectedLabelText.set('Loading Selected File...')
selectedFilename = filedialog.askopenfilename(initialdir = programPath,title = "Select Binary")
#Check for Zero size files
##############################################
while os.path.getsize(selectedFilename) <= 0:
print("Zero-byte file detected. Please select a different file.")
selectedFilename = filedialog.askopenfilename(initialdir = programPath,title = "Select Binary")
#Check Entropy Percentage
entropyPercent = entropyPercentage(selectedFilename)
percentLabelText.set(entropyPercent)
fileSelectedLabelText.set(selectedFilename)
entropyNumber = float(entropyPercent.split('%')[0])
if entropyNumber >= 66:
percentLabel.config(fg="red")
messagebox.showwarning("Entropy Warning", "Warning: Selected Binary is " + str(entropyPercent) + " Compressed or Encrypted. WiiBin Results Should Not Be Trusted.")
elif entropyNumber >= 33:
percentLabel.config(fg="orange")
messagebox.showwarning("Entropy Warning", "Warning: Selected Binary is " + str(entropyPercent) + " Compressed or Encrypted. WiiBin Results Might Not Be Reliable.")
elif entropyNumber >= 0:
percentLabel.config(fg="green")
def entropyLabelClicked(event):
resultsBox.delete(1.0, END)
resultsBox.insert(END,"The window of entropy that will be considered when generating byte histograms. Anything outside of that window will be ignored. Syntax=Max:Min")
def blocksizeLabelClicked(event):
resultsBox.delete(1.0, END)
resultsBox.insert(END,"The size of individual block (in bytes) considered during entropy analysis. Default=512")
def chunksizeLabelClicked(event):
resultsBox.delete(1.0, END)
resultsBox.insert(END,"The size of chunk in bytes that the data offset process will break the inputed file into. Default=10000")
def slidesizeLabelClicked(event):
resultsBox.delete(1.0, END)
resultsBox.insert(END,"The percent of the chunk size that the sliding window is slid Default=50")
def reqVotesLabelClicked(event):
resultsBox.delete(1.0, END)
resultsBox.insert(END,"Number of ML algorithms that must agree on an file for it to be reported as part of the data offset output. Default=5 (Simple Majority)")
def radioArchSelected():
entropyEntryText.set('0.9:0.1')
resultsBox.delete(1.0, END)
resultsBox.insert(END,"Mode Changed to Architecture")
def radioCompSelected():
entropyEntryText.set('1.0:0.0')
resultsBox.delete(1.0, END)
resultsBox.insert(END,"Mode Changed to Compiler")
def radioByteSelected():
entropyEntryText.set('0.9:0.1')
resultsBox.delete(1.0, END)
resultsBox.insert(END,"Mode Changed to Bytecode")
def radioArchByteSelected():
entropyEntryText.set('0.9:0.1')
resultsBox.delete(1.0, END)
resultsBox.insert(END,"Mode Changed to Architecture & Bytecode")
#############Initialization###################
root = Tk()
root.configure(width=40)
root.title("WiiBin")
root.geometry("655x390")
root.resizable(0, 0)
radioValue = IntVar()
buttonText = StringVar()
buttonText.set('Select File...')
fileSelectButton = Button(root, textvariable=buttonText, command=fileSelectButton)
fileSelectButton.place(x=10,y=10)
fileSelectedLabelText = StringVar()
fileSelectedLabel = Entry(root,textvariable=fileSelectedLabelText,bd=0,bg='#D9D9D9',width=62)
fileSelectedLabel.place(x=130,y=15)
fileSelectedLabelText.set('<File Path>')
entropyLabel = Label(root,text='Entropy Span:')
entropyLabel.place(x=10,y=45)
entropyLabel.bind("<Button>", entropyLabelClicked)
entropyEntryText = StringVar()
entropyEntry = Entry(root,width=7,textvariable=entropyEntryText)
entropyEntry.place(x=108,y=45)
entropyEntryText.set('0.9:0.1')
blocksizeLabel = Label(root,text='Block Size (b):')
blocksizeLabel.place(x=180,y=45)
blocksizeLabel.bind("<Button>", blocksizeLabelClicked)
blocksizeEntryText = StringVar()
blocksizeEntry = Entry(root,width=4,textvariable=blocksizeEntryText)
blocksizeEntry.place(x=279,y=45)
blocksizeEntryText.set('512')
chunkLabel = Label(root,text='Chunk Size (b):')
chunkLabel.place(x=330,y=45)
chunkLabel.bind("<Button>", chunksizeLabelClicked)
chunkEntryText = StringVar()
chunkEntry = Entry(root,width=6,textvariable=chunkEntryText)
chunkEntry.place(x=434,y=45)
chunkEntryText.set('10000')
slideLabel = Label(root,text='Slide (%):')
slideLabel.place(x=520,y=70)
slideLabel.bind("<Button>", slidesizeLabelClicked)
slideEntryText = StringVar()
slideEntry = Entry(root,width=3,textvariable=slideEntryText)
slideEntry.place(x=590,y=70)
slideEntryText.set('50')
votesLabel = Label(root,text='Req\'d Votes: of 8')
votesLabel.place(x=500,y=45)
votesLabel.bind("<Button>", reqVotesLabelClicked)
votesEntryText = StringVar()
votesEntry = Entry(root,width=2,textvariable=votesEntryText)
votesEntry.place(x=590,y=45)
votesEntryText.set('5')
goButtonArch = Button(root,text="Determine Type",command=goButtonArch)
goButtonArch.place(x=10,y=75)
goButtonData = Button(root,text="Determine Offsets",command=goButtonData)
goButtonData.place(x=145,y=75)
radioButtonArch = Radiobutton(root, text="Architecture", variable=radioValue, value=1, command=radioArchSelected)
radioButtonArch.place(x=292,y=68)
radioButtonByte = Radiobutton(root, text="Bytecode", variable=radioValue, value=3, command=radioByteSelected)
radioButtonByte.place(x=292,y=90)
radioButtonComp = Radiobutton(root, text="Compiler", variable=radioValue, value=2, command=radioCompSelected)
radioButtonComp.place(x=400,y=68)
radioButtonArchByte = Radiobutton(root, text="Arch&Byte", variable=radioValue, value=4, command=radioArchByteSelected)
radioButtonArchByte.place(x=400,y=90)
endianLabel = Label(root,text='Endianness:', justify=RIGHT)
endianLabel.place(x=500,y=93)
endianLabelText = StringVar()
endianLabel = Label(root,width=7,textvariable=endianLabelText)
endianLabel.place(x=580,y=93)
endianLabelText.set('<????>')
percentLabel = Label(root,text='Percent Compressed/Encrypted:', justify=RIGHT)
percentLabel.place(x=370,y=112)
percentLabelText = StringVar()
percentLabel = Label(root,width=7,textvariable=percentLabelText)
percentLabel.place(x=580,y=112)
percentLabelText.set('<????>')
resultsBox = Text(root, width=75, height=14, padx=5, pady=5, borderwidth=2, relief=RIDGE)
resultsBox.place(x=10, y= 130)
scrollb = Scrollbar(root, command=resultsBox.yview)
scrollb.place(x=630, y=350)
resultsBox['yscrollcommand'] = scrollb.set
resultsBox.insert(END,'Welcome to WiiBin 1.7.1\n----------------------\n\nNotes:\n\nThe smaller the Slide (%) and Chunk Size, the more accurate the detection and longer the runtime.\n\nToo small a Chuck Size will cause make ML difficult and less accurate.\n\nThe minimum detectable code segment size is limited to half of the selected Chunk Size.\n\nTo detect smaller code segments the Chunk Size most be reduced.')
clearDirectories()
root.mainloop()
|
[
"os.mkdir",
"zipfile.ZipFile",
"os.path.getsize",
"os.path.exists",
"tkinter.filedialog.askopenfilename",
"shutil.rmtree",
"os.path.join",
"os.listdir",
"shutil.copy"
] |
[((557, 594), 'os.path.exists', 'os.path.exists', (["(programPath + 'Input')"], {}), "(programPath + 'Input')\n", (571, 594), False, 'import os\n'), ((646, 684), 'os.path.exists', 'os.path.exists', (["(programPath + 'Output')"], {}), "(programPath + 'Output')\n", (660, 684), False, 'import os\n'), ((737, 779), 'os.path.exists', 'os.path.exists', (["(programPath + 'OutputTemp')"], {}), "(programPath + 'OutputTemp')\n", (751, 779), False, 'import os\n'), ((833, 864), 'os.mkdir', 'os.mkdir', (["(programPath + 'Input')"], {}), "(programPath + 'Input')\n", (841, 864), False, 'import os\n'), ((868, 900), 'os.mkdir', 'os.mkdir', (["(programPath + 'Output')"], {}), "(programPath + 'Output')\n", (876, 900), False, 'import os\n'), ((904, 940), 'os.mkdir', 'os.mkdir', (["(programPath + 'OutputTemp')"], {}), "(programPath + 'OutputTemp')\n", (912, 940), False, 'import os\n'), ((1620, 1641), 'os.listdir', 'os.listdir', (['modelPath'], {}), '(modelPath)\n', (1630, 1641), False, 'import os\n'), ((3899, 3920), 'os.listdir', 'os.listdir', (['modelPath'], {}), '(modelPath)\n', (3909, 3920), False, 'import os\n'), ((7601, 7628), 'os.path.getsize', 'os.path.getsize', (['passedFile'], {}), '(passedFile)\n', (7616, 7628), False, 'import os\n'), ((8436, 8509), 'tkinter.filedialog.askopenfilename', 'filedialog.askopenfilename', ([], {'initialdir': 'programPath', 'title': '"""Select Binary"""'}), "(initialdir=programPath, title='Select Binary')\n", (8462, 8509), False, 'from tkinter import filedialog\n'), ((603, 639), 'shutil.rmtree', 'shutil.rmtree', (["(programPath + 'Input')"], {}), "(programPath + 'Input')\n", (616, 639), False, 'import shutil\n'), ((693, 730), 'shutil.rmtree', 'shutil.rmtree', (["(programPath + 'Output')"], {}), "(programPath + 'Output')\n", (706, 730), False, 'import shutil\n'), ((788, 829), 'shutil.rmtree', 'shutil.rmtree', (["(programPath + 'OutputTemp')"], {}), "(programPath + 'OutputTemp')\n", (801, 829), False, 'import shutil\n'), ((1831, 1919), 'zipfile.ZipFile', 'zipfile.ZipFile', (["(programPath + 'PickledSKLearnModels' + os.sep + 'Architectures.zip')"], {}), "(programPath + 'PickledSKLearnModels' + os.sep +\n 'Architectures.zip')\n", (1846, 1919), False, 'import zipfile\n'), ((2016, 2101), 'zipfile.ZipFile', 'zipfile.ZipFile', (["(programPath + 'PickledSKLearnModels' + os.sep + 'Compilers.zip')"], {}), "(programPath + 'PickledSKLearnModels' + os.sep + 'Compilers.zip'\n )\n", (2031, 2101), False, 'import zipfile\n'), ((2197, 2276), 'zipfile.ZipFile', 'zipfile.ZipFile', (["(programPath + 'PickledSKLearnModels' + os.sep + 'Bytecode.zip')"], {}), "(programPath + 'PickledSKLearnModels' + os.sep + 'Bytecode.zip')\n", (2212, 2276), False, 'import zipfile\n'), ((2377, 2478), 'zipfile.ZipFile', 'zipfile.ZipFile', (["(programPath + 'PickledSKLearnModels' + os.sep +\n 'Architectures_and_Bytecode.zip')"], {}), "(programPath + 'PickledSKLearnModels' + os.sep +\n 'Architectures_and_Bytecode.zip')\n", (2392, 2478), False, 'import zipfile\n'), ((2604, 2656), 'shutil.copy', 'shutil.copy', (['selectedFilename', "(programPath + 'Input')"], {}), "(selectedFilename, programPath + 'Input')\n", (2615, 2656), False, 'import shutil\n'), ((4110, 4198), 'zipfile.ZipFile', 'zipfile.ZipFile', (["(programPath + 'PickledSKLearnModels' + os.sep + 'Architectures.zip')"], {}), "(programPath + 'PickledSKLearnModels' + os.sep +\n 'Architectures.zip')\n", (4125, 4198), False, 'import zipfile\n'), ((4295, 4380), 'zipfile.ZipFile', 'zipfile.ZipFile', (["(programPath + 'PickledSKLearnModels' + os.sep + 'Compilers.zip')"], {}), "(programPath + 'PickledSKLearnModels' + os.sep + 'Compilers.zip'\n )\n", (4310, 4380), False, 'import zipfile\n'), ((4476, 4555), 'zipfile.ZipFile', 'zipfile.ZipFile', (["(programPath + 'PickledSKLearnModels' + os.sep + 'Bytecode.zip')"], {}), "(programPath + 'PickledSKLearnModels' + os.sep + 'Bytecode.zip')\n", (4491, 4555), False, 'import zipfile\n'), ((4656, 4757), 'zipfile.ZipFile', 'zipfile.ZipFile', (["(programPath + 'PickledSKLearnModels' + os.sep +\n 'Architectures_and_Bytecode.zip')"], {}), "(programPath + 'PickledSKLearnModels' + os.sep +\n 'Architectures_and_Bytecode.zip')\n", (4671, 4757), False, 'import zipfile\n'), ((4882, 4934), 'shutil.copy', 'shutil.copy', (['selectedFilename', "(programPath + 'Input')"], {}), "(selectedFilename, programPath + 'Input')\n", (4893, 4934), False, 'import shutil\n'), ((8606, 8639), 'os.path.getsize', 'os.path.getsize', (['selectedFilename'], {}), '(selectedFilename)\n', (8621, 8639), False, 'import os\n'), ((8746, 8819), 'tkinter.filedialog.askopenfilename', 'filedialog.askopenfilename', ([], {'initialdir': 'programPath', 'title': '"""Select Binary"""'}), "(initialdir=programPath, title='Select Binary')\n", (8772, 8819), False, 'from tkinter import filedialog\n'), ((1718, 1748), 'os.path.join', 'os.path.join', (['modelPath', 'model'], {}), '(modelPath, model)\n', (1730, 1748), False, 'import os\n'), ((3997, 4027), 'os.path.join', 'os.path.join', (['modelPath', 'model'], {}), '(modelPath, model)\n', (4009, 4027), False, 'import os\n')]
|
#! /usr/bin/env python3
"""
usage: bactopia-stats [-h] STR STR
bactopia-stats - Ouput files to be used by Bactopia-WDL
positional arguments:
STR Directory where Bactopia outputs are.
STR Sample name used in Bactopia run
optional arguments:
-h, --help show this help message and exit
"""
import os
import json
import sys
PROGRAM = "bactopia-stats"
DESCRIPTION = 'Ouput files to be used by Bactopia-WDL'
def read_json(json_file):
""" Read input JSON file and return the dict. """
json_data = None
with open(json_file, 'rt') as json_fh:
json_data = json.load(json_fh)
return json_data
def write_output(file_name, output):
""" Write the output to a specific file. """
with open(file_name, 'wt') as file_fh:
if isinstance(output, float):
# Limit it two decimal places
file_fh.write(f'{output:.2f}\n')
elif isinstance(output, bool):
val = 'true' if output else 'false'
file_fh.write(f'{val}\n')
else:
file_fh.write(f'{output}\n')
if __name__ == '__main__':
import argparse as ap
import textwrap
parser = ap.ArgumentParser(
prog=PROGRAM,
conflict_handler='resolve',
description=(
f'{PROGRAM} - {DESCRIPTION}'
),
formatter_class=ap.RawDescriptionHelpFormatter
)
parser.add_argument('bactopia', metavar="STR", type=str,
help='Directory where Bactopia outputs are.')
parser.add_argument('sample_name', metavar="STR", type=str,
help='Sample name used in Bactopia run')
if len(sys.argv) == 1:
parser.print_help()
sys.exit(0)
args = parser.parse_args()
with open(f'{args.bactopia}/{args.sample_name}-genome-size.txt', 'rt') as file_fh:
write_output("GENOME_SIZE", file_fh.readline().rstrip())
# FASTQ Stats
is_paired = True if os.path.exists(f'{args.bactopia}/quality-control/{args.sample_name}_R1.fastq.gz') else False
write_output("IS_PAIRED", is_paired)
read_stats = {}
if (is_paired):
r1_raw_stats = assembly_stats = read_json(f'{args.bactopia}/quality-control/summary/{args.sample_name}_R1-original.json')
r2_raw_stats = assembly_stats = read_json(f'{args.bactopia}/quality-control/summary/{args.sample_name}_R2-original.json')
r1_qc_stats = assembly_stats = read_json(f'{args.bactopia}/quality-control/summary/{args.sample_name}_R1-final.json')
r2_qc_stats = assembly_stats = read_json(f'{args.bactopia}/quality-control/summary/{args.sample_name}_R2-final.json')
# Original Reads
write_output("RAW_TOTAL_BP", r1_raw_stats['qc_stats']['total_bp'] + r2_raw_stats['qc_stats']['total_bp'])
write_output("RAW_COVERAGE", r1_raw_stats['qc_stats']['coverage'] + r2_raw_stats['qc_stats']['coverage'])
write_output("RAW_READ_TOTAL", r1_raw_stats['qc_stats']['read_total'] + r2_raw_stats['qc_stats']['read_total'])
write_output("RAW_READ_MEAN", (r1_raw_stats['qc_stats']['read_mean'] + r2_raw_stats['qc_stats']['read_mean']) / 2.0)
write_output("RAW_QUAL_MEAN", (r1_raw_stats['qc_stats']['qual_mean'] + r2_raw_stats['qc_stats']['qual_mean']) / 2.0)
# After QC Reads
write_output("QC_TOTAL_BP", r1_qc_stats['qc_stats']['total_bp'] + r2_qc_stats['qc_stats']['total_bp'])
write_output("QC_COVERAGE", r1_qc_stats['qc_stats']['coverage'] + r2_qc_stats['qc_stats']['coverage'])
write_output("QC_READ_TOTAL", r1_qc_stats['qc_stats']['read_total'] + r2_qc_stats['qc_stats']['read_total'])
write_output("QC_READ_MEAN", (r1_qc_stats['qc_stats']['read_mean'] + r2_qc_stats['qc_stats']['read_mean']) / 2.0)
write_output("QC_QUAL_MEAN", (r1_qc_stats['qc_stats']['qual_mean'] + r2_qc_stats['qc_stats']['qual_mean']) / 2.0)
else:
se_raw_stats = assembly_stats = read_json(f'{args.bactopia}/quality-control/summary/{args.sample_name}-original.json')
se_qc_stats = assembly_stats = read_json(f'{args.bactopia}/quality-control/summary/{args.sample_name}-final.json')
# Original Reads
write_output("RAW_TOTAL_BP", se_raw_stats['qc_stats']['total_bp'])
write_output("RAW_COVERAGE", se_raw_stats['qc_stats']['coverage'])
write_output("RAW_READ_TOTAL", se_raw_stats['qc_stats']['read_total'])
write_output("RAW_READ_MEAN", se_raw_stats['qc_stats']['read_mean'])
write_output("RAW_QUAL_MEAN", se_raw_stats['qc_stats']['qual_mean'])
# After QC Reads
write_output("QC_TOTAL_BP", se_qc_stats['qc_stats']['total_bp'])
write_output("QC_COVERAGE", se_qc_stats['qc_stats']['coverage'])
write_output("QC_READ_TOTAL", se_qc_stats['qc_stats']['read_total'])
write_output("QC_READ_MEAN", se_qc_stats['qc_stats']['read_mean'])
write_output("QC_QUAL_MEAN", se_qc_stats['qc_stats']['qual_mean'])
# Assembly related stats
assembly_stats = read_json(f'{args.bactopia}/assembly/{args.sample_name}.json')
write_output("TOTAL_CONTIG", assembly_stats['total_contig'])
write_output("TOTAL_CONTIG_LENGTH", assembly_stats['total_contig_length'])
write_output("MAX_CONTIG_LENGTH", assembly_stats['max_contig_length'])
write_output("MEAN_CONTIG_LENGTH", assembly_stats['mean_contig_length'])
write_output("N50_CONTIG_LENGTH", assembly_stats['n50_contig_length'])
write_output("GC_PERCENT", float(assembly_stats['contig_percent_c']) + float(assembly_stats['contig_percent_g']))
|
[
"json.load",
"os.path.exists",
"argparse.ArgumentParser",
"sys.exit"
] |
[((1157, 1316), 'argparse.ArgumentParser', 'ap.ArgumentParser', ([], {'prog': 'PROGRAM', 'conflict_handler': '"""resolve"""', 'description': 'f"""{PROGRAM} - {DESCRIPTION}"""', 'formatter_class': 'ap.RawDescriptionHelpFormatter'}), "(prog=PROGRAM, conflict_handler='resolve', description=\n f'{PROGRAM} - {DESCRIPTION}', formatter_class=ap.\n RawDescriptionHelpFormatter)\n", (1174, 1316), True, 'import argparse as ap\n'), ((594, 612), 'json.load', 'json.load', (['json_fh'], {}), '(json_fh)\n', (603, 612), False, 'import json\n'), ((1693, 1704), 'sys.exit', 'sys.exit', (['(0)'], {}), '(0)\n', (1701, 1704), False, 'import sys\n'), ((1933, 2019), 'os.path.exists', 'os.path.exists', (['f"""{args.bactopia}/quality-control/{args.sample_name}_R1.fastq.gz"""'], {}), "(\n f'{args.bactopia}/quality-control/{args.sample_name}_R1.fastq.gz')\n", (1947, 2019), False, 'import os\n')]
|
import BartlebyMachine.main as bartleby
import BartlebyMachine.book as book
bartleby = bartleby.Bartleby()
bartleby.addTableOfContent('toc.ggded.yaml')
bartleby.markdownToLatex()
bartleby.writeLatex()
|
[
"BartlebyMachine.main.Bartleby",
"BartlebyMachine.main.addTableOfContent",
"BartlebyMachine.main.markdownToLatex",
"BartlebyMachine.main.writeLatex"
] |
[((88, 107), 'BartlebyMachine.main.Bartleby', 'bartleby.Bartleby', ([], {}), '()\n', (105, 107), True, 'import BartlebyMachine.main as bartleby\n'), ((108, 152), 'BartlebyMachine.main.addTableOfContent', 'bartleby.addTableOfContent', (['"""toc.ggded.yaml"""'], {}), "('toc.ggded.yaml')\n", (134, 152), True, 'import BartlebyMachine.main as bartleby\n'), ((153, 179), 'BartlebyMachine.main.markdownToLatex', 'bartleby.markdownToLatex', ([], {}), '()\n', (177, 179), True, 'import BartlebyMachine.main as bartleby\n'), ((180, 201), 'BartlebyMachine.main.writeLatex', 'bartleby.writeLatex', ([], {}), '()\n', (199, 201), True, 'import BartlebyMachine.main as bartleby\n')]
|
# Adapted from https://pytorch.org/tutorials/intermediate/reinforcement_q_learning.html
import argparse
import random
import sys
import time
from collections import namedtuple
from pathlib import Path
# Prevent numpy from using up all cpu
import os
os.environ['MKL_NUM_THREADS'] = '1' # pylint: disable=wrong-import-position
import torch
import torch.optim as optim
from torch.nn.functional import smooth_l1_loss
from torch.utils.tensorboard import SummaryWriter
from tqdm import tqdm
import utils
torch.backends.cudnn.benchmark = True
Transition = namedtuple('Transition', ('state', 'action', 'reward', 'ministeps', 'next_state'))
class ReplayBuffer:
def __init__(self, capacity):
self.capacity = capacity
self.buffer = []
self.position = 0
def push(self, *args):
if len(self.buffer) < self.capacity:
self.buffer.append(None)
self.buffer[self.position] = Transition(*args)
self.position = (self.position + 1) % self.capacity
def sample(self, batch_size):
transitions = random.sample(self.buffer, batch_size)
return Transition(*zip(*transitions))
def __len__(self):
return len(self.buffer)
def train(cfg, policy_net, target_net, optimizer, batch, transform_func):
device = torch.device('cuda' if torch.cuda.is_available() else 'cpu')
state_batch = torch.cat([transform_func(s) for s in batch.state]).to(device) # (32, 3, 96, 96)
action_batch = torch.tensor(batch.action, dtype=torch.long).to(device) # (32,)
reward_batch = torch.tensor(batch.reward, dtype=torch.float32).to(device) # (32,)
ministeps_batch = torch.tensor(batch.ministeps, dtype=torch.float32).to(device) # (32,)
non_final_next_states = torch.cat([transform_func(s) for s in batch.next_state if s is not None]).to(device, non_blocking=True) # (?32, 3, 96, 96)
output = policy_net(state_batch) # (32, 2, 96, 96)
state_action_values = output.view(cfg.batch_size, -1).gather(1, action_batch.unsqueeze(1)).squeeze(1) # (32,)
next_state_values = torch.zeros(cfg.batch_size, dtype=torch.float32, device=device) # (32,)
non_final_mask = torch.tensor(tuple(map(lambda s: s is not None, batch.next_state)), dtype=torch.bool, device=device) # (32,)
if cfg.use_double_dqn:
with torch.no_grad():
best_action = policy_net(non_final_next_states).view(non_final_next_states.size(0), -1).max(1)[1].view(non_final_next_states.size(0), 1) # (32?, 1)
next_state_values[non_final_mask] = target_net(non_final_next_states).view(non_final_next_states.size(0), -1).gather(1, best_action).view(-1) # (32?,)
else:
next_state_values[non_final_mask] = target_net(non_final_next_states).view(non_final_next_states.size(0), -1).max(1)[0].detach() # (32,)
expected_state_action_values = (reward_batch + torch.pow(cfg.discount_factor, ministeps_batch) * next_state_values) # (32,)
td_error = torch.abs(state_action_values - expected_state_action_values).detach() # (32,)
loss = smooth_l1_loss(state_action_values, expected_state_action_values)
optimizer.zero_grad()
loss.backward()
if cfg.grad_norm_clipping is not None:
torch.nn.utils.clip_grad_norm_(policy_net.parameters(), cfg.grad_norm_clipping)
optimizer.step()
train_info = {}
train_info['q_value_min'] = output.min().item()
train_info['q_value_max'] = output.max().item()
train_info['td_error'] = td_error.mean()
train_info['loss'] = loss
return train_info
def main(cfg):
# Set up logging and checkpointing
log_dir = Path(cfg.log_dir)
checkpoint_dir = Path(cfg.checkpoint_dir)
print('log_dir: {}'.format(log_dir))
print('checkpoint_dir: {}'.format(checkpoint_dir))
# Create environment
kwargs = {}
if sys.platform == 'darwin':
kwargs['use_gui'] = True
env = utils.get_env_from_cfg(cfg, **kwargs)
# Policy
policy = utils.get_policy_from_cfg(cfg, env.get_action_space(), train=True)
# Optimizer
optimizer = optim.SGD(policy.policy_net.parameters(), lr=cfg.learning_rate, momentum=0.9, weight_decay=cfg.weight_decay)
# Replay buffer
replay_buffer = ReplayBuffer(cfg.replay_buffer_size)
# Resume if applicable
start_timestep = 0
episode = 0
if cfg.checkpoint_path is not None:
checkpoint = torch.load(cfg.checkpoint_path)
start_timestep = checkpoint['timestep']
episode = checkpoint['episode']
optimizer.load_state_dict(checkpoint['optimizer'])
replay_buffer = checkpoint['replay_buffer']
print("=> loaded checkpoint '{}' (timestep {})".format(cfg.checkpoint_path, start_timestep))
# Target net
target_net = policy.build_network()
target_net.load_state_dict(policy.policy_net.state_dict())
target_net.eval()
# Logging
train_summary_writer = SummaryWriter(log_dir=str(log_dir / 'train'))
visualization_summary_writer = SummaryWriter(log_dir=str(log_dir / 'visualization'))
meters = Meters()
state = env.reset()
total_timesteps_with_warm_up = cfg.learning_starts + cfg.total_timesteps
for timestep in tqdm(range(start_timestep, total_timesteps_with_warm_up),
initial=start_timestep, total=total_timesteps_with_warm_up, file=sys.stdout):
start_time = time.time()
# Select an action
if cfg.exploration_timesteps > 0:
exploration_eps = 1 - min(max(timestep - cfg.learning_starts, 0) / cfg.exploration_timesteps, 1) * (1 - cfg.final_exploration)
else:
exploration_eps = cfg.final_exploration
action, _ = policy.step(state, exploration_eps=exploration_eps)
# Step the simulation
next_state, reward, done, info = env.step(action)
ministeps = info['ministeps']
# Store in buffer
replay_buffer.push(state, action, reward, ministeps, next_state)
state = next_state
# Reset if episode ended
if done:
state = env.reset()
episode += 1
# Train network
if timestep >= cfg.learning_starts:
batch = replay_buffer.sample(cfg.batch_size)
train_info = train(cfg, policy.policy_net, target_net, optimizer, batch, policy.apply_transform)
# Update target network
if (timestep + 1) % cfg.target_update_freq == 0:
target_net.load_state_dict(policy.policy_net.state_dict())
step_time = time.time() - start_time
################################################################################
# Logging
# Meters
meters.update('step_time', step_time)
if timestep >= cfg.learning_starts:
for name, val in train_info.items():
meters.update(name, val)
if done:
for name in meters.get_names():
train_summary_writer.add_scalar(name, meters.avg(name), timestep + 1)
eta_seconds = meters.avg('step_time') * (total_timesteps_with_warm_up - timestep)
meters.reset()
train_summary_writer.add_scalar('episodes', episode, timestep + 1)
train_summary_writer.add_scalar('eta_hours', eta_seconds / 3600, timestep + 1)
for name in ['cumulative_cubes', 'cumulative_distance', 'cumulative_reward']:
train_summary_writer.add_scalar(name, info[name], timestep + 1)
# Visualize Q-network outputs
if timestep >= cfg.learning_starts and not cfg.use_steering_commands:
random_state = random.choice(replay_buffer.buffer).state
_, info = policy.step(random_state, debug=True)
output = info['output'].cpu().numpy()
visualization = utils.get_state_and_output_visualization(random_state, output).transpose((2, 0, 1))
visualization_summary_writer.add_image('output', visualization, timestep + 1)
################################################################################
# Checkpointing
if (timestep + 1) % cfg.checkpoint_freq == 0 or timestep + 1 == total_timesteps_with_warm_up:
# Save model
if not checkpoint_dir.exists():
checkpoint_dir.mkdir(parents=True, exist_ok=True)
model_name = 'model_{:08d}.pth.tar'.format(timestep + 1)
torch.save({
'timestep': timestep + 1,
'state_dict': policy.policy_net.state_dict(),
}, str(checkpoint_dir / model_name))
# Save checkpoint
checkpoint_name = 'checkpoint_{:08d}.pth.tar'.format(timestep + 1)
torch.save({
'timestep': timestep + 1,
'episode': episode,
'optimizer': optimizer.state_dict(),
'replay_buffer': replay_buffer,
}, str(checkpoint_dir / checkpoint_name))
# Save updated config file
cfg.model_path = str(checkpoint_dir / model_name)
cfg.checkpoint_path = str(checkpoint_dir / checkpoint_name)
utils.write_config(cfg, log_dir / 'config.yml')
# Remove old checkpoint
old_checkpoint_path = checkpoint_dir / 'checkpoint_{:08d}.pth.tar'.format((timestep + 1) - cfg.checkpoint_freq)
if old_checkpoint_path.exists():
old_checkpoint_path.unlink()
env.close()
# Create file to indicate training completed
(log_dir / 'success').touch()
class AverageMeter:
def __init__(self):
self.val = 0
self.avg = 0
self.sum = 0
self.count = 0
def reset(self):
self.val = 0
self.avg = 0
self.sum = 0
self.count = 0
def update(self, val, n=1):
self.val = val
self.sum += val * n
self.count += n
self.avg = self.sum / self.count
class Meters:
def __init__(self):
self.meters = {}
def get_names(self):
return self.meters.keys()
def reset(self):
for _, meter in self.meters.items():
meter.reset()
def update(self, name, val):
if name not in self.meters:
self.meters[name] = AverageMeter()
self.meters[name].update(val)
def avg(self, name):
return self.meters[name].avg
if __name__ == '__main__':
parser = argparse.ArgumentParser()
parser.add_argument('config_path')
config_path = parser.parse_args().config_path
config_path = utils.setup_run(config_path)
main(utils.read_config(config_path))
|
[
"argparse.ArgumentParser",
"random.sample",
"utils.read_config",
"torch.nn.functional.smooth_l1_loss",
"pathlib.Path",
"torch.no_grad",
"utils.get_env_from_cfg",
"utils.write_config",
"torch.load",
"utils.get_state_and_output_visualization",
"torch.zeros",
"torch.cuda.is_available",
"torch.pow",
"utils.setup_run",
"random.choice",
"time.time",
"collections.namedtuple",
"torch.abs",
"torch.tensor"
] |
[((556, 642), 'collections.namedtuple', 'namedtuple', (['"""Transition"""', "('state', 'action', 'reward', 'ministeps', 'next_state')"], {}), "('Transition', ('state', 'action', 'reward', 'ministeps',\n 'next_state'))\n", (566, 642), False, 'from collections import namedtuple\n'), ((2063, 2126), 'torch.zeros', 'torch.zeros', (['cfg.batch_size'], {'dtype': 'torch.float32', 'device': 'device'}), '(cfg.batch_size, dtype=torch.float32, device=device)\n', (2074, 2126), False, 'import torch\n'), ((3042, 3107), 'torch.nn.functional.smooth_l1_loss', 'smooth_l1_loss', (['state_action_values', 'expected_state_action_values'], {}), '(state_action_values, expected_state_action_values)\n', (3056, 3107), False, 'from torch.nn.functional import smooth_l1_loss\n'), ((3599, 3616), 'pathlib.Path', 'Path', (['cfg.log_dir'], {}), '(cfg.log_dir)\n', (3603, 3616), False, 'from pathlib import Path\n'), ((3638, 3662), 'pathlib.Path', 'Path', (['cfg.checkpoint_dir'], {}), '(cfg.checkpoint_dir)\n', (3642, 3662), False, 'from pathlib import Path\n'), ((3877, 3914), 'utils.get_env_from_cfg', 'utils.get_env_from_cfg', (['cfg'], {}), '(cfg, **kwargs)\n', (3899, 3914), False, 'import utils\n'), ((10359, 10384), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {}), '()\n', (10382, 10384), False, 'import argparse\n'), ((10492, 10520), 'utils.setup_run', 'utils.setup_run', (['config_path'], {}), '(config_path)\n', (10507, 10520), False, 'import utils\n'), ((1060, 1098), 'random.sample', 'random.sample', (['self.buffer', 'batch_size'], {}), '(self.buffer, batch_size)\n', (1073, 1098), False, 'import random\n'), ((4357, 4388), 'torch.load', 'torch.load', (['cfg.checkpoint_path'], {}), '(cfg.checkpoint_path)\n', (4367, 4388), False, 'import torch\n'), ((5336, 5347), 'time.time', 'time.time', ([], {}), '()\n', (5345, 5347), False, 'import time\n'), ((10530, 10560), 'utils.read_config', 'utils.read_config', (['config_path'], {}), '(config_path)\n', (10547, 10560), False, 'import utils\n'), ((1312, 1337), 'torch.cuda.is_available', 'torch.cuda.is_available', ([], {}), '()\n', (1335, 1337), False, 'import torch\n'), ((1470, 1514), 'torch.tensor', 'torch.tensor', (['batch.action'], {'dtype': 'torch.long'}), '(batch.action, dtype=torch.long)\n', (1482, 1514), False, 'import torch\n'), ((1554, 1601), 'torch.tensor', 'torch.tensor', (['batch.reward'], {'dtype': 'torch.float32'}), '(batch.reward, dtype=torch.float32)\n', (1566, 1601), False, 'import torch\n'), ((1644, 1694), 'torch.tensor', 'torch.tensor', (['batch.ministeps'], {'dtype': 'torch.float32'}), '(batch.ministeps, dtype=torch.float32)\n', (1656, 1694), False, 'import torch\n'), ((2308, 2323), 'torch.no_grad', 'torch.no_grad', ([], {}), '()\n', (2321, 2323), False, 'import torch\n'), ((2858, 2905), 'torch.pow', 'torch.pow', (['cfg.discount_factor', 'ministeps_batch'], {}), '(cfg.discount_factor, ministeps_batch)\n', (2867, 2905), False, 'import torch\n'), ((2951, 3012), 'torch.abs', 'torch.abs', (['(state_action_values - expected_state_action_values)'], {}), '(state_action_values - expected_state_action_values)\n', (2960, 3012), False, 'import torch\n'), ((6474, 6485), 'time.time', 'time.time', ([], {}), '()\n', (6483, 6485), False, 'import time\n'), ((9095, 9142), 'utils.write_config', 'utils.write_config', (['cfg', "(log_dir / 'config.yml')"], {}), "(cfg, log_dir / 'config.yml')\n", (9113, 9142), False, 'import utils\n'), ((7572, 7607), 'random.choice', 'random.choice', (['replay_buffer.buffer'], {}), '(replay_buffer.buffer)\n', (7585, 7607), False, 'import random\n'), ((7764, 7826), 'utils.get_state_and_output_visualization', 'utils.get_state_and_output_visualization', (['random_state', 'output'], {}), '(random_state, output)\n', (7804, 7826), False, 'import utils\n')]
|
"""
dataclasses モジュールのサンプルです.
fronzen プロパティの指定について
REFERENCESS:: http://bit.ly/2KTZynw
http://bit.ly/2KJCnwk
http://bit.ly/2KHeNA9
http://bit.ly/2KFLGxc
"""
import dataclasses as dc
from trypython.common.commoncls import SampleBase
from trypython.common.commonfunc import pr
@dc.dataclass(frozen=True)
class Data1:
name: str
unit_price: float
quantity: int = 0
def total_cost(self) -> float:
return self.unit_price * self.quantity
class Sample(SampleBase):
def exec(self):
obj = Data1(name='test', unit_price=300.5)
try:
# --------------------------------------------------------
# frozen 指定している dataclass は値の設定が出来ないようになる.
# dataclasses.FrozenInstanceError が発生する.
# --------------------------------------------------------
# noinspection PyDataclass
obj.quantity = 5
except dc.FrozenInstanceError as e:
pr('frozen な dataclass に値を設定', e)
def go():
obj = Sample()
obj.exec()
|
[
"trypython.common.commonfunc.pr",
"dataclasses.dataclass"
] |
[((323, 348), 'dataclasses.dataclass', 'dc.dataclass', ([], {'frozen': '(True)'}), '(frozen=True)\n', (335, 348), True, 'import dataclasses as dc\n'), ((990, 1023), 'trypython.common.commonfunc.pr', 'pr', (['"""frozen な dataclass に値を設定"""', 'e'], {}), "('frozen な dataclass に値を設定', e)\n", (992, 1023), False, 'from trypython.common.commonfunc import pr\n')]
|
import numpy as np
import scipy.io as sio
from GPy_ABCD.Models.modelSearch import *
from testConsistency import save_one_run
if __name__ == '__main__':
# np.seterr(all='raise') # Raise exceptions instead of RuntimeWarnings. The exceptions can then be caught by the debugger
datasets = ['01-airline', '02-solar', '03-mauna', '04-wheat', '05-temperature', '06-internet', '07-call-centre', '08-radio', '09-gas-production', '10-sulphuric', '11-unemployment', '12-births', '13-wages']
dataset_name = datasets[1-1]
data = sio.loadmat(f'./Data/{dataset_name}.mat')
# print(data.keys())
X = data['X']
Y = data['y']
sorted_models, tested_models, tested_k_exprs, expanded, not_expanded = explore_model_space(X, Y, start_kernels = standard_start_kernels, p_rules = production_rules_all,
restarts = 3, utility_function = 'BIC', rounds = 3, buffer = 2, dynamic_buffer = True, verbose = True, parallel = True)
for mod_depth in tested_models: print(', '.join([str(mod.kernel_expression) for mod in mod_depth]) + f'\n{len(mod_depth)}')
from matplotlib import pyplot as plt
for bm in sorted_models[:3]:
print(bm.kernel_expression)
print(bm.model.kern)
print(bm.model.log_likelihood())
print(bm.cached_utility_function)
bm.model.plot()
print(bm.interpret())
plt.show()
save_one_run(dataset_name, 'UNKNOWN', sorted_models, tested_models, tested_k_exprs)
|
[
"testConsistency.save_one_run",
"matplotlib.pyplot.show",
"scipy.io.loadmat"
] |
[((537, 578), 'scipy.io.loadmat', 'sio.loadmat', (['f"""./Data/{dataset_name}.mat"""'], {}), "(f'./Data/{dataset_name}.mat')\n", (548, 578), True, 'import scipy.io as sio\n'), ((1399, 1409), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (1407, 1409), True, 'from matplotlib import pyplot as plt\n'), ((1415, 1502), 'testConsistency.save_one_run', 'save_one_run', (['dataset_name', '"""UNKNOWN"""', 'sorted_models', 'tested_models', 'tested_k_exprs'], {}), "(dataset_name, 'UNKNOWN', sorted_models, tested_models,\n tested_k_exprs)\n", (1427, 1502), False, 'from testConsistency import save_one_run\n')]
|
import torch
import torch.nn as nn
from .mpd import MultiPeriodDiscriminator
from .mrd import MultiResolutionDiscriminator
from omegaconf import OmegaConf
class Discriminator(nn.Module):
def __init__(self, hp):
super(Discriminator, self).__init__()
self.MRD = MultiResolutionDiscriminator(hp)
self.MPD = MultiPeriodDiscriminator(hp)
def forward(self, x):
return self.MRD(x), self.MPD(x)
if __name__ == '__main__':
hp = OmegaConf.load('../config/default.yaml')
model = Discriminator(hp)
x = torch.randn(3, 1, 16384)
print(x.shape)
mrd_output, mpd_output = model(x)
for features, score in mpd_output:
for feat in features:
print(feat.shape)
print(score.shape)
pytorch_total_params = sum(p.numel() for p in model.parameters() if p.requires_grad)
print(pytorch_total_params)
|
[
"omegaconf.OmegaConf.load",
"torch.randn"
] |
[((467, 507), 'omegaconf.OmegaConf.load', 'OmegaConf.load', (['"""../config/default.yaml"""'], {}), "('../config/default.yaml')\n", (481, 507), False, 'from omegaconf import OmegaConf\n'), ((547, 571), 'torch.randn', 'torch.randn', (['(3)', '(1)', '(16384)'], {}), '(3, 1, 16384)\n', (558, 571), False, 'import torch\n')]
|
import re
data = input()
pattern = r"\b([A-Z][a-z]+)\s([A-Z][a-z]+)\b"
matches = re.finditer(pattern, data)
for match in matches:
print(match.group(), end=" ")
|
[
"re.finditer"
] |
[((83, 109), 're.finditer', 're.finditer', (['pattern', 'data'], {}), '(pattern, data)\n', (94, 109), False, 'import re\n')]
|
# -*- coding: utf-8 -*-
__author__ = "Yuchen"
__aim__ = 'rank top sentences in one topic'
__testCase__ = "../test/test_rankingTFIDF.py"
from sklearn.feature_extraction.text import CountVectorizer,TfidfTransformer
import sys
import argparse
import numpy as np
from termcolor import colored
from sklearn.metrics.pairwise import cosine_similarity
import operator
sys.path.append(r"../..")
from pushkin_gs.sum import tfidf_contentWords
class TFIDF(object):
def __init__(self, train_data, contentWords, topN, targCorpus):
"""
:param train_data: in 'tfidf_contentWords.py' file. after processing step, return a 'targData' dataset, use it to train sentence_tfidf_score
:param contentWords: in 'tfidf_contentWords.py' file. get 'contentWords' for each topic
:param topN: N sentence to summary the doc
:param targCorpus: return top N sentence from init corpus
"""
self.train_data = train_data
self.contentWords = contentWords
self.topN = topN
self.targCorpus = targCorpus
def SentRankTFIDF(self):
"""
:return: tfidfArray: [[0.12, 0.99, 0.24]
[0.4, 0.3, 0.4, 0.33, ..]...]
"""
"""#tfidf
#根据bag of words的原理计算corpus的词频矩阵,把每个句子(即矩阵的每一行)看做一个vector,计算每个vector(句子)在全部corpus中的tfidf值,每个句子的tfidf值是矩阵的每个行向量
"""
print ("func: SentRankTFIDF")
# convert corpus to term(word)_vectors
vectorizer = CountVectorizer()
# calculate appear times for each word
term_freq_matrix = vectorizer.fit_transform(self.train_data)
# get all terms(words) from corpus
termList = vectorizer.get_feature_names()
# 将词频矩阵term_freq_matrix统计成TF-IDF值
# calculate tfidf value for each sentence using term_freq_matrix
transformer = TfidfTransformer()
tfidf = transformer.fit_transform(term_freq_matrix)
# tfidf[i][j] is sentence[i]'s tfidf value
# 查看数据结构 tfidf[i][j]表示i类文本中的tf-idf权重
tfidfArray = tfidf.toarray()
# print (tfidf.toarray())
"""#claculate sentence score
##only summing tfidf values where the words belong to contentWords##
根据上面求得的sentence tfidf矩阵(tfidfArray),加和求每一行(每个句子)的tfidf value,
不是全部相加,只是把代表content words的值加起来
Finally, 每个句子的tfidf分数除以整个文章tfidf总分数,即是该句子的ranking(sentRanking[i] = sentValueList[i]/docTfidfScore)
"""
# content words in each sentence
contWoEachSent = [[w for w in self.contentWords if w in sent.lower().split()]
for sent in self.train_data]
# content words index(termList) in each sentence
contWoIndex = [[[termList.index(w)] for w in self.contentWords if w in sent.lower().split()]
for sent in self.train_data]
print (' content words in each sentence',contWoEachSent,'\n','content words index in each sent',contWoIndex)
# calculate tfidf value for each sentence, return a score list for all sentence(sentValueList)
sentValueList = []
for i,index in enumerate(contWoIndex):
sentValue = sum(tfidfArray[i,index])
sentValueList.append(float(sentValue))
print (' sentValueList',sentValueList)
# sentence ranking #normalization
sentRanking = [value/max(sentValueList) for value in sentValueList]
sentRanking = np.array(sentRanking)
# print ("sentRanking",sentRanking[np.argsort(-sentRanking)])
topNSent = [self.targCorpus[rank] for rank in np.argsort(-sentRanking)[:-1]]
topNProcess = [self.train_data[rank] for rank in np.argsort(-sentRanking)[:-1]]
dicTop = np.c_[sentRanking[np.argsort(-sentRanking)[:-1]],topNProcess,topNSent]
print (' sent with score',dicTop[:2])
print ("....")
print ('-'*200)
self.dicTop = dicTop
return dicTop
# calculate Similarity score each sentence with whole documents
def calculateSimilarity(self, sentence, doc):
if doc == []:
return 0
vocab = {}
for word in sentence[:-1].split():
vocab[word] = 0
docInOneSentence = ''
for t in doc:
docInOneSentence += (t + ' ')
for word in t[:-1].split():
vocab[word] = 0
cv = CountVectorizer(vocabulary=vocab.keys())
docVector = cv.fit_transform([docInOneSentence])
sentenceVector = cv.fit_transform([sentence])
return cosine_similarity(docVector, sentenceVector)[0][0]
def MMR(self, dicTopSentence):
print("func: MMR")
##惩罚因子
##score = a * i[2] + (1 - a) * similarity(i[sentence], (i - 1)[sentence])
n = 20 * len(self.targCorpus) / 100
alpha = 0.5
summarySet = []
temset = []
while n > 0:
mmr = {}
for sentence in dicTopSentence:
if not sentence[1] in temset:
# print (self.calculateSimilarity(sentence[1],summarySet))
mmr[sentence[1]] = alpha * float(sentence[0]) - (1 - alpha) * self.calculateSimilarity(sentence[1], temset)
selected = max(mmr.items(), key=operator.itemgetter(1))[0]
# print (selected)
temset.append(selected)
n -= 1
for temsents in temset:
summarySet.append(''.join([sent[2] for sent in self.dicTop if sent[1] == temsents]))
print ('\nTotal Sentences', colored(len(self.train_data),'red'))
print ('Top', colored(len(summarySet),'red') ,'sentences:')
for sent in enumerate(summarySet):
print (sent)
print ("**"*100)
return summarySet
def main():
"""
python rankingTFIDF.py --topic bmt_2.txt --contentWordNumber 100
:predefine:
:--allData: X.txt file, which contain (target1 polarity1\tsent1\ntarget2 polarity2\tsent2\n )
:--topic: bmt_0.txt, which contain (sent1 sent2 ... sentn)
"""
parser = argparse.ArgumentParser()
parser.add_argument('--topic', default='', help="target topic")
parser.add_argument('--contentWordNumber', default='', help="threshold for content Word Number")
parser.add_argument('--returnNSents', default='', help="top N sentences")
args = parser.parse_args()
targetTweets, targData, contentWords = tfidf_contentWords.main()
for key in targData:
trainData = targData[key].split(".")
# init corpus: finally return top N sentence from init corpus
for key in targetTweets:
initCorpus = targetTweets[key].split('\n')
instance = TFIDF(trainData, contentWords, args.returnNSents, initCorpus)
topSent = instance.SentRankTFIDF()
instance.MMR(topSent)
if __name__ == '__main__':
"""
python rankingTFIDF.py --topic bmt_2.txt --contentWordNumber 100 (--returnNSents 2)
"""
main()
|
[
"sys.path.append",
"pushkin_gs.sum.tfidf_contentWords.main",
"sklearn.feature_extraction.text.CountVectorizer",
"sklearn.metrics.pairwise.cosine_similarity",
"argparse.ArgumentParser",
"numpy.argsort",
"numpy.array",
"operator.itemgetter",
"sklearn.feature_extraction.text.TfidfTransformer"
] |
[((361, 385), 'sys.path.append', 'sys.path.append', (['"""../.."""'], {}), "('../..')\n", (376, 385), False, 'import sys\n'), ((6010, 6035), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {}), '()\n', (6033, 6035), False, 'import argparse\n'), ((6357, 6382), 'pushkin_gs.sum.tfidf_contentWords.main', 'tfidf_contentWords.main', ([], {}), '()\n', (6380, 6382), False, 'from pushkin_gs.sum import tfidf_contentWords\n'), ((1469, 1486), 'sklearn.feature_extraction.text.CountVectorizer', 'CountVectorizer', ([], {}), '()\n', (1484, 1486), False, 'from sklearn.feature_extraction.text import CountVectorizer, TfidfTransformer\n'), ((1833, 1851), 'sklearn.feature_extraction.text.TfidfTransformer', 'TfidfTransformer', ([], {}), '()\n', (1849, 1851), False, 'from sklearn.feature_extraction.text import CountVectorizer, TfidfTransformer\n'), ((3398, 3419), 'numpy.array', 'np.array', (['sentRanking'], {}), '(sentRanking)\n', (3406, 3419), True, 'import numpy as np\n'), ((4497, 4541), 'sklearn.metrics.pairwise.cosine_similarity', 'cosine_similarity', (['docVector', 'sentenceVector'], {}), '(docVector, sentenceVector)\n', (4514, 4541), False, 'from sklearn.metrics.pairwise import cosine_similarity\n'), ((3545, 3569), 'numpy.argsort', 'np.argsort', (['(-sentRanking)'], {}), '(-sentRanking)\n', (3555, 3569), True, 'import numpy as np\n'), ((3633, 3657), 'numpy.argsort', 'np.argsort', (['(-sentRanking)'], {}), '(-sentRanking)\n', (3643, 3657), True, 'import numpy as np\n'), ((3700, 3724), 'numpy.argsort', 'np.argsort', (['(-sentRanking)'], {}), '(-sentRanking)\n', (3710, 3724), True, 'import numpy as np\n'), ((5201, 5223), 'operator.itemgetter', 'operator.itemgetter', (['(1)'], {}), '(1)\n', (5220, 5223), False, 'import operator\n')]
|
# Copyright (c) 2021 <NAME>
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
#
# Imports
#
from typing import Any, Dict, Optional
import pyrogram
from telegram_payment_bot.config.configurable_object import ConfigurableObject
from telegram_payment_bot.logger.logger import Logger
from telegram_payment_bot.misc.chat_members import ChatMembersList, ChatMembersGetter
from telegram_payment_bot.misc.helpers import MemberHelper
from telegram_payment_bot.misc.user import User
from telegram_payment_bot.payment.payments_loader_base import PaymentsLoaderBase
from telegram_payment_bot.payment.payments_loader_factory import PaymentsLoaderFactory
from telegram_payment_bot.payment.payments_data import SinglePayment, PaymentsData
#
# Classes
#
# Members payment getter class
class MembersPaymentGetter:
client: pyrogram.Client
config: ConfigurableObject
logger: Logger
payments_loader: PaymentsLoaderBase
payments_cache: Optional[PaymentsData]
single_payment_cache: Optional[Dict[str, Any]]
# Constructor
def __init__(self,
client: pyrogram.Client,
config: ConfigurableObject,
logger: Logger) -> None:
self.client = client
self.config = config
self.logger = logger
self.payments_loader = PaymentsLoaderFactory(config, logger).CreateLoader()
self.payments_cache = None
self.single_payment_cache = None
self.ReloadPayment()
# Reload payment
def ReloadPayment(self):
self.payments_cache = None
self.single_payment_cache = None
# Get all members with OK payment
def GetAllMembersWithOkPayment(self,
chat: pyrogram.types.Chat) -> ChatMembersList:
# Get all payments
payments = self.__GetAllPayments()
# Filter chat members
return ChatMembersGetter(self.client).FilterMembers(
chat,
lambda member: (
MemberHelper.IsValidMember(member) and
member.user.username is not None and
not payments.IsExpiredByUser(User.FromUserObject(self.config, member.user))
)
)
# Get all members with expired payment
def GetAllMembersWithExpiredPayment(self,
chat: pyrogram.types.Chat) -> ChatMembersList:
# Get all payments
payments = self.__GetAllPayments()
# For safety: if no data was loaded, no user is expired
if payments.Empty():
return ChatMembersList()
# Filter chat members
return ChatMembersGetter(self.client).FilterMembers(
chat,
lambda member: (
MemberHelper.IsValidMember(member) and
(member.user.username is None or
payments.IsExpiredByUser(User.FromUserObject(self.config, member.user)))
)
)
# Get all members with expiring payment
def GetAllMembersWithExpiringPayment(self,
chat: pyrogram.types.Chat,
days: int) -> ChatMembersList:
# Get all payments
payments = self.__GetAllPayments()
# For safety: if no data was loaded, no user is expired
if payments.Empty():
return ChatMembersList()
# Filter chat members
return ChatMembersGetter(self.client).FilterMembers(
chat,
lambda member: (
MemberHelper.IsValidMember(member) and
(member.user.username is None or
payments.IsExpiringInDaysByUser(User.FromUserObject(self.config, member.user), days))
)
)
# Get all emails with expired payment
def GetAllEmailsWithExpiredPayment(self) -> PaymentsData:
return self.__GetAllPayments().FilterExpired()
# Get all emails with expiring payment in the specified number of days
def GetAllEmailsWithExpiringPayment(self,
days: int) -> PaymentsData:
return self.__GetAllPayments().FilterExpiringInDays(days)
# Get if single member is expired
def IsSingleMemberExpired(self,
chat: pyrogram.types.Chat,
user: pyrogram.types.User) -> bool:
# If the user is not in the chat, consider payment as not expired
chat_members = ChatMembersGetter(self.client).GetSingle(chat, user)
if chat_members is None:
return False
# Get single payment
single_payment = self.__GetSinglePayment(user)
# If the user is not in the payment data, consider payment as expired
return single_payment.IsExpired() if single_payment is not None else True
# Get all payments
def __GetAllPayments(self) -> PaymentsData:
# Load only the first time
if self.payments_cache is None:
self.payments_cache = self.payments_loader.LoadAll()
return self.payments_cache
# Get single payment
def __GetSinglePayment(self,
user: pyrogram.types.User) -> Optional[SinglePayment]:
# Load only the first time
if self.single_payment_cache is None or self.single_payment_cache["user_id"] != user.id:
self.single_payment_cache = {
"payment": self.payments_loader.LoadSingleByUser(User.FromUserObject(self.config, user)),
"user_id": user.id,
}
return self.single_payment_cache["payment"]
|
[
"telegram_payment_bot.misc.chat_members.ChatMembersList",
"telegram_payment_bot.misc.helpers.MemberHelper.IsValidMember",
"telegram_payment_bot.misc.user.User.FromUserObject",
"telegram_payment_bot.payment.payments_loader_factory.PaymentsLoaderFactory",
"telegram_payment_bot.misc.chat_members.ChatMembersGetter"
] |
[((3551, 3568), 'telegram_payment_bot.misc.chat_members.ChatMembersList', 'ChatMembersList', ([], {}), '()\n', (3566, 3568), False, 'from telegram_payment_bot.misc.chat_members import ChatMembersList, ChatMembersGetter\n'), ((4341, 4358), 'telegram_payment_bot.misc.chat_members.ChatMembersList', 'ChatMembersList', ([], {}), '()\n', (4356, 4358), False, 'from telegram_payment_bot.misc.chat_members import ChatMembersList, ChatMembersGetter\n'), ((2310, 2347), 'telegram_payment_bot.payment.payments_loader_factory.PaymentsLoaderFactory', 'PaymentsLoaderFactory', (['config', 'logger'], {}), '(config, logger)\n', (2331, 2347), False, 'from telegram_payment_bot.payment.payments_loader_factory import PaymentsLoaderFactory\n'), ((2874, 2904), 'telegram_payment_bot.misc.chat_members.ChatMembersGetter', 'ChatMembersGetter', (['self.client'], {}), '(self.client)\n', (2891, 2904), False, 'from telegram_payment_bot.misc.chat_members import ChatMembersList, ChatMembersGetter\n'), ((3615, 3645), 'telegram_payment_bot.misc.chat_members.ChatMembersGetter', 'ChatMembersGetter', (['self.client'], {}), '(self.client)\n', (3632, 3645), False, 'from telegram_payment_bot.misc.chat_members import ChatMembersList, ChatMembersGetter\n'), ((4405, 4435), 'telegram_payment_bot.misc.chat_members.ChatMembersGetter', 'ChatMembersGetter', (['self.client'], {}), '(self.client)\n', (4422, 4435), False, 'from telegram_payment_bot.misc.chat_members import ChatMembersList, ChatMembersGetter\n'), ((5440, 5470), 'telegram_payment_bot.misc.chat_members.ChatMembersGetter', 'ChatMembersGetter', (['self.client'], {}), '(self.client)\n', (5457, 5470), False, 'from telegram_payment_bot.misc.chat_members import ChatMembersList, ChatMembersGetter\n'), ((2983, 3017), 'telegram_payment_bot.misc.helpers.MemberHelper.IsValidMember', 'MemberHelper.IsValidMember', (['member'], {}), '(member)\n', (3009, 3017), False, 'from telegram_payment_bot.misc.helpers import MemberHelper\n'), ((3724, 3758), 'telegram_payment_bot.misc.helpers.MemberHelper.IsValidMember', 'MemberHelper.IsValidMember', (['member'], {}), '(member)\n', (3750, 3758), False, 'from telegram_payment_bot.misc.helpers import MemberHelper\n'), ((4514, 4548), 'telegram_payment_bot.misc.helpers.MemberHelper.IsValidMember', 'MemberHelper.IsValidMember', (['member'], {}), '(member)\n', (4540, 4548), False, 'from telegram_payment_bot.misc.helpers import MemberHelper\n'), ((6424, 6462), 'telegram_payment_bot.misc.user.User.FromUserObject', 'User.FromUserObject', (['self.config', 'user'], {}), '(self.config, user)\n', (6443, 6462), False, 'from telegram_payment_bot.misc.user import User\n'), ((3120, 3165), 'telegram_payment_bot.misc.user.User.FromUserObject', 'User.FromUserObject', (['self.config', 'member.user'], {}), '(self.config, member.user)\n', (3139, 3165), False, 'from telegram_payment_bot.misc.user import User\n'), ((3854, 3899), 'telegram_payment_bot.misc.user.User.FromUserObject', 'User.FromUserObject', (['self.config', 'member.user'], {}), '(self.config, member.user)\n', (3873, 3899), False, 'from telegram_payment_bot.misc.user import User\n'), ((4651, 4696), 'telegram_payment_bot.misc.user.User.FromUserObject', 'User.FromUserObject', (['self.config', 'member.user'], {}), '(self.config, member.user)\n', (4670, 4696), False, 'from telegram_payment_bot.misc.user import User\n')]
|
##########################################################################
#
# Copyright (c) 2019, Image Engine Design Inc. All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above
# copyright notice, this list of conditions and the following
# disclaimer.
#
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following
# disclaimer in the documentation and/or other materials provided with
# the distribution.
#
# * Neither the name of <NAME> nor the names of
# any other contributors to this software may be used to endorse or
# promote products derived from this software without specific prior
# written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS
# IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO,
# THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
# PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR
# CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
# EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
# PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
# PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
# LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
# NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
##########################################################################
import unittest
import functools
import json
import time
import collections
import Gaffer
# TestRunner capable of measuring performance of certain
# tests and failing them if they contain regressions compared
# to previous results.
class TestRunner( unittest.TextTestRunner ) :
def __init__( self, previousResultsFile = "" ) :
unittest.TextTestRunner.__init__(
self,
verbosity = 2,
)
self.__previousResultsFile = previousResultsFile
# Decorator used to annotate tests which measure performance.
class PerformanceTestMethod( object ) :
def __init__( self, repeat = 3, acceptableDifference = 0.01 ) :
self.__repeat = repeat
self.__acceptableDifference = acceptableDifference
# Called to return the decorated method.
def __call__( self, method ) :
@functools.wraps( method )
def wrapper( *args, **kw ) :
timings = []
for i in range( 0, self.__repeat ) :
Gaffer.ValuePlug.clearCache() # Put each iteration on an equal footing
TestRunner.PerformanceScope._total = None
t = time.time()
result = method( *args, **kw )
totalTime = time.time() - t
scopedTime = TestRunner.PerformanceScope._total
timings.append( scopedTime if scopedTime is not None else totalTime )
# Stash timings so they can be recovered
# by TestRunner.__Result.
args[0].timings = timings
# If previous timings are available, then
# compare against them and throw if a regression
# is detected.
previousTimings = getattr( args[0], "previousTimings" )
if previousTimings :
args[0].assertLessEqual( min( timings ), min( previousTimings ) + self.__acceptableDifference )
return result
wrapper.performanceTestMethod = True
return wrapper
# Context manager used to time only specific blocks
# within a PerformanceTestMethod.
class PerformanceScope( object ) :
# Protected to allow access by PerformanceTestMethod.
_total = None
def __enter__( self ) :
self.__startTime = time.time()
def __exit__( self, type, value, traceBack ) :
t = time.time() - self.__startTime
if TestRunner.PerformanceScope._total is not None :
TestRunner.PerformanceScope._total += t
else :
TestRunner.PerformanceScope._total = t
def run( self, test ) :
result = unittest.TextTestRunner.run( self, test )
result.writePerformance()
return result
# Adds a skip decorator to all non-performance-related tests.
@staticmethod
def filterPerformanceTests( test ) :
if isinstance( test, unittest.TestSuite ) :
for t in test :
TestRunner.filterPerformanceTests( t )
elif isinstance( test, unittest.TestCase ) :
testMethod = getattr( test, test._testMethodName )
if not getattr( testMethod, "performanceTestMethod", False ) :
setattr(
test, test._testMethodName,
unittest.skip( "Not a performance test" )( testMethod )
)
def _makeResult( self ) :
return self.__Result(
self.stream, self.descriptions, self.verbosity,
previousResultsFile = self.__previousResultsFile
)
class __Result( unittest.TextTestResult ) :
def __init__( self, stream, descriptions, verbosity, previousResultsFile ) :
unittest.TextTestResult.__init__( self, stream, descriptions, verbosity )
self.__results = collections.OrderedDict()
if previousResultsFile :
with open( previousResultsFile ) as f :
self.__previousResults = json.load( f )
else :
self.__previousResults = {}
self.__performanceImprovements = []
# Methods unique to __Result
def save( self, fileName ) :
with open( fileName, "w" ) as f :
json.dump( self.__results, f, indent = 4 )
def writePerformance( self ) :
if not len( self.__performanceImprovements ) :
return
self.stream.write( "{0}\n".format( self.separator2 ) )
self.stream.write( "{n} Performance Improvement{s} :\n\n".format(
n = len( self.__performanceImprovements ),
s = "s" if len( self.__performanceImprovements ) > 1 else ""
) )
for s in self.__performanceImprovements :
self.stream.write( "{}\n".format( s ) )
# Overrides for TextTestResult methods
def startTest( self, test ) :
previousResults = self.__previousResults.get( str( test ), {} )
test.previousTimings = previousResults.get( "timings", [] )
unittest.TextTestResult.startTest( self, test )
def addSuccess( self, test ) :
unittest.TextTestResult.addSuccess( self, test )
timings = getattr( test, "timings", None )
if timings and test.previousTimings :
new = min( timings )
old = min( test.previousTimings )
reduction = 100 * (old-new)/old
if reduction > 2 :
self.__performanceImprovements.append(
"- {test} : was {old:.2f}s now {new:.2f}s ({reduction:.0f}% reduction)".format(
test = str( test), old = old, new = new, reduction = reduction
)
)
self.__addResult( test, "success" )
def addError( self, test, error ) :
unittest.TextTestResult.addError( self, test, error )
self.__addResult( test, "error" )
def addFailure( self, test, error ) :
unittest.TextTestResult.addFailure( self, test, error )
self.__addResult( test, "failure" )
def wasSuccessful( self ) :
return unittest.TextTestResult.wasSuccessful( self )
# Private methods
def __addResult( self, test, result ) :
d = {
"result" : result
}
timings = getattr( test, "timings", None )
if timings :
d["timings"] = timings
self.__results[str(test)] = d
|
[
"json.dump",
"Gaffer.ValuePlug.clearCache",
"json.load",
"unittest.TextTestResult.wasSuccessful",
"unittest.TextTestResult.addSuccess",
"unittest.TextTestRunner.__init__",
"unittest.TextTestResult.addFailure",
"time.time",
"unittest.TextTestRunner.run",
"unittest.TextTestResult.startTest",
"unittest.skip",
"functools.wraps",
"unittest.TextTestResult.__init__",
"unittest.TextTestResult.addError",
"collections.OrderedDict"
] |
[((2132, 2183), 'unittest.TextTestRunner.__init__', 'unittest.TextTestRunner.__init__', (['self'], {'verbosity': '(2)'}), '(self, verbosity=2)\n', (2164, 2183), False, 'import unittest\n'), ((4071, 4110), 'unittest.TextTestRunner.run', 'unittest.TextTestRunner.run', (['self', 'test'], {}), '(self, test)\n', (4098, 4110), False, 'import unittest\n'), ((2584, 2607), 'functools.wraps', 'functools.wraps', (['method'], {}), '(method)\n', (2599, 2607), False, 'import functools\n'), ((3780, 3791), 'time.time', 'time.time', ([], {}), '()\n', (3789, 3791), False, 'import time\n'), ((4954, 5025), 'unittest.TextTestResult.__init__', 'unittest.TextTestResult.__init__', (['self', 'stream', 'descriptions', 'verbosity'], {}), '(self, stream, descriptions, verbosity)\n', (4986, 5025), False, 'import unittest\n'), ((5049, 5074), 'collections.OrderedDict', 'collections.OrderedDict', ([], {}), '()\n', (5072, 5074), False, 'import collections\n'), ((6067, 6112), 'unittest.TextTestResult.startTest', 'unittest.TextTestResult.startTest', (['self', 'test'], {}), '(self, test)\n', (6100, 6112), False, 'import unittest\n'), ((6153, 6199), 'unittest.TextTestResult.addSuccess', 'unittest.TextTestResult.addSuccess', (['self', 'test'], {}), '(self, test)\n', (6187, 6199), False, 'import unittest\n'), ((6710, 6761), 'unittest.TextTestResult.addError', 'unittest.TextTestResult.addError', (['self', 'test', 'error'], {}), '(self, test, error)\n', (6742, 6761), False, 'import unittest\n'), ((6846, 6899), 'unittest.TextTestResult.addFailure', 'unittest.TextTestResult.addFailure', (['self', 'test', 'error'], {}), '(self, test, error)\n', (6880, 6899), False, 'import unittest\n'), ((6983, 7026), 'unittest.TextTestResult.wasSuccessful', 'unittest.TextTestResult.wasSuccessful', (['self'], {}), '(self)\n', (7020, 7026), False, 'import unittest\n'), ((3850, 3861), 'time.time', 'time.time', ([], {}), '()\n', (3859, 3861), False, 'import time\n'), ((5381, 5419), 'json.dump', 'json.dump', (['self.__results', 'f'], {'indent': '(4)'}), '(self.__results, f, indent=4)\n', (5390, 5419), False, 'import json\n'), ((2706, 2735), 'Gaffer.ValuePlug.clearCache', 'Gaffer.ValuePlug.clearCache', ([], {}), '()\n', (2733, 2735), False, 'import Gaffer\n'), ((2833, 2844), 'time.time', 'time.time', ([], {}), '()\n', (2842, 2844), False, 'import time\n'), ((5178, 5190), 'json.load', 'json.load', (['f'], {}), '(f)\n', (5187, 5190), False, 'import json\n'), ((2898, 2909), 'time.time', 'time.time', ([], {}), '()\n', (2907, 2909), False, 'import time\n'), ((4602, 4641), 'unittest.skip', 'unittest.skip', (['"""Not a performance test"""'], {}), "('Not a performance test')\n", (4615, 4641), False, 'import unittest\n')]
|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
@author: Jared
"""
import pandas as pd
import pymongo
import json
from os import listdir
from os.path import isfile, join
import multiprocessing as mp
import numpy as np
import dbConfig
from builder.dummyCrystalBuilder import processDummyCrystals
from ml.feature import getCompFeature
def import_content(db, filename, collection):
data = pd.read_csv(filename)
data = data.dropna()
data_json = json.loads(data.to_json(orient='records'))
db[collection].insert_many(data_json)
def update_database(db, folder, collection):
filepaths = [f for f in listdir(folder) if
(isfile(join(folder, f)) and f.endswith('.csv'))]
db[collection].delete_many({})
for filename in filepaths:
import_content(db, folder + filename, collection)
print('Loading ' + str(db[collection].count()) +
' items from ' + collection + '...')
db[collection].aggregate([
{
"$lookup": {
"from": collection,
"localField": "crystal_id",
"foreignField": "crystal_id",
"as" : "fromItems"
}
},
{
"$replaceRoot": { "newRoot": { "$mergeObjects":
[ { "$arrayElemAt": [ "$fromItems", 0 ] },
"$$ROOT" ] } }
},
{ "$project": { "fromItems": 0 } },
{ "$out": collection + "_aggregated" }
])
print('Done.')
def parallelize(df, numProcesses, func):
df_split = np.array_split(df, numProcesses)
pool = mp.Pool(processes=numProcesses)
results = pool.map(func, df_split)
pool.close()
pool.join()
results_df = pd.concat(results)
return results_df
def process_features(db, **kwargs):
df = pd.DataFrame(list(db['qw_outputs_aggregated'].find()))
if dbConfig.dummy == True:
df = processDummyCrystals(df)
print('Processing Features... ')
df = df.drop(df[df['nIterations'] >= 201].index).copy()
if kwargs['numProcesses'] > 1:
feature = parallelize(df, kwargs['numProcesses'], getCompFeature)
else:
feature = getCompFeature(df)
print('Len features', len(feature.columns))
if dbConfig.saveFeatures == True:
feature.to_csv(dbConfig.saveFeaturesPath +
dbConfig.saveFeaturesFile, index=False)
print('Done.')
def getDB():
client = pymongo.MongoClient(dbConfig.host, dbConfig.port)
return(client['perovskites'])
def main():
db = getDB()
update_database(db, dbConfig.crystalDBFolder, 'qw_outputs')
process_features(db, numProcesses = 4)
update_database(db, dbConfig.featureDBFolder, 'features')
if __name__ == "__main__":
main()
|
[
"pymongo.MongoClient",
"os.listdir",
"builder.dummyCrystalBuilder.processDummyCrystals",
"pandas.read_csv",
"ml.feature.getCompFeature",
"multiprocessing.Pool",
"numpy.array_split",
"os.path.join",
"pandas.concat"
] |
[((397, 418), 'pandas.read_csv', 'pd.read_csv', (['filename'], {}), '(filename)\n', (408, 418), True, 'import pandas as pd\n'), ((1600, 1632), 'numpy.array_split', 'np.array_split', (['df', 'numProcesses'], {}), '(df, numProcesses)\n', (1614, 1632), True, 'import numpy as np\n'), ((1645, 1676), 'multiprocessing.Pool', 'mp.Pool', ([], {'processes': 'numProcesses'}), '(processes=numProcesses)\n', (1652, 1676), True, 'import multiprocessing as mp\n'), ((1777, 1795), 'pandas.concat', 'pd.concat', (['results'], {}), '(results)\n', (1786, 1795), True, 'import pandas as pd\n'), ((2528, 2577), 'pymongo.MongoClient', 'pymongo.MongoClient', (['dbConfig.host', 'dbConfig.port'], {}), '(dbConfig.host, dbConfig.port)\n', (2547, 2577), False, 'import pymongo\n'), ((1971, 1995), 'builder.dummyCrystalBuilder.processDummyCrystals', 'processDummyCrystals', (['df'], {}), '(df)\n', (1991, 1995), False, 'from builder.dummyCrystalBuilder import processDummyCrystals\n'), ((2237, 2255), 'ml.feature.getCompFeature', 'getCompFeature', (['df'], {}), '(df)\n', (2251, 2255), False, 'from ml.feature import getCompFeature\n'), ((631, 646), 'os.listdir', 'listdir', (['folder'], {}), '(folder)\n', (638, 646), False, 'from os import listdir\n'), ((676, 691), 'os.path.join', 'join', (['folder', 'f'], {}), '(folder, f)\n', (680, 691), False, 'from os.path import isfile, join\n')]
|
#ama_speech.py
import speech_recognition as spr
from gtts import gTTS
from playsound import playsound
from googletrans import Translator
import googletrans
import time
def RecognizeAndSpeech(sound1='th',sound2='zh-cn'):
time.sleep(2)
print('Recognizing..')
#print(googletrans.LANGUAGES)
##### RECOGNITION ######
rec = spr.Recognizer()
with spr.Microphone() as speak:
audio = rec.listen(speak)
try:
result = rec.recognize_google(audio,language=sound1)
print('Stop...')
print(result)
except:
print('Error We can not recognize your sound')
result = 'ERROR'
##### TRANSLATOR ######
LAM = Translator()
word = LAM.translate(result,dest=sound2)
print('Meaning: ',word.text)
##### TEXT TO SPEECH ######
tts = gTTS(text=word.text, lang=sound2)
tts.save('result.mp3')
playsound('result.mp3')
RecognizeAndSpeech('th','zh-cn')
RecognizeAndSpeech('th','de')
|
[
"playsound.playsound",
"gtts.gTTS",
"time.sleep",
"speech_recognition.Microphone",
"googletrans.Translator",
"speech_recognition.Recognizer"
] |
[((235, 248), 'time.sleep', 'time.sleep', (['(2)'], {}), '(2)\n', (245, 248), False, 'import time\n'), ((341, 357), 'speech_recognition.Recognizer', 'spr.Recognizer', ([], {}), '()\n', (355, 357), True, 'import speech_recognition as spr\n'), ((643, 655), 'googletrans.Translator', 'Translator', ([], {}), '()\n', (653, 655), False, 'from googletrans import Translator\n'), ((772, 805), 'gtts.gTTS', 'gTTS', ([], {'text': 'word.text', 'lang': 'sound2'}), '(text=word.text, lang=sound2)\n', (776, 805), False, 'from gtts import gTTS\n'), ((835, 858), 'playsound.playsound', 'playsound', (['"""result.mp3"""'], {}), "('result.mp3')\n", (844, 858), False, 'from playsound import playsound\n'), ((367, 383), 'speech_recognition.Microphone', 'spr.Microphone', ([], {}), '()\n', (381, 383), True, 'import speech_recognition as spr\n')]
|
from enum import (
Enum,
auto,
unique,
IntEnum,
Flag,
IntFlag,
)
import enum
@unique
class Color(Enum):
RED = auto()
BLUE = auto()
YELLOW = "yellow"
CYAN = 3
GREEN = auto()
print(Color.RED.value)
print(Color.GREEN.value)
|
[
"enum.auto"
] |
[((126, 132), 'enum.auto', 'auto', ([], {}), '()\n', (130, 132), False, 'from enum import Enum, auto, unique, IntEnum, Flag, IntFlag\n'), ((143, 149), 'enum.auto', 'auto', ([], {}), '()\n', (147, 149), False, 'from enum import Enum, auto, unique, IntEnum, Flag, IntFlag\n'), ((191, 197), 'enum.auto', 'auto', ([], {}), '()\n', (195, 197), False, 'from enum import Enum, auto, unique, IntEnum, Flag, IntFlag\n')]
|
import pytest
import allure
from hamcrest import *
from shared.data_generators import Generators
@allure.issue("SAN-71", "Drafts")
@pytest.mark.parametrize('d_user', ["2 users"], indirect=True)
class TestConfigSync:
""" Tests for synchronization, setting and getting configs
"""
@allure.title("Test for drafts")
@allure.testcase("XTE-11", "Test for drafts")
def test_drafts(self, d_user, update1):
"""
Test for drafts
"""
with allure.step("User1 send draft"):
updates = update1
msg = Generators.random_text_message()
d_user.send_draft(d_user.u1, d_user.outpeer1, draft_type='drafts_PRIVATE_', msg=msg)
params = d_user.get_parameters(d_user.u1)
print(params)
with allure.step("User1 gets draft, check draft body and chat id"):
for update in updates:
print(update)
if update.unboxed_update.HasField('updateParameterChanged'):
key = list(update.unboxed_update.updateParameterChanged.key.split('\n'))
id = key[1].split(' ')[1]
assert_that(int(id), equal_to(d_user.u2.user_info.user.id))
assert_that(update.unboxed_update.updateParameterChanged.value.value, equal_to(msg))
break
with allure.step("User1 delete draft"):
d_user.send_draft(d_user.u1, d_user.outpeer1, draft_type='drafts_PRIVATE_', msg=None)
with allure.step("User1 gets update with deletion of draft"):
for update in updates:
if update.unboxed_update.HasField('updateParameterChanged'):
assert_that(update.unboxed_update.updateParameterChanged.value.value is '')
break
with allure.step("Draft of User1 should be empty"):
params = d_user.get_parameters(d_user.u1)
assert_that(getattr(params.parameters[0], 'key'), is_not(''))
assert_that(getattr(params.parameters[0], 'value'), equal_to(''))
|
[
"allure.issue",
"shared.data_generators.Generators.random_text_message",
"allure.step",
"allure.testcase",
"allure.title",
"pytest.mark.parametrize"
] |
[((100, 132), 'allure.issue', 'allure.issue', (['"""SAN-71"""', '"""Drafts"""'], {}), "('SAN-71', 'Drafts')\n", (112, 132), False, 'import allure\n'), ((134, 195), 'pytest.mark.parametrize', 'pytest.mark.parametrize', (['"""d_user"""', "['2 users']"], {'indirect': '(True)'}), "('d_user', ['2 users'], indirect=True)\n", (157, 195), False, 'import pytest\n'), ((295, 326), 'allure.title', 'allure.title', (['"""Test for drafts"""'], {}), "('Test for drafts')\n", (307, 326), False, 'import allure\n'), ((332, 376), 'allure.testcase', 'allure.testcase', (['"""XTE-11"""', '"""Test for drafts"""'], {}), "('XTE-11', 'Test for drafts')\n", (347, 376), False, 'import allure\n'), ((482, 513), 'allure.step', 'allure.step', (['"""User1 send draft"""'], {}), "('User1 send draft')\n", (493, 513), False, 'import allure\n'), ((563, 595), 'shared.data_generators.Generators.random_text_message', 'Generators.random_text_message', ([], {}), '()\n', (593, 595), False, 'from shared.data_generators import Generators\n'), ((786, 847), 'allure.step', 'allure.step', (['"""User1 gets draft, check draft body and chat id"""'], {}), "('User1 gets draft, check draft body and chat id')\n", (797, 847), False, 'import allure\n'), ((1354, 1387), 'allure.step', 'allure.step', (['"""User1 delete draft"""'], {}), "('User1 delete draft')\n", (1365, 1387), False, 'import allure\n'), ((1500, 1555), 'allure.step', 'allure.step', (['"""User1 gets update with deletion of draft"""'], {}), "('User1 gets update with deletion of draft')\n", (1511, 1555), False, 'import allure\n'), ((1804, 1849), 'allure.step', 'allure.step', (['"""Draft of User1 should be empty"""'], {}), "('Draft of User1 should be empty')\n", (1815, 1849), False, 'import allure\n')]
|
import json
def checkForProcess(vmObject, processName):
vmObject.updateProcList()
if processName in ' '.join(vmObject.procList):
return True
else:
return False
def loadJsonFile(fileName):
try:
fileObject = open(fileName, 'r')
fileStr = fileObject.read()
fileObject.close()
except IOError as e:
print("UNABLE TO OPEN FILE: " + str(fileName) + '\n' + str(e))
return None
try:
fileDic = json.loads(fileStr)
except Exception as e:
print("UNABLE TO PARSE FILE: " + str(fileName) + '\n' + str(e))
return None
return fileDic
def makeVmList(vmServer, keywordArg, fileArg):
vmList = []
if fileArg != None:
vmFileObj = open(fileArg, 'r')
desiredVms = vmFileObj.read().splitlines()
vmFileObj.close()
vmServer.enumerateVms()
for vm in vmServer.vmList:
if vm.vmName in desiredVms:
vmList.append(vm)
if keywordArg != None:
vmServer.enumerateVms()
for vm in vmServer.vmList:
if keywordArg in vm.vmName:
vmList.append(vm)
return vmList
def waitForProcess(vmObject, procName, timeout = 600):
retVal = False
waitCount = 1
if timeout > 0:
waitCount = timeout/5
for i in range(waitCount):
vmObject.updateProcList()
if procName in ' '.join(vmObject.procList):
retVal = True
break
time.sleep(5)
return retVal
|
[
"json.loads"
] |
[((474, 493), 'json.loads', 'json.loads', (['fileStr'], {}), '(fileStr)\n', (484, 493), False, 'import json\n')]
|
import codecs
from typing import Optional
from sqlalchemy.engine import create_engine
from config import webapp_settings
from model import Session
codecs.register(
lambda name: codecs.lookup('utf8') if name == 'utf8mb4' else None)
class ConnectionPooling(object):
def __init__(self, **params):
self.engine = create_engine(webapp_settings['mysql_connection'], **params)
c = ConnectionPooling(max_overflow=50, pool_size=20, pool_recycle=3600, **webapp_settings.get('mysql_extra_param', {}))
class Connection:
execution_options = None
def __init__(self, execution_options=None):
self.execution_options = execution_options
self.s: Optional[Session] = None
def __enter__(self):
"""
create connection object
:return:
"""
self.engine = c.engine
Session.configure(bind=self.engine)
self.s = Session()
if self.execution_options:
self.s.connection(execution_options=self.execution_options)
return self
def __exit__(self, exc_type, exc_val, exc_tb):
"""
remove session
"""
Session.remove()
def upsert_from_form(self, model_class, form):
args_dict = {}
for column in model_class.__table__.columns:
if hasattr(form, column.name) and getattr(form, column.name).data:
# TODO Noneや''の時に更新されない
args_dict[column.name] = getattr(form, column.name).data
if form.id.data:
# update (if form has id)
id_ = form.id.data
self.s.query(model_class).filter(model_class.id == id_).update(
args_dict, synchronize_session=False)
else:
# create
new_model = model_class(**args_dict)
self.s.add(new_model)
self.s.flush()
self.s.refresh(new_model)
id_ = new_model.id
self.s.commit()
return id_
|
[
"model.Session.remove",
"codecs.lookup",
"model.Session.configure",
"model.Session",
"config.webapp_settings.get",
"sqlalchemy.engine.create_engine"
] |
[((329, 389), 'sqlalchemy.engine.create_engine', 'create_engine', (["webapp_settings['mysql_connection']"], {}), "(webapp_settings['mysql_connection'], **params)\n", (342, 389), False, 'from sqlalchemy.engine import create_engine\n'), ((466, 510), 'config.webapp_settings.get', 'webapp_settings.get', (['"""mysql_extra_param"""', '{}'], {}), "('mysql_extra_param', {})\n", (485, 510), False, 'from config import webapp_settings\n'), ((842, 877), 'model.Session.configure', 'Session.configure', ([], {'bind': 'self.engine'}), '(bind=self.engine)\n', (859, 877), False, 'from model import Session\n'), ((895, 904), 'model.Session', 'Session', ([], {}), '()\n', (902, 904), False, 'from model import Session\n'), ((1139, 1155), 'model.Session.remove', 'Session.remove', ([], {}), '()\n', (1153, 1155), False, 'from model import Session\n'), ((184, 205), 'codecs.lookup', 'codecs.lookup', (['"""utf8"""'], {}), "('utf8')\n", (197, 205), False, 'import codecs\n')]
|
def polyFit(xData, yData, degree):
pass
fitValues = np.polyfit(xData, yData, degree)
yFit = np.zeros(len(xData))
for i in range(degree+1):
yFit = yFit + xData**(degree-i)*fitValues[i]
def function(x):
func = 0
for i in fitValues:
func = func*x + i
return func
return (fitValues,function)
if __name__ == "__main__":
import pandas as pd
import numpy as np
import matplotlib.pyplot as plt
import pltStyle # used for formatting the plots
# read some data
data = pd.read_csv("polyFit.csv", header=None, names=["x","y"])
# create a new figure object
fig = plt.figure()
# create axis and a new subplot within the figure
ax = fig.add_subplot(1, 1, 1)
# plot the measurement data
ax.plot(data.x, data.y,marker="+", label="Measurement data")
# add polynomial fits with different degrees
for i in range(1,7,1):
ax.plot(data.x, polyFit(data.x,data.y,i)[1](data.x), label="Polynomial fit degree = "+str(i))
# create the legend and set its position
ax.legend(loc="lower left")
# manually set the axes limits and label them
ax.set_xlim([0,12])
ax.set_ylim([-2,1.1])
ax.set_xlabel(r'x axis label using \TeX\ and SI-units such as an upright $\si{\micro}$')
ax.set_ylabel(r'unusual symbols {\"a} \c{s} \AE\ \~{n}')
ax.grid(True)
#plt.tight_layout()
plt.savefig("polyFit.png")
|
[
"pandas.read_csv",
"matplotlib.pyplot.figure",
"matplotlib.pyplot.savefig",
"numpy.polyfit"
] |
[((60, 92), 'numpy.polyfit', 'np.polyfit', (['xData', 'yData', 'degree'], {}), '(xData, yData, degree)\n', (70, 92), True, 'import numpy as np\n'), ((554, 611), 'pandas.read_csv', 'pd.read_csv', (['"""polyFit.csv"""'], {'header': 'None', 'names': "['x', 'y']"}), "('polyFit.csv', header=None, names=['x', 'y'])\n", (565, 611), True, 'import pandas as pd\n'), ((655, 667), 'matplotlib.pyplot.figure', 'plt.figure', ([], {}), '()\n', (665, 667), True, 'import matplotlib.pyplot as plt\n'), ((1410, 1436), 'matplotlib.pyplot.savefig', 'plt.savefig', (['"""polyFit.png"""'], {}), "('polyFit.png')\n", (1421, 1436), True, 'import matplotlib.pyplot as plt\n')]
|
"""Minimal implementation of Wasserstein GAN for MNIST."""
import numpy as np
import matplotlib.pyplot as plt
import tensorflow as tf
from tensorflow.contrib import layers
from tensorflow.examples.tutorials.mnist import input_data
import threading
from rendering import draw_figure, export_video
def leaky_relu(x):
return tf.maximum(x, 0.2 * x)
def generator(z):
with tf.variable_scope('generator'):
z = layers.fully_connected(z, num_outputs=4096)
z = tf.reshape(z, [-1, 4, 4, 256])
z = layers.conv2d_transpose(z, num_outputs=128, kernel_size=5, stride=2)
z = layers.conv2d_transpose(z, num_outputs=64, kernel_size=5, stride=2)
z = layers.conv2d_transpose(z, num_outputs=1, kernel_size=5, stride=2,
activation_fn=tf.nn.sigmoid)
return z[:, 2:-2, 2:-2, :]
def discriminator(x, reuse):
with tf.variable_scope('discriminator', reuse=reuse):
x = layers.conv2d(x, num_outputs=64, kernel_size=5, stride=2,
activation_fn=leaky_relu)
x = layers.conv2d(x, num_outputs=128, kernel_size=5, stride=2,
activation_fn=leaky_relu)
x = layers.conv2d(x, num_outputs=256, kernel_size=5, stride=2,
activation_fn=leaky_relu)
x = layers.flatten(x)
return layers.fully_connected(x, num_outputs=1, activation_fn=None)
############# Create Tensorflow Graph ###############
with tf.name_scope('placeholders'):
x_true = tf.placeholder(tf.float32, [None, 28, 28, 1])
z = tf.placeholder(tf.float32, [None, 128])
x_generated = generator(z)
d_true = discriminator(x_true, reuse=False)
d_generated = discriminator(x_generated, reuse=True)
with tf.name_scope('regularizer'):
epsilon = tf.random_uniform([50, 1, 1, 1], 0.0, 1.0)
x_hat = epsilon * x_true + (1 - epsilon) * x_generated
d_hat = discriminator(x_hat, reuse=True)
gradients = tf.gradients(d_hat, x_hat)[0]
ddx = tf.sqrt(tf.reduce_sum(gradients ** 2, axis=[1, 2]))
d_regularizer = tf.reduce_mean((ddx - 1.0) ** 2)
with tf.name_scope('loss'):
g_loss = tf.reduce_mean(d_generated)
d_loss = (tf.reduce_mean(d_true) - tf.reduce_mean(d_generated) +
10 * d_regularizer)
with tf.name_scope('optimizer'):
optimizer = tf.train.AdamOptimizer(learning_rate=1e-4, beta1=0, beta2=0.9)
g_vars = tf.get_collection(tf.GraphKeys.GLOBAL_VARIABLES, scope='generator')
g_train = optimizer.minimize(g_loss, var_list=g_vars)
d_vars = tf.get_collection(tf.GraphKeys.GLOBAL_VARIABLES, scope='discriminator')
d_train = optimizer.minimize(d_loss, var_list=d_vars)
#####################################################
############# Initialize Variables ###############
session = tf.InteractiveSession()
# session = tf.InteractiveSession(config=tf.ConfigProto(log_device_placement=True))
tf.global_variables_initializer().run()
mnist = input_data.read_data_sets('MNIST_data')
generated_images = []
export_video_nth_frame = 30
height, width, channels = (28, 28, 1)
#####################################################
############# Start Rendering Thread ###############
drawing_thread = threading.Thread(target=draw_figure, args=(generated_images,))
drawing_thread.setDaemon(True)
drawing_thread.start()
#####################################################
############# Train ###############
for i in range(20000):
batch = mnist.train.next_batch(50)
images = batch[0].reshape([-1, height, width, channels])
z_train = np.random.randn(50, 128)
session.run(g_train, feed_dict={z: z_train})
for j in range(5):
session.run(d_train, feed_dict={x_true: images, z: z_train})
print('iter={}/20000'.format(i))
z_validate = np.random.randn(1, 128)
generated = x_generated.eval(feed_dict={z: z_validate}).squeeze()
generated = np.uint8(generated*255) # hand over to thread
generated_images.append(generated)
if i%export_video_nth_frame == 0:
pass
export_video(generated_images)
#####################################################
################ Finalize #####################
export_video(generated_images)
#####################################################
|
[
"tensorflow.reduce_sum",
"tensorflow.get_collection",
"tensorflow.maximum",
"tensorflow.contrib.layers.flatten",
"tensorflow.reshape",
"tensorflow.InteractiveSession",
"numpy.random.randn",
"tensorflow.variable_scope",
"tensorflow.placeholder",
"tensorflow.contrib.layers.conv2d_transpose",
"tensorflow.gradients",
"tensorflow.name_scope",
"threading.Thread",
"numpy.uint8",
"tensorflow.contrib.layers.fully_connected",
"tensorflow.global_variables_initializer",
"tensorflow.reduce_mean",
"tensorflow.contrib.layers.conv2d",
"tensorflow.random_uniform",
"tensorflow.examples.tutorials.mnist.input_data.read_data_sets",
"tensorflow.train.AdamOptimizer",
"rendering.export_video"
] |
[((2786, 2809), 'tensorflow.InteractiveSession', 'tf.InteractiveSession', ([], {}), '()\n', (2807, 2809), True, 'import tensorflow as tf\n'), ((2942, 2981), 'tensorflow.examples.tutorials.mnist.input_data.read_data_sets', 'input_data.read_data_sets', (['"""MNIST_data"""'], {}), "('MNIST_data')\n", (2967, 2981), False, 'from tensorflow.examples.tutorials.mnist import input_data\n'), ((3196, 3258), 'threading.Thread', 'threading.Thread', ([], {'target': 'draw_figure', 'args': '(generated_images,)'}), '(target=draw_figure, args=(generated_images,))\n', (3212, 3258), False, 'import threading\n'), ((4155, 4185), 'rendering.export_video', 'export_video', (['generated_images'], {}), '(generated_images)\n', (4167, 4185), False, 'from rendering import draw_figure, export_video\n'), ((329, 351), 'tensorflow.maximum', 'tf.maximum', (['x', '(0.2 * x)'], {}), '(x, 0.2 * x)\n', (339, 351), True, 'import tensorflow as tf\n'), ((1477, 1506), 'tensorflow.name_scope', 'tf.name_scope', (['"""placeholders"""'], {}), "('placeholders')\n", (1490, 1506), True, 'import tensorflow as tf\n'), ((1521, 1566), 'tensorflow.placeholder', 'tf.placeholder', (['tf.float32', '[None, 28, 28, 1]'], {}), '(tf.float32, [None, 28, 28, 1])\n', (1535, 1566), True, 'import tensorflow as tf\n'), ((1575, 1614), 'tensorflow.placeholder', 'tf.placeholder', (['tf.float32', '[None, 128]'], {}), '(tf.float32, [None, 128])\n', (1589, 1614), True, 'import tensorflow as tf\n'), ((1746, 1774), 'tensorflow.name_scope', 'tf.name_scope', (['"""regularizer"""'], {}), "('regularizer')\n", (1759, 1774), True, 'import tensorflow as tf\n'), ((1790, 1832), 'tensorflow.random_uniform', 'tf.random_uniform', (['[50, 1, 1, 1]', '(0.0)', '(1.0)'], {}), '([50, 1, 1, 1], 0.0, 1.0)\n', (1807, 1832), True, 'import tensorflow as tf\n'), ((2066, 2098), 'tensorflow.reduce_mean', 'tf.reduce_mean', (['((ddx - 1.0) ** 2)'], {}), '((ddx - 1.0) ** 2)\n', (2080, 2098), True, 'import tensorflow as tf\n'), ((2105, 2126), 'tensorflow.name_scope', 'tf.name_scope', (['"""loss"""'], {}), "('loss')\n", (2118, 2126), True, 'import tensorflow as tf\n'), ((2141, 2168), 'tensorflow.reduce_mean', 'tf.reduce_mean', (['d_generated'], {}), '(d_generated)\n', (2155, 2168), True, 'import tensorflow as tf\n'), ((2278, 2304), 'tensorflow.name_scope', 'tf.name_scope', (['"""optimizer"""'], {}), "('optimizer')\n", (2291, 2304), True, 'import tensorflow as tf\n'), ((2322, 2386), 'tensorflow.train.AdamOptimizer', 'tf.train.AdamOptimizer', ([], {'learning_rate': '(0.0001)', 'beta1': '(0)', 'beta2': '(0.9)'}), '(learning_rate=0.0001, beta1=0, beta2=0.9)\n', (2344, 2386), True, 'import tensorflow as tf\n'), ((2399, 2466), 'tensorflow.get_collection', 'tf.get_collection', (['tf.GraphKeys.GLOBAL_VARIABLES'], {'scope': '"""generator"""'}), "(tf.GraphKeys.GLOBAL_VARIABLES, scope='generator')\n", (2416, 2466), True, 'import tensorflow as tf\n'), ((2538, 2609), 'tensorflow.get_collection', 'tf.get_collection', (['tf.GraphKeys.GLOBAL_VARIABLES'], {'scope': '"""discriminator"""'}), "(tf.GraphKeys.GLOBAL_VARIABLES, scope='discriminator')\n", (2555, 2609), True, 'import tensorflow as tf\n'), ((3542, 3566), 'numpy.random.randn', 'np.random.randn', (['(50)', '(128)'], {}), '(50, 128)\n', (3557, 3566), True, 'import numpy as np\n'), ((3764, 3787), 'numpy.random.randn', 'np.random.randn', (['(1)', '(128)'], {}), '(1, 128)\n', (3779, 3787), True, 'import numpy as np\n'), ((3875, 3900), 'numpy.uint8', 'np.uint8', (['(generated * 255)'], {}), '(generated * 255)\n', (3883, 3900), True, 'import numpy as np\n'), ((380, 410), 'tensorflow.variable_scope', 'tf.variable_scope', (['"""generator"""'], {}), "('generator')\n", (397, 410), True, 'import tensorflow as tf\n'), ((424, 467), 'tensorflow.contrib.layers.fully_connected', 'layers.fully_connected', (['z'], {'num_outputs': '(4096)'}), '(z, num_outputs=4096)\n', (446, 467), False, 'from tensorflow.contrib import layers\n'), ((480, 510), 'tensorflow.reshape', 'tf.reshape', (['z', '[-1, 4, 4, 256]'], {}), '(z, [-1, 4, 4, 256])\n', (490, 510), True, 'import tensorflow as tf\n'), ((524, 592), 'tensorflow.contrib.layers.conv2d_transpose', 'layers.conv2d_transpose', (['z'], {'num_outputs': '(128)', 'kernel_size': '(5)', 'stride': '(2)'}), '(z, num_outputs=128, kernel_size=5, stride=2)\n', (547, 592), False, 'from tensorflow.contrib import layers\n'), ((605, 672), 'tensorflow.contrib.layers.conv2d_transpose', 'layers.conv2d_transpose', (['z'], {'num_outputs': '(64)', 'kernel_size': '(5)', 'stride': '(2)'}), '(z, num_outputs=64, kernel_size=5, stride=2)\n', (628, 672), False, 'from tensorflow.contrib import layers\n'), ((685, 784), 'tensorflow.contrib.layers.conv2d_transpose', 'layers.conv2d_transpose', (['z'], {'num_outputs': '(1)', 'kernel_size': '(5)', 'stride': '(2)', 'activation_fn': 'tf.nn.sigmoid'}), '(z, num_outputs=1, kernel_size=5, stride=2,\n activation_fn=tf.nn.sigmoid)\n', (708, 784), False, 'from tensorflow.contrib import layers\n'), ((892, 939), 'tensorflow.variable_scope', 'tf.variable_scope', (['"""discriminator"""'], {'reuse': 'reuse'}), "('discriminator', reuse=reuse)\n", (909, 939), True, 'import tensorflow as tf\n'), ((953, 1041), 'tensorflow.contrib.layers.conv2d', 'layers.conv2d', (['x'], {'num_outputs': '(64)', 'kernel_size': '(5)', 'stride': '(2)', 'activation_fn': 'leaky_relu'}), '(x, num_outputs=64, kernel_size=5, stride=2, activation_fn=\n leaky_relu)\n', (966, 1041), False, 'from tensorflow.contrib import layers\n'), ((1075, 1164), 'tensorflow.contrib.layers.conv2d', 'layers.conv2d', (['x'], {'num_outputs': '(128)', 'kernel_size': '(5)', 'stride': '(2)', 'activation_fn': 'leaky_relu'}), '(x, num_outputs=128, kernel_size=5, stride=2, activation_fn=\n leaky_relu)\n', (1088, 1164), False, 'from tensorflow.contrib import layers\n'), ((1198, 1287), 'tensorflow.contrib.layers.conv2d', 'layers.conv2d', (['x'], {'num_outputs': '(256)', 'kernel_size': '(5)', 'stride': '(2)', 'activation_fn': 'leaky_relu'}), '(x, num_outputs=256, kernel_size=5, stride=2, activation_fn=\n leaky_relu)\n', (1211, 1287), False, 'from tensorflow.contrib import layers\n'), ((1322, 1339), 'tensorflow.contrib.layers.flatten', 'layers.flatten', (['x'], {}), '(x)\n', (1336, 1339), False, 'from tensorflow.contrib import layers\n'), ((1355, 1415), 'tensorflow.contrib.layers.fully_connected', 'layers.fully_connected', (['x'], {'num_outputs': '(1)', 'activation_fn': 'None'}), '(x, num_outputs=1, activation_fn=None)\n', (1377, 1415), False, 'from tensorflow.contrib import layers\n'), ((1954, 1980), 'tensorflow.gradients', 'tf.gradients', (['d_hat', 'x_hat'], {}), '(d_hat, x_hat)\n', (1966, 1980), True, 'import tensorflow as tf\n'), ((2002, 2044), 'tensorflow.reduce_sum', 'tf.reduce_sum', (['(gradients ** 2)'], {'axis': '[1, 2]'}), '(gradients ** 2, axis=[1, 2])\n', (2015, 2044), True, 'import tensorflow as tf\n'), ((2894, 2927), 'tensorflow.global_variables_initializer', 'tf.global_variables_initializer', ([], {}), '()\n', (2925, 2927), True, 'import tensorflow as tf\n'), ((4020, 4050), 'rendering.export_video', 'export_video', (['generated_images'], {}), '(generated_images)\n', (4032, 4050), False, 'from rendering import draw_figure, export_video\n'), ((2183, 2205), 'tensorflow.reduce_mean', 'tf.reduce_mean', (['d_true'], {}), '(d_true)\n', (2197, 2205), True, 'import tensorflow as tf\n'), ((2208, 2235), 'tensorflow.reduce_mean', 'tf.reduce_mean', (['d_generated'], {}), '(d_generated)\n', (2222, 2235), True, 'import tensorflow as tf\n')]
|
from polyaxon_schemas.environments import HorovodClusterConfig
from polyaxon_schemas.polyaxonfile.specification.frameworks import HorovodSpecification
from polyaxon_schemas.utils import TaskType
from scheduler.spawners.experiment_spawner import ExperimentSpawner
class HorovodSpawner(ExperimentSpawner):
MASTER_SERVICE = True
WORKER_SERVICE = True
@property
def resources(self):
cluster, is_distributed, = self.spec.cluster_def
worker_resources = HorovodSpecification.get_worker_resources(
environment=self.spec.environment,
cluster=cluster,
is_distributed=is_distributed
)
return {
TaskType.MASTER: {0: self.spec.master_resources},
TaskType.WORKER: worker_resources,
}
@property
def node_selectors(self):
cluster, is_distributed, = self.spec.cluster_def
worker_node_selectors = HorovodSpecification.get_worker_node_selectors(
environment=self.spec.environment,
cluster=cluster,
is_distributed=is_distributed
)
return {
TaskType.MASTER: {0: self.spec.master_node_selectors},
TaskType.WORKER: worker_node_selectors,
}
def get_resources(self, task_type, task_idx):
return self.resources.get(task_type, {}).get(task_idx)
def get_node_selectors(self, task_type, task_idx):
return self.node_selectors.get(task_type, {}).get(task_idx)
def get_n_pods(self, task_type):
return self.spec.cluster_def[0].get(task_type, 0)
def start_experiment(self):
experiment = super().start_experiment()
experiment[TaskType.WORKER] = self.create_multi_jobs(task_type=TaskType.WORKER,
add_service=self.WORKER_SERVICE)
return experiment
def stop_experiment(self):
super().stop_experiment()
self.delete_multi_jobs(task_type=TaskType.WORKER, has_service=self.WORKER_SERVICE)
def get_cluster(self):
cluster_def, _ = self.spec.cluster_def
job_name = self.pod_manager.get_job_name(task_type=TaskType.MASTER, task_idx=0)
cluster_config = {
TaskType.MASTER: [self._get_pod_address(job_name)]
}
workers = []
for i in range(cluster_def.get(TaskType.WORKER, 0)):
job_name = self.pod_manager.get_job_name(task_type=TaskType.WORKER, task_idx=i)
workers.append(self._get_pod_address(job_name))
cluster_config[TaskType.WORKER] = workers
return HorovodClusterConfig.from_dict(cluster_config).to_dict()
|
[
"polyaxon_schemas.environments.HorovodClusterConfig.from_dict",
"polyaxon_schemas.polyaxonfile.specification.frameworks.HorovodSpecification.get_worker_node_selectors",
"polyaxon_schemas.polyaxonfile.specification.frameworks.HorovodSpecification.get_worker_resources"
] |
[((482, 610), 'polyaxon_schemas.polyaxonfile.specification.frameworks.HorovodSpecification.get_worker_resources', 'HorovodSpecification.get_worker_resources', ([], {'environment': 'self.spec.environment', 'cluster': 'cluster', 'is_distributed': 'is_distributed'}), '(environment=self.spec.environment,\n cluster=cluster, is_distributed=is_distributed)\n', (523, 610), False, 'from polyaxon_schemas.polyaxonfile.specification.frameworks import HorovodSpecification\n'), ((923, 1057), 'polyaxon_schemas.polyaxonfile.specification.frameworks.HorovodSpecification.get_worker_node_selectors', 'HorovodSpecification.get_worker_node_selectors', ([], {'environment': 'self.spec.environment', 'cluster': 'cluster', 'is_distributed': 'is_distributed'}), '(environment=self.spec.\n environment, cluster=cluster, is_distributed=is_distributed)\n', (969, 1057), False, 'from polyaxon_schemas.polyaxonfile.specification.frameworks import HorovodSpecification\n'), ((2591, 2637), 'polyaxon_schemas.environments.HorovodClusterConfig.from_dict', 'HorovodClusterConfig.from_dict', (['cluster_config'], {}), '(cluster_config)\n', (2621, 2637), False, 'from polyaxon_schemas.environments import HorovodClusterConfig\n')]
|
from src.seededkm.seededkm import SeededKMeans
from src.constrainedkm.constrainedkm import ConstrainedKMeans
from sklearn import datasets
from src.utils.runnerutils import run_algo, run_KMeans
def cluster(n_clusters, seed_fraction, noise_fraction, incompleteness_fraction, manually_annotate, n_fold, run_KM):
iris = datasets.load_iris()
X = iris.data
y = iris.target
skm = SeededKMeans(seed_fraction, noise_fraction, incompleteness_fraction, n_clusters, 'iris')
seeded_ari, seeded_ami = run_algo(skm, 'Seeded K-Means', X, y, n_fold, manually_annotate)
skm.visualise_results()
ckm = ConstrainedKMeans(seed_fraction, noise_fraction, incompleteness_fraction, n_clusters, 'iris')
constrained_ari, constrained_ami = run_algo(ckm, 'Constrained K-Means', X, y, n_fold, manually_annotate)
ckm.visualise_results()
kmeans_ari = 0
kmeans_ami = 0
if run_KM:
kmeans_ari, kmeans_ami = run_KMeans(X, y, n_clusters, n_fold)
return seeded_ari, seeded_ami, constrained_ari, constrained_ami, kmeans_ari, kmeans_ami
|
[
"sklearn.datasets.load_iris",
"src.utils.runnerutils.run_algo",
"src.utils.runnerutils.run_KMeans",
"src.constrainedkm.constrainedkm.ConstrainedKMeans",
"src.seededkm.seededkm.SeededKMeans"
] |
[((321, 341), 'sklearn.datasets.load_iris', 'datasets.load_iris', ([], {}), '()\n', (339, 341), False, 'from sklearn import datasets\n'), ((391, 483), 'src.seededkm.seededkm.SeededKMeans', 'SeededKMeans', (['seed_fraction', 'noise_fraction', 'incompleteness_fraction', 'n_clusters', '"""iris"""'], {}), "(seed_fraction, noise_fraction, incompleteness_fraction,\n n_clusters, 'iris')\n", (403, 483), False, 'from src.seededkm.seededkm import SeededKMeans\n'), ((509, 573), 'src.utils.runnerutils.run_algo', 'run_algo', (['skm', '"""Seeded K-Means"""', 'X', 'y', 'n_fold', 'manually_annotate'], {}), "(skm, 'Seeded K-Means', X, y, n_fold, manually_annotate)\n", (517, 573), False, 'from src.utils.runnerutils import run_algo, run_KMeans\n'), ((613, 710), 'src.constrainedkm.constrainedkm.ConstrainedKMeans', 'ConstrainedKMeans', (['seed_fraction', 'noise_fraction', 'incompleteness_fraction', 'n_clusters', '"""iris"""'], {}), "(seed_fraction, noise_fraction, incompleteness_fraction,\n n_clusters, 'iris')\n", (630, 710), False, 'from src.constrainedkm.constrainedkm import ConstrainedKMeans\n'), ((746, 815), 'src.utils.runnerutils.run_algo', 'run_algo', (['ckm', '"""Constrained K-Means"""', 'X', 'y', 'n_fold', 'manually_annotate'], {}), "(ckm, 'Constrained K-Means', X, y, n_fold, manually_annotate)\n", (754, 815), False, 'from src.utils.runnerutils import run_algo, run_KMeans\n'), ((931, 967), 'src.utils.runnerutils.run_KMeans', 'run_KMeans', (['X', 'y', 'n_clusters', 'n_fold'], {}), '(X, y, n_clusters, n_fold)\n', (941, 967), False, 'from src.utils.runnerutils import run_algo, run_KMeans\n')]
|
"""Parsing of signal logs from experiments, and version logging."""
import datetime
import importlib
import json
import logging
import os
import pprint
import subprocess
import time
import git
import numpy as np
# these should be moved to other (optional) module
from openpromela import logic
from openpromela import slugs
logger = logging.getLogger(__name__)
CONFIG_FILE = 'config.json'
def git_version(path):
"""Return SHA-dirty for repo under `path`."""
repo = git.Repo(path)
sha = repo.head.commit.hexsha
dirty = repo.is_dirty()
return sha + ('-dirty' if dirty else '')
def snapshot_versions(check=True):
"""Log versions of software used."""
d = dict()
d['slugs'] = slugs_version()
# versions of python packages
packages = [
'dd', 'omega', 'tugs',
'openpromela', 'promela']
for s in packages:
pkg = importlib.import_module(s)
d[s] = pkg.__version__
t_now = time.strftime('%Y-%b-%d-%A-%T-%Z')
d['time'] = t_now
d['platform'] = os.uname()
if not check:
return d
# existing log ?
try:
with open(CONFIG_FILE, 'r') as f:
d_old = json.load(f)
except IOError:
d_old = None
# check versions
compare = list(packages)
compare.append('slugs')
if d_old is not None:
for k in compare:
assert d[k] == d_old[k], (
('versions differ from {cfg}:\n\n'
'NEW: {d}'
'\n -----\n\n'
'OLD: {d_old}').format(
cfg=CONFIG_FILE,
d=pprint.pformat(d),
d_old=pprint.pformat(d_old)))
# dump
with open(CONFIG_FILE, 'w') as f:
json.dump(d, f, indent=4)
return d
def slugs_version():
cmd = ['slugs', '--version']
try:
p = subprocess.Popen(cmd, stdout=subprocess.PIPE)
except OSError as e:
if e.errno == os.errno.ENOENT:
print('Warning: `slugs` not found on path')
return
else:
raise
p.wait()
if p.returncode != 0:
print('`{cmd}` returned {r}'.format(
cmd=' '.join(cmd),
r=p.returncode))
return
version = p.stdout.read().strip()
return version
def add_logfile(fname, logger_name):
h = logging.FileHandler(fname, mode='w')
log = logging.getLogger(logger_name)
log.addHandler(h)
return h
def close_logfile(h, logger_name):
log = logging.getLogger(logger_name)
log.removeHandler(h)
h.close()
def load_log_file(fname):
data = dict()
with open(fname, 'r') as f:
for line in f:
if "'time'" not in line:
continue
try:
d = eval(line)
split_data(d, data)
except:
continue
for k, v in data.iteritems():
for q, r in v.iteritems():
try:
data[k][q] = np.array(r, dtype=float)
except:
pass
return data
def split_data(d, data):
"""Store sample in `d` as a signal in `data`.
@type d: `dict`
@type data: `dict(dict(time=list(), value=list()))`
"""
t = d['time']
for k, v in d.iteritems():
if k == 'time':
continue
# is a signal
# new ?
if k not in data:
data[k] = dict(time=list(), value=list())
data[k]['time'].append(t)
data[k]['value'].append(v)
def get_signal(name, data):
return data[name]['time'], data[name]['value']
def inspect_data(data):
for k in data:
t = data[k]['time']
v = data[k]['value']
print(k, len(t), len(v))
def translate_promela_to_slugsin(code):
"""Return SlugsIn code from Promela `code`."""
t0 = time.time()
spec = logic.compile_spec(code)
aut = slugs._symbolic._bitblast(spec)
s = slugs._to_slugs(aut)
t1 = time.time()
dt = datetime.timedelta(seconds=t1 - t0)
logger.info('translated Promela -> SlugsIn in {dt}.'.format(dt=dt))
return s
|
[
"json.dump",
"subprocess.Popen",
"json.load",
"pprint.pformat",
"logging.FileHandler",
"importlib.import_module",
"os.uname",
"openpromela.slugs._to_slugs",
"time.strftime",
"git.Repo",
"time.time",
"datetime.timedelta",
"numpy.array",
"openpromela.logic.compile_spec",
"openpromela.slugs._symbolic._bitblast",
"logging.getLogger"
] |
[((334, 361), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (351, 361), False, 'import logging\n'), ((476, 490), 'git.Repo', 'git.Repo', (['path'], {}), '(path)\n', (484, 490), False, 'import git\n'), ((947, 981), 'time.strftime', 'time.strftime', (['"""%Y-%b-%d-%A-%T-%Z"""'], {}), "('%Y-%b-%d-%A-%T-%Z')\n", (960, 981), False, 'import time\n'), ((1024, 1034), 'os.uname', 'os.uname', ([], {}), '()\n', (1032, 1034), False, 'import os\n'), ((2318, 2354), 'logging.FileHandler', 'logging.FileHandler', (['fname'], {'mode': '"""w"""'}), "(fname, mode='w')\n", (2337, 2354), False, 'import logging\n'), ((2365, 2395), 'logging.getLogger', 'logging.getLogger', (['logger_name'], {}), '(logger_name)\n', (2382, 2395), False, 'import logging\n'), ((2478, 2508), 'logging.getLogger', 'logging.getLogger', (['logger_name'], {}), '(logger_name)\n', (2495, 2508), False, 'import logging\n'), ((3798, 3809), 'time.time', 'time.time', ([], {}), '()\n', (3807, 3809), False, 'import time\n'), ((3821, 3845), 'openpromela.logic.compile_spec', 'logic.compile_spec', (['code'], {}), '(code)\n', (3839, 3845), False, 'from openpromela import logic\n'), ((3856, 3887), 'openpromela.slugs._symbolic._bitblast', 'slugs._symbolic._bitblast', (['spec'], {}), '(spec)\n', (3881, 3887), False, 'from openpromela import slugs\n'), ((3896, 3916), 'openpromela.slugs._to_slugs', 'slugs._to_slugs', (['aut'], {}), '(aut)\n', (3911, 3916), False, 'from openpromela import slugs\n'), ((3926, 3937), 'time.time', 'time.time', ([], {}), '()\n', (3935, 3937), False, 'import time\n'), ((3947, 3982), 'datetime.timedelta', 'datetime.timedelta', ([], {'seconds': '(t1 - t0)'}), '(seconds=t1 - t0)\n', (3965, 3982), False, 'import datetime\n'), ((877, 903), 'importlib.import_module', 'importlib.import_module', (['s'], {}), '(s)\n', (900, 903), False, 'import importlib\n'), ((1722, 1747), 'json.dump', 'json.dump', (['d', 'f'], {'indent': '(4)'}), '(d, f, indent=4)\n', (1731, 1747), False, 'import json\n'), ((1838, 1883), 'subprocess.Popen', 'subprocess.Popen', (['cmd'], {'stdout': 'subprocess.PIPE'}), '(cmd, stdout=subprocess.PIPE)\n', (1854, 1883), False, 'import subprocess\n'), ((1162, 1174), 'json.load', 'json.load', (['f'], {}), '(f)\n', (1171, 1174), False, 'import json\n'), ((2955, 2979), 'numpy.array', 'np.array', (['r'], {'dtype': 'float'}), '(r, dtype=float)\n', (2963, 2979), True, 'import numpy as np\n'), ((1596, 1613), 'pprint.pformat', 'pprint.pformat', (['d'], {}), '(d)\n', (1610, 1613), False, 'import pprint\n'), ((1641, 1662), 'pprint.pformat', 'pprint.pformat', (['d_old'], {}), '(d_old)\n', (1655, 1662), False, 'import pprint\n')]
|
import pytest
from aiohttp import web
from pjrpc import exc
from pjrpc.common import v20
from pjrpc.server.integration import aiohttp as integration
from tests.common import _
@pytest.fixture
def path():
return '/test/path'
@pytest.fixture
def json_rpc(path):
json_rpc = integration.Application(path)
return json_rpc
@pytest.mark.parametrize(
'request_id, params, result', [
(
1,
(1, 1.1, 'str', {}, False),
[1, 1.1, 'str', {}, False],
),
(
'abc',
{'int': 1, 'float': 1.1, 'str': 'str', 'dict': {}, 'bool': False},
{'int': 1, 'float': 1.1, 'str': 'str', 'dict': {}, 'bool': False},
),
],
)
async def test_request(json_rpc, path, mocker, aiohttp_client, request_id, params, result):
method_name = 'test_method'
mock = mocker.Mock(name=method_name, return_value=result)
json_rpc.dispatcher.add(mock, method_name)
cli = await aiohttp_client(json_rpc.app)
raw = await cli.post(path, json=v20.Request(method=method_name, params=params, id=request_id).to_json())
assert raw.status == 200
resp = v20.Response.from_json(await raw.json())
if isinstance(params, dict):
mock.assert_called_once_with(kwargs=params)
else:
mock.assert_called_once_with(args=params)
assert resp.id == request_id
assert resp.result == result
async def test_notify(json_rpc, path, mocker, aiohttp_client):
params = [1, 2]
method_name = 'test_method'
mock = mocker.Mock(name=method_name, return_value='result')
json_rpc.dispatcher.add(mock, method_name)
cli = await aiohttp_client(json_rpc.app)
raw = await cli.post(path, json=v20.Request(method=method_name, params=params).to_json())
assert raw.status == 200
assert raw.content_type != 'application/json'
assert await raw.read() == b''
async def test_errors(json_rpc, path, mocker, aiohttp_client):
request_id = 1
params = (1, 2)
method_name = 'test_method'
def error_method(*args, **kwargs):
raise exc.JsonRpcError(code=1, message='message')
mock = mocker.Mock(name=method_name, side_effect=error_method)
json_rpc.dispatcher.add(mock, method_name)
cli = await aiohttp_client(json_rpc.app)
# method not found
raw = await cli.post(path, json=v20.Request(method='unknown_method', params=params, id=request_id).to_json())
assert raw.status == 200
resp = v20.Response.from_json(await raw.json())
assert resp.id is request_id
assert resp.is_error is True
assert resp.error == exc.MethodNotFoundError(data="method 'unknown_method' not found")
# customer error
raw = await cli.post(path, json=v20.Request(method=method_name, params=params, id=request_id).to_json())
assert raw.status == 200
resp = v20.Response.from_json(await raw.json())
mock.assert_called_once_with(args=params)
assert resp.id == request_id
assert resp.is_error is True
assert resp.error == exc.JsonRpcError(code=1, message='message')
# content type error
raw = await cli.post(path, data='')
assert raw.status == 415
# malformed json
raw = await cli.post(path, headers={'Content-Type': 'application/json'}, data='')
assert raw.status == 200
resp = v20.Response.from_json(await raw.json())
assert resp.id is None
assert resp.is_error is True
assert resp.error == exc.ParseError(data=_)
# decoding error
raw = await cli.post(path, headers={'Content-Type': 'application/json'}, data=b'\xff')
assert raw.status == 400
async def test_context(json_rpc, path, mocker, aiohttp_client):
request_id = 1
params = (1, 2)
method_name = 'test_method'
# test list parameters
mock = mocker.Mock(name=method_name, return_value='result')
json_rpc.dispatcher.add(mock, method_name, context='request')
cli = await aiohttp_client(json_rpc.app)
raw = await cli.post(path, json=v20.Request(method=method_name, params=params, id=request_id).to_json())
assert raw.status == 200
mock.assert_called_once()
call_args = mock.call_args[1]
context, args = call_args['request'], call_args['args']
assert isinstance(context, web.Request)
assert args == params
# test dict parameters
params = {'param1': 1, 'param2': 2}
mock.reset_mock()
cli = await aiohttp_client(json_rpc.app)
raw = await cli.post(path, json=v20.Request(method=method_name, params=params, id=request_id).to_json())
assert raw.status == 200
mock.assert_called_once()
call_args = mock.call_args[1]
context, kwargs = call_args['request'], call_args['kwargs']
assert isinstance(context, web.Request)
assert kwargs == params
|
[
"pjrpc.exc.MethodNotFoundError",
"pjrpc.common.v20.Request",
"pjrpc.exc.ParseError",
"pjrpc.server.integration.aiohttp.Application",
"pytest.mark.parametrize",
"pjrpc.exc.JsonRpcError"
] |
[((339, 611), 'pytest.mark.parametrize', 'pytest.mark.parametrize', (['"""request_id, params, result"""', "[(1, (1, 1.1, 'str', {}, False), [1, 1.1, 'str', {}, False]), ('abc', {\n 'int': 1, 'float': 1.1, 'str': 'str', 'dict': {}, 'bool': False}, {\n 'int': 1, 'float': 1.1, 'str': 'str', 'dict': {}, 'bool': False})]"], {}), "('request_id, params, result', [(1, (1, 1.1, 'str',\n {}, False), [1, 1.1, 'str', {}, False]), ('abc', {'int': 1, 'float': \n 1.1, 'str': 'str', 'dict': {}, 'bool': False}, {'int': 1, 'float': 1.1,\n 'str': 'str', 'dict': {}, 'bool': False})])\n", (362, 611), False, 'import pytest\n'), ((285, 314), 'pjrpc.server.integration.aiohttp.Application', 'integration.Application', (['path'], {}), '(path)\n', (308, 314), True, 'from pjrpc.server.integration import aiohttp as integration\n'), ((2104, 2147), 'pjrpc.exc.JsonRpcError', 'exc.JsonRpcError', ([], {'code': '(1)', 'message': '"""message"""'}), "(code=1, message='message')\n", (2120, 2147), False, 'from pjrpc import exc\n'), ((2620, 2685), 'pjrpc.exc.MethodNotFoundError', 'exc.MethodNotFoundError', ([], {'data': '"""method \'unknown_method\' not found"""'}), '(data="method \'unknown_method\' not found")\n', (2643, 2685), False, 'from pjrpc import exc\n'), ((3036, 3079), 'pjrpc.exc.JsonRpcError', 'exc.JsonRpcError', ([], {'code': '(1)', 'message': '"""message"""'}), "(code=1, message='message')\n", (3052, 3079), False, 'from pjrpc import exc\n'), ((3449, 3471), 'pjrpc.exc.ParseError', 'exc.ParseError', ([], {'data': '_'}), '(data=_)\n', (3463, 3471), False, 'from pjrpc import exc\n'), ((1063, 1124), 'pjrpc.common.v20.Request', 'v20.Request', ([], {'method': 'method_name', 'params': 'params', 'id': 'request_id'}), '(method=method_name, params=params, id=request_id)\n', (1074, 1124), False, 'from pjrpc.common import v20\n'), ((1742, 1788), 'pjrpc.common.v20.Request', 'v20.Request', ([], {'method': 'method_name', 'params': 'params'}), '(method=method_name, params=params)\n', (1753, 1788), False, 'from pjrpc.common import v20\n'), ((2369, 2435), 'pjrpc.common.v20.Request', 'v20.Request', ([], {'method': '"""unknown_method"""', 'params': 'params', 'id': 'request_id'}), "(method='unknown_method', params=params, id=request_id)\n", (2380, 2435), False, 'from pjrpc.common import v20\n'), ((2744, 2805), 'pjrpc.common.v20.Request', 'v20.Request', ([], {'method': 'method_name', 'params': 'params', 'id': 'request_id'}), '(method=method_name, params=params, id=request_id)\n', (2755, 2805), False, 'from pjrpc.common import v20\n'), ((3992, 4053), 'pjrpc.common.v20.Request', 'v20.Request', ([], {'method': 'method_name', 'params': 'params', 'id': 'request_id'}), '(method=method_name, params=params, id=request_id)\n', (4003, 4053), False, 'from pjrpc.common import v20\n'), ((4462, 4523), 'pjrpc.common.v20.Request', 'v20.Request', ([], {'method': 'method_name', 'params': 'params', 'id': 'request_id'}), '(method=method_name, params=params, id=request_id)\n', (4473, 4523), False, 'from pjrpc.common import v20\n')]
|
#!/usr/bin/env python
import os
import toml
import sys
os.system('rm -f *wrl *h5')
# print('### RUNNING GEANT4 (design.wrl) ###')
# conf = toml.load('sfqed.toml')
# A = conf['PrimaryGenerator']
# A['PythoGenerator'] = 'sfqed.pattern_spray'
# A['NumEvents'] = 100
# with open('temp.toml', 'w') as fout:
# toml.dump(conf, fout)
# os.system('pbpl-compton-mc temp.toml vis.mac > /dev/null 2>&1')
# os.system('pbpl-compton-extrude-vrml g4_00.wrl --radius=0.2 --num-points=8 --output=design.wrl')
# os.system('rm -f temp.toml g4*wrl')
print('### RUNNING GEANT4 (gamma-10MeV.wrl) ###')
conf = toml.load('sfqed.toml')
A = conf['PrimaryGenerator']
A['PythonGenerator'] = 'pbpl.compton.generators.repeater'
A['PythonGeneratorArgs'] = ['gamma', '10*MeV', '[0,0,-100*mm]', '[0,0,1]']
A['NumEvents'] = 20000
with open('temp.toml', 'w') as fout:
toml.dump(conf, fout)
os.system('pbpl-compton-mc temp.toml vis.mac')
#os.system('pbpl-compton-mc temp.toml vis.mac > /dev/null 2>&1')
os.system('pbpl-compton-extrude-vrml g4_00.wrl --radius=0.8 --num-points=8 --output=gamma-10MeV.wrl')
# os.system('rm -f temp.toml g4*wrl')
# print('### RUNNING GEANT4 (gamma-2GeV.wrl) ###')
# conf = toml.load('sfqed.toml')
# A = conf['PrimaryGenerator']
# A['PythonGenerator'] = 'pbpl.compton.generators.repeater'
# A['PythonGeneratorArgs'] = ['gamma', '2*GeV', '[0,0,-100*mm]', '[0,0,1]']
# A['NumEvents'] = 20000
# with open('temp.toml', 'w') as fout:
# toml.dump(conf, fout)
# os.system('pbpl-compton-mc temp.toml vis.mac')
# #os.system('pbpl-compton-mc temp.toml vis.mac > /dev/null 2>&1')
# os.system('pbpl-compton-extrude-vrml g4_00.wrl --radius=0.8 --num-points=8 --output=gamma-2GeV.wrl')
# os.system('rm -f temp.toml g4*wrl')
|
[
"toml.dump",
"toml.load",
"os.system"
] |
[((56, 83), 'os.system', 'os.system', (['"""rm -f *wrl *h5"""'], {}), "('rm -f *wrl *h5')\n", (65, 83), False, 'import os\n'), ((593, 616), 'toml.load', 'toml.load', (['"""sfqed.toml"""'], {}), "('sfqed.toml')\n", (602, 616), False, 'import toml\n'), ((865, 911), 'os.system', 'os.system', (['"""pbpl-compton-mc temp.toml vis.mac"""'], {}), "('pbpl-compton-mc temp.toml vis.mac')\n", (874, 911), False, 'import os\n'), ((977, 1088), 'os.system', 'os.system', (['"""pbpl-compton-extrude-vrml g4_00.wrl --radius=0.8 --num-points=8 --output=gamma-10MeV.wrl"""'], {}), "(\n 'pbpl-compton-extrude-vrml g4_00.wrl --radius=0.8 --num-points=8 --output=gamma-10MeV.wrl'\n )\n", (986, 1088), False, 'import os\n'), ((843, 864), 'toml.dump', 'toml.dump', (['conf', 'fout'], {}), '(conf, fout)\n', (852, 864), False, 'import toml\n')]
|
from interbotix_xs_modules.locobot import InterbotixLocobotXS
# This script commands some arbitrary positions to the arm joints
#
# To get started, open a terminal and type...
# 'roslaunch interbotix_xslocobot_control xslocobot_python.launch robot_model:=locobot_wx250s show_lidar:=true'
# Then change to this directory and type 'python joint_position_control.py'
def main():
joint_positions = [-1.0, 0.5 , 0.5, 0, -0.5, 1.57]
locobot = InterbotixLocobotXS(robot_model="locobot_wx250s", arm_model="mobile_wx250s")
locobot.arm.go_to_home_pose()
locobot.arm.set_joint_positions(joint_positions)
locobot.arm.go_to_home_pose()
locobot.arm.go_to_sleep_pose()
if __name__=='__main__':
main()
|
[
"interbotix_xs_modules.locobot.InterbotixLocobotXS"
] |
[((447, 523), 'interbotix_xs_modules.locobot.InterbotixLocobotXS', 'InterbotixLocobotXS', ([], {'robot_model': '"""locobot_wx250s"""', 'arm_model': '"""mobile_wx250s"""'}), "(robot_model='locobot_wx250s', arm_model='mobile_wx250s')\n", (466, 523), False, 'from interbotix_xs_modules.locobot import InterbotixLocobotXS\n')]
|
'''
Once newer version of sklearn is used will need to change k alias from n_topics to n_components
https://stackoverflow.com/a/48121678
'''
from sklearn.decomposition import LatentDirichletAllocation as _LatentDirichletAllocation
from base import BaseAlgo, TransformerMixin
from codec import codecs_manager
from util.param_util import convert_params
class LatentDirichletAllocation(TransformerMixin, BaseAlgo):
def __init__(self, options):
self.handle_options(options)
out_params = convert_params(
options.get('params', {}),
floats=['doc_topic_prior','learning_decay','learning_offset','perp_tol','mean_change_tol'],
strs=['learning_method'],
ints=['k','max_iter','batch_size','evaluate_every','total_samples','max_doc_update_iter','n_jobs','verbose','random_state'],
aliases={'k': 'n_topics'}
)
self.estimator = _LatentDirichletAllocation(**out_params)
def rename_output(self, default_names, new_names):
if new_names is None:
new_names = 'LDA'
output_names = ['{}_{}'.format(new_names, i+1) for i in xrange(len(default_names))]
return output_names
@staticmethod
def register_codecs():
from codec.codecs import SimpleObjectCodec
codecs_manager.add_codec('algos_contrib.LatentDirichletAllocation', 'LatentDirichletAllocation', SimpleObjectCodec)
codecs_manager.add_codec('sklearn.decomposition.online_lda', 'LatentDirichletAllocation', SimpleObjectCodec)
|
[
"sklearn.decomposition.LatentDirichletAllocation",
"codec.codecs_manager.add_codec"
] |
[((914, 954), 'sklearn.decomposition.LatentDirichletAllocation', '_LatentDirichletAllocation', ([], {}), '(**out_params)\n', (940, 954), True, 'from sklearn.decomposition import LatentDirichletAllocation as _LatentDirichletAllocation\n'), ((1296, 1415), 'codec.codecs_manager.add_codec', 'codecs_manager.add_codec', (['"""algos_contrib.LatentDirichletAllocation"""', '"""LatentDirichletAllocation"""', 'SimpleObjectCodec'], {}), "('algos_contrib.LatentDirichletAllocation',\n 'LatentDirichletAllocation', SimpleObjectCodec)\n", (1320, 1415), False, 'from codec import codecs_manager\n'), ((1420, 1532), 'codec.codecs_manager.add_codec', 'codecs_manager.add_codec', (['"""sklearn.decomposition.online_lda"""', '"""LatentDirichletAllocation"""', 'SimpleObjectCodec'], {}), "('sklearn.decomposition.online_lda',\n 'LatentDirichletAllocation', SimpleObjectCodec)\n", (1444, 1532), False, 'from codec import codecs_manager\n')]
|
# MIT License
#
# Copyright (c) 2019 Red Hat, Inc.
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
import hmac
from hashlib import sha1
from http import HTTPStatus
from logging import getLogger
from flask import request
from flask_restplus import Namespace, Resource, fields
from packit_service.celerizer import celery_app
from packit_service.config import ServiceConfig
from packit_service.service.api.errors import ValidationFailed
logger = getLogger("packit_service")
config = ServiceConfig.get_service_config()
ns = Namespace("webhooks", description="Webhooks")
# Just to be able to specify some payload in Swagger UI
ping_payload = ns.model(
"Github webhook ping",
{
"zen": fields.String(required=False),
"hook_id": fields.String(required=False),
"hook": fields.String(required=False),
},
)
@ns.route("/github")
class GithubWebhook(Resource):
@ns.response(HTTPStatus.OK, "Webhook accepted, returning reply")
@ns.response(HTTPStatus.ACCEPTED, "Webhook accepted, request is being processed")
@ns.response(HTTPStatus.BAD_REQUEST, "Bad request data")
@ns.response(HTTPStatus.UNAUTHORIZED, "X-Hub-Signature validation failed")
# Just to be able to specify some payload in Swagger UI
@ns.expect(ping_payload)
def post(self):
"""
A webhook used by Packit-as-a-Service GitHub App.
"""
msg = request.json
if not msg:
logger.debug("/webhooks/github: we haven't received any JSON data.")
return "We haven't received any JSON data.", HTTPStatus.BAD_REQUEST
if all([msg.get("zen"), msg.get("hook_id"), msg.get("hook")]):
logger.debug(f"/webhooks/github received ping event: {msg['hook']}")
return "Pong!", HTTPStatus.OK
try:
self.validate_signature()
except ValidationFailed as exc:
logger.info(f"/webhooks/github {exc}")
return str(exc), HTTPStatus.UNAUTHORIZED
if not self.interested():
return "Thanks but we don't care about this event", HTTPStatus.ACCEPTED
# TODO: define task names at one place
celery_app.send_task(
name="task.steve_jobs.process_message", kwargs={"event": msg}
)
return "Webhook accepted. We thank you, Github.", HTTPStatus.ACCEPTED
@staticmethod
def validate_signature():
"""
https://developer.github.com/webhooks/securing/#validating-payloads-from-github
https://developer.github.com/webhooks/#delivery-headers
"""
if "X-Hub-Signature" not in request.headers:
if config.validate_webhooks:
msg = "X-Hub-Signature not in request.headers"
logger.warning(msg)
raise ValidationFailed(msg)
else:
# don't validate signatures when testing locally
logger.debug("Ain't validating signatures")
return
sig = request.headers["X-Hub-Signature"]
if not sig.startswith("sha1="):
msg = f"Digest mode in X-Hub-Signature {sig!r} is not sha1"
logger.warning(msg)
raise ValidationFailed(msg)
webhook_secret = config.webhook_secret.encode()
if not webhook_secret:
msg = "webhook_secret not specified in config"
logger.error(msg)
raise ValidationFailed(msg)
signature = sig.split("=")[1]
mac = hmac.new(webhook_secret, msg=request.get_data(), digestmod=sha1)
digest_is_valid = hmac.compare_digest(signature, mac.hexdigest())
if digest_is_valid:
logger.debug("payload signature OK.")
else:
msg = "payload signature validation failed."
logger.warning(msg)
logger.debug(f"X-Hub-Signature: {sig!r} != computed: {mac.hexdigest()}")
raise ValidationFailed(msg)
@staticmethod
def interested():
"""
Check X-GitHub-Event header for events we know we give a f...
...finely prepared response to.
:return: False if we are not interested in this kind of event
"""
uninteresting_events = {
"integration_installation",
"integration_installation_repositories",
}
event_type = request.headers.get("X-GitHub-Event")
uuid = request.headers.get("X-GitHub-Delivery")
_interested = event_type not in uninteresting_events
logger.debug(
f"{event_type} {uuid}{' (not interested)' if not _interested else ''}"
)
return _interested
|
[
"flask.request.headers.get",
"packit_service.config.ServiceConfig.get_service_config",
"packit_service.celerizer.celery_app.send_task",
"packit_service.service.api.errors.ValidationFailed",
"flask.request.get_data",
"flask_restplus.fields.String",
"flask_restplus.Namespace",
"logging.getLogger"
] |
[((1455, 1482), 'logging.getLogger', 'getLogger', (['"""packit_service"""'], {}), "('packit_service')\n", (1464, 1482), False, 'from logging import getLogger\n'), ((1492, 1526), 'packit_service.config.ServiceConfig.get_service_config', 'ServiceConfig.get_service_config', ([], {}), '()\n', (1524, 1526), False, 'from packit_service.config import ServiceConfig\n'), ((1533, 1578), 'flask_restplus.Namespace', 'Namespace', (['"""webhooks"""'], {'description': '"""Webhooks"""'}), "('webhooks', description='Webhooks')\n", (1542, 1578), False, 'from flask_restplus import Namespace, Resource, fields\n'), ((1709, 1738), 'flask_restplus.fields.String', 'fields.String', ([], {'required': '(False)'}), '(required=False)\n', (1722, 1738), False, 'from flask_restplus import Namespace, Resource, fields\n'), ((1759, 1788), 'flask_restplus.fields.String', 'fields.String', ([], {'required': '(False)'}), '(required=False)\n', (1772, 1788), False, 'from flask_restplus import Namespace, Resource, fields\n'), ((1806, 1835), 'flask_restplus.fields.String', 'fields.String', ([], {'required': '(False)'}), '(required=False)\n', (1819, 1835), False, 'from flask_restplus import Namespace, Resource, fields\n'), ((3161, 3249), 'packit_service.celerizer.celery_app.send_task', 'celery_app.send_task', ([], {'name': '"""task.steve_jobs.process_message"""', 'kwargs': "{'event': msg}"}), "(name='task.steve_jobs.process_message', kwargs={\n 'event': msg})\n", (3181, 3249), False, 'from packit_service.celerizer import celery_app\n'), ((5325, 5362), 'flask.request.headers.get', 'request.headers.get', (['"""X-GitHub-Event"""'], {}), "('X-GitHub-Event')\n", (5344, 5362), False, 'from flask import request\n'), ((5378, 5418), 'flask.request.headers.get', 'request.headers.get', (['"""X-GitHub-Delivery"""'], {}), "('X-GitHub-Delivery')\n", (5397, 5418), False, 'from flask import request\n'), ((4186, 4207), 'packit_service.service.api.errors.ValidationFailed', 'ValidationFailed', (['msg'], {}), '(msg)\n', (4202, 4207), False, 'from packit_service.service.api.errors import ValidationFailed\n'), ((4403, 4424), 'packit_service.service.api.errors.ValidationFailed', 'ValidationFailed', (['msg'], {}), '(msg)\n', (4419, 4424), False, 'from packit_service.service.api.errors import ValidationFailed\n'), ((4901, 4922), 'packit_service.service.api.errors.ValidationFailed', 'ValidationFailed', (['msg'], {}), '(msg)\n', (4917, 4922), False, 'from packit_service.service.api.errors import ValidationFailed\n'), ((3786, 3807), 'packit_service.service.api.errors.ValidationFailed', 'ValidationFailed', (['msg'], {}), '(msg)\n', (3802, 3807), False, 'from packit_service.service.api.errors import ValidationFailed\n'), ((4507, 4525), 'flask.request.get_data', 'request.get_data', ([], {}), '()\n', (4523, 4525), False, 'from flask import request\n')]
|
from collections import namedtuple
from enum import Enum, unique
from functools import lru_cache
from core.errors import ParseError
@unique
class Associativity(Enum):
UNDEFINED = 0
LEFT = 1
RIGHT = 2
BinOpInfo = namedtuple('BinOpInfo', ['precedence', 'associativity'])
BUILTIN_OP = {
'=': BinOpInfo(2, Associativity.RIGHT),
'==': BinOpInfo(10, Associativity.LEFT),
'+=': BinOpInfo(10, Associativity.LEFT),
'-=': BinOpInfo(10, Associativity.LEFT),
'!=': BinOpInfo(10, Associativity.LEFT),
'and': BinOpInfo(5, Associativity.LEFT),
'or': BinOpInfo(5, Associativity.LEFT),
'xor': BinOpInfo(5, Associativity.LEFT),
'<': BinOpInfo(10, Associativity.LEFT),
'<=': BinOpInfo(10, Associativity.LEFT),
'>': BinOpInfo(10, Associativity.LEFT),
'>=': BinOpInfo(10, Associativity.LEFT),
'+': BinOpInfo(20, Associativity.LEFT),
'-': BinOpInfo(20, Associativity.LEFT),
'*': BinOpInfo(40, Associativity.LEFT),
'/': BinOpInfo(40, Associativity.LEFT),
}
BUILTIN_UNARY_OP = {
'not',
'-'
}
UNASSIGNED = {
'!','$','%','`','^','&','|','\','
}
FALSE_BINOP_INFO = BinOpInfo(-1, Associativity.UNDEFINED)
def builtin_operators():
return sorted(BUILTIN_OP.keys())
_binop_map = dict(BUILTIN_OP)
def binop_info(tok):
kind, value, _, position = tok
try:
return _binop_map[value]
except KeyError:
from core.lexer import TokenKind, PUNCTUATORS
if kind == TokenKind.PUNCTUATOR and value not in PUNCTUATORS:
raise ParseError(f'Undefined operator: "{value}"', position)
# Return a false binop info that has no precedence
return FALSE_BINOP_INFO
def set_binop_info(op, precedence, associativity):
_binop_map[op] = BinOpInfo(precedence, associativity)
|
[
"core.errors.ParseError",
"collections.namedtuple"
] |
[((227, 283), 'collections.namedtuple', 'namedtuple', (['"""BinOpInfo"""', "['precedence', 'associativity']"], {}), "('BinOpInfo', ['precedence', 'associativity'])\n", (237, 283), False, 'from collections import namedtuple\n'), ((1528, 1582), 'core.errors.ParseError', 'ParseError', (['f"""Undefined operator: "{value}\\""""', 'position'], {}), '(f\'Undefined operator: "{value}"\', position)\n', (1538, 1582), False, 'from core.errors import ParseError\n')]
|
# -*- coding: utf-8 -*-
"""
This file is covered by the LICENSING file in the root of this project.
"""
import sys
sys.path.append("..")
import os
from os.path import realpath, dirname, isfile, abspath
import json
import time
import uuid
from werkzeug.datastructures import FileStorage
from io import IOBase
from hackathon.constants import FILE_TYPE, HEALTH_STATUS, HEALTH
from hackathon.storage.storage import Storage
__all__ = ["LocalStorage"]
class LocalStorage(Storage):
"""Hackathon file storage that saves all templates on local disk
files will be save at "<src_dir>/open-hackathon-server/src/hackathon/upload/<file_type>"
"""
def save(self, context):
"""Save a file to storage
:type context: Context
:param context: the execution context of file saving
:rtype context
:return the updated context which should including the full path of saved file
"""
context = self.__generate_paths(context)
self.__save_file(context.content, context.physical_path)
self.log.debug("file saved at:" + context.physical_path)
return context
def delete(self, url):
"""Delete file from storage
:type url: str|unicode
:param url: the url of file to be deleted which are created in 'save'
:rtype bool
:return True if successfully deleted else False
"""
path = self.__convert_url_to_physical_path(url)
if isfile(path):
os.remove(path)
return True
else:
self.log.warn("try to remove dir or non-existed file")
return False
def report_health(self):
"""The status of local storage should be always True"""
return {
HEALTH.STATUS: HEALTH_STATUS.OK,
"type": "LocalStorage"
}
def __init__(self):
self.base_dir = self.__get_storage_base_dir()
@staticmethod
def __ensure_dir(file_path):
"""Make sure the directory of target file exists"""
path = dirname(file_path)
if path and not (os.path.exists(path)):
os.makedirs(path)
return path
def __save_file(self, content, path):
"""Dump file to disk
An existing file with the same name will be erased
:type content: file | dict | FileStorage
:param content: the content of file to be saved. Can be a file object or a dict
:type path: str | unicode
:param path: the file path
"""
self.__ensure_dir(path)
with open(path, 'w') as f:
if isinstance(content, dict):
json.dump(content, f)
elif isinstance(content, IOBase) or isinstance(content, FileStorage):
content.save(path)
else:
f.write(str(content))
@staticmethod
def __get_storage_base_dir():
"""Get the base directory of storage"""
return abspath("%s/.." % dirname(realpath(__file__)))
def __generate_paths(self, context):
"""Generate file new name ,physical path and uri
:type context: Context
:param context: execution context
:return updated context
"""
hackathon_name = context.hackathon_name if "hackathon_name" in context else None
# replace file_name with new random name
context.file_name = self.__generate_file_name(context.file_name, context.file_type, hackathon_name)
context.physical_path = self.__generate_physical_path(context.file_name, context.file_type)
context.url = self.__convert_physical_path_to_url(context.physical_path)
return context
def __convert_url_to_physical_path(self, url):
"""Return the physical_path according to its url
:type url: str|unicode
:param url: the absolute physical path of the file
:rtype str
:return public accessable URI
"""
# physical_path example: <base_dir>/static/upload/hack_image/hack01/20150708/win10-20140708-1234.jpg
# url example: http://localhost:15000/static/upload/hack_image/hack01/20150708/win10-20140708-1234.jpg
i = url.index("static")
path = url[i:]
return "%s/%s" % (self.base_dir, path)
def __convert_physical_path_to_url(self, physical_path):
"""Return the http URI according to physical_path
:type physical_path: str|unicode
:param physical_path: the absolute physical path of the file
:rtype str
:return public accessable URI
"""
# physical_path example: <base_dir>/static/upload/hack_image/hack01/20150708/win10-20140708-1234.jpg
# url example: http://localhost:15000/static/upload/hack_image/hack01/20150708/win10-20140708-1234.jpg
i = physical_path.index("static")
path = physical_path[i:]
return self.util.get_config("endpoint") + "/" + path
def __generate_physical_path(self, file_name, file_type, hackathon_name=None):
"""Return the physical path of file including directory and file name
files are saved at <base_dir>/static/upload/<file_type>/
:type file_name: str|unicode
:param file_name: the original file name
:type file_type: str | unicode
:param file_type: type of file which decides the directories where file is saved.
:rtype str
:return physical path of the file to be saved
"""
# <base_dir>/static/upload/<file_type>/<hackathon_name>/<date>/<file_name>
path = "%s/static/upload/%s%s/%s/%s" % (
self.base_dir,
file_type,
"/" + hackathon_name if hackathon_name else "",
time.strftime("%Y%m%d"),
file_name)
return path
@staticmethod
def __generate_file_name(origin_name, file_type, hackathon_name=None):
"""Generate a random file name if file_type is hack_image
:type origin_name: str | unicode
:param origin_name the origin name of file
:type file_type: str|unicode
:param file_type: type of file, defined by FILTE_TYPE in constants.py
:type hackathon_name: str | unicode
:param hackathon_name: name of hackathon related to this file
:rtype str
:return a random file name which includes hackathon_name and time as parts
"""
if not hackathon_name:
hackathon_name = ""
# handle uploaded images only since the uploaded file name can be very strange or contains Chinese
if file_type == FILE_TYPE.HACK_IMAGE:
extension = os.path.splitext(origin_name)[1]
new_name = "%s-%s-%s%s" % (
hackathon_name,
time.strftime("%Y%m%d"),
str(uuid.uuid1())[0:8],
extension
)
return new_name.strip('-')
else:
return origin_name
|
[
"sys.path.append",
"json.dump",
"os.remove",
"os.makedirs",
"os.path.dirname",
"os.path.realpath",
"os.path.exists",
"time.strftime",
"os.path.isfile",
"uuid.uuid1",
"os.path.splitext"
] |
[((117, 138), 'sys.path.append', 'sys.path.append', (['""".."""'], {}), "('..')\n", (132, 138), False, 'import sys\n'), ((1465, 1477), 'os.path.isfile', 'isfile', (['path'], {}), '(path)\n', (1471, 1477), False, 'from os.path import realpath, dirname, isfile, abspath\n'), ((2044, 2062), 'os.path.dirname', 'dirname', (['file_path'], {}), '(file_path)\n', (2051, 2062), False, 'from os.path import realpath, dirname, isfile, abspath\n'), ((1491, 1506), 'os.remove', 'os.remove', (['path'], {}), '(path)\n', (1500, 1506), False, 'import os\n'), ((2123, 2140), 'os.makedirs', 'os.makedirs', (['path'], {}), '(path)\n', (2134, 2140), False, 'import os\n'), ((2088, 2108), 'os.path.exists', 'os.path.exists', (['path'], {}), '(path)\n', (2102, 2108), False, 'import os\n'), ((2638, 2659), 'json.dump', 'json.dump', (['content', 'f'], {}), '(content, f)\n', (2647, 2659), False, 'import json\n'), ((5699, 5722), 'time.strftime', 'time.strftime', (['"""%Y%m%d"""'], {}), "('%Y%m%d')\n", (5712, 5722), False, 'import time\n'), ((6607, 6636), 'os.path.splitext', 'os.path.splitext', (['origin_name'], {}), '(origin_name)\n', (6623, 6636), False, 'import os\n'), ((2975, 2993), 'os.path.realpath', 'realpath', (['__file__'], {}), '(__file__)\n', (2983, 2993), False, 'from os.path import realpath, dirname, isfile, abspath\n'), ((6728, 6751), 'time.strftime', 'time.strftime', (['"""%Y%m%d"""'], {}), "('%Y%m%d')\n", (6741, 6751), False, 'import time\n'), ((6773, 6785), 'uuid.uuid1', 'uuid.uuid1', ([], {}), '()\n', (6783, 6785), False, 'import uuid\n')]
|
from flask import Blueprint, render_template
from flask_login import login_required, current_user
from ctf.models.Score import Score
from ctf import db
from sys import path
path.append("..")
scoreboard = Blueprint("scoreboard", __name__)
@scoreboard.route("/scoreboard")
def scoreboard_out():
scores = Score.query.all()
scores.sort(key=lambda x: x.score, reverse=True)
return render_template("scoreboard.html", scores=scores)
|
[
"sys.path.append",
"flask.render_template",
"flask.Blueprint",
"ctf.models.Score.Score.query.all"
] |
[((173, 190), 'sys.path.append', 'path.append', (['""".."""'], {}), "('..')\n", (184, 190), False, 'from sys import path\n'), ((205, 238), 'flask.Blueprint', 'Blueprint', (['"""scoreboard"""', '__name__'], {}), "('scoreboard', __name__)\n", (214, 238), False, 'from flask import Blueprint, render_template\n'), ((309, 326), 'ctf.models.Score.Score.query.all', 'Score.query.all', ([], {}), '()\n', (324, 326), False, 'from ctf.models.Score import Score\n'), ((391, 440), 'flask.render_template', 'render_template', (['"""scoreboard.html"""'], {'scores': 'scores'}), "('scoreboard.html', scores=scores)\n", (406, 440), False, 'from flask import Blueprint, render_template\n')]
|
import pygame
import pygameMenu
import flatpakmanager_steamos
import pyflatpak
class gui():
def __init__(self, window_width, window_height, title):
self.window_width = window_width
self.window_height = window_height
self.title = title
self.framerate = 30
self.running = False
self.menu_available_page = 1
self.menu_installed_page = 1
self.__init_pygame()
self.__init_joysticks()
self.__flatpak_manager = pyflatpak.manager()
self.__init_menu()
def __init_pygame(self):
pygame.init()
self.window = pygame.display.set_mode((self.window_width, self.window_height))
pygame.display.set_caption(self.title)
self.clock = pygame.time.Clock()
self.__draw_splash_screen()
def __init_joysticks(self):
pygame.joystick.init()
joysticks = [pygame.joystick.Joystick(x) for x in range(pygame.joystick.get_count())]
for joystick in joysticks:
joystick.init()
def __init_menu(self):
self.menu_available = self.__generate_available_application_menu()
#self.menu_installed = self.__generate_installed_application_menu()
self.menu_main = self._create_menu(self.title)
self.menu_main.add_option("Available Software", self.menu_available)
#self.menu_main.add_option("Installed Software", self.menu_installed)
self.menu_main.add_option("Exit", pygameMenu.events.EXIT)
def __draw_background(self):
self.window.fill(flatpakmanager_steamos.color.background)
def __draw_splash_screen(self):
self.__draw_background()
# draw logo
logo = pygame.image.load(flatpakmanager_steamos.config.logo)
logo_x = self.window_width / 2 - logo.get_width() / 2
logo_rect = pygame.Rect(logo_x, 0, logo.get_width(), logo.get_height())
self.window.blit(logo, logo_rect)
# draw title
font = pygame.font.Font(flatpakmanager_steamos.config.font, 64)
text = font.render(self.title, False, flatpakmanager_steamos.color.text_title)
text_rectangle = text.get_rect()
text_rectangle.center = (self.window_width / 2, self.window_height - 64)
self.window.blit(text, text_rectangle)
pygame.display.update()
def __draw_load_screen(self, title):
self.__draw_background()
# draw title
font = pygame.font.Font(flatpakmanager_steamos.config.font, 64)
text = font.render(title, False, flatpakmanager_steamos.color.text_title)
text_rectangle = text.get_rect()
text_rectangle.center = (self.window_width / 2, self.window_height / 2)
self.window.blit(text, text_rectangle)
pygame.display.update()
def __generate_available_application_menu(self, label=None, page=None):
application_list = self.__flatpak_manager.applications_available
# Create menu
menu = self._create_menu("Available applications")
# Change the page and make sure we're on an existing page
if page > 1:
print(page)
self.menu_available_page = page
# add page changer
page_list = []
last_page = len(application_list) / flatpakmanager_steamos.config.applications_per_page + 1
for number in range(1, last_page + 1):
page_list.append(("Page {}/{}".format(number, last_page), number))
menu.add_selector("", page_list, onchange=self.__generate_available_application_menu,
selector_id='page_selector{}'.format(self.menu_available_page))
# add application buttons to menu
page_content = self.__get_page(application_list, self.menu_available_page)
for application in page_content:
menu.add_option(str(application), pygameMenu.events.BACK)
return menu
def _create_menu(self, title):
return pygameMenu.Menu(self.window, self.window_width, self.window_height,
flatpakmanager_steamos.config.font, self.title,
dopause=False,
menu_width=self.window_width,
menu_height=self.window_height,
menu_color=flatpakmanager_steamos.color.background,
menu_color_title=flatpakmanager_steamos.color.title,
color_selected=flatpakmanager_steamos.color.selected,
menu_alpha=100
)
def __get_page(self, application_list, page):
output = []
first_index = flatpakmanager_steamos.config.applications_per_page * (page - 1)
last_index = first_index + flatpakmanager_steamos.config.applications_per_page
for index in range(first_index, last_index):
if not index < len(application_list):
break
output.append(application_list[index])
print(output)
return output
def __read_input(self):
for event in pygame.event.get():
if event.type == pygame.QUIT:
running = False
def run(self):
self.running = True
while self.running:
self.clock.tick(flatpakmanager_steamos.config.framerate)
self.menu_main.mainloop()
pygame.display.update()
def stop(self):
self.running = False
|
[
"pygame.joystick.init",
"pygame.joystick.get_count",
"pygame.joystick.Joystick",
"pygame.event.get",
"pygame.display.set_mode",
"pygameMenu.Menu",
"pygame.init",
"pygame.display.update",
"pygame.font.Font",
"pygame.image.load",
"pygame.display.set_caption",
"pygame.time.Clock",
"pyflatpak.manager"
] |
[((493, 512), 'pyflatpak.manager', 'pyflatpak.manager', ([], {}), '()\n', (510, 512), False, 'import pyflatpak\n'), ((578, 591), 'pygame.init', 'pygame.init', ([], {}), '()\n', (589, 591), False, 'import pygame\n'), ((614, 678), 'pygame.display.set_mode', 'pygame.display.set_mode', (['(self.window_width, self.window_height)'], {}), '((self.window_width, self.window_height))\n', (637, 678), False, 'import pygame\n'), ((687, 725), 'pygame.display.set_caption', 'pygame.display.set_caption', (['self.title'], {}), '(self.title)\n', (713, 725), False, 'import pygame\n'), ((747, 766), 'pygame.time.Clock', 'pygame.time.Clock', ([], {}), '()\n', (764, 766), False, 'import pygame\n'), ((844, 866), 'pygame.joystick.init', 'pygame.joystick.init', ([], {}), '()\n', (864, 866), False, 'import pygame\n'), ((1688, 1741), 'pygame.image.load', 'pygame.image.load', (['flatpakmanager_steamos.config.logo'], {}), '(flatpakmanager_steamos.config.logo)\n', (1705, 1741), False, 'import pygame\n'), ((1963, 2019), 'pygame.font.Font', 'pygame.font.Font', (['flatpakmanager_steamos.config.font', '(64)'], {}), '(flatpakmanager_steamos.config.font, 64)\n', (1979, 2019), False, 'import pygame\n'), ((2285, 2308), 'pygame.display.update', 'pygame.display.update', ([], {}), '()\n', (2306, 2308), False, 'import pygame\n'), ((2421, 2477), 'pygame.font.Font', 'pygame.font.Font', (['flatpakmanager_steamos.config.font', '(64)'], {}), '(flatpakmanager_steamos.config.font, 64)\n', (2437, 2477), False, 'import pygame\n'), ((2737, 2760), 'pygame.display.update', 'pygame.display.update', ([], {}), '()\n', (2758, 2760), False, 'import pygame\n'), ((3919, 4308), 'pygameMenu.Menu', 'pygameMenu.Menu', (['self.window', 'self.window_width', 'self.window_height', 'flatpakmanager_steamos.config.font', 'self.title'], {'dopause': '(False)', 'menu_width': 'self.window_width', 'menu_height': 'self.window_height', 'menu_color': 'flatpakmanager_steamos.color.background', 'menu_color_title': 'flatpakmanager_steamos.color.title', 'color_selected': 'flatpakmanager_steamos.color.selected', 'menu_alpha': '(100)'}), '(self.window, self.window_width, self.window_height,\n flatpakmanager_steamos.config.font, self.title, dopause=False,\n menu_width=self.window_width, menu_height=self.window_height,\n menu_color=flatpakmanager_steamos.color.background, menu_color_title=\n flatpakmanager_steamos.color.title, color_selected=\n flatpakmanager_steamos.color.selected, menu_alpha=100)\n', (3934, 4308), False, 'import pygameMenu\n'), ((5085, 5103), 'pygame.event.get', 'pygame.event.get', ([], {}), '()\n', (5101, 5103), False, 'import pygame\n'), ((888, 915), 'pygame.joystick.Joystick', 'pygame.joystick.Joystick', (['x'], {}), '(x)\n', (912, 915), False, 'import pygame\n'), ((5374, 5397), 'pygame.display.update', 'pygame.display.update', ([], {}), '()\n', (5395, 5397), False, 'import pygame\n'), ((931, 958), 'pygame.joystick.get_count', 'pygame.joystick.get_count', ([], {}), '()\n', (956, 958), False, 'import pygame\n')]
|
# -*- coding: utf-8 -*-
from karsender.database import get_collection
from karsender.services import validate_emails
__author__ = '<NAME> <<EMAIL>>'
from unittest import TestCase
class TestServices(TestCase):
def test_validate_emails(self):
validate_emails()
|
[
"karsender.services.validate_emails"
] |
[((256, 273), 'karsender.services.validate_emails', 'validate_emails', ([], {}), '()\n', (271, 273), False, 'from karsender.services import validate_emails\n')]
|
""" calculates certain quantities of interest using MESS+filesytem
"""
import os
import autofile
from mechanalyzer.inf import rxn as rinfo
from mechanalyzer.inf import spc as sinfo
from mechanalyzer.inf import thy as tinfo
from mechlib.amech_io import printer as ioprinter
from mechroutines.models import typ
from mechroutines.models import _vib as vib
# Functions to hand reading and formatting energies of single species
def read_energy(spc_dct_i, pf_filesystems,
spc_model_dct_i, run_prefix,
read_ene=True, read_zpe=True, conf=None, saddle=False):
""" Get the energy for a species on a channel
"""
# Read the electronic energy and ZPVE
e_elec = None
if read_ene:
e_elec = electronic_energy(
spc_dct_i, pf_filesystems, spc_model_dct_i, conf=conf)
# ioprinter.debug_message('e_elec in models ene ', e_elec)
e_zpe = None
if read_zpe:
e_zpe = zero_point_energy(
spc_dct_i, pf_filesystems, spc_model_dct_i,
run_prefix, saddle=saddle)
# ioprinter.debug_message('zpe in models ene ', e_zpe)
# Return the total energy requested
ene = None
if read_ene and read_zpe:
if e_elec is not None and e_zpe is not None:
ene = e_elec + e_zpe
elif read_ene and not read_zpe:
ene = e_elec
elif read_ene and not read_zpe:
ene = e_zpe
return ene
def electronic_energy(spc_dct_i, pf_filesystems, spc_model_dct_i, conf=None):
""" get high level energy at low level optimized geometry
"""
ioprinter.info_message('- Calculating electronic energy')
# spc_dct_i = spc_dct[spc_name]
rxn_info = spc_dct_i.get('rxn_info', None)
if rxn_info is not None:
spc_info = rinfo.ts_info(rxn_info)
else:
spc_info = sinfo.from_dct(spc_dct_i)
# Get the harmonic filesys information
if conf:
cnf_path = conf[1]
else:
[_, cnf_path, _, _, _] = pf_filesystems['harm']
# Get the electronic energy levels
ene_levels = tuple(val[1] for key, val in spc_model_dct_i['ene'].items()
if 'lvl' in key)
# Read the energies from the filesystem
e_elec = None
if os.path.exists(cnf_path):
e_elec = 0.0
# ioprinter.info_message('lvls', ene_levels)
for (coeff, level) in ene_levels:
# Build SP filesys
mod_thy_info = tinfo.modify_orb_label(level, spc_info)
sp_save_fs = autofile.fs.single_point(cnf_path)
sp_save_fs[-1].create(mod_thy_info[1:4])
# Read the energy
sp_path = sp_save_fs[-1].path(mod_thy_info[1:4])
if os.path.exists(sp_path):
ioprinter.reading('Energy', sp_path)
ene = sp_save_fs[-1].file.energy.read(mod_thy_info[1:4])
e_elec += (coeff * ene)
else:
ioprinter.warning_message('No energy at path')
e_elec = None
break
else:
ioprinter.warning_message('No conformer to calculate the energy')
return e_elec
def zero_point_energy(spc_dct_i,
pf_filesystems, spc_model_dct_i,
run_prefix, saddle=False):
""" compute the ZPE including torsional and anharmonic corrections
"""
ioprinter.info_message('- Calculating zero-point energy')
# Calculate ZPVE
is_atom = False
if not saddle:
if typ.is_atom(spc_dct_i):
is_atom = True
if is_atom:
zpe = 0.0
else:
_, _, zpe, _ = vib.vib_analysis(
spc_dct_i, pf_filesystems, spc_model_dct_i,
run_prefix, zrxn=(None if not saddle else 'placeholder'))
return zpe
def rpath_ref_idx(ts_dct, scn_vals, coord_name, scn_prefix,
ene_info1, ene_info2):
""" Get the reference energy along a reaction path
"""
# Set up the filesystem
zma_fs = autofile.fs.zmatrix(scn_prefix)
zma_path = zma_fs[-1].path([0])
scn_fs = autofile.fs.scan(zma_path)
ene_info1 = ene_info1[1][0][1]
ene_info2 = ene_info2[0]
ioprinter.debug_message('mod_eneinf1', ene_info1)
ioprinter.debug_message('mod_eneinf2', ene_info2)
mod_ene_info1 = tinfo.modify_orb_label(
sinfo.from_dct(ts_dct), ene_info1)
mod_ene_info2 = tinfo.modify_orb_label(
sinfo.from_dct(ts_dct), ene_info2)
ene1, ene2, ref_val = None, None, None
for val in reversed(scn_vals):
locs = [[coord_name], [val]]
path = scn_fs[-1].path(locs)
hs_fs = autofile.fs.high_spin(path)
if hs_fs[-1].file.energy.exists(mod_ene_info1[1:4]):
ene1 = hs_fs[-1].file.energy.read(mod_ene_info1[1:4])
if hs_fs[-1].file.energy.exists(mod_ene_info2[1:4]):
ene2 = hs_fs[-1].file.energy.read(mod_ene_info2[1:4])
if ene1 is not None and ene2 is not None:
ref_val = val
break
if ref_val is not None:
scn_idx = scn_vals.index(ref_val)
return scn_idx, ene1, ene2
|
[
"autofile.fs.scan",
"autofile.fs.high_spin",
"mechanalyzer.inf.thy.modify_orb_label",
"mechanalyzer.inf.spc.from_dct",
"mechlib.amech_io.printer.reading",
"mechroutines.models._vib.vib_analysis",
"os.path.exists",
"mechanalyzer.inf.rxn.ts_info",
"mechroutines.models.typ.is_atom",
"mechlib.amech_io.printer.info_message",
"autofile.fs.single_point",
"autofile.fs.zmatrix",
"mechlib.amech_io.printer.warning_message",
"mechlib.amech_io.printer.debug_message"
] |
[((1576, 1633), 'mechlib.amech_io.printer.info_message', 'ioprinter.info_message', (['"""- Calculating electronic energy"""'], {}), "('- Calculating electronic energy')\n", (1598, 1633), True, 'from mechlib.amech_io import printer as ioprinter\n'), ((2222, 2246), 'os.path.exists', 'os.path.exists', (['cnf_path'], {}), '(cnf_path)\n', (2236, 2246), False, 'import os\n'), ((3332, 3389), 'mechlib.amech_io.printer.info_message', 'ioprinter.info_message', (['"""- Calculating zero-point energy"""'], {}), "('- Calculating zero-point energy')\n", (3354, 3389), True, 'from mechlib.amech_io import printer as ioprinter\n'), ((3948, 3979), 'autofile.fs.zmatrix', 'autofile.fs.zmatrix', (['scn_prefix'], {}), '(scn_prefix)\n', (3967, 3979), False, 'import autofile\n'), ((4029, 4055), 'autofile.fs.scan', 'autofile.fs.scan', (['zma_path'], {}), '(zma_path)\n', (4045, 4055), False, 'import autofile\n'), ((4125, 4174), 'mechlib.amech_io.printer.debug_message', 'ioprinter.debug_message', (['"""mod_eneinf1"""', 'ene_info1'], {}), "('mod_eneinf1', ene_info1)\n", (4148, 4174), True, 'from mechlib.amech_io import printer as ioprinter\n'), ((4179, 4228), 'mechlib.amech_io.printer.debug_message', 'ioprinter.debug_message', (['"""mod_eneinf2"""', 'ene_info2'], {}), "('mod_eneinf2', ene_info2)\n", (4202, 4228), True, 'from mechlib.amech_io import printer as ioprinter\n'), ((1766, 1789), 'mechanalyzer.inf.rxn.ts_info', 'rinfo.ts_info', (['rxn_info'], {}), '(rxn_info)\n', (1779, 1789), True, 'from mechanalyzer.inf import rxn as rinfo\n'), ((1819, 1844), 'mechanalyzer.inf.spc.from_dct', 'sinfo.from_dct', (['spc_dct_i'], {}), '(spc_dct_i)\n', (1833, 1844), True, 'from mechanalyzer.inf import spc as sinfo\n'), ((3024, 3089), 'mechlib.amech_io.printer.warning_message', 'ioprinter.warning_message', (['"""No conformer to calculate the energy"""'], {}), "('No conformer to calculate the energy')\n", (3049, 3089), True, 'from mechlib.amech_io import printer as ioprinter\n'), ((3462, 3484), 'mechroutines.models.typ.is_atom', 'typ.is_atom', (['spc_dct_i'], {}), '(spc_dct_i)\n', (3473, 3484), False, 'from mechroutines.models import typ\n'), ((3580, 3700), 'mechroutines.models._vib.vib_analysis', 'vib.vib_analysis', (['spc_dct_i', 'pf_filesystems', 'spc_model_dct_i', 'run_prefix'], {'zrxn': "(None if not saddle else 'placeholder')"}), "(spc_dct_i, pf_filesystems, spc_model_dct_i, run_prefix,\n zrxn=None if not saddle else 'placeholder')\n", (3596, 3700), True, 'from mechroutines.models import _vib as vib\n'), ((4281, 4303), 'mechanalyzer.inf.spc.from_dct', 'sinfo.from_dct', (['ts_dct'], {}), '(ts_dct)\n', (4295, 4303), True, 'from mechanalyzer.inf import spc as sinfo\n'), ((4368, 4390), 'mechanalyzer.inf.spc.from_dct', 'sinfo.from_dct', (['ts_dct'], {}), '(ts_dct)\n', (4382, 4390), True, 'from mechanalyzer.inf import spc as sinfo\n'), ((4572, 4599), 'autofile.fs.high_spin', 'autofile.fs.high_spin', (['path'], {}), '(path)\n', (4593, 4599), False, 'import autofile\n'), ((2423, 2462), 'mechanalyzer.inf.thy.modify_orb_label', 'tinfo.modify_orb_label', (['level', 'spc_info'], {}), '(level, spc_info)\n', (2445, 2462), True, 'from mechanalyzer.inf import thy as tinfo\n'), ((2488, 2522), 'autofile.fs.single_point', 'autofile.fs.single_point', (['cnf_path'], {}), '(cnf_path)\n', (2512, 2522), False, 'import autofile\n'), ((2682, 2705), 'os.path.exists', 'os.path.exists', (['sp_path'], {}), '(sp_path)\n', (2696, 2705), False, 'import os\n'), ((2723, 2759), 'mechlib.amech_io.printer.reading', 'ioprinter.reading', (['"""Energy"""', 'sp_path'], {}), "('Energy', sp_path)\n", (2740, 2759), True, 'from mechlib.amech_io import printer as ioprinter\n'), ((2907, 2953), 'mechlib.amech_io.printer.warning_message', 'ioprinter.warning_message', (['"""No energy at path"""'], {}), "('No energy at path')\n", (2932, 2953), True, 'from mechlib.amech_io import printer as ioprinter\n')]
|
"""Tests for base classes."""
import datetime
import unittest
from unittest import mock
from unittest.mock import MagicMock
import requests
from georss_client import (
UPDATE_ERROR,
UPDATE_OK,
FeedEntry,
GeoRssDistanceHelper,
GeoRssFeed,
)
from georss_client.xml_parser.geometry import Point, Polygon
from tests import MockGeoRssFeed
from tests.utils import load_fixture
HOME_COORDINATES_1 = (-31.0, 151.0)
HOME_COORDINATES_2 = (-37.0, 150.0)
class TestGeoRssFeed(unittest.TestCase):
@mock.patch("requests.Request")
@mock.patch("requests.Session")
def test_update_ok(self, mock_session, mock_request):
"""Test updating feed is ok."""
mock_session.return_value.__enter__.return_value.send.return_value.ok = True
mock_session.return_value.__enter__.return_value.send.return_value.text = (
load_fixture("generic_feed_1.xml")
)
feed = MockGeoRssFeed(HOME_COORDINATES_1, None)
assert (
repr(feed) == "<MockGeoRssFeed(home=(-31.0, 151.0), "
"url=None, radius=None, categories=None)>"
)
status, entries = feed.update()
assert status == UPDATE_OK
self.assertIsNotNone(entries)
assert len(entries) == 5
feed_entry = entries[0]
assert feed_entry.title == "Title 1"
assert feed_entry.external_id == "1234"
assert feed_entry.category == "Category 1"
assert feed_entry.published == datetime.datetime(2018, 9, 23, 8, 30)
assert feed_entry.updated == datetime.datetime(2018, 9, 23, 8, 35)
assert feed_entry.coordinates == (-37.2345, 149.1234)
self.assertAlmostEqual(feed_entry.distance_to_home, 714.4, 1)
feed_entry = entries[1]
assert feed_entry.title == "Title 2"
assert feed_entry.external_id == "2345"
self.assertIsNone(feed_entry.attribution)
assert repr(feed_entry) == "<FeedEntry(id=2345)>"
feed_entry = entries[2]
assert feed_entry.title == "Title 3"
assert feed_entry.external_id == "Title 3"
feed_entry = entries[3]
self.assertIsNone(feed_entry.title)
assert feed_entry.external_id == hash(feed_entry.coordinates)
feed_entry = entries[4]
assert feed_entry.title == "Title 5"
assert feed_entry.external_id == "5678"
@mock.patch("requests.Request")
@mock.patch("requests.Session")
def test_update_ok_feed_2(self, mock_session, mock_request):
"""Test updating feed is ok."""
mock_session.return_value.__enter__.return_value.send.return_value.ok = True
mock_session.return_value.__enter__.return_value.send.return_value.text = (
load_fixture("generic_feed_2.xml")
)
feed = MockGeoRssFeed(HOME_COORDINATES_1, None)
status, entries = feed.update()
assert status == UPDATE_OK
self.assertIsNotNone(entries)
assert len(entries) == 1
feed_entry = entries[0]
assert feed_entry.title == "Title 1"
assert feed_entry.external_id == "1234"
assert feed_entry.category == "Category 1"
assert feed_entry.coordinates == (-37.2345, 149.1234)
self.assertAlmostEqual(feed_entry.distance_to_home, 714.4, 1)
@mock.patch("requests.Request")
@mock.patch("requests.Session")
def test_update_ok_feed_3(self, mock_session, mock_request):
"""Test updating feed is ok."""
mock_session.return_value.__enter__.return_value.send.return_value.ok = True
mock_session.return_value.__enter__.return_value.send.return_value.text = (
load_fixture("generic_feed_3.xml")
)
feed = MockGeoRssFeed(HOME_COORDINATES_1, None)
status, entries = feed.update()
assert status == UPDATE_OK
self.assertIsNotNone(entries)
assert len(entries) == 3
feed_entry = entries[0]
assert feed_entry.external_id == "1234"
assert feed_entry.coordinates == (-34.93728111547821, 148.59710883878262)
self.assertAlmostEqual(feed_entry.distance_to_home, 491.7, 1)
feed_entry = entries[1]
assert feed_entry.external_id == "2345"
assert feed_entry.coordinates == (-34.937170989, 148.597182317)
self.assertAlmostEqual(feed_entry.distance_to_home, 491.8, 1)
feed_entry = entries[2]
assert feed_entry.external_id == "3456"
assert feed_entry.coordinates == (-29.962746645660683, 152.43090880416074)
self.assertAlmostEqual(feed_entry.distance_to_home, 176.5, 1)
@mock.patch("requests.Request")
@mock.patch("requests.Session")
def test_update_ok_feed_6(self, mock_session, mock_request):
"""Test updating feed is ok."""
mock_session.return_value.__enter__.return_value.send.return_value.ok = True
mock_session.return_value.__enter__.return_value.send.return_value.text = (
load_fixture("generic_feed_6.xml")
)
feed = MockGeoRssFeed(HOME_COORDINATES_1, None)
status, entries = feed.update()
assert status == UPDATE_OK
self.assertIsNotNone(entries)
assert len(entries) == 1
feed_entry = entries[0]
assert feed_entry.title == "Title 1"
assert feed_entry.external_id == "1234"
assert feed_entry.category == "Category 1"
assert feed_entry.coordinates == (-37.2345, 149.1234)
self.assertAlmostEqual(feed_entry.distance_to_home, 714.4, 1)
@mock.patch("requests.Request")
@mock.patch("requests.Session")
def test_update_ok_with_radius_filtering(self, mock_session, mock_request):
"""Test updating feed is ok."""
mock_session.return_value.__enter__.return_value.send.return_value.ok = True
mock_session.return_value.__enter__.return_value.send.return_value.text = (
load_fixture("generic_feed_1.xml")
)
feed = MockGeoRssFeed(HOME_COORDINATES_2, None, filter_radius=90.0)
status, entries = feed.update()
assert status == UPDATE_OK
self.assertIsNotNone(entries)
assert len(entries) == 4
self.assertAlmostEqual(entries[0].distance_to_home, 82.0, 1)
self.assertAlmostEqual(entries[1].distance_to_home, 77.0, 1)
self.assertAlmostEqual(entries[2].distance_to_home, 84.6, 1)
@mock.patch("requests.Request")
@mock.patch("requests.Session")
def test_update_ok_with_radius_and_category_filtering(
self, mock_session, mock_request
):
"""Test updating feed is ok."""
mock_session.return_value.__enter__.return_value.send.return_value.ok = True
mock_session.return_value.__enter__.return_value.send.return_value.text = (
load_fixture("generic_feed_1.xml")
)
feed = MockGeoRssFeed(
HOME_COORDINATES_2,
None,
filter_radius=90.0,
filter_categories=["Category 2"],
)
status, entries = feed.update()
assert status == UPDATE_OK
self.assertIsNotNone(entries)
assert len(entries) == 1
self.assertAlmostEqual(entries[0].distance_to_home, 77.0, 1)
feed = MockGeoRssFeed(
HOME_COORDINATES_2,
None,
filter_radius=90.0,
filter_categories=["Category 4"],
)
status, entries = feed.update()
assert status == UPDATE_OK
self.assertIsNotNone(entries)
assert len(entries) == 0
@mock.patch("requests.Request")
@mock.patch("requests.Session")
def test_update_error(self, mock_session, mock_request):
"""Test updating feed results in error."""
mock_session.return_value.__enter__.return_value.send.return_value.ok = False
feed = MockGeoRssFeed(HOME_COORDINATES_1, None)
status, entries = feed.update()
assert status == UPDATE_ERROR
@mock.patch("requests.Request")
@mock.patch("requests.Session")
def test_update_with_request_exception(self, mock_session, mock_request):
"""Test updating feed raises exception."""
mock_session.return_value.__enter__.return_value.send.side_effect = (
requests.exceptions.RequestException
)
feed = GeoRssFeed(HOME_COORDINATES_1, None)
status, entries = feed.update()
assert status == UPDATE_ERROR
self.assertIsNone(entries)
@mock.patch("requests.Request")
@mock.patch("requests.Session")
def test_update_bom(self, mock_session, mock_request):
"""Test updating feed with BOM (byte order mark) is ok."""
mock_session.return_value.__enter__.return_value.send.return_value.ok = True
mock_session.return_value.__enter__.return_value.send.return_value.text = (
load_fixture("xml_parser_bom_1.xml")
)
feed = MockGeoRssFeed(HOME_COORDINATES_1, None)
assert (
repr(feed) == "<MockGeoRssFeed(home=(-31.0, 151.0), "
"url=None, radius=None, categories=None)>"
)
status, entries = feed.update()
assert status == UPDATE_OK
self.assertIsNotNone(entries)
assert len(entries) == 0
class TestGeoRssDistanceHelper(unittest.TestCase):
"""Tests for the GeoJSON distance helper."""
def test_extract_coordinates_from_point(self):
"""Test extracting coordinates from point."""
mock_point = Point(-30.0, 151.0)
latitude, longitude = GeoRssDistanceHelper.extract_coordinates(mock_point)
assert latitude == -30.0
assert longitude == 151.0
def test_extract_coordinates_from_polygon(self):
"""Test extracting coordinates from polygon."""
mock_polygon = Polygon(
[
Point(-30.0, 151.0),
Point(-30.0, 151.5),
Point(-30.5, 151.5),
Point(-30.5, 151.0),
Point(-30.0, 151.0),
]
)
latitude, longitude = GeoRssDistanceHelper.extract_coordinates(mock_polygon)
self.assertAlmostEqual(latitude, -30.2, 1)
self.assertAlmostEqual(longitude, 151.2, 1)
def test_extract_coordinates_from_unsupported_geometry(self):
"""Test extracting coordinates from unsupported geometry."""
mock_unsupported_geometry = MagicMock()
latitude, longitude = GeoRssDistanceHelper.extract_coordinates(
mock_unsupported_geometry
)
self.assertIsNone(latitude)
self.assertIsNone(longitude)
def test_distance_to_point(self):
"""Test calculating distance to point."""
home_coordinates = [-31.0, 150.0]
mock_point = Point(-30.0, 151.0)
distance = GeoRssDistanceHelper.distance_to_geometry(
home_coordinates, mock_point
)
self.assertAlmostEqual(distance, 146.8, 1)
def test_distance_to_polygon(self):
"""Test calculating distance to point."""
home_coordinates = [-31.0, 150.0]
mock_polygon = Polygon(
[
Point(-30.0, 151.0),
Point(-30.0, 151.5),
Point(-30.5, 151.5),
Point(-30.5, 151.0),
Point(-30.0, 151.0),
]
)
distance = GeoRssDistanceHelper.distance_to_geometry(
home_coordinates, mock_polygon
)
self.assertAlmostEqual(distance, 110.6, 1)
def test_distance_to_unsupported_geometry(self):
"""Test calculating distance to unsupported geometry."""
home_coordinates = [-31.0, 150.0]
mock_unsupported_geometry = MagicMock()
distance = GeoRssDistanceHelper.distance_to_geometry(
home_coordinates, mock_unsupported_geometry
)
assert distance == float("inf")
class TestFeedEntry(unittest.TestCase):
def test_simple_feed_entry(self):
"""Test feed entry behaviour."""
feed_entry = FeedEntry(None, None)
assert repr(feed_entry) == "<FeedEntry(id=None)>"
self.assertIsNone(feed_entry.geometry)
self.assertIsNone(feed_entry.coordinates)
self.assertIsNone(feed_entry.title)
self.assertIsNone(feed_entry.category)
self.assertIsNone(feed_entry.attribution)
self.assertIsNone(feed_entry.description)
self.assertIsNone(feed_entry.published)
self.assertIsNone(feed_entry.updated)
self.assertIsNone(
feed_entry._search_in_external_id(r"External ID (?P<custom_attribute>.+)$")
)
self.assertIsNone(
feed_entry._search_in_title(r"Title (?P<custom_attribute>.+)$")
)
self.assertIsNone(
feed_entry._search_in_description(r"Description (?P<custom_attribute>.+)$")
)
def test_feed_entry_search_in_attributes(self):
"""Test feed entry behaviour."""
rss_entry = mock.MagicMock()
type(rss_entry).guid = mock.PropertyMock(return_value="Test 123")
type(rss_entry).title = mock.PropertyMock(return_value="Title 123")
type(rss_entry).description = mock.PropertyMock(return_value="Description 123")
type(rss_entry).category = mock.PropertyMock(
return_value=["Category 1", "Category 2"]
)
updated = datetime.datetime(2019, 4, 1, 8, 30, tzinfo=datetime.timezone.utc)
type(rss_entry).updated_date = mock.PropertyMock(return_value=updated)
feed_entry = FeedEntry(None, rss_entry)
assert repr(feed_entry) == "<FeedEntry(id=Test 123)>"
assert (
feed_entry._search_in_external_id(r"Test (?P<custom_attribute>.+)$")
== "123"
)
assert feed_entry._search_in_title(r"Title (?P<custom_attribute>.+)$") == "123"
assert (
feed_entry._search_in_description(r"Description (?P<custom_attribute>.+)$")
== "123"
)
assert feed_entry.category == "Category 1"
assert feed_entry.description == "Description 123"
assert feed_entry.updated == updated
|
[
"tests.MockGeoRssFeed",
"unittest.mock.MagicMock",
"georss_client.GeoRssDistanceHelper.distance_to_geometry",
"unittest.mock.PropertyMock",
"datetime.datetime",
"unittest.mock.patch",
"tests.utils.load_fixture",
"georss_client.GeoRssFeed",
"georss_client.xml_parser.geometry.Point",
"georss_client.GeoRssDistanceHelper.extract_coordinates",
"georss_client.FeedEntry"
] |
[((514, 544), 'unittest.mock.patch', 'mock.patch', (['"""requests.Request"""'], {}), "('requests.Request')\n", (524, 544), False, 'from unittest import mock\n'), ((550, 580), 'unittest.mock.patch', 'mock.patch', (['"""requests.Session"""'], {}), "('requests.Session')\n", (560, 580), False, 'from unittest import mock\n'), ((2359, 2389), 'unittest.mock.patch', 'mock.patch', (['"""requests.Request"""'], {}), "('requests.Request')\n", (2369, 2389), False, 'from unittest import mock\n'), ((2395, 2425), 'unittest.mock.patch', 'mock.patch', (['"""requests.Session"""'], {}), "('requests.Session')\n", (2405, 2425), False, 'from unittest import mock\n'), ((3275, 3305), 'unittest.mock.patch', 'mock.patch', (['"""requests.Request"""'], {}), "('requests.Request')\n", (3285, 3305), False, 'from unittest import mock\n'), ((3311, 3341), 'unittest.mock.patch', 'mock.patch', (['"""requests.Session"""'], {}), "('requests.Session')\n", (3321, 3341), False, 'from unittest import mock\n'), ((4572, 4602), 'unittest.mock.patch', 'mock.patch', (['"""requests.Request"""'], {}), "('requests.Request')\n", (4582, 4602), False, 'from unittest import mock\n'), ((4608, 4638), 'unittest.mock.patch', 'mock.patch', (['"""requests.Session"""'], {}), "('requests.Session')\n", (4618, 4638), False, 'from unittest import mock\n'), ((5488, 5518), 'unittest.mock.patch', 'mock.patch', (['"""requests.Request"""'], {}), "('requests.Request')\n", (5498, 5518), False, 'from unittest import mock\n'), ((5524, 5554), 'unittest.mock.patch', 'mock.patch', (['"""requests.Session"""'], {}), "('requests.Session')\n", (5534, 5554), False, 'from unittest import mock\n'), ((6337, 6367), 'unittest.mock.patch', 'mock.patch', (['"""requests.Request"""'], {}), "('requests.Request')\n", (6347, 6367), False, 'from unittest import mock\n'), ((6373, 6403), 'unittest.mock.patch', 'mock.patch', (['"""requests.Session"""'], {}), "('requests.Session')\n", (6383, 6403), False, 'from unittest import mock\n'), ((7484, 7514), 'unittest.mock.patch', 'mock.patch', (['"""requests.Request"""'], {}), "('requests.Request')\n", (7494, 7514), False, 'from unittest import mock\n'), ((7520, 7550), 'unittest.mock.patch', 'mock.patch', (['"""requests.Session"""'], {}), "('requests.Session')\n", (7530, 7550), False, 'from unittest import mock\n'), ((7890, 7920), 'unittest.mock.patch', 'mock.patch', (['"""requests.Request"""'], {}), "('requests.Request')\n", (7900, 7920), False, 'from unittest import mock\n'), ((7926, 7956), 'unittest.mock.patch', 'mock.patch', (['"""requests.Session"""'], {}), "('requests.Session')\n", (7936, 7956), False, 'from unittest import mock\n'), ((8395, 8425), 'unittest.mock.patch', 'mock.patch', (['"""requests.Request"""'], {}), "('requests.Request')\n", (8405, 8425), False, 'from unittest import mock\n'), ((8431, 8461), 'unittest.mock.patch', 'mock.patch', (['"""requests.Session"""'], {}), "('requests.Session')\n", (8441, 8461), False, 'from unittest import mock\n'), ((860, 894), 'tests.utils.load_fixture', 'load_fixture', (['"""generic_feed_1.xml"""'], {}), "('generic_feed_1.xml')\n", (872, 894), False, 'from tests.utils import load_fixture\n'), ((921, 961), 'tests.MockGeoRssFeed', 'MockGeoRssFeed', (['HOME_COORDINATES_1', 'None'], {}), '(HOME_COORDINATES_1, None)\n', (935, 961), False, 'from tests import MockGeoRssFeed\n'), ((2712, 2746), 'tests.utils.load_fixture', 'load_fixture', (['"""generic_feed_2.xml"""'], {}), "('generic_feed_2.xml')\n", (2724, 2746), False, 'from tests.utils import load_fixture\n'), ((2773, 2813), 'tests.MockGeoRssFeed', 'MockGeoRssFeed', (['HOME_COORDINATES_1', 'None'], {}), '(HOME_COORDINATES_1, None)\n', (2787, 2813), False, 'from tests import MockGeoRssFeed\n'), ((3628, 3662), 'tests.utils.load_fixture', 'load_fixture', (['"""generic_feed_3.xml"""'], {}), "('generic_feed_3.xml')\n", (3640, 3662), False, 'from tests.utils import load_fixture\n'), ((3689, 3729), 'tests.MockGeoRssFeed', 'MockGeoRssFeed', (['HOME_COORDINATES_1', 'None'], {}), '(HOME_COORDINATES_1, None)\n', (3703, 3729), False, 'from tests import MockGeoRssFeed\n'), ((4925, 4959), 'tests.utils.load_fixture', 'load_fixture', (['"""generic_feed_6.xml"""'], {}), "('generic_feed_6.xml')\n", (4937, 4959), False, 'from tests.utils import load_fixture\n'), ((4986, 5026), 'tests.MockGeoRssFeed', 'MockGeoRssFeed', (['HOME_COORDINATES_1', 'None'], {}), '(HOME_COORDINATES_1, None)\n', (5000, 5026), False, 'from tests import MockGeoRssFeed\n'), ((5856, 5890), 'tests.utils.load_fixture', 'load_fixture', (['"""generic_feed_1.xml"""'], {}), "('generic_feed_1.xml')\n", (5868, 5890), False, 'from tests.utils import load_fixture\n'), ((5917, 5977), 'tests.MockGeoRssFeed', 'MockGeoRssFeed', (['HOME_COORDINATES_2', 'None'], {'filter_radius': '(90.0)'}), '(HOME_COORDINATES_2, None, filter_radius=90.0)\n', (5931, 5977), False, 'from tests import MockGeoRssFeed\n'), ((6732, 6766), 'tests.utils.load_fixture', 'load_fixture', (['"""generic_feed_1.xml"""'], {}), "('generic_feed_1.xml')\n", (6744, 6766), False, 'from tests.utils import load_fixture\n'), ((6793, 6891), 'tests.MockGeoRssFeed', 'MockGeoRssFeed', (['HOME_COORDINATES_2', 'None'], {'filter_radius': '(90.0)', 'filter_categories': "['Category 2']"}), "(HOME_COORDINATES_2, None, filter_radius=90.0,\n filter_categories=['Category 2'])\n", (6807, 6891), False, 'from tests import MockGeoRssFeed\n'), ((7178, 7276), 'tests.MockGeoRssFeed', 'MockGeoRssFeed', (['HOME_COORDINATES_2', 'None'], {'filter_radius': '(90.0)', 'filter_categories': "['Category 4']"}), "(HOME_COORDINATES_2, None, filter_radius=90.0,\n filter_categories=['Category 4'])\n", (7192, 7276), False, 'from tests import MockGeoRssFeed\n'), ((7765, 7805), 'tests.MockGeoRssFeed', 'MockGeoRssFeed', (['HOME_COORDINATES_1', 'None'], {}), '(HOME_COORDINATES_1, None)\n', (7779, 7805), False, 'from tests import MockGeoRssFeed\n'), ((8239, 8275), 'georss_client.GeoRssFeed', 'GeoRssFeed', (['HOME_COORDINATES_1', 'None'], {}), '(HOME_COORDINATES_1, None)\n', (8249, 8275), False, 'from georss_client import UPDATE_ERROR, UPDATE_OK, FeedEntry, GeoRssDistanceHelper, GeoRssFeed\n'), ((8769, 8805), 'tests.utils.load_fixture', 'load_fixture', (['"""xml_parser_bom_1.xml"""'], {}), "('xml_parser_bom_1.xml')\n", (8781, 8805), False, 'from tests.utils import load_fixture\n'), ((8832, 8872), 'tests.MockGeoRssFeed', 'MockGeoRssFeed', (['HOME_COORDINATES_1', 'None'], {}), '(HOME_COORDINATES_1, None)\n', (8846, 8872), False, 'from tests import MockGeoRssFeed\n'), ((9396, 9415), 'georss_client.xml_parser.geometry.Point', 'Point', (['(-30.0)', '(151.0)'], {}), '(-30.0, 151.0)\n', (9401, 9415), False, 'from georss_client.xml_parser.geometry import Point, Polygon\n'), ((9446, 9498), 'georss_client.GeoRssDistanceHelper.extract_coordinates', 'GeoRssDistanceHelper.extract_coordinates', (['mock_point'], {}), '(mock_point)\n', (9486, 9498), False, 'from georss_client import UPDATE_ERROR, UPDATE_OK, FeedEntry, GeoRssDistanceHelper, GeoRssFeed\n'), ((9961, 10015), 'georss_client.GeoRssDistanceHelper.extract_coordinates', 'GeoRssDistanceHelper.extract_coordinates', (['mock_polygon'], {}), '(mock_polygon)\n', (10001, 10015), False, 'from georss_client import UPDATE_ERROR, UPDATE_OK, FeedEntry, GeoRssDistanceHelper, GeoRssFeed\n'), ((10291, 10302), 'unittest.mock.MagicMock', 'MagicMock', ([], {}), '()\n', (10300, 10302), False, 'from unittest.mock import MagicMock\n'), ((10333, 10400), 'georss_client.GeoRssDistanceHelper.extract_coordinates', 'GeoRssDistanceHelper.extract_coordinates', (['mock_unsupported_geometry'], {}), '(mock_unsupported_geometry)\n', (10373, 10400), False, 'from georss_client import UPDATE_ERROR, UPDATE_OK, FeedEntry, GeoRssDistanceHelper, GeoRssFeed\n'), ((10648, 10667), 'georss_client.xml_parser.geometry.Point', 'Point', (['(-30.0)', '(151.0)'], {}), '(-30.0, 151.0)\n', (10653, 10667), False, 'from georss_client.xml_parser.geometry import Point, Polygon\n'), ((10687, 10758), 'georss_client.GeoRssDistanceHelper.distance_to_geometry', 'GeoRssDistanceHelper.distance_to_geometry', (['home_coordinates', 'mock_point'], {}), '(home_coordinates, mock_point)\n', (10728, 10758), False, 'from georss_client import UPDATE_ERROR, UPDATE_OK, FeedEntry, GeoRssDistanceHelper, GeoRssFeed\n'), ((11239, 11312), 'georss_client.GeoRssDistanceHelper.distance_to_geometry', 'GeoRssDistanceHelper.distance_to_geometry', (['home_coordinates', 'mock_polygon'], {}), '(home_coordinates, mock_polygon)\n', (11280, 11312), False, 'from georss_client import UPDATE_ERROR, UPDATE_OK, FeedEntry, GeoRssDistanceHelper, GeoRssFeed\n'), ((11583, 11594), 'unittest.mock.MagicMock', 'MagicMock', ([], {}), '()\n', (11592, 11594), False, 'from unittest.mock import MagicMock\n'), ((11614, 11704), 'georss_client.GeoRssDistanceHelper.distance_to_geometry', 'GeoRssDistanceHelper.distance_to_geometry', (['home_coordinates', 'mock_unsupported_geometry'], {}), '(home_coordinates,\n mock_unsupported_geometry)\n', (11655, 11704), False, 'from georss_client import UPDATE_ERROR, UPDATE_OK, FeedEntry, GeoRssDistanceHelper, GeoRssFeed\n'), ((11905, 11926), 'georss_client.FeedEntry', 'FeedEntry', (['None', 'None'], {}), '(None, None)\n', (11914, 11926), False, 'from georss_client import UPDATE_ERROR, UPDATE_OK, FeedEntry, GeoRssDistanceHelper, GeoRssFeed\n'), ((12844, 12860), 'unittest.mock.MagicMock', 'mock.MagicMock', ([], {}), '()\n', (12858, 12860), False, 'from unittest import mock\n'), ((12892, 12934), 'unittest.mock.PropertyMock', 'mock.PropertyMock', ([], {'return_value': '"""Test 123"""'}), "(return_value='Test 123')\n", (12909, 12934), False, 'from unittest import mock\n'), ((12967, 13010), 'unittest.mock.PropertyMock', 'mock.PropertyMock', ([], {'return_value': '"""Title 123"""'}), "(return_value='Title 123')\n", (12984, 13010), False, 'from unittest import mock\n'), ((13049, 13098), 'unittest.mock.PropertyMock', 'mock.PropertyMock', ([], {'return_value': '"""Description 123"""'}), "(return_value='Description 123')\n", (13066, 13098), False, 'from unittest import mock\n'), ((13134, 13194), 'unittest.mock.PropertyMock', 'mock.PropertyMock', ([], {'return_value': "['Category 1', 'Category 2']"}), "(return_value=['Category 1', 'Category 2'])\n", (13151, 13194), False, 'from unittest import mock\n'), ((13235, 13301), 'datetime.datetime', 'datetime.datetime', (['(2019)', '(4)', '(1)', '(8)', '(30)'], {'tzinfo': 'datetime.timezone.utc'}), '(2019, 4, 1, 8, 30, tzinfo=datetime.timezone.utc)\n', (13252, 13301), False, 'import datetime\n'), ((13341, 13380), 'unittest.mock.PropertyMock', 'mock.PropertyMock', ([], {'return_value': 'updated'}), '(return_value=updated)\n', (13358, 13380), False, 'from unittest import mock\n'), ((13403, 13429), 'georss_client.FeedEntry', 'FeedEntry', (['None', 'rss_entry'], {}), '(None, rss_entry)\n', (13412, 13429), False, 'from georss_client import UPDATE_ERROR, UPDATE_OK, FeedEntry, GeoRssDistanceHelper, GeoRssFeed\n'), ((1472, 1509), 'datetime.datetime', 'datetime.datetime', (['(2018)', '(9)', '(23)', '(8)', '(30)'], {}), '(2018, 9, 23, 8, 30)\n', (1489, 1509), False, 'import datetime\n'), ((1547, 1584), 'datetime.datetime', 'datetime.datetime', (['(2018)', '(9)', '(23)', '(8)', '(35)'], {}), '(2018, 9, 23, 8, 35)\n', (1564, 1584), False, 'import datetime\n'), ((9738, 9757), 'georss_client.xml_parser.geometry.Point', 'Point', (['(-30.0)', '(151.0)'], {}), '(-30.0, 151.0)\n', (9743, 9757), False, 'from georss_client.xml_parser.geometry import Point, Polygon\n'), ((9775, 9794), 'georss_client.xml_parser.geometry.Point', 'Point', (['(-30.0)', '(151.5)'], {}), '(-30.0, 151.5)\n', (9780, 9794), False, 'from georss_client.xml_parser.geometry import Point, Polygon\n'), ((9812, 9831), 'georss_client.xml_parser.geometry.Point', 'Point', (['(-30.5)', '(151.5)'], {}), '(-30.5, 151.5)\n', (9817, 9831), False, 'from georss_client.xml_parser.geometry import Point, Polygon\n'), ((9849, 9868), 'georss_client.xml_parser.geometry.Point', 'Point', (['(-30.5)', '(151.0)'], {}), '(-30.5, 151.0)\n', (9854, 9868), False, 'from georss_client.xml_parser.geometry import Point, Polygon\n'), ((9886, 9905), 'georss_client.xml_parser.geometry.Point', 'Point', (['(-30.0)', '(151.0)'], {}), '(-30.0, 151.0)\n', (9891, 9905), False, 'from georss_client.xml_parser.geometry import Point, Polygon\n'), ((11027, 11046), 'georss_client.xml_parser.geometry.Point', 'Point', (['(-30.0)', '(151.0)'], {}), '(-30.0, 151.0)\n', (11032, 11046), False, 'from georss_client.xml_parser.geometry import Point, Polygon\n'), ((11064, 11083), 'georss_client.xml_parser.geometry.Point', 'Point', (['(-30.0)', '(151.5)'], {}), '(-30.0, 151.5)\n', (11069, 11083), False, 'from georss_client.xml_parser.geometry import Point, Polygon\n'), ((11101, 11120), 'georss_client.xml_parser.geometry.Point', 'Point', (['(-30.5)', '(151.5)'], {}), '(-30.5, 151.5)\n', (11106, 11120), False, 'from georss_client.xml_parser.geometry import Point, Polygon\n'), ((11138, 11157), 'georss_client.xml_parser.geometry.Point', 'Point', (['(-30.5)', '(151.0)'], {}), '(-30.5, 151.0)\n', (11143, 11157), False, 'from georss_client.xml_parser.geometry import Point, Polygon\n'), ((11175, 11194), 'georss_client.xml_parser.geometry.Point', 'Point', (['(-30.0)', '(151.0)'], {}), '(-30.0, 151.0)\n', (11180, 11194), False, 'from georss_client.xml_parser.geometry import Point, Polygon\n')]
|
from setuptools import setup, find_packages
with open("README.md", "r") as fh:
long_description = fh.read()
VERSION = "1.0.1"
setup(name='fast_rl',
version=VERSION,
description='Fastai for computer vision and tabular learning has been amazing. One would wish that this would '
'be the same for RL. The purpose of this repo is to have a framework that is as easy as possible to '
'start, but also designed for testing new agents. ',
url='https://github.com/josiahls/fast-reinforcement-learning',
author='<NAME>',
author_email='<EMAIL>',
python_requires='>=3.6',
long_description=long_description,
long_description_content_type="text/markdown",
license='',
packages=find_packages(),
zip_safe=False,
install_requires=['fastai>=1.0.59', 'gym[box2d, atari]', 'jupyter'],
extras_require={'all': [
'gym-minigrid',
'moviepy'
# 'gym_maze @ git+https://github.com/MattChanTK/gym-maze.git',
# 'pybullet-gym @ git+https://github.com/benelot/pybullet-gym.git'
]},
classifiers=[
"Development Status :: 3 - Alpha",
"Programming Language :: Python :: 3",
"License :: OSI Approved :: Apache Software License",
"Operating System :: OS Independent",
],
)
|
[
"setuptools.find_packages"
] |
[((768, 783), 'setuptools.find_packages', 'find_packages', ([], {}), '()\n', (781, 783), False, 'from setuptools import setup, find_packages\n')]
|
import logging
from docserver.api import schemas
from docserver.config import config
from docserver.db import models as db_models
logger = logging.getLogger(__name__)
def delete_package(package: schemas.BasePackage, provided_permissions=None):
db = config.db.local_session()
packages = db_models.Package.read(params=package.dict(), db=db)
deleted = []
for package in packages:
if package.is_authorised(provided_permissions, 'delete'):
package.delete(db=db)
deleted.append(package.name)
return deleted
def delete_version(documentation_version: schemas.BasePackageVersion, provided_permissions=None):
db = config.db.local_session()
packages = db_models.Package.read(params=documentation_version.dict(), db=db)
deleted = []
for package in packages:
package_version = package.get_version(documentation_version.version)
if package_version and package.is_authorised(provided_permissions, 'delete'):
package_version.delete(db=db)
deleted.append(f'{package.name}-{package_version.version}')
db.refresh(package)
if not package.versions:
package.delete(db=db)
deleted.append(package.name)
return deleted
|
[
"logging.getLogger",
"docserver.config.config.db.local_session"
] |
[((142, 169), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (159, 169), False, 'import logging\n'), ((258, 283), 'docserver.config.config.db.local_session', 'config.db.local_session', ([], {}), '()\n', (281, 283), False, 'from docserver.config import config\n'), ((667, 692), 'docserver.config.config.db.local_session', 'config.db.local_session', ([], {}), '()\n', (690, 692), False, 'from docserver.config import config\n')]
|
from os import walk, remove
def get_all_files(directory):
""" Method for listing files within a directory
"""
f = []
for (_, _, filenames) in walk(directory):
f.extend(filenames)
return f
def remove_file(filename, directory):
""" Method for removing a file within a directory
"""
try:
remove(directory + '/' + filename)
except FileNotFoundError:
pass
|
[
"os.remove",
"os.walk"
] |
[((151, 166), 'os.walk', 'walk', (['directory'], {}), '(directory)\n', (155, 166), False, 'from os import walk, remove\n'), ((312, 346), 'os.remove', 'remove', (["(directory + '/' + filename)"], {}), "(directory + '/' + filename)\n", (318, 346), False, 'from os import walk, remove\n')]
|
import re
import argparse
from typing import Optional, List
from dataclasses import dataclass
from lark import Lark, Transformer, v_args
USAGE = "A command line calculator"
@dataclass
class Token:
name: str
value: str
calc_grammar = """
?start: sum
| NAME "=" sum -> assign_var
?sum: product
| sum "+" product -> add
| sum "-" product -> sub
?product: atom
| product "*" atom -> mul
| product "/" atom -> div
?atom: NUMBER -> number
| "-" atom -> neg
| NAME -> var
| "(" sum ")"
%import common.CNAME -> NAME
%import common.NUMBER
%import common.WS_INLINE
%ignore WS_INLINE
"""
@v_args(inline=True) # Affects the signatures of the methods
class CalculateTree(Transformer):
from operator import add, sub, mul, truediv as div, neg
number = float
def __init__(self):
self.vars = {}
def assign_var(self, name, value):
self.vars[name] = value
return value
def var(self, name):
try:
return self.vars[name]
except KeyError:
raise Exception("Variable not found: %s" % name)
def calculate(formula):
calc_parser = Lark(calc_grammar, parser='lalr',
transformer=CalculateTree())
result = calc_parser.parse(formula)
return result
def main():
parser = argparse.ArgumentParser("calc")
parser.add_argument("formula", action="store")
args = parser.parse_args()
formula = args.formula
result = calculate(formula)
if result is not None:
print(result)
if __name__ == '__main__':
main()
|
[
"lark.v_args",
"argparse.ArgumentParser"
] |
[((739, 758), 'lark.v_args', 'v_args', ([], {'inline': '(True)'}), '(inline=True)\n', (745, 758), False, 'from lark import Lark, Transformer, v_args\n'), ((1431, 1462), 'argparse.ArgumentParser', 'argparse.ArgumentParser', (['"""calc"""'], {}), "('calc')\n", (1454, 1462), False, 'import argparse\n')]
|
from django.urls import path
from . import views
from .views import CustomLoginView, RegisterPage
from django.contrib.auth.views import LogoutView
urlpatterns = [
path('login/', CustomLoginView.as_view(),name='login'),
path('logout/', LogoutView.as_view(next_page='login'),name='logout'),
path('register/', RegisterPage.as_view(),name='register'),
path('list/',views.list_todo_items, name='items'),
path('insert_todo/',views.insert_todo_item, name='insert_todo_item'),
path('delete_todo/<int:todo_id>/',views.delete_todo_item, name='delete_todo_item'),
]
|
[
"django.contrib.auth.views.LogoutView.as_view",
"django.urls.path"
] |
[((376, 426), 'django.urls.path', 'path', (['"""list/"""', 'views.list_todo_items'], {'name': '"""items"""'}), "('list/', views.list_todo_items, name='items')\n", (380, 426), False, 'from django.urls import path\n'), ((432, 501), 'django.urls.path', 'path', (['"""insert_todo/"""', 'views.insert_todo_item'], {'name': '"""insert_todo_item"""'}), "('insert_todo/', views.insert_todo_item, name='insert_todo_item')\n", (436, 501), False, 'from django.urls import path\n'), ((507, 595), 'django.urls.path', 'path', (['"""delete_todo/<int:todo_id>/"""', 'views.delete_todo_item'], {'name': '"""delete_todo_item"""'}), "('delete_todo/<int:todo_id>/', views.delete_todo_item, name=\n 'delete_todo_item')\n", (511, 595), False, 'from django.urls import path\n'), ((251, 288), 'django.contrib.auth.views.LogoutView.as_view', 'LogoutView.as_view', ([], {'next_page': '"""login"""'}), "(next_page='login')\n", (269, 288), False, 'from django.contrib.auth.views import LogoutView\n')]
|
import sklearn.tree
import os
import pandas as pd
import numpy as np
from hydroDL import kPath
from hydroDL.data import usgs, gageII
from hydroDL.post import axplot
import matplotlib.pyplot as plt
dirCQ = os.path.join(kPath.dirWQ, 'C-Q')
dfS = pd.read_csv(os.path.join(dirCQ, 'slope'), dtype={
'siteNo': str}).set_index('siteNo')
dfN = pd.read_csv(os.path.join(dirCQ, 'nSample'), dtype={
'siteNo': str}).set_index('siteNo')
siteNoLst = dfS.index.tolist()
codeLst = dfS.columns.tolist()
dropColLst = ['STANAME', 'WR_REPORT_REMARKS',
'ADR_CITATION', 'SCREENING_COMMENTS']
dfX = gageII.readData(siteNoLst=siteNoLst).drop(columns=dropColLst)
dfX = gageII.updateCode(dfX)
dfCrd = gageII.readData(varLst=['LAT_GAGE', 'LNG_GAGE'], siteNoLst=siteNoLst)
code = '00955'
indValid = np.where((~np.isnan(dfS['00955'].values))
& (dfN['00955'].values > 10))[0]
dataAll = dfS[code][indValid]
vr = np.max([np.abs(np.percentile(dataAll, 1)),
np.abs(np.percentile(dataAll, 99))])
vRange = [-vr, vr]
def subTree(indInput):
x = dfX.values[indInput, :]
y = dfS[code].values[indInput]
x[np.isnan(x)] = -99
clf = sklearn.tree.DecisionTreeRegressor(max_depth=1)
clf = clf.fit(x, y)
tree = clf.tree_
feat = dfX.columns[tree.feature[0]]
th = tree.threshold[0]
indLeft = np.where(x[:, tree.feature[0]] <= tree.threshold[0])[0]
indRight = np.where(x[:, tree.feature[0]] > tree.threshold[0])[0]
indLeftG = indInput[indLeft]
indRightG = indInput[indRight]
return indLeftG, indRightG, feat, th
def plotCdf(ax, indInput, indLeft, indRight):
cLst = 'gbr'
labLst = ['parent', 'left', 'right']
y0 = dfS[code].values[indInput]
y1 = dfS[code].values[indLeft]
y2 = dfS[code].values[indRight]
dataLst = [y0, y1, y2]
for k, data in enumerate(dataLst):
xSort = np.sort(data[~np.isnan(data)])
yRank = np.arange(1, len(xSort)+1) / float(len(xSort))
ax.plot(xSort, yRank, color=cLst[k], label=labLst[k])
ax.set_xlim(vRange)
ax.legend(loc='best', frameon=False)
def plotMap(ax, indInput):
lat = dfCrd['LAT_GAGE'][indInput]
lon = dfCrd['LNG_GAGE'][indInput]
data = dfS[code][indInput]
axplot.mapPoint(ax, lat, lon, data, vRange=vRange, s=10)
indInput = indValid
indLeft, indRight, feat, th = subTree(indInput)
fig, ax = plt.subplots(1, 1)
plotCdf(ax, indInput, indLeft, indRight)
fig.show()
fig, axes = plt.subplots(2, 1)
plotMap(axes[0], indLeft)
plotMap(axes[1], indRight)
fig.show()
|
[
"os.path.join",
"numpy.isnan",
"numpy.percentile",
"numpy.where",
"hydroDL.post.axplot.mapPoint",
"hydroDL.data.gageII.updateCode",
"hydroDL.data.gageII.readData",
"matplotlib.pyplot.subplots"
] |
[((206, 238), 'os.path.join', 'os.path.join', (['kPath.dirWQ', '"""C-Q"""'], {}), "(kPath.dirWQ, 'C-Q')\n", (218, 238), False, 'import os\n'), ((682, 704), 'hydroDL.data.gageII.updateCode', 'gageII.updateCode', (['dfX'], {}), '(dfX)\n', (699, 704), False, 'from hydroDL.data import usgs, gageII\n'), ((713, 782), 'hydroDL.data.gageII.readData', 'gageII.readData', ([], {'varLst': "['LAT_GAGE', 'LNG_GAGE']", 'siteNoLst': 'siteNoLst'}), "(varLst=['LAT_GAGE', 'LNG_GAGE'], siteNoLst=siteNoLst)\n", (728, 782), False, 'from hydroDL.data import usgs, gageII\n'), ((2385, 2403), 'matplotlib.pyplot.subplots', 'plt.subplots', (['(1)', '(1)'], {}), '(1, 1)\n', (2397, 2403), True, 'import matplotlib.pyplot as plt\n'), ((2469, 2487), 'matplotlib.pyplot.subplots', 'plt.subplots', (['(2)', '(1)'], {}), '(2, 1)\n', (2481, 2487), True, 'import matplotlib.pyplot as plt\n'), ((2248, 2304), 'hydroDL.post.axplot.mapPoint', 'axplot.mapPoint', (['ax', 'lat', 'lon', 'data'], {'vRange': 'vRange', 's': '(10)'}), '(ax, lat, lon, data, vRange=vRange, s=10)\n', (2263, 2304), False, 'from hydroDL.post import axplot\n'), ((614, 650), 'hydroDL.data.gageII.readData', 'gageII.readData', ([], {'siteNoLst': 'siteNoLst'}), '(siteNoLst=siteNoLst)\n', (629, 650), False, 'from hydroDL.data import usgs, gageII\n'), ((1150, 1161), 'numpy.isnan', 'np.isnan', (['x'], {}), '(x)\n', (1158, 1161), True, 'import numpy as np\n'), ((1353, 1405), 'numpy.where', 'np.where', (['(x[:, tree.feature[0]] <= tree.threshold[0])'], {}), '(x[:, tree.feature[0]] <= tree.threshold[0])\n', (1361, 1405), True, 'import numpy as np\n'), ((1424, 1475), 'numpy.where', 'np.where', (['(x[:, tree.feature[0]] > tree.threshold[0])'], {}), '(x[:, tree.feature[0]] > tree.threshold[0])\n', (1432, 1475), True, 'import numpy as np\n'), ((257, 285), 'os.path.join', 'os.path.join', (['dirCQ', '"""slope"""'], {}), "(dirCQ, 'slope')\n", (269, 285), False, 'import os\n'), ((353, 383), 'os.path.join', 'os.path.join', (['dirCQ', '"""nSample"""'], {}), "(dirCQ, 'nSample')\n", (365, 383), False, 'import os\n'), ((955, 980), 'numpy.percentile', 'np.percentile', (['dataAll', '(1)'], {}), '(dataAll, 1)\n', (968, 980), True, 'import numpy as np\n'), ((1003, 1029), 'numpy.percentile', 'np.percentile', (['dataAll', '(99)'], {}), '(dataAll, 99)\n', (1016, 1029), True, 'import numpy as np\n'), ((821, 850), 'numpy.isnan', 'np.isnan', (["dfS['00955'].values"], {}), "(dfS['00955'].values)\n", (829, 850), True, 'import numpy as np\n'), ((1897, 1911), 'numpy.isnan', 'np.isnan', (['data'], {}), '(data)\n', (1905, 1911), True, 'import numpy as np\n')]
|
import cv2
from gaze_tracking import GazeTracking
from imutils.video import VideoStream
import imutils
import argparse
import time
ap = argparse.ArgumentParser()
ap.add_argument("-v", "--video",
help="path to the (optional) video file")
args = vars(ap.parse_args())
gaze = GazeTracking()
# if a video path was not supplied, grab the reference
# to the webcam
if not args.get("video", False):
vs = VideoStream(src=0).start()
# otherwise, grab a reference to the video file
else:
vs = cv2.VideoCapture(args["video"])
# allow the camera or video file to warm up
time.sleep(2.0)
count = 0
while True:
t = time.time()
frame = vs.read()
frame = frame[1] if args.get("video", False) else frame
if frame is None:
break
gaze.refresh(frame)
frame = gaze.annotated_frame()
text = ""
#if gaze.is_blinking():
# text = "Blinking"
if gaze.is_right():
text = "DIREITA"
elif gaze.is_left():
text = "ESQUERDA"
elif gaze.is_center():
text = "FRENTE"
else:
text = "DEVIOU"
cv2.putText(frame, text, (850, 60), cv2.FONT_HERSHEY_DUPLEX, 1.4, (147, 58, 31), 2)
#left_pupil = gaze.pupil_left_coords()
#right_pupil = gaze.pupil_right_coords()
#cv2.putText(frame, "Left pupil: " + str(left_pupil), (90, 130), cv2.FONT_HERSHEY_DUPLEX, 0.5, (147, 58, 31), 1)
#cv2.putText(frame, "Right pupil: " + str(right_pupil), (90, 165), cv2.FONT_HERSHEY_DUPLEX, 0.5, (147, 58, 31), 1)
cv2.imwrite("/home/gabriel/Documentos/blur_face/frame%d.jpg" % count, frame)
count += 1
print("Time to process the frame = {}".format(time.time() - t))
# close all windows
cv2.destroyAllWindows()
|
[
"imutils.video.VideoStream",
"cv2.putText",
"argparse.ArgumentParser",
"cv2.imwrite",
"time.sleep",
"cv2.VideoCapture",
"time.time",
"gaze_tracking.GazeTracking",
"cv2.destroyAllWindows"
] |
[((137, 162), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {}), '()\n', (160, 162), False, 'import argparse\n'), ((276, 290), 'gaze_tracking.GazeTracking', 'GazeTracking', ([], {}), '()\n', (288, 290), False, 'from gaze_tracking import GazeTracking\n'), ((565, 580), 'time.sleep', 'time.sleep', (['(2.0)'], {}), '(2.0)\n', (575, 580), False, 'import time\n'), ((1574, 1597), 'cv2.destroyAllWindows', 'cv2.destroyAllWindows', ([], {}), '()\n', (1595, 1597), False, 'import cv2\n'), ((489, 520), 'cv2.VideoCapture', 'cv2.VideoCapture', (["args['video']"], {}), "(args['video'])\n", (505, 520), False, 'import cv2\n'), ((611, 622), 'time.time', 'time.time', ([], {}), '()\n', (620, 622), False, 'import time\n'), ((994, 1081), 'cv2.putText', 'cv2.putText', (['frame', 'text', '(850, 60)', 'cv2.FONT_HERSHEY_DUPLEX', '(1.4)', '(147, 58, 31)', '(2)'], {}), '(frame, text, (850, 60), cv2.FONT_HERSHEY_DUPLEX, 1.4, (147, 58,\n 31), 2)\n', (1005, 1081), False, 'import cv2\n'), ((1396, 1472), 'cv2.imwrite', 'cv2.imwrite', (["('/home/gabriel/Documentos/blur_face/frame%d.jpg' % count)", 'frame'], {}), "('/home/gabriel/Documentos/blur_face/frame%d.jpg' % count, frame)\n", (1407, 1472), False, 'import cv2\n'), ((402, 420), 'imutils.video.VideoStream', 'VideoStream', ([], {'src': '(0)'}), '(src=0)\n', (413, 420), False, 'from imutils.video import VideoStream\n'), ((1533, 1544), 'time.time', 'time.time', ([], {}), '()\n', (1542, 1544), False, 'import time\n')]
|
#!/usr/bin/env python3
import decimal
import mock
import wallycore as wally
import garecovery.two_of_three
from garecovery.clargs import DEFAULT_SUBACCOUNT_SEARCH_DEPTH
from gaservices.utils import txutil
from .util import AuthServiceProxy, datafile, get_output, parse_summary, raise_IOError
garecovery.bitcoin_config.open = raise_IOError
sub_depth = DEFAULT_SUBACCOUNT_SEARCH_DEPTH
key_depth = 20
destination_address = 'mynHfTyTWyGGB76NBFbfUrTnn8YWQkTJVs'
@mock.patch('garecovery.two_of_three.bitcoincore.AuthServiceProxy')
def test_recover_2of3(mock_bitcoincore):
"""Test 2of3 happy path"""
mock_bitcoincore.return_value = AuthServiceProxy('testnet_txs')
estimate = {'blocks': 3, 'feerate': 1, }
mock_bitcoincore.return_value.estimatesmartfee.return_value = estimate
args = [
'--mnemonic-file={}'.format(datafile('mnemonic_6.txt')),
'--rpcuser=abc',
'--rpcpassword=<PASSWORD>',
'2of3',
'--network=testnet',
'--recovery-mnemonic-file={}'.format(datafile('mnemonic_7.txt')),
'--key-search-depth={}'.format(key_depth),
'--search-subaccounts={}'.format(sub_depth),
'--destination-address={}'.format(destination_address),
]
# Raw tx
output = get_output(args).strip()
assert output == open(datafile("signed_2of3_5")).read().strip()
# Check replace by fee is set
tx = txutil.from_hex(output)
assert wally.tx_get_num_inputs(tx) == 1
assert wally.tx_get_input_sequence(tx, 0) == int(32*'1', 2) - 2
# Summary
args = ['--show-summary', ] + args
output = get_output(args)
summary = parse_summary(output)
assert len(summary) == 1
assert summary[0]['destination address'] == destination_address
@mock.patch('garecovery.two_of_three.bitcoincore.AuthServiceProxy')
def test_set_nlocktime(mock_bitcoincore):
"""Test that newly created recovery transactions have nlocktime = current blockheight + 1"""
mock_bitcoincore.return_value = AuthServiceProxy('testnet_txs')
estimate = {'blocks': 3, 'feerate': 1, }
mock_bitcoincore.return_value.estimatesmartfee.return_value = estimate
current_blockheight = 123
mock_bitcoincore.return_value.getblockcount.return_value = current_blockheight
args = [
'--mnemonic-file={}'.format(datafile('mnemonic_6.txt')),
'--rpcuser=abc',
'--rpcpassword=<PASSWORD>',
'2of3',
'--network=testnet',
'--recovery-mnemonic-file={}'.format(datafile('mnemonic_7.txt')),
'--key-search-depth={}'.format(key_depth),
'--search-subaccounts={}'.format(sub_depth),
'--destination-address={}'.format(destination_address),
]
output = get_output(args).strip()
tx = txutil.from_hex(output)
assert wally.tx_get_locktime(tx) == current_blockheight
@mock.patch('garecovery.two_of_three.bitcoincore.AuthServiceProxy')
def test_recover_2of2_csv(mock_bitcoincore):
"""Test 2of2-csv happy path"""
mock_bitcoincore.return_value = AuthServiceProxy('testnet_txs')
estimate = {'blocks': 3, 'feerate': decimal.Decimal('0.00001'), }
mock_bitcoincore.return_value.estimatesmartfee.return_value = estimate
mock_bitcoincore.return_value.getnetworkinfo = mock.Mock(return_value={'version': 190100})
mock_bitcoincore.return_value.getblockcount.return_value = 144
args = [
'--mnemonic-file={}'.format(datafile('mnemonic_1.txt')),
'--rpcuser=abc',
'--rpcpassword=<PASSWORD>',
'2of2-csv',
'--network=testnet',
'--key-search-depth={}'.format(key_depth),
'--search-subaccounts={}'.format(sub_depth),
]
# Raw tx
output = get_output(args).strip()
assert output == open(datafile("signed_2of2_csv_1")).read().strip()
tx = txutil.from_hex(output)
assert wally.tx_get_num_inputs(tx) == 1
# Summary
args = ['--show-summary', ] + args
output = get_output(args)
summary = parse_summary(output)
assert len(summary) == 1
# Use scantxoutset instead of importmulti + listunspent
scantxoutset_result = {
'success': True,
'unspents': [{
'txid': '0ab5d70ef25a601de455155fdcb8c492d21a9b3063211dc8a969568d9d0fe15b',
'vout': 0,
'scriptPubKey': 'a91458ce12e1773dd078940a9dc855b94c3c9a343b8587',
'desc': 'addr(2N1LnKRLTCWr8H9UdwoREazuFDXHMEgZj9g)#ztm9gzsm',
'amount': 0.001,
'height': 0,
}],
}
mock_bitcoincore.return_value.scantxoutset = mock.Mock(return_value=scantxoutset_result)
# output not expired yet
mock_bitcoincore.return_value.getblockcount.return_value = 143
args = [
'--mnemonic-file={}'.format(datafile('mnemonic_1.txt')),
'--rpcuser=abc',
'--rpcpassword=<PASSWORD>',
'2of2-csv',
'--network=testnet',
'--key-search-depth={}'.format(key_depth),
'--search-subaccounts={}'.format(sub_depth),
'--ignore-mempool',
]
# Raw tx
raw_tx = get_output(args).strip()
assert raw_tx == ''
# output expired
mock_bitcoincore.return_value.getblockcount.return_value = 144
# Raw tx
output = get_output(args).strip()
assert output == open(datafile("signed_2of2_csv_1")).read().strip()
# Check replace by fee is set
tx = txutil.from_hex(output)
assert wally.tx_get_num_inputs(tx) == 1
# Summary
args = ['--show-summary', ] + args
output = get_output(args)
summary = parse_summary(output)
assert len(summary) == 1
|
[
"wallycore.tx_get_input_sequence",
"wallycore.tx_get_num_inputs",
"decimal.Decimal",
"mock.patch",
"gaservices.utils.txutil.from_hex",
"mock.Mock",
"wallycore.tx_get_locktime"
] |
[((464, 530), 'mock.patch', 'mock.patch', (['"""garecovery.two_of_three.bitcoincore.AuthServiceProxy"""'], {}), "('garecovery.two_of_three.bitcoincore.AuthServiceProxy')\n", (474, 530), False, 'import mock\n'), ((1745, 1811), 'mock.patch', 'mock.patch', (['"""garecovery.two_of_three.bitcoincore.AuthServiceProxy"""'], {}), "('garecovery.two_of_three.bitcoincore.AuthServiceProxy')\n", (1755, 1811), False, 'import mock\n'), ((2821, 2887), 'mock.patch', 'mock.patch', (['"""garecovery.two_of_three.bitcoincore.AuthServiceProxy"""'], {}), "('garecovery.two_of_three.bitcoincore.AuthServiceProxy')\n", (2831, 2887), False, 'import mock\n'), ((1389, 1412), 'gaservices.utils.txutil.from_hex', 'txutil.from_hex', (['output'], {}), '(output)\n', (1404, 1412), False, 'from gaservices.utils import txutil\n'), ((2734, 2757), 'gaservices.utils.txutil.from_hex', 'txutil.from_hex', (['output'], {}), '(output)\n', (2749, 2757), False, 'from gaservices.utils import txutil\n'), ((3233, 3276), 'mock.Mock', 'mock.Mock', ([], {'return_value': "{'version': 190100}"}), "(return_value={'version': 190100})\n", (3242, 3276), False, 'import mock\n'), ((3777, 3800), 'gaservices.utils.txutil.from_hex', 'txutil.from_hex', (['output'], {}), '(output)\n', (3792, 3800), False, 'from gaservices.utils import txutil\n'), ((4515, 4558), 'mock.Mock', 'mock.Mock', ([], {'return_value': 'scantxoutset_result'}), '(return_value=scantxoutset_result)\n', (4524, 4558), False, 'import mock\n'), ((5315, 5338), 'gaservices.utils.txutil.from_hex', 'txutil.from_hex', (['output'], {}), '(output)\n', (5330, 5338), False, 'from gaservices.utils import txutil\n'), ((1424, 1451), 'wallycore.tx_get_num_inputs', 'wally.tx_get_num_inputs', (['tx'], {}), '(tx)\n', (1447, 1451), True, 'import wallycore as wally\n'), ((1468, 1502), 'wallycore.tx_get_input_sequence', 'wally.tx_get_input_sequence', (['tx', '(0)'], {}), '(tx, 0)\n', (1495, 1502), True, 'import wallycore as wally\n'), ((2769, 2794), 'wallycore.tx_get_locktime', 'wally.tx_get_locktime', (['tx'], {}), '(tx)\n', (2790, 2794), True, 'import wallycore as wally\n'), ((3077, 3103), 'decimal.Decimal', 'decimal.Decimal', (['"""0.00001"""'], {}), "('0.00001')\n", (3092, 3103), False, 'import decimal\n'), ((3812, 3839), 'wallycore.tx_get_num_inputs', 'wally.tx_get_num_inputs', (['tx'], {}), '(tx)\n', (3835, 3839), True, 'import wallycore as wally\n'), ((5350, 5377), 'wallycore.tx_get_num_inputs', 'wally.tx_get_num_inputs', (['tx'], {}), '(tx)\n', (5373, 5377), True, 'import wallycore as wally\n')]
|
import sys
import textwrap
from src.csvdiff2 import csvdiff
def test_show_difference(lhs, rhs, capfd):
lhs.write(textwrap.dedent('''
head1, head2, head3, head4, head5, head6
1, value1-2, key2-2, 1002, 20210921T035902, value4-2
1, value1-3, key2-3, 1003, 20210921T035904, value4-3
102, value1-4, key2-1, 1004, 20210924T180521, value4-e
1003, value1-5, key2-1, 1005, 20210924T180528, value4-5
1003, value1-6, key2-2, 1006, 20210923T143259, value4-6
1003, value1-7, key2-3, 1007, 20210923T143258, value4-7
1003, value1-e, key2-4, 1008, 20210923T143259, value4-8
''').strip())
rhs.write(textwrap.dedent('''
head1, head2, head3, head4, head5, head6
1, value1-1, key2-1, 1001, 20210921T035901, value4-1
1, value1-2, key2-2, 1002, 20210921T035902, value4-2
1, value1-3, key2-3, 1003, 20210921T035903, value4-3
102, value1-4e, key2-1, 1044, 20210924T180529, value4-4
1003, value1-6, key2-2, 1006, 20210923T143259, value4-6
1003, value1-8, key2-4, 1008, 20210923T143257, value4-e
''').strip())
sys.argv = ['csvdiff.py', lhs.strpath, rhs.strpath, '-k0:4,2', '-dv']
csvdiff.main()
out, err = capfd.readouterr()
assert err == ''
assert out == textwrap.dedent('''
============ Report ============
* Differences
--------------------------------------------------------------------------------
L left.csv
R right.csv
--------------------------------------------------------------------------------
> R 2 ['1', 'value1-1', 'key2-1', '1001', '20210921T035901', 'value4-1']
! @ [4]
L 3 ['1', 'value1-3', 'key2-3', '1003', '20210921T035904', 'value4-3']
R 4 ['1', 'value1-3', 'key2-3', '1003', '20210921T035903', 'value4-3']
! @ [1, 3, 4, 5]
L 4 ['102', 'value1-4', 'key2-1', '1004', '20210924T180521', 'value4-e']
R 5 ['102', 'value1-4e', 'key2-1', '1044', '20210924T180529', 'value4-4']
< L 5 ['1003', 'value1-5', 'key2-1', '1005', '20210924T180528', 'value4-5']
< L 7 ['1003', 'value1-7', 'key2-3', '1007', '20210923T143258', 'value4-7']
! @ [1, 4, 5]
L 8 ['1003', 'value1-e', 'key2-4', '1008', '20210923T143259', 'value4-8']
R 7 ['1003', 'value1-8', 'key2-4', '1008', '20210923T143257', 'value4-e']
''')
def test_show_difference_and_number_of_cases(lhs, rhs, capfd):
lhs.write(textwrap.dedent('''
head1, head2, head3, head4, head5, head6
1, value1-2, key2-2, 1002, 20210921T035902, value4-2
1, value1-3, key2-3, 1003, 20210921T035904, value4-3
102, value1-4, key2-1, 1004, 20210924T180521, value4-e
1003, value1-5, key2-1, 1005, 20210924T180528, value4-5
1003, value1-6, key2-2, 1006, 20210923T143259, value4-6
1003, value1-7, key2-3, 1007, 20210923T143258, value4-7
1003, value1-e, key2-4, 1008, 20210923T143259, value4-8
''').strip())
rhs.write(textwrap.dedent('''
head1, head2, head3, head4, head5, head6
1, value1-1, key2-1, 1001, 20210921T035901, value4-1
1, value1-2, key2-2, 1002, 20210921T035902, value4-2
1, value1-3, key2-3, 1003, 20210921T035903, value4-3
102, value1-4e, key2-1, 1044, 20210924T180529, value4-4
1003, value1-6, key2-2, 1006, 20210923T143259, value4-6
1003, value1-8, key2-4, 1008, 20210923T143257, value4-e
''').strip())
sys.argv = ['csvdiff.py', lhs.strpath, rhs.strpath, '-k0:4,2', '-dvc']
csvdiff.main()
out, err = capfd.readouterr()
assert err == ''
assert out == textwrap.dedent('''
============ Report ============
* Differences
--------------------------------------------------------------------------------
L left.csv
R right.csv
--------------------------------------------------------------------------------
> R 2 ['1', 'value1-1', 'key2-1', '1001', '20210921T035901', 'value4-1']
! @ [4]
L 3 ['1', 'value1-3', 'key2-3', '1003', '20210921T035904', 'value4-3']
R 4 ['1', 'value1-3', 'key2-3', '1003', '20210921T035903', 'value4-3']
! @ [1, 3, 4, 5]
L 4 ['102', 'value1-4', 'key2-1', '1004', '20210924T180521', 'value4-e']
R 5 ['102', 'value1-4e', 'key2-1', '1044', '20210924T180529', 'value4-4']
< L 5 ['1003', 'value1-5', 'key2-1', '1005', '20210924T180528', 'value4-5']
< L 7 ['1003', 'value1-7', 'key2-3', '1007', '20210923T143258', 'value4-7']
! @ [1, 4, 5]
L 8 ['1003', 'value1-e', 'key2-4', '1008', '20210923T143259', 'value4-8']
R 7 ['1003', 'value1-8', 'key2-4', '1008', '20210923T143257', 'value4-e']
* Count & Row number
same lines : 2
left side only (<): 2 :-- Row Numbers -->: [5, 7]
right side only (>): 1 :-- Row Numbers -->: [2]
with differences (!): 3 :-- Row Number Pairs -->: [(3, 4), (4, 5), (8, 7)]
''')
def test_show_all_and_number_of_cases(lhs, rhs, capfd):
lhs.write(textwrap.dedent('''
head1, head2, head3, head4, head5, head6
1, value1-2, key2-2, 1002, 20210921T035902, value4-2
1, value1-3, key2-3, 1003, 20210921T035904, value4-3
102, value1-4, key2-1, 1004, 20210924T180521, value4-e
1003, value1-5, key2-1, 1005, 20210924T180528, value4-5
1003, value1-6, key2-2, 1006, 20210923T143259, value4-6
1003, value1-7, key2-3, 1007, 20210923T143258, value4-7
1003, value1-e, key2-4, 1008, 20210923T143259, value4-8
''').strip())
rhs.write(textwrap.dedent('''
head1, head2, head3, head4, head5, head6
1, value1-1, key2-1, 1001, 20210921T035901, value4-1
1, value1-2, key2-2, 1002, 20210921T035902, value4-2
1, value1-3, key2-3, 1003, 20210921T035903, value4-3
102, value1-4e, key2-1, 1044, 20210924T180529, value4-4
1003, value1-6, key2-2, 1006, 20210923T143259, value4-6
1003, value1-8, key2-4, 1008, 20210923T143257, value4-e
''').strip())
sys.argv = ['csvdiff.py', lhs.strpath, rhs.strpath, '-k0:4,2', '-avc']
csvdiff.main()
out, err = capfd.readouterr()
assert err == ''
assert out == textwrap.dedent('''
============ Report ============
* All
--------------------------------------------------------------------------------
L left.csv
R right.csv
--------------------------------------------------------------------------------
> R 2 ['1', 'value1-1', 'key2-1', '1001', '20210921T035901', 'value4-1']
=
L 2 ['1', 'value1-2', 'key2-2', '1002', '20210921T035902', 'value4-2']
R 3 ['1', 'value1-2', 'key2-2', '1002', '20210921T035902', 'value4-2']
! @ [4]
L 3 ['1', 'value1-3', 'key2-3', '1003', '20210921T035904', 'value4-3']
R 4 ['1', 'value1-3', 'key2-3', '1003', '20210921T035903', 'value4-3']
! @ [1, 3, 4, 5]
L 4 ['102', 'value1-4', 'key2-1', '1004', '20210924T180521', 'value4-e']
R 5 ['102', 'value1-4e', 'key2-1', '1044', '20210924T180529', 'value4-4']
< L 5 ['1003', 'value1-5', 'key2-1', '1005', '20210924T180528', 'value4-5']
=
L 6 ['1003', 'value1-6', 'key2-2', '1006', '20210923T143259', 'value4-6']
R 6 ['1003', 'value1-6', 'key2-2', '1006', '20210923T143259', 'value4-6']
< L 7 ['1003', 'value1-7', 'key2-3', '1007', '20210923T143258', 'value4-7']
! @ [1, 4, 5]
L 8 ['1003', 'value1-e', 'key2-4', '1008', '20210923T143259', 'value4-8']
R 7 ['1003', 'value1-8', 'key2-4', '1008', '20210923T143257', 'value4-e']
* Count & Row number
same lines : 2
left side only (<): 2 :-- Row Numbers -->: [5, 7]
right side only (>): 1 :-- Row Numbers -->: [2]
with differences (!): 3 :-- Row Number Pairs -->: [(3, 4), (4, 5), (8, 7)]
''')
def test_show_all_and_number_of_cases_with_ignore_column(lhs, rhs, capfd):
lhs.write(textwrap.dedent('''
head1, head2, head3, head4, head5, head6
1, value1-2, key2-2, 1002, 20210921T035902, value4-2
1, value1-3, key2-3, 1003, 20210921T035904, value4-3
102, value1-4, key2-1, 1004, 20210924T180521, value4-e
1003, value1-5, key2-1, 1005, 20210924T180528, value4-5
1003, value1-6, key2-2, 1006, 20210923T143259, value4-6
1003, value1-7, key2-3, 1007, 20210923T143258, value4-7
1003, value1-e, key2-4, 1008, 20210923T143259, value4-8
''').strip())
rhs.write(textwrap.dedent('''
head1, head2, head3, head4, head5, head6
1, value1-1, key2-1, 1001, 20210921T035901, value4-1
1, value1-2, key2-2, 1002, 20210921T035902, value4-2
1, value1-3, key2-3, 1003, 20210921T035903, value4-3
102, value1-4e, key2-1, 1044, 20210924T180529, value4-4
1003, value1-6, key2-2, 1006, 20210923T143259, value4-6
1003, value1-8, key2-4, 1008, 20210923T143257, value4-e
''').strip())
sys.argv = ['csvdiff.py', lhs.strpath, rhs.strpath, '-k0:4,2', '-avc', '-i1']
csvdiff.main()
out, err = capfd.readouterr()
assert err == ''
assert out == textwrap.dedent('''
============ Report ============
* All
--------------------------------------------------------------------------------
L left.csv
R right.csv
--------------------------------------------------------------------------------
> R 2 ['1', 'value1-1', 'key2-1', '1001', '20210921T035901', 'value4-1']
=
L 2 ['1', 'value1-2', 'key2-2', '1002', '20210921T035902', 'value4-2']
R 3 ['1', 'value1-2', 'key2-2', '1002', '20210921T035902', 'value4-2']
! @ [4]
L 3 ['1', 'value1-3', 'key2-3', '1003', '20210921T035904', 'value4-3']
R 4 ['1', 'value1-3', 'key2-3', '1003', '20210921T035903', 'value4-3']
! @ [3, 4, 5]
L 4 ['102', 'value1-4', 'key2-1', '1004', '20210924T180521', 'value4-e']
R 5 ['102', 'value1-4e', 'key2-1', '1044', '20210924T180529', 'value4-4']
< L 5 ['1003', 'value1-5', 'key2-1', '1005', '20210924T180528', 'value4-5']
=
L 6 ['1003', 'value1-6', 'key2-2', '1006', '20210923T143259', 'value4-6']
R 6 ['1003', 'value1-6', 'key2-2', '1006', '20210923T143259', 'value4-6']
< L 7 ['1003', 'value1-7', 'key2-3', '1007', '20210923T143258', 'value4-7']
! @ [4, 5]
L 8 ['1003', 'value1-e', 'key2-4', '1008', '20210923T143259', 'value4-8']
R 7 ['1003', 'value1-8', 'key2-4', '1008', '20210923T143257', 'value4-e']
* Count & Row number
same lines : 2
left side only (<): 2 :-- Row Numbers -->: [5, 7]
right side only (>): 1 :-- Row Numbers -->: [2]
with differences (!): 3 :-- Row Number Pairs -->: [(3, 4), (4, 5), (8, 7)]
''')
def test_show_all_and_number_of_cases_with_ignore_columns(lhs, rhs, capfd):
lhs.write(textwrap.dedent('''
head1, head2, head3, head4, head5, head6
1, value1-2, key2-2, 1002, 20210921T035902, value4-2
1, value1-3, key2-3, 1003, 20210921T035904, value4-3
102, value1-4, key2-1, 1004, 20210924T180521, value4-e
1003, value1-5, key2-1, 1005, 20210924T180528, value4-5
1003, value1-6, key2-2, 1006, 20210923T143259, value4-6
1003, value1-7, key2-3, 1007, 20210923T143258, value4-7
1003, value1-e, key2-4, 1008, 20210923T143259, value4-8
''').strip())
rhs.write(textwrap.dedent('''
head1, head2, head3, head4, head5, head6
1, value1-1, key2-1, 1001, 20210921T035901, value4-1
1, value1-2, key2-2, 1002, 20210921T035902, value4-2
1, value1-3, key2-3, 1003, 20210921T035903, value4-3
102, value1-4e, key2-1, 1044, 20210924T180529, value4-4
1003, value1-6, key2-2, 1006, 20210923T143259, value4-6
1003, value1-8, key2-4, 1008, 20210923T143257, value4-e
''').strip())
sys.argv = ['csvdiff.py', lhs.strpath, rhs.strpath, '-k0:4,2', '-avc', '-i1,4']
csvdiff.main()
out, err = capfd.readouterr()
assert err == ''
assert out == textwrap.dedent('''
============ Report ============
* All
--------------------------------------------------------------------------------
L left.csv
R right.csv
--------------------------------------------------------------------------------
> R 2 ['1', 'value1-1', 'key2-1', '1001', '20210921T035901', 'value4-1']
=
L 2 ['1', 'value1-2', 'key2-2', '1002', '20210921T035902', 'value4-2']
R 3 ['1', 'value1-2', 'key2-2', '1002', '20210921T035902', 'value4-2']
=
L 3 ['1', 'value1-3', 'key2-3', '1003', '20210921T035904', 'value4-3']
R 4 ['1', 'value1-3', 'key2-3', '1003', '20210921T035903', 'value4-3']
! @ [3, 5]
L 4 ['102', 'value1-4', 'key2-1', '1004', '20210924T180521', 'value4-e']
R 5 ['102', 'value1-4e', 'key2-1', '1044', '20210924T180529', 'value4-4']
< L 5 ['1003', 'value1-5', 'key2-1', '1005', '20210924T180528', 'value4-5']
=
L 6 ['1003', 'value1-6', 'key2-2', '1006', '20210923T143259', 'value4-6']
R 6 ['1003', 'value1-6', 'key2-2', '1006', '20210923T143259', 'value4-6']
< L 7 ['1003', 'value1-7', 'key2-3', '1007', '20210923T143258', 'value4-7']
! @ [5]
L 8 ['1003', 'value1-e', 'key2-4', '1008', '20210923T143259', 'value4-8']
R 7 ['1003', 'value1-8', 'key2-4', '1008', '20210923T143257', 'value4-e']
* Count & Row number
same lines : 3
left side only (<): 2 :-- Row Numbers -->: [5, 7]
right side only (>): 1 :-- Row Numbers -->: [2]
with differences (!): 2 :-- Row Number Pairs -->: [(4, 5), (8, 7)]
''')
|
[
"textwrap.dedent",
"src.csvdiff2.csvdiff.main"
] |
[((1204, 1218), 'src.csvdiff2.csvdiff.main', 'csvdiff.main', ([], {}), '()\n', (1216, 1218), False, 'from src.csvdiff2 import csvdiff\n'), ((3577, 3591), 'src.csvdiff2.csvdiff.main', 'csvdiff.main', ([], {}), '()\n', (3589, 3591), False, 'from src.csvdiff2 import csvdiff\n'), ((6225, 6239), 'src.csvdiff2.csvdiff.main', 'csvdiff.main', ([], {}), '()\n', (6237, 6239), False, 'from src.csvdiff2 import csvdiff\n'), ((9241, 9255), 'src.csvdiff2.csvdiff.main', 'csvdiff.main', ([], {}), '()\n', (9253, 9255), False, 'from src.csvdiff2 import csvdiff\n'), ((12255, 12269), 'src.csvdiff2.csvdiff.main', 'csvdiff.main', ([], {}), '()\n', (12267, 12269), False, 'from src.csvdiff2 import csvdiff\n'), ((1293, 2422), 'textwrap.dedent', 'textwrap.dedent', (['"""\n ============ Report ============\n\n * Differences\n --------------------------------------------------------------------------------\n L left.csv\n R right.csv\n --------------------------------------------------------------------------------\n > R 2 [\'1\', \'value1-1\', \'key2-1\', \'1001\', \'20210921T035901\', \'value4-1\']\n ! @ [4]\n L 3 [\'1\', \'value1-3\', \'key2-3\', \'1003\', \'20210921T035904\', \'value4-3\']\n R 4 [\'1\', \'value1-3\', \'key2-3\', \'1003\', \'20210921T035903\', \'value4-3\']\n ! @ [1, 3, 4, 5]\n L 4 [\'102\', \'value1-4\', \'key2-1\', \'1004\', \'20210924T180521\', \'value4-e\']\n R 5 [\'102\', \'value1-4e\', \'key2-1\', \'1044\', \'20210924T180529\', \'value4-4\']\n < L 5 [\'1003\', \'value1-5\', \'key2-1\', \'1005\', \'20210924T180528\', \'value4-5\']\n < L 7 [\'1003\', \'value1-7\', \'key2-3\', \'1007\', \'20210923T143258\', \'value4-7\']\n ! @ [1, 4, 5]\n L 8 [\'1003\', \'value1-e\', \'key2-4\', \'1008\', \'20210923T143259\', \'value4-8\']\n R 7 [\'1003\', \'value1-8\', \'key2-4\', \'1008\', \'20210923T143257\', \'value4-e\']\n\n """'], {}), '(\n """\n ============ Report ============\n\n * Differences\n --------------------------------------------------------------------------------\n L left.csv\n R right.csv\n --------------------------------------------------------------------------------\n > R 2 [\'1\', \'value1-1\', \'key2-1\', \'1001\', \'20210921T035901\', \'value4-1\']\n ! @ [4]\n L 3 [\'1\', \'value1-3\', \'key2-3\', \'1003\', \'20210921T035904\', \'value4-3\']\n R 4 [\'1\', \'value1-3\', \'key2-3\', \'1003\', \'20210921T035903\', \'value4-3\']\n ! @ [1, 3, 4, 5]\n L 4 [\'102\', \'value1-4\', \'key2-1\', \'1004\', \'20210924T180521\', \'value4-e\']\n R 5 [\'102\', \'value1-4e\', \'key2-1\', \'1044\', \'20210924T180529\', \'value4-4\']\n < L 5 [\'1003\', \'value1-5\', \'key2-1\', \'1005\', \'20210924T180528\', \'value4-5\']\n < L 7 [\'1003\', \'value1-7\', \'key2-3\', \'1007\', \'20210923T143258\', \'value4-7\']\n ! @ [1, 4, 5]\n L 8 [\'1003\', \'value1-e\', \'key2-4\', \'1008\', \'20210923T143259\', \'value4-8\']\n R 7 [\'1003\', \'value1-8\', \'key2-4\', \'1008\', \'20210923T143257\', \'value4-e\']\n\n """\n )\n', (1308, 2422), False, 'import textwrap\n'), ((3666, 5078), 'textwrap.dedent', 'textwrap.dedent', (['"""\n ============ Report ============\n\n * Differences\n --------------------------------------------------------------------------------\n L left.csv\n R right.csv\n --------------------------------------------------------------------------------\n > R 2 [\'1\', \'value1-1\', \'key2-1\', \'1001\', \'20210921T035901\', \'value4-1\']\n ! @ [4]\n L 3 [\'1\', \'value1-3\', \'key2-3\', \'1003\', \'20210921T035904\', \'value4-3\']\n R 4 [\'1\', \'value1-3\', \'key2-3\', \'1003\', \'20210921T035903\', \'value4-3\']\n ! @ [1, 3, 4, 5]\n L 4 [\'102\', \'value1-4\', \'key2-1\', \'1004\', \'20210924T180521\', \'value4-e\']\n R 5 [\'102\', \'value1-4e\', \'key2-1\', \'1044\', \'20210924T180529\', \'value4-4\']\n < L 5 [\'1003\', \'value1-5\', \'key2-1\', \'1005\', \'20210924T180528\', \'value4-5\']\n < L 7 [\'1003\', \'value1-7\', \'key2-3\', \'1007\', \'20210923T143258\', \'value4-7\']\n ! @ [1, 4, 5]\n L 8 [\'1003\', \'value1-e\', \'key2-4\', \'1008\', \'20210923T143259\', \'value4-8\']\n R 7 [\'1003\', \'value1-8\', \'key2-4\', \'1008\', \'20210923T143257\', \'value4-e\']\n \n * Count & Row number\n same lines : 2\n left side only (<): 2 :-- Row Numbers -->: [5, 7]\n right side only (>): 1 :-- Row Numbers -->: [2]\n with differences (!): 3 :-- Row Number Pairs -->: [(3, 4), (4, 5), (8, 7)]\n """'], {}), '(\n """\n ============ Report ============\n\n * Differences\n --------------------------------------------------------------------------------\n L left.csv\n R right.csv\n --------------------------------------------------------------------------------\n > R 2 [\'1\', \'value1-1\', \'key2-1\', \'1001\', \'20210921T035901\', \'value4-1\']\n ! @ [4]\n L 3 [\'1\', \'value1-3\', \'key2-3\', \'1003\', \'20210921T035904\', \'value4-3\']\n R 4 [\'1\', \'value1-3\', \'key2-3\', \'1003\', \'20210921T035903\', \'value4-3\']\n ! @ [1, 3, 4, 5]\n L 4 [\'102\', \'value1-4\', \'key2-1\', \'1004\', \'20210924T180521\', \'value4-e\']\n R 5 [\'102\', \'value1-4e\', \'key2-1\', \'1044\', \'20210924T180529\', \'value4-4\']\n < L 5 [\'1003\', \'value1-5\', \'key2-1\', \'1005\', \'20210924T180528\', \'value4-5\']\n < L 7 [\'1003\', \'value1-7\', \'key2-3\', \'1007\', \'20210923T143258\', \'value4-7\']\n ! @ [1, 4, 5]\n L 8 [\'1003\', \'value1-e\', \'key2-4\', \'1008\', \'20210923T143259\', \'value4-8\']\n R 7 [\'1003\', \'value1-8\', \'key2-4\', \'1008\', \'20210923T143257\', \'value4-e\']\n \n * Count & Row number\n same lines : 2\n left side only (<): 2 :-- Row Numbers -->: [5, 7]\n right side only (>): 1 :-- Row Numbers -->: [2]\n with differences (!): 3 :-- Row Number Pairs -->: [(3, 4), (4, 5), (8, 7)]\n """\n )\n', (3681, 5078), False, 'import textwrap\n'), ((6314, 8068), 'textwrap.dedent', 'textwrap.dedent', (['"""\n ============ Report ============\n\n * All\n --------------------------------------------------------------------------------\n L left.csv\n R right.csv\n --------------------------------------------------------------------------------\n > R 2 [\'1\', \'value1-1\', \'key2-1\', \'1001\', \'20210921T035901\', \'value4-1\']\n =\n L 2 [\'1\', \'value1-2\', \'key2-2\', \'1002\', \'20210921T035902\', \'value4-2\']\n R 3 [\'1\', \'value1-2\', \'key2-2\', \'1002\', \'20210921T035902\', \'value4-2\']\n ! @ [4]\n L 3 [\'1\', \'value1-3\', \'key2-3\', \'1003\', \'20210921T035904\', \'value4-3\']\n R 4 [\'1\', \'value1-3\', \'key2-3\', \'1003\', \'20210921T035903\', \'value4-3\']\n ! @ [1, 3, 4, 5]\n L 4 [\'102\', \'value1-4\', \'key2-1\', \'1004\', \'20210924T180521\', \'value4-e\']\n R 5 [\'102\', \'value1-4e\', \'key2-1\', \'1044\', \'20210924T180529\', \'value4-4\']\n < L 5 [\'1003\', \'value1-5\', \'key2-1\', \'1005\', \'20210924T180528\', \'value4-5\']\n =\n L 6 [\'1003\', \'value1-6\', \'key2-2\', \'1006\', \'20210923T143259\', \'value4-6\']\n R 6 [\'1003\', \'value1-6\', \'key2-2\', \'1006\', \'20210923T143259\', \'value4-6\']\n < L 7 [\'1003\', \'value1-7\', \'key2-3\', \'1007\', \'20210923T143258\', \'value4-7\']\n ! @ [1, 4, 5]\n L 8 [\'1003\', \'value1-e\', \'key2-4\', \'1008\', \'20210923T143259\', \'value4-8\']\n R 7 [\'1003\', \'value1-8\', \'key2-4\', \'1008\', \'20210923T143257\', \'value4-e\']\n \n * Count & Row number\n same lines : 2\n left side only (<): 2 :-- Row Numbers -->: [5, 7]\n right side only (>): 1 :-- Row Numbers -->: [2]\n with differences (!): 3 :-- Row Number Pairs -->: [(3, 4), (4, 5), (8, 7)]\n """'], {}), '(\n """\n ============ Report ============\n\n * All\n --------------------------------------------------------------------------------\n L left.csv\n R right.csv\n --------------------------------------------------------------------------------\n > R 2 [\'1\', \'value1-1\', \'key2-1\', \'1001\', \'20210921T035901\', \'value4-1\']\n =\n L 2 [\'1\', \'value1-2\', \'key2-2\', \'1002\', \'20210921T035902\', \'value4-2\']\n R 3 [\'1\', \'value1-2\', \'key2-2\', \'1002\', \'20210921T035902\', \'value4-2\']\n ! @ [4]\n L 3 [\'1\', \'value1-3\', \'key2-3\', \'1003\', \'20210921T035904\', \'value4-3\']\n R 4 [\'1\', \'value1-3\', \'key2-3\', \'1003\', \'20210921T035903\', \'value4-3\']\n ! @ [1, 3, 4, 5]\n L 4 [\'102\', \'value1-4\', \'key2-1\', \'1004\', \'20210924T180521\', \'value4-e\']\n R 5 [\'102\', \'value1-4e\', \'key2-1\', \'1044\', \'20210924T180529\', \'value4-4\']\n < L 5 [\'1003\', \'value1-5\', \'key2-1\', \'1005\', \'20210924T180528\', \'value4-5\']\n =\n L 6 [\'1003\', \'value1-6\', \'key2-2\', \'1006\', \'20210923T143259\', \'value4-6\']\n R 6 [\'1003\', \'value1-6\', \'key2-2\', \'1006\', \'20210923T143259\', \'value4-6\']\n < L 7 [\'1003\', \'value1-7\', \'key2-3\', \'1007\', \'20210923T143258\', \'value4-7\']\n ! @ [1, 4, 5]\n L 8 [\'1003\', \'value1-e\', \'key2-4\', \'1008\', \'20210923T143259\', \'value4-8\']\n R 7 [\'1003\', \'value1-8\', \'key2-4\', \'1008\', \'20210923T143257\', \'value4-e\']\n \n * Count & Row number\n same lines : 2\n left side only (<): 2 :-- Row Numbers -->: [5, 7]\n right side only (>): 1 :-- Row Numbers -->: [2]\n with differences (!): 3 :-- Row Number Pairs -->: [(3, 4), (4, 5), (8, 7)]\n """\n )\n', (6329, 8068), False, 'import textwrap\n'), ((9330, 11078), 'textwrap.dedent', 'textwrap.dedent', (['"""\n ============ Report ============\n\n * All\n --------------------------------------------------------------------------------\n L left.csv\n R right.csv\n --------------------------------------------------------------------------------\n > R 2 [\'1\', \'value1-1\', \'key2-1\', \'1001\', \'20210921T035901\', \'value4-1\']\n =\n L 2 [\'1\', \'value1-2\', \'key2-2\', \'1002\', \'20210921T035902\', \'value4-2\']\n R 3 [\'1\', \'value1-2\', \'key2-2\', \'1002\', \'20210921T035902\', \'value4-2\']\n ! @ [4]\n L 3 [\'1\', \'value1-3\', \'key2-3\', \'1003\', \'20210921T035904\', \'value4-3\']\n R 4 [\'1\', \'value1-3\', \'key2-3\', \'1003\', \'20210921T035903\', \'value4-3\']\n ! @ [3, 4, 5]\n L 4 [\'102\', \'value1-4\', \'key2-1\', \'1004\', \'20210924T180521\', \'value4-e\']\n R 5 [\'102\', \'value1-4e\', \'key2-1\', \'1044\', \'20210924T180529\', \'value4-4\']\n < L 5 [\'1003\', \'value1-5\', \'key2-1\', \'1005\', \'20210924T180528\', \'value4-5\']\n =\n L 6 [\'1003\', \'value1-6\', \'key2-2\', \'1006\', \'20210923T143259\', \'value4-6\']\n R 6 [\'1003\', \'value1-6\', \'key2-2\', \'1006\', \'20210923T143259\', \'value4-6\']\n < L 7 [\'1003\', \'value1-7\', \'key2-3\', \'1007\', \'20210923T143258\', \'value4-7\']\n ! @ [4, 5]\n L 8 [\'1003\', \'value1-e\', \'key2-4\', \'1008\', \'20210923T143259\', \'value4-8\']\n R 7 [\'1003\', \'value1-8\', \'key2-4\', \'1008\', \'20210923T143257\', \'value4-e\']\n \n * Count & Row number\n same lines : 2\n left side only (<): 2 :-- Row Numbers -->: [5, 7]\n right side only (>): 1 :-- Row Numbers -->: [2]\n with differences (!): 3 :-- Row Number Pairs -->: [(3, 4), (4, 5), (8, 7)]\n """'], {}), '(\n """\n ============ Report ============\n\n * All\n --------------------------------------------------------------------------------\n L left.csv\n R right.csv\n --------------------------------------------------------------------------------\n > R 2 [\'1\', \'value1-1\', \'key2-1\', \'1001\', \'20210921T035901\', \'value4-1\']\n =\n L 2 [\'1\', \'value1-2\', \'key2-2\', \'1002\', \'20210921T035902\', \'value4-2\']\n R 3 [\'1\', \'value1-2\', \'key2-2\', \'1002\', \'20210921T035902\', \'value4-2\']\n ! @ [4]\n L 3 [\'1\', \'value1-3\', \'key2-3\', \'1003\', \'20210921T035904\', \'value4-3\']\n R 4 [\'1\', \'value1-3\', \'key2-3\', \'1003\', \'20210921T035903\', \'value4-3\']\n ! @ [3, 4, 5]\n L 4 [\'102\', \'value1-4\', \'key2-1\', \'1004\', \'20210924T180521\', \'value4-e\']\n R 5 [\'102\', \'value1-4e\', \'key2-1\', \'1044\', \'20210924T180529\', \'value4-4\']\n < L 5 [\'1003\', \'value1-5\', \'key2-1\', \'1005\', \'20210924T180528\', \'value4-5\']\n =\n L 6 [\'1003\', \'value1-6\', \'key2-2\', \'1006\', \'20210923T143259\', \'value4-6\']\n R 6 [\'1003\', \'value1-6\', \'key2-2\', \'1006\', \'20210923T143259\', \'value4-6\']\n < L 7 [\'1003\', \'value1-7\', \'key2-3\', \'1007\', \'20210923T143258\', \'value4-7\']\n ! @ [4, 5]\n L 8 [\'1003\', \'value1-e\', \'key2-4\', \'1008\', \'20210923T143259\', \'value4-8\']\n R 7 [\'1003\', \'value1-8\', \'key2-4\', \'1008\', \'20210923T143257\', \'value4-e\']\n \n * Count & Row number\n same lines : 2\n left side only (<): 2 :-- Row Numbers -->: [5, 7]\n right side only (>): 1 :-- Row Numbers -->: [2]\n with differences (!): 3 :-- Row Number Pairs -->: [(3, 4), (4, 5), (8, 7)]\n """\n )\n', (9345, 11078), False, 'import textwrap\n'), ((12344, 14072), 'textwrap.dedent', 'textwrap.dedent', (['"""\n ============ Report ============\n\n * All\n --------------------------------------------------------------------------------\n L left.csv\n R right.csv\n --------------------------------------------------------------------------------\n > R 2 [\'1\', \'value1-1\', \'key2-1\', \'1001\', \'20210921T035901\', \'value4-1\']\n =\n L 2 [\'1\', \'value1-2\', \'key2-2\', \'1002\', \'20210921T035902\', \'value4-2\']\n R 3 [\'1\', \'value1-2\', \'key2-2\', \'1002\', \'20210921T035902\', \'value4-2\']\n =\n L 3 [\'1\', \'value1-3\', \'key2-3\', \'1003\', \'20210921T035904\', \'value4-3\']\n R 4 [\'1\', \'value1-3\', \'key2-3\', \'1003\', \'20210921T035903\', \'value4-3\']\n ! @ [3, 5]\n L 4 [\'102\', \'value1-4\', \'key2-1\', \'1004\', \'20210924T180521\', \'value4-e\']\n R 5 [\'102\', \'value1-4e\', \'key2-1\', \'1044\', \'20210924T180529\', \'value4-4\']\n < L 5 [\'1003\', \'value1-5\', \'key2-1\', \'1005\', \'20210924T180528\', \'value4-5\']\n =\n L 6 [\'1003\', \'value1-6\', \'key2-2\', \'1006\', \'20210923T143259\', \'value4-6\']\n R 6 [\'1003\', \'value1-6\', \'key2-2\', \'1006\', \'20210923T143259\', \'value4-6\']\n < L 7 [\'1003\', \'value1-7\', \'key2-3\', \'1007\', \'20210923T143258\', \'value4-7\']\n ! @ [5]\n L 8 [\'1003\', \'value1-e\', \'key2-4\', \'1008\', \'20210923T143259\', \'value4-8\']\n R 7 [\'1003\', \'value1-8\', \'key2-4\', \'1008\', \'20210923T143257\', \'value4-e\']\n \n * Count & Row number\n same lines : 3\n left side only (<): 2 :-- Row Numbers -->: [5, 7]\n right side only (>): 1 :-- Row Numbers -->: [2]\n with differences (!): 2 :-- Row Number Pairs -->: [(4, 5), (8, 7)]\n """'], {}), '(\n """\n ============ Report ============\n\n * All\n --------------------------------------------------------------------------------\n L left.csv\n R right.csv\n --------------------------------------------------------------------------------\n > R 2 [\'1\', \'value1-1\', \'key2-1\', \'1001\', \'20210921T035901\', \'value4-1\']\n =\n L 2 [\'1\', \'value1-2\', \'key2-2\', \'1002\', \'20210921T035902\', \'value4-2\']\n R 3 [\'1\', \'value1-2\', \'key2-2\', \'1002\', \'20210921T035902\', \'value4-2\']\n =\n L 3 [\'1\', \'value1-3\', \'key2-3\', \'1003\', \'20210921T035904\', \'value4-3\']\n R 4 [\'1\', \'value1-3\', \'key2-3\', \'1003\', \'20210921T035903\', \'value4-3\']\n ! @ [3, 5]\n L 4 [\'102\', \'value1-4\', \'key2-1\', \'1004\', \'20210924T180521\', \'value4-e\']\n R 5 [\'102\', \'value1-4e\', \'key2-1\', \'1044\', \'20210924T180529\', \'value4-4\']\n < L 5 [\'1003\', \'value1-5\', \'key2-1\', \'1005\', \'20210924T180528\', \'value4-5\']\n =\n L 6 [\'1003\', \'value1-6\', \'key2-2\', \'1006\', \'20210923T143259\', \'value4-6\']\n R 6 [\'1003\', \'value1-6\', \'key2-2\', \'1006\', \'20210923T143259\', \'value4-6\']\n < L 7 [\'1003\', \'value1-7\', \'key2-3\', \'1007\', \'20210923T143258\', \'value4-7\']\n ! @ [5]\n L 8 [\'1003\', \'value1-e\', \'key2-4\', \'1008\', \'20210923T143259\', \'value4-8\']\n R 7 [\'1003\', \'value1-8\', \'key2-4\', \'1008\', \'20210923T143257\', \'value4-e\']\n \n * Count & Row number\n same lines : 3\n left side only (<): 2 :-- Row Numbers -->: [5, 7]\n right side only (>): 1 :-- Row Numbers -->: [2]\n with differences (!): 2 :-- Row Number Pairs -->: [(4, 5), (8, 7)]\n """\n )\n', (12359, 14072), False, 'import textwrap\n'), ((121, 649), 'textwrap.dedent', 'textwrap.dedent', (['"""\n head1, head2, head3, head4, head5, head6\n 1, value1-2, key2-2, 1002, 20210921T035902, value4-2\n 1, value1-3, key2-3, 1003, 20210921T035904, value4-3\n 102, value1-4, key2-1, 1004, 20210924T180521, value4-e\n 1003, value1-5, key2-1, 1005, 20210924T180528, value4-5\n 1003, value1-6, key2-2, 1006, 20210923T143259, value4-6\n 1003, value1-7, key2-3, 1007, 20210923T143258, value4-7\n 1003, value1-e, key2-4, 1008, 20210923T143259, value4-8\n """'], {}), '(\n """\n head1, head2, head3, head4, head5, head6\n 1, value1-2, key2-2, 1002, 20210921T035902, value4-2\n 1, value1-3, key2-3, 1003, 20210921T035904, value4-3\n 102, value1-4, key2-1, 1004, 20210924T180521, value4-e\n 1003, value1-5, key2-1, 1005, 20210924T180528, value4-5\n 1003, value1-6, key2-2, 1006, 20210923T143259, value4-6\n 1003, value1-7, key2-3, 1007, 20210923T143258, value4-7\n 1003, value1-e, key2-4, 1008, 20210923T143259, value4-8\n """\n )\n', (136, 649), False, 'import textwrap\n'), ((663, 1125), 'textwrap.dedent', 'textwrap.dedent', (['"""\n head1, head2, head3, head4, head5, head6\n 1, value1-1, key2-1, 1001, 20210921T035901, value4-1\n 1, value1-2, key2-2, 1002, 20210921T035902, value4-2\n 1, value1-3, key2-3, 1003, 20210921T035903, value4-3\n 102, value1-4e, key2-1, 1044, 20210924T180529, value4-4\n 1003, value1-6, key2-2, 1006, 20210923T143259, value4-6\n 1003, value1-8, key2-4, 1008, 20210923T143257, value4-e\n """'], {}), '(\n """\n head1, head2, head3, head4, head5, head6\n 1, value1-1, key2-1, 1001, 20210921T035901, value4-1\n 1, value1-2, key2-2, 1002, 20210921T035902, value4-2\n 1, value1-3, key2-3, 1003, 20210921T035903, value4-3\n 102, value1-4e, key2-1, 1044, 20210924T180529, value4-4\n 1003, value1-6, key2-2, 1006, 20210923T143259, value4-6\n 1003, value1-8, key2-4, 1008, 20210923T143257, value4-e\n """\n )\n', (678, 1125), False, 'import textwrap\n'), ((2493, 3021), 'textwrap.dedent', 'textwrap.dedent', (['"""\n head1, head2, head3, head4, head5, head6\n 1, value1-2, key2-2, 1002, 20210921T035902, value4-2\n 1, value1-3, key2-3, 1003, 20210921T035904, value4-3\n 102, value1-4, key2-1, 1004, 20210924T180521, value4-e\n 1003, value1-5, key2-1, 1005, 20210924T180528, value4-5\n 1003, value1-6, key2-2, 1006, 20210923T143259, value4-6\n 1003, value1-7, key2-3, 1007, 20210923T143258, value4-7\n 1003, value1-e, key2-4, 1008, 20210923T143259, value4-8\n """'], {}), '(\n """\n head1, head2, head3, head4, head5, head6\n 1, value1-2, key2-2, 1002, 20210921T035902, value4-2\n 1, value1-3, key2-3, 1003, 20210921T035904, value4-3\n 102, value1-4, key2-1, 1004, 20210924T180521, value4-e\n 1003, value1-5, key2-1, 1005, 20210924T180528, value4-5\n 1003, value1-6, key2-2, 1006, 20210923T143259, value4-6\n 1003, value1-7, key2-3, 1007, 20210923T143258, value4-7\n 1003, value1-e, key2-4, 1008, 20210923T143259, value4-8\n """\n )\n', (2508, 3021), False, 'import textwrap\n'), ((3035, 3497), 'textwrap.dedent', 'textwrap.dedent', (['"""\n head1, head2, head3, head4, head5, head6\n 1, value1-1, key2-1, 1001, 20210921T035901, value4-1\n 1, value1-2, key2-2, 1002, 20210921T035902, value4-2\n 1, value1-3, key2-3, 1003, 20210921T035903, value4-3\n 102, value1-4e, key2-1, 1044, 20210924T180529, value4-4\n 1003, value1-6, key2-2, 1006, 20210923T143259, value4-6\n 1003, value1-8, key2-4, 1008, 20210923T143257, value4-e\n """'], {}), '(\n """\n head1, head2, head3, head4, head5, head6\n 1, value1-1, key2-1, 1001, 20210921T035901, value4-1\n 1, value1-2, key2-2, 1002, 20210921T035902, value4-2\n 1, value1-3, key2-3, 1003, 20210921T035903, value4-3\n 102, value1-4e, key2-1, 1044, 20210924T180529, value4-4\n 1003, value1-6, key2-2, 1006, 20210923T143259, value4-6\n 1003, value1-8, key2-4, 1008, 20210923T143257, value4-e\n """\n )\n', (3050, 3497), False, 'import textwrap\n'), ((5141, 5669), 'textwrap.dedent', 'textwrap.dedent', (['"""\n head1, head2, head3, head4, head5, head6\n 1, value1-2, key2-2, 1002, 20210921T035902, value4-2\n 1, value1-3, key2-3, 1003, 20210921T035904, value4-3\n 102, value1-4, key2-1, 1004, 20210924T180521, value4-e\n 1003, value1-5, key2-1, 1005, 20210924T180528, value4-5\n 1003, value1-6, key2-2, 1006, 20210923T143259, value4-6\n 1003, value1-7, key2-3, 1007, 20210923T143258, value4-7\n 1003, value1-e, key2-4, 1008, 20210923T143259, value4-8\n """'], {}), '(\n """\n head1, head2, head3, head4, head5, head6\n 1, value1-2, key2-2, 1002, 20210921T035902, value4-2\n 1, value1-3, key2-3, 1003, 20210921T035904, value4-3\n 102, value1-4, key2-1, 1004, 20210924T180521, value4-e\n 1003, value1-5, key2-1, 1005, 20210924T180528, value4-5\n 1003, value1-6, key2-2, 1006, 20210923T143259, value4-6\n 1003, value1-7, key2-3, 1007, 20210923T143258, value4-7\n 1003, value1-e, key2-4, 1008, 20210923T143259, value4-8\n """\n )\n', (5156, 5669), False, 'import textwrap\n'), ((5683, 6145), 'textwrap.dedent', 'textwrap.dedent', (['"""\n head1, head2, head3, head4, head5, head6\n 1, value1-1, key2-1, 1001, 20210921T035901, value4-1\n 1, value1-2, key2-2, 1002, 20210921T035902, value4-2\n 1, value1-3, key2-3, 1003, 20210921T035903, value4-3\n 102, value1-4e, key2-1, 1044, 20210924T180529, value4-4\n 1003, value1-6, key2-2, 1006, 20210923T143259, value4-6\n 1003, value1-8, key2-4, 1008, 20210923T143257, value4-e\n """'], {}), '(\n """\n head1, head2, head3, head4, head5, head6\n 1, value1-1, key2-1, 1001, 20210921T035901, value4-1\n 1, value1-2, key2-2, 1002, 20210921T035902, value4-2\n 1, value1-3, key2-3, 1003, 20210921T035903, value4-3\n 102, value1-4e, key2-1, 1044, 20210924T180529, value4-4\n 1003, value1-6, key2-2, 1006, 20210923T143259, value4-6\n 1003, value1-8, key2-4, 1008, 20210923T143257, value4-e\n """\n )\n', (5698, 6145), False, 'import textwrap\n'), ((8150, 8678), 'textwrap.dedent', 'textwrap.dedent', (['"""\n head1, head2, head3, head4, head5, head6\n 1, value1-2, key2-2, 1002, 20210921T035902, value4-2\n 1, value1-3, key2-3, 1003, 20210921T035904, value4-3\n 102, value1-4, key2-1, 1004, 20210924T180521, value4-e\n 1003, value1-5, key2-1, 1005, 20210924T180528, value4-5\n 1003, value1-6, key2-2, 1006, 20210923T143259, value4-6\n 1003, value1-7, key2-3, 1007, 20210923T143258, value4-7\n 1003, value1-e, key2-4, 1008, 20210923T143259, value4-8\n """'], {}), '(\n """\n head1, head2, head3, head4, head5, head6\n 1, value1-2, key2-2, 1002, 20210921T035902, value4-2\n 1, value1-3, key2-3, 1003, 20210921T035904, value4-3\n 102, value1-4, key2-1, 1004, 20210924T180521, value4-e\n 1003, value1-5, key2-1, 1005, 20210924T180528, value4-5\n 1003, value1-6, key2-2, 1006, 20210923T143259, value4-6\n 1003, value1-7, key2-3, 1007, 20210923T143258, value4-7\n 1003, value1-e, key2-4, 1008, 20210923T143259, value4-8\n """\n )\n', (8165, 8678), False, 'import textwrap\n'), ((8692, 9154), 'textwrap.dedent', 'textwrap.dedent', (['"""\n head1, head2, head3, head4, head5, head6\n 1, value1-1, key2-1, 1001, 20210921T035901, value4-1\n 1, value1-2, key2-2, 1002, 20210921T035902, value4-2\n 1, value1-3, key2-3, 1003, 20210921T035903, value4-3\n 102, value1-4e, key2-1, 1044, 20210924T180529, value4-4\n 1003, value1-6, key2-2, 1006, 20210923T143259, value4-6\n 1003, value1-8, key2-4, 1008, 20210923T143257, value4-e\n """'], {}), '(\n """\n head1, head2, head3, head4, head5, head6\n 1, value1-1, key2-1, 1001, 20210921T035901, value4-1\n 1, value1-2, key2-2, 1002, 20210921T035902, value4-2\n 1, value1-3, key2-3, 1003, 20210921T035903, value4-3\n 102, value1-4e, key2-1, 1044, 20210924T180529, value4-4\n 1003, value1-6, key2-2, 1006, 20210923T143259, value4-6\n 1003, value1-8, key2-4, 1008, 20210923T143257, value4-e\n """\n )\n', (8707, 9154), False, 'import textwrap\n'), ((11162, 11690), 'textwrap.dedent', 'textwrap.dedent', (['"""\n head1, head2, head3, head4, head5, head6\n 1, value1-2, key2-2, 1002, 20210921T035902, value4-2\n 1, value1-3, key2-3, 1003, 20210921T035904, value4-3\n 102, value1-4, key2-1, 1004, 20210924T180521, value4-e\n 1003, value1-5, key2-1, 1005, 20210924T180528, value4-5\n 1003, value1-6, key2-2, 1006, 20210923T143259, value4-6\n 1003, value1-7, key2-3, 1007, 20210923T143258, value4-7\n 1003, value1-e, key2-4, 1008, 20210923T143259, value4-8\n """'], {}), '(\n """\n head1, head2, head3, head4, head5, head6\n 1, value1-2, key2-2, 1002, 20210921T035902, value4-2\n 1, value1-3, key2-3, 1003, 20210921T035904, value4-3\n 102, value1-4, key2-1, 1004, 20210924T180521, value4-e\n 1003, value1-5, key2-1, 1005, 20210924T180528, value4-5\n 1003, value1-6, key2-2, 1006, 20210923T143259, value4-6\n 1003, value1-7, key2-3, 1007, 20210923T143258, value4-7\n 1003, value1-e, key2-4, 1008, 20210923T143259, value4-8\n """\n )\n', (11177, 11690), False, 'import textwrap\n'), ((11704, 12166), 'textwrap.dedent', 'textwrap.dedent', (['"""\n head1, head2, head3, head4, head5, head6\n 1, value1-1, key2-1, 1001, 20210921T035901, value4-1\n 1, value1-2, key2-2, 1002, 20210921T035902, value4-2\n 1, value1-3, key2-3, 1003, 20210921T035903, value4-3\n 102, value1-4e, key2-1, 1044, 20210924T180529, value4-4\n 1003, value1-6, key2-2, 1006, 20210923T143259, value4-6\n 1003, value1-8, key2-4, 1008, 20210923T143257, value4-e\n """'], {}), '(\n """\n head1, head2, head3, head4, head5, head6\n 1, value1-1, key2-1, 1001, 20210921T035901, value4-1\n 1, value1-2, key2-2, 1002, 20210921T035902, value4-2\n 1, value1-3, key2-3, 1003, 20210921T035903, value4-3\n 102, value1-4e, key2-1, 1044, 20210924T180529, value4-4\n 1003, value1-6, key2-2, 1006, 20210923T143259, value4-6\n 1003, value1-8, key2-4, 1008, 20210923T143257, value4-e\n """\n )\n', (11719, 12166), False, 'import textwrap\n')]
|
from sklearn import svm
import numpy as np
X = np.array([[-1, -1], [-2, -1], [1, 1], [2, 1]])
y = np.array([1, 1, 2, 2])
model = svm.SVC(kernel='linear',C=1,gamma=1)
model.fit(X,y)
print(model.predict([[-0.8,-1]]))
|
[
"numpy.array",
"sklearn.svm.SVC"
] |
[((48, 94), 'numpy.array', 'np.array', (['[[-1, -1], [-2, -1], [1, 1], [2, 1]]'], {}), '([[-1, -1], [-2, -1], [1, 1], [2, 1]])\n', (56, 94), True, 'import numpy as np\n'), ((99, 121), 'numpy.array', 'np.array', (['[1, 1, 2, 2]'], {}), '([1, 1, 2, 2])\n', (107, 121), True, 'import numpy as np\n'), ((132, 170), 'sklearn.svm.SVC', 'svm.SVC', ([], {'kernel': '"""linear"""', 'C': '(1)', 'gamma': '(1)'}), "(kernel='linear', C=1, gamma=1)\n", (139, 170), False, 'from sklearn import svm\n')]
|
from types import SimpleNamespace
class Page:
def __init__(self):
self.root = None
self.ui = SimpleNamespace()
|
[
"types.SimpleNamespace"
] |
[((115, 132), 'types.SimpleNamespace', 'SimpleNamespace', ([], {}), '()\n', (130, 132), False, 'from types import SimpleNamespace\n')]
|
'''
Faster brute-force adapter matcher
'''
import time
VERBOSE = False
BASES = ('A', 'C', 'G', 'T')
def addNewAdapterToSet(ad, adSet):
adSet.add(ad)
if VERBOSE:
print(f'Adding {ad}')
time.sleep(.1)
return adSet
def makeAdapters(adapter):
adapters = set()
adapters.add(adapter[:-8])
for i, x in enumerate(adapter):
for j in BASES:
ad = adapter[:i] + j + adapter[i+1:]
adapters.add(ad[:-8])
for l in BASES:
ad = adapter[:i] + l + adapter[i:]
adapters.add(ad[:-8])
ad = adapter[:i] + adapter[i+1:]
adapters.add(ad[:-8])
ad = adapter[:i] + adapter[i+2:]
adapters.add(ad[:-8])
return adapters
def build(adapter, _):
'''
Build a brute force adapter macher with no parameters
'''
adapters = sorted(makeAdapters(adapter))
def match(line):
for adapter in adapters:
if adapter in line:
return line.find(adapter)
return match
|
[
"time.sleep"
] |
[((210, 225), 'time.sleep', 'time.sleep', (['(0.1)'], {}), '(0.1)\n', (220, 225), False, 'import time\n')]
|
"""
The purpose of this file is to install autograd and its dependencies and to
provide utility functions that are used when it is used in conjunction with
Qubiter.
When using autograd, one declares np to be the alias to module
numpy.autograd. If another file later declares np to be alias to numpy,
all sorts of error messages start cropping up. What I've done to avoid this
is to change the statements `import numpy as np` in some (not all, just the
ones called while using autograd) files by
import sys
if 'autograd.numpy' not in sys.modules:
import numpy as np
else:
import autograd.numpy as np
References
----------
1. https://github.com/HIPS/autograd/blob/master/docs/tutorial.md
2. https://github.com/HIPS/autograd/blob/master/docs/updateguide.md
"""
import autograd.numpy as np
from autograd import grad, jacobian
from autograd.extend import primitive, defvjp
import sys
print('np installed?', 'np' in sys.modules) # False
print('numpy installed?', 'numpy' in sys.modules) # True
print('autograd.numpy installed?', 'autograd.numpy' in sys.modules) # True
def sig_all():
"""
This method returns a numpy array of shape=(2, 3, 3) which contains the
3 Pauli matrices in it. sigx = sig_all[:, :, 0], sigy = sig_all[:, :,
1], sigz = sig_all[:, :, 2],
Returns
-------
np.ndarray
shape = (2, 2, 3)
"""
sigx = np.array([[0, 1], [1, 0]])
sigy = np.array([[0, -1j], [1j, 0]])
sigz = np.array([[1, 0], [0, -1]])
all_paulis = np.vstack([sigx, sigy, sigz])
all_paulis = np.reshape(all_paulis, (3, 2, 2)).transpose(1, 2, 0)
return all_paulis
def u2_alt(*tlist):
"""
An alternative to OneBitGates.u2(). Both should return identical 2-dim
matrices for identical arguments.
Parameters
----------
tlist : list[float]
tlist = [rads0, rads1, rads2, rads3]
Returns
-------
np.ndarray
shape = (2, 2)
"""
assert len(tlist) == 4
t = np.sqrt(tlist[1]**2 + tlist[2]**2 + tlist[3]**2)
if abs(t) < 1e-6:
return np.exp(1j*tlist[0])*np.eye(2)
tvec = np.array([tlist[1], tlist[2], tlist[3]])/t
out = np.eye(2)*np.cos(t) + 1j*np.dot(sig_all(), tvec)*np.sin(t)
return np.exp(1j*tlist[0])*out
def d_u2(dwrt, *tlist):
"""
tlist is a list of 4 floats, and dwrt (which stands for "derivative with
respect to") is in range(4). This method returns the analytical (not
numerical, in terms of closed functions) derivative of u2(*tlist) with
respect to tlist[dwrt].
The output of this method has been verified by comparing it to same
derivatives calculated numerically with autograd.
Parameters
----------
dwrt : int
tlist : list[float]
Returns
-------
np.ndarray
shape = (2, 2)
"""
assert dwrt in range(4)
assert len(tlist) == 4
if dwrt == 0:
return 1j*u2_alt(*tlist)
dwrt -= 1
t = np.sqrt(tlist[1]**2 + tlist[2]**2 + tlist[3]**2)
if abs(t) < 1e-6:
# we already know dwrt !=0
return np.zeros((2, 2), dtype=complex)
tvec = np.array([tlist[1], tlist[2], tlist[3]])/t
dotted_vec = tvec*tvec[dwrt]*np.cos(t) +\
(np.sin(t)/t)*(-tvec*tvec[dwrt] + np.eye(3)[dwrt, :])
out = -np.sin(t)*tvec[dwrt]*np.eye(2) +\
1j*np.dot(sig_all(), dotted_vec)
return np.exp(1j*tlist[0])*out
def d_auto_u2(dwrt, *tlist):
"""
Returns the automatic (computed by backprop) derivative of 2-dim matrix
UnitaryMat.u2_alt. UnitaryMat.u2_alt is an alternative to
OneBitGates.u2. Both functions return same answer for identical input (
input is 4 real parameters in tlist).
Parameters
----------
dwrt : int
stands for 'derivative with respect to'. int in range(4)
tlist : list[float]
len = 4
Returns
-------
np.ndarray
shape=(2,2)
"""
def u2r(*tlist1):
return np.real(u2_alt(*tlist1))
def u2i(*tlist1):
return np.imag(u2_alt(*tlist1))
return jacobian(u2r, dwrt)(*tlist) + 1j*jacobian(u2i, dwrt)(*tlist)
@primitive
def pu2r(*tlist):
"""
Returns real part of u2, and registers it as being primitive.
Primitive means that its derivative will be provided in a defvjp (
def of vector-jacobian-product) so no need for autograd to calculate it
from the u2 definition.
Parameters
----------
tlist : list[float]
len = 4
Returns
-------
np.ndarray
shape=(2,2)
"""
return np.real(u2_alt(*tlist))
@primitive
def pu2i(*tlist):
"""
Returns imaginary part of u2, and registers it as being primitive.
Primitive means that its derivative will be provided in a defvjp (
def of vector-jacobian-product) so no need for autograd to calculate it
from the u2 definition.
Parameters
----------
tlist : list[float]
len = 4
Returns
-------
np.ndarray
shape=(2,2)
"""
return np.imag(u2_alt(*tlist))
def pu2(*tlist):
"""
Returns primitive u2 as (primitive real part of u2) + j*(primtive
imaginary part of u2).
Parameters
----------
tlist : list[float]
len = 4
Returns
-------
np.ndarray
shape=(2,2)
"""
# print('mmbbvv, pu2', pu2r(*tlist) +1j* pu2r(*tlist))
return pu2r(*tlist) + 1j*pu2i(*tlist)
defvjp(pu2r,
# defines vector-jacobian-product of pu2r
# g.shape == pu2r.shape
lambda ans, *tlist: lambda g: np.sum(
g*np.real(d_u2(0, *tlist))),
lambda ans, *tlist: lambda g: np.sum(
g*np.real(d_u2(1, *tlist))),
lambda ans, *tlist: lambda g: np.sum(
g*np.real(d_u2(2, *tlist))),
lambda ans, *tlist: lambda g: np.sum(
g*np.real(d_u2(3, *tlist))),
argnums=range(4))
defvjp(pu2i,
# defines vector-jacobian-product of pu2i
# g.shape == pu2i.shape
lambda ans, *tlist: lambda g: np.sum(
g*np.imag(d_u2(0, *tlist))),
lambda ans, *tlist: lambda g: np.sum(
g*np.imag(d_u2(1, *tlist))),
lambda ans, *tlist: lambda g: np.sum(
g*np.imag(d_u2(2, *tlist))),
lambda ans, *tlist: lambda g: np.sum(
g*np.imag(d_u2(3, *tlist))),
argnums=range(4))
def d_auto_pu2(dwrt, *tlist):
"""
Returns the automatic derivative of pu2. We have defined things so that
this derivative is stipulated analytically a priori rather than being
calculated by autograd from def of u2.
Parameters
----------
dwrt : int
stands for 'derivative with respect to'. int in range(4)
tlist : list[float]
len = 4
Returns
-------
np.ndarray
shape=(2,2)
"""
assert dwrt in range(4)
return jacobian(pu2r, dwrt)(*tlist) + 1j*jacobian(pu2i, dwrt)(*tlist)
if __name__ == "__main__":
from qubiter.OneBitGates import *
def main():
print("\nu2_alt example-------------")
ex = np.array([1, 0, 0])
ey = np.array([0, 1, 0])
ez = np.array([0, 0, 1])
all_paulis = sig_all()
sigx_ = np.dot(all_paulis, ex)
sigy_ = np.dot(all_paulis, ey)
sigz_ = np.dot(all_paulis, ez)
print('sigx_=\n', sigx_)
print('sigy_=\n', sigy_)
print('sigz_=\n', sigz_)
rads_list = [.1, .2, .3, .4]
err = np.linalg.norm(OneBitGates.u2(*rads_list) -
u2_alt(*rads_list))
print('err=', err)
tlist = [.3, 1.1, .7, .5]
for dwrt in range(4):
print('err=', np.linalg.norm(
d_auto_u2(dwrt, *tlist) - d_u2(dwrt, *tlist)))
for dwrt in range(4):
print('err=', np.linalg.norm(
d_auto_pu2(dwrt, *tlist) - d_u2(dwrt, *tlist)))
main()
|
[
"autograd.numpy.sqrt",
"autograd.numpy.dot",
"autograd.numpy.cos",
"autograd.numpy.vstack",
"autograd.numpy.array",
"autograd.numpy.exp",
"autograd.numpy.zeros",
"autograd.numpy.reshape",
"autograd.numpy.eye",
"autograd.numpy.sin",
"autograd.jacobian"
] |
[((1374, 1400), 'autograd.numpy.array', 'np.array', (['[[0, 1], [1, 0]]'], {}), '([[0, 1], [1, 0]])\n', (1382, 1400), True, 'import autograd.numpy as np\n'), ((1412, 1445), 'autograd.numpy.array', 'np.array', (['[[0, -1.0j], [1.0j, 0]]'], {}), '([[0, -1.0j], [1.0j, 0]])\n', (1420, 1445), True, 'import autograd.numpy as np\n'), ((1453, 1480), 'autograd.numpy.array', 'np.array', (['[[1, 0], [0, -1]]'], {}), '([[1, 0], [0, -1]])\n', (1461, 1480), True, 'import autograd.numpy as np\n'), ((1498, 1527), 'autograd.numpy.vstack', 'np.vstack', (['[sigx, sigy, sigz]'], {}), '([sigx, sigy, sigz])\n', (1507, 1527), True, 'import autograd.numpy as np\n'), ((1970, 2024), 'autograd.numpy.sqrt', 'np.sqrt', (['(tlist[1] ** 2 + tlist[2] ** 2 + tlist[3] ** 2)'], {}), '(tlist[1] ** 2 + tlist[2] ** 2 + tlist[3] ** 2)\n', (1977, 2024), True, 'import autograd.numpy as np\n'), ((2928, 2982), 'autograd.numpy.sqrt', 'np.sqrt', (['(tlist[1] ** 2 + tlist[2] ** 2 + tlist[3] ** 2)'], {}), '(tlist[1] ** 2 + tlist[2] ** 2 + tlist[3] ** 2)\n', (2935, 2982), True, 'import autograd.numpy as np\n'), ((2097, 2137), 'autograd.numpy.array', 'np.array', (['[tlist[1], tlist[2], tlist[3]]'], {}), '([tlist[1], tlist[2], tlist[3]])\n', (2105, 2137), True, 'import autograd.numpy as np\n'), ((2220, 2243), 'autograd.numpy.exp', 'np.exp', (['(1.0j * tlist[0])'], {}), '(1.0j * tlist[0])\n', (2226, 2243), True, 'import autograd.numpy as np\n'), ((3049, 3080), 'autograd.numpy.zeros', 'np.zeros', (['(2, 2)'], {'dtype': 'complex'}), '((2, 2), dtype=complex)\n', (3057, 3080), True, 'import autograd.numpy as np\n'), ((3092, 3132), 'autograd.numpy.array', 'np.array', (['[tlist[1], tlist[2], tlist[3]]'], {}), '([tlist[1], tlist[2], tlist[3]])\n', (3100, 3132), True, 'import autograd.numpy as np\n'), ((3351, 3374), 'autograd.numpy.exp', 'np.exp', (['(1.0j * tlist[0])'], {}), '(1.0j * tlist[0])\n', (3357, 3374), True, 'import autograd.numpy as np\n'), ((6986, 7005), 'autograd.numpy.array', 'np.array', (['[1, 0, 0]'], {}), '([1, 0, 0])\n', (6994, 7005), True, 'import autograd.numpy as np\n'), ((7019, 7038), 'autograd.numpy.array', 'np.array', (['[0, 1, 0]'], {}), '([0, 1, 0])\n', (7027, 7038), True, 'import autograd.numpy as np\n'), ((7052, 7071), 'autograd.numpy.array', 'np.array', (['[0, 0, 1]'], {}), '([0, 0, 1])\n', (7060, 7071), True, 'import autograd.numpy as np\n'), ((7119, 7141), 'autograd.numpy.dot', 'np.dot', (['all_paulis', 'ex'], {}), '(all_paulis, ex)\n', (7125, 7141), True, 'import autograd.numpy as np\n'), ((7158, 7180), 'autograd.numpy.dot', 'np.dot', (['all_paulis', 'ey'], {}), '(all_paulis, ey)\n', (7164, 7180), True, 'import autograd.numpy as np\n'), ((7197, 7219), 'autograd.numpy.dot', 'np.dot', (['all_paulis', 'ez'], {}), '(all_paulis, ez)\n', (7203, 7219), True, 'import autograd.numpy as np\n'), ((1545, 1578), 'autograd.numpy.reshape', 'np.reshape', (['all_paulis', '(3, 2, 2)'], {}), '(all_paulis, (3, 2, 2))\n', (1555, 1578), True, 'import autograd.numpy as np\n'), ((2056, 2079), 'autograd.numpy.exp', 'np.exp', (['(1.0j * tlist[0])'], {}), '(1.0j * tlist[0])\n', (2062, 2079), True, 'import autograd.numpy as np\n'), ((2076, 2085), 'autograd.numpy.eye', 'np.eye', (['(2)'], {}), '(2)\n', (2082, 2085), True, 'import autograd.numpy as np\n'), ((2150, 2159), 'autograd.numpy.eye', 'np.eye', (['(2)'], {}), '(2)\n', (2156, 2159), True, 'import autograd.numpy as np\n'), ((2160, 2169), 'autograd.numpy.cos', 'np.cos', (['t'], {}), '(t)\n', (2166, 2169), True, 'import autograd.numpy as np\n'), ((2199, 2208), 'autograd.numpy.sin', 'np.sin', (['t'], {}), '(t)\n', (2205, 2208), True, 'import autograd.numpy as np\n'), ((3168, 3177), 'autograd.numpy.cos', 'np.cos', (['t'], {}), '(t)\n', (3174, 3177), True, 'import autograd.numpy as np\n'), ((3284, 3293), 'autograd.numpy.eye', 'np.eye', (['(2)'], {}), '(2)\n', (3290, 3293), True, 'import autograd.numpy as np\n'), ((4027, 4046), 'autograd.jacobian', 'jacobian', (['u2r', 'dwrt'], {}), '(u2r, dwrt)\n', (4035, 4046), False, 'from autograd import grad, jacobian\n'), ((6780, 6800), 'autograd.jacobian', 'jacobian', (['pu2r', 'dwrt'], {}), '(pu2r, dwrt)\n', (6788, 6800), False, 'from autograd import grad, jacobian\n'), ((3199, 3208), 'autograd.numpy.sin', 'np.sin', (['t'], {}), '(t)\n', (3205, 3208), True, 'import autograd.numpy as np\n'), ((4060, 4079), 'autograd.jacobian', 'jacobian', (['u2i', 'dwrt'], {}), '(u2i, dwrt)\n', (4068, 4079), False, 'from autograd import grad, jacobian\n'), ((6814, 6834), 'autograd.jacobian', 'jacobian', (['pu2i', 'dwrt'], {}), '(pu2i, dwrt)\n', (6822, 6834), False, 'from autograd import grad, jacobian\n'), ((3232, 3241), 'autograd.numpy.eye', 'np.eye', (['(3)'], {}), '(3)\n', (3238, 3241), True, 'import autograd.numpy as np\n'), ((3263, 3272), 'autograd.numpy.sin', 'np.sin', (['t'], {}), '(t)\n', (3269, 3272), True, 'import autograd.numpy as np\n')]
|
#################################
# CSI function
#################################
#########################################################
# import libraries
import scipy.spatial.distance as ssd
import numpy as np
import scipy.io as sio
#########################################################
# Function definition
###############################
# Load CSI
def load_csi(num_UAV, location, pthH, SaveFile):
"""
This function generate the CSI parameters based on the LOS propagation model and the location of nodes at the
beginning of the problem.
:param num_UAV: Number of UAVs.
:param location: A dictionary including all location.
:param pthH: The directory to save the CSI parameters on a file.
:param SaveFile: A Flag(True, False) to save or load data.
:return: Returns a Numpy array including CSI parameters.
"""
if SaveFile:
X_U = location.get('X_U')
X_S = location.get('X_S')
X_F = location.get('X_F')
X_GT = location.get('X_GT')
X_GR = location.get('X_GR')
Y_U = location.get('Y_U')
Y_S = location.get('Y_S')
Y_F = location.get('Y_F')
Y_GT = location.get('Y_GT')
Y_GR = location.get('Y_GR')
Z_U = location.get('Z_U')
Z_S = location.get('Z_S')
Z_F = location.get('Z_F')
Z_GT = location.get('Z_GT')
Z_GR = location.get('Z_GR')
dist_S_uav = [ssd.euclidean([X_S, Y_S, Z_S], [i, j, k]) for i, j, k in zip(X_U, Y_U, Z_U)]
dist_S_uav = np.asarray(dist_S_uav)
dist_uav_F = [ssd.euclidean([X_F, Y_F, Z_F], [i, j, k]) for i, j, k in zip(X_U, Y_U, Z_U)]
dist_uav_F = np.asarray(dist_uav_F)
dist_GT_uav = [ssd.euclidean([X_GT, Y_GT, Z_GT], [i, j, k]) for i, j, k in zip(X_U, Y_U, Z_U)]
dist_GT_uav = np.asarray(dist_GT_uav)
dist_uav_GR = [ssd.euclidean([X_GR, Y_GR, Z_GR], [i, j, k]) for i, j, k in zip(X_U, Y_U, Z_U)]
dist_uav_GR = np.asarray(dist_uav_GR)
dist_S_uav_Norm = dist_S_uav/min(dist_S_uav)
dist_uav_F_Norm = dist_uav_F/min(dist_uav_F)
dist_GT_uav_Norm = dist_GT_uav/min(dist_GT_uav)
dist_uav_GR_Norm = dist_uav_GR/min(dist_uav_GR)
h_S_uav = np.multiply(1/(dist_S_uav_Norm**2), (np.ones([num_UAV, 1]) + 1j * np.ones([num_UAV, 1])).T)
h_S_uav = h_S_uav.T
h_uav_F = np.multiply(1/(dist_uav_F_Norm**2), (np.ones([num_UAV, 1]) + 1j * np.ones([num_UAV, 1])).T)
h_uav_F = h_uav_F.T
h_GT_uav = np.multiply(1/(dist_GT_uav_Norm**2), (np.ones([num_UAV, 1]) + 1j * np.ones([num_UAV, 1])).T)
h_GT_uav = h_GT_uav.T
h_uav_GR = np.multiply(1/(dist_uav_GR_Norm**2), (np.ones([num_UAV, 1]) + 1j * np.ones([num_UAV, 1])).T)
h_uav_GR = h_uav_GR.T
csi_h = np.zeros([num_UAV, 4, 1], dtype=complex)
csi_h[:, 0, :] = h_S_uav
csi_h[:, 1, :] = h_uav_F
csi_h[:, 2, :] = h_GT_uav
csi_h[:, 3, :] = h_uav_GR
sio.savemat(pthH, {'csi_h': csi_h})
else:
csi_h_dict = sio.loadmat(pthH)
csi_h = csi_h_dict.get('csi_h')
return csi_h
###############################
# GET CSI
def get_csi(num_UAV, location, x_u, y_u):
"""
This function updates the CSI location based on the changed location of drones.
:param num_UAV: Number of UAVs.
:param location: The initial location of drones and the fixed nodes.
:param x_u: The updated longitude of UAVs.
:param y_u: The updated latitude of UAVs.
:return: It returns an update numpy array for the CSI parameters.
"""
source_uav = 0
uav_fusion = 1
gtuser_uav = 2
uav_gruser = 3
X_U = x_u
X_S = location.get('X_S')
X_F = location.get('X_F')
X_GT = location.get('X_GT')
X_GR = location.get('X_GR')
Y_U = y_u
Y_S = location.get('Y_S')
Y_F = location.get('Y_F')
Y_GT = location.get('Y_GT')
Y_GR = location.get('Y_GR')
Z_U = location.get('Z_U')
Z_S = location.get('Z_S')
Z_F = location.get('Z_F')
Z_GT = location.get('Z_GT')
Z_GR = location.get('Z_GR')
dist_S_uav = [ssd.euclidean([X_S, Y_S, Z_S], [i, j, k]) for i, j, k in zip(X_U, Y_U, Z_U)]
dist_S_uav = np.asarray(dist_S_uav)
dist_uav_F = [ssd.euclidean([X_F, Y_F, Z_F], [i, j, k]) for i, j, k in zip(X_U, Y_U, Z_U)]
dist_uav_F = np.asarray(dist_uav_F)
dist_GT_uav = [ssd.euclidean([X_GT, Y_GT, Z_GT], [i, j, k]) for i, j, k in zip(X_U, Y_U, Z_U)]
dist_GT_uav = np.asarray(dist_GT_uav)
dist_uav_GR = [ssd.euclidean([X_GR, Y_GR, Z_GR], [i, j, k]) for i, j, k in zip(X_U, Y_U, Z_U)]
dist_uav_GR = np.asarray(dist_uav_GR)
dist_S_uav_Norm = dist_S_uav
dist_uav_F_Norm = dist_uav_F
dist_GT_uav_Norm = dist_GT_uav
dist_uav_GR_Norm = dist_uav_GR
h_S_uav = np.multiply(1 / (dist_S_uav_Norm ** 2), (np.ones([num_UAV, 1]) + 1j * np.ones([num_UAV, 1])).T)
h_S_uav = h_S_uav.T
h_uav_F = np.multiply(1 / (dist_uav_F_Norm ** 2), (np.ones([num_UAV, 1]) + 1j * np.ones([num_UAV, 1])).T)
h_uav_F = h_uav_F.T
h_GT_uav = np.multiply(1 / (dist_GT_uav_Norm ** 2), (np.ones([num_UAV, 1]) + 1j * np.ones([num_UAV, 1])).T)
h_GT_uav = h_GT_uav.T
h_uav_GR = np.multiply(1 / (dist_uav_GR_Norm ** 2), (np.ones([num_UAV, 1]) + 1j * np.ones([num_UAV, 1])).T)
h_uav_GR = h_uav_GR.T
csi_h = np.zeros([num_UAV, 4], dtype=complex)
csi_h[:, source_uav] = np.squeeze(h_S_uav)
csi_h[:, uav_fusion] = np.squeeze(h_uav_F)
csi_h[:, gtuser_uav] = np.squeeze(h_GT_uav)
csi_h[:, uav_gruser] = np.squeeze(h_uav_GR)
return csi_h
|
[
"scipy.spatial.distance.euclidean",
"scipy.io.loadmat",
"numpy.asarray",
"numpy.zeros",
"scipy.io.savemat",
"numpy.ones",
"numpy.squeeze"
] |
[((4331, 4353), 'numpy.asarray', 'np.asarray', (['dist_S_uav'], {}), '(dist_S_uav)\n', (4341, 4353), True, 'import numpy as np\n'), ((4470, 4492), 'numpy.asarray', 'np.asarray', (['dist_uav_F'], {}), '(dist_uav_F)\n', (4480, 4492), True, 'import numpy as np\n'), ((4614, 4637), 'numpy.asarray', 'np.asarray', (['dist_GT_uav'], {}), '(dist_GT_uav)\n', (4624, 4637), True, 'import numpy as np\n'), ((4759, 4782), 'numpy.asarray', 'np.asarray', (['dist_uav_GR'], {}), '(dist_uav_GR)\n', (4769, 4782), True, 'import numpy as np\n'), ((5500, 5537), 'numpy.zeros', 'np.zeros', (['[num_UAV, 4]'], {'dtype': 'complex'}), '([num_UAV, 4], dtype=complex)\n', (5508, 5537), True, 'import numpy as np\n'), ((5566, 5585), 'numpy.squeeze', 'np.squeeze', (['h_S_uav'], {}), '(h_S_uav)\n', (5576, 5585), True, 'import numpy as np\n'), ((5614, 5633), 'numpy.squeeze', 'np.squeeze', (['h_uav_F'], {}), '(h_uav_F)\n', (5624, 5633), True, 'import numpy as np\n'), ((5662, 5682), 'numpy.squeeze', 'np.squeeze', (['h_GT_uav'], {}), '(h_GT_uav)\n', (5672, 5682), True, 'import numpy as np\n'), ((5711, 5731), 'numpy.squeeze', 'np.squeeze', (['h_uav_GR'], {}), '(h_uav_GR)\n', (5721, 5731), True, 'import numpy as np\n'), ((1576, 1598), 'numpy.asarray', 'np.asarray', (['dist_S_uav'], {}), '(dist_S_uav)\n', (1586, 1598), True, 'import numpy as np\n'), ((1723, 1745), 'numpy.asarray', 'np.asarray', (['dist_uav_F'], {}), '(dist_uav_F)\n', (1733, 1745), True, 'import numpy as np\n'), ((1875, 1898), 'numpy.asarray', 'np.asarray', (['dist_GT_uav'], {}), '(dist_GT_uav)\n', (1885, 1898), True, 'import numpy as np\n'), ((2028, 2051), 'numpy.asarray', 'np.asarray', (['dist_uav_GR'], {}), '(dist_uav_GR)\n', (2038, 2051), True, 'import numpy as np\n'), ((2871, 2911), 'numpy.zeros', 'np.zeros', (['[num_UAV, 4, 1]'], {'dtype': 'complex'}), '([num_UAV, 4, 1], dtype=complex)\n', (2879, 2911), True, 'import numpy as np\n'), ((3063, 3098), 'scipy.io.savemat', 'sio.savemat', (['pthH', "{'csi_h': csi_h}"], {}), "(pthH, {'csi_h': csi_h})\n", (3074, 3098), True, 'import scipy.io as sio\n'), ((3132, 3149), 'scipy.io.loadmat', 'sio.loadmat', (['pthH'], {}), '(pthH)\n', (3143, 3149), True, 'import scipy.io as sio\n'), ((4236, 4277), 'scipy.spatial.distance.euclidean', 'ssd.euclidean', (['[X_S, Y_S, Z_S]', '[i, j, k]'], {}), '([X_S, Y_S, Z_S], [i, j, k])\n', (4249, 4277), True, 'import scipy.spatial.distance as ssd\n'), ((4375, 4416), 'scipy.spatial.distance.euclidean', 'ssd.euclidean', (['[X_F, Y_F, Z_F]', '[i, j, k]'], {}), '([X_F, Y_F, Z_F], [i, j, k])\n', (4388, 4416), True, 'import scipy.spatial.distance as ssd\n'), ((4515, 4559), 'scipy.spatial.distance.euclidean', 'ssd.euclidean', (['[X_GT, Y_GT, Z_GT]', '[i, j, k]'], {}), '([X_GT, Y_GT, Z_GT], [i, j, k])\n', (4528, 4559), True, 'import scipy.spatial.distance as ssd\n'), ((4660, 4704), 'scipy.spatial.distance.euclidean', 'ssd.euclidean', (['[X_GR, Y_GR, Z_GR]', '[i, j, k]'], {}), '([X_GR, Y_GR, Z_GR], [i, j, k])\n', (4673, 4704), True, 'import scipy.spatial.distance as ssd\n'), ((1477, 1518), 'scipy.spatial.distance.euclidean', 'ssd.euclidean', (['[X_S, Y_S, Z_S]', '[i, j, k]'], {}), '([X_S, Y_S, Z_S], [i, j, k])\n', (1490, 1518), True, 'import scipy.spatial.distance as ssd\n'), ((1624, 1665), 'scipy.spatial.distance.euclidean', 'ssd.euclidean', (['[X_F, Y_F, Z_F]', '[i, j, k]'], {}), '([X_F, Y_F, Z_F], [i, j, k])\n', (1637, 1665), True, 'import scipy.spatial.distance as ssd\n'), ((1772, 1816), 'scipy.spatial.distance.euclidean', 'ssd.euclidean', (['[X_GT, Y_GT, Z_GT]', '[i, j, k]'], {}), '([X_GT, Y_GT, Z_GT], [i, j, k])\n', (1785, 1816), True, 'import scipy.spatial.distance as ssd\n'), ((1925, 1969), 'scipy.spatial.distance.euclidean', 'ssd.euclidean', (['[X_GR, Y_GR, Z_GR]', '[i, j, k]'], {}), '([X_GR, Y_GR, Z_GR], [i, j, k])\n', (1938, 1969), True, 'import scipy.spatial.distance as ssd\n'), ((4983, 5004), 'numpy.ones', 'np.ones', (['[num_UAV, 1]'], {}), '([num_UAV, 1])\n', (4990, 5004), True, 'import numpy as np\n'), ((5121, 5142), 'numpy.ones', 'np.ones', (['[num_UAV, 1]'], {}), '([num_UAV, 1])\n', (5128, 5142), True, 'import numpy as np\n'), ((5261, 5282), 'numpy.ones', 'np.ones', (['[num_UAV, 1]'], {}), '([num_UAV, 1])\n', (5268, 5282), True, 'import numpy as np\n'), ((5403, 5424), 'numpy.ones', 'np.ones', (['[num_UAV, 1]'], {}), '([num_UAV, 1])\n', (5410, 5424), True, 'import numpy as np\n'), ((2334, 2355), 'numpy.ones', 'np.ones', (['[num_UAV, 1]'], {}), '([num_UAV, 1])\n', (2341, 2355), True, 'import numpy as np\n'), ((2476, 2497), 'numpy.ones', 'np.ones', (['[num_UAV, 1]'], {}), '([num_UAV, 1])\n', (2483, 2497), True, 'import numpy as np\n'), ((2620, 2641), 'numpy.ones', 'np.ones', (['[num_UAV, 1]'], {}), '([num_UAV, 1])\n', (2627, 2641), True, 'import numpy as np\n'), ((2766, 2787), 'numpy.ones', 'np.ones', (['[num_UAV, 1]'], {}), '([num_UAV, 1])\n', (2773, 2787), True, 'import numpy as np\n'), ((5012, 5033), 'numpy.ones', 'np.ones', (['[num_UAV, 1]'], {}), '([num_UAV, 1])\n', (5019, 5033), True, 'import numpy as np\n'), ((5150, 5171), 'numpy.ones', 'np.ones', (['[num_UAV, 1]'], {}), '([num_UAV, 1])\n', (5157, 5171), True, 'import numpy as np\n'), ((5290, 5311), 'numpy.ones', 'np.ones', (['[num_UAV, 1]'], {}), '([num_UAV, 1])\n', (5297, 5311), True, 'import numpy as np\n'), ((5432, 5453), 'numpy.ones', 'np.ones', (['[num_UAV, 1]'], {}), '([num_UAV, 1])\n', (5439, 5453), True, 'import numpy as np\n'), ((2363, 2384), 'numpy.ones', 'np.ones', (['[num_UAV, 1]'], {}), '([num_UAV, 1])\n', (2370, 2384), True, 'import numpy as np\n'), ((2505, 2526), 'numpy.ones', 'np.ones', (['[num_UAV, 1]'], {}), '([num_UAV, 1])\n', (2512, 2526), True, 'import numpy as np\n'), ((2649, 2670), 'numpy.ones', 'np.ones', (['[num_UAV, 1]'], {}), '([num_UAV, 1])\n', (2656, 2670), True, 'import numpy as np\n'), ((2795, 2816), 'numpy.ones', 'np.ones', (['[num_UAV, 1]'], {}), '([num_UAV, 1])\n', (2802, 2816), True, 'import numpy as np\n')]
|
# Copyright 2021 <NAME>, BYU CCL
# please see the BYU CCl SpyDrNet license file for terms of usage.
from spydrnet.parsers.verilog.tokenizer import VerilogTokenizer
import spydrnet.parsers.verilog.verilog_tokens as vt
from spydrnet.ir import Netlist, Library, Definition, Port, Cable, Instance, OuterPin
from spydrnet.plugins import namespace_manager
import spydrnet as sdn
from functools import reduce
import re
class VerilogParser:
'''
Parse verilog files into spydrnet.
Higher level functions will always peek when deciding what lower level function to call.
within your own function call next instead to keep the flow moving.
the first token to expect in a function will be the token that starts that construct.
'''
#########################################################
# Note to contributors
#########################################################
# I have tried to follow the convention that each function
# parses all of the construct it is designed to parse
# for example the parse module function will parse the
# module keyword and then will call the parse
# instance function. It will not consume any of the tokens
# that belong to the instance instantations including the
# semi colon. (it just uses peek)
#
# I would suggest following this convention even on constructs
# where the first word is always the same.
# the small overhead of using the peek function to not consume
# the token has been well worth making it easier to maintain.
# --Dallin
#########################################################
# helper classes
#########################################################
class BlackboxHolder:
'''this is an internal class that helps manage
modules that are instanced before they are declared'''
def __init__(self):
self.name_lookup = dict()
self.defined = set()
def get_blackbox(self, name):
'''creates or returns the black box based on the name'''
if name in self.name_lookup:
return self.name_lookup[name]
else:
definition = sdn.Definition()
definition.name = name
self.name_lookup[name] = definition
return definition
def define(self, name):
'''adds the name to the defined set'''
self.defined.add(self.name_lookup[name])
def get_undefined_blackboxes(self):
'''return an iterable of all undefined blackboxes'''
undef = set()
for v in self.name_lookup.values():
if v not in self.defined:
undef.add(v)
return undef
#######################################################
# setup functions
#######################################################
@staticmethod
def from_filename(filename):
parser = VerilogParser()
parser.filename = filename
return parser
@staticmethod
def from_file_handle(file_handle):
parser = VerilogParser()
parser.filename = file_handle
return parser
def __init__(self):
self.filename = None
self.tokenizer = None
self.netlist = None
self.current_library = None
self.current_definition = None
self.current_instance = None
self.primitives = None
self.work = None
self.assigns = None
self.assignment_count = 0
self.blackbox_holder = self.BlackboxHolder()
def parse(self):
''' parse a verilog netlist represented by verilog file
verilog_file can be a filename or stream
'''
self.initialize_tokenizer()
ns_default = namespace_manager.default
namespace_manager.default = "DEFAULT"
self.parse_verilog()
namespace_manager.default = ns_default
self.tokenizer.__del__()
return self.netlist
def initialize_tokenizer(self):
self.tokenizer = VerilogTokenizer(self.filename)
def peek_token(self):
token = self.peek_token_remove_comments()
if token[0] == '`':
t_split = token.split(maxsplit=1)
if len(t_split) > 1 and t_split[0] in [vt.IFDEF]:
while t_split[0] != vt.ENDIF:
token = self.next_token_remove_comments()
t_split = token.split(maxsplit=1)
token = self.peek_token_remove_comments()
if len(t_split) > 1 and t_split[0] == vt.DEFINE:
assert False, self.error_string(
"define not supported", "assumes all macros are undefined", vt.DEFINE)
return token
def next_token(self):
token = self.next_token_remove_comments()
if token[0] == '`':
t_split = token.split(maxsplit=1)
if len(t_split) > 1 and t_split[0] in [vt.IFDEF]:
while t_split[0] != vt.ENDIF:
token = self.next_token_remove_comments()
t_split = token.split(maxsplit=1)
token = self.next_token_remove_comments()
if len(t_split) > 1 and t_split[0] == vt.DEFINE:
assert False, self.error_string(
"define not supported", "assumes all macros are undefined", vt.DEFINE)
return token
def peek_token_remove_comments(self):
'''peeks from the tokenizer this wrapper function exists to skip comment tokens'''
token = self.tokenizer.peek()
while len(token) >= 2 and token[0] == "/" and (token[1] == "/" or token[1] == "*"):
# this is a comment token skip it
self.tokenizer.next()
token = self.tokenizer.peek()
return token
def next_token_remove_comments(self):
'''peeks from the tokenizer this wrapper function exists to skip comment tokens'''
token = self.tokenizer.next()
while len(token) >= 2 and (token[0:2] == vt.OPEN_LINE_COMMENT or token[0:2] == vt.OPEN_BLOCK_COMMENT):
# this is a comment token, skip it
token = self.tokenizer.next()
return token
#######################################################
# parsing functions
#######################################################
def parse_verilog(self):
self.netlist = sdn.Netlist()
self.netlist.name = "SDN_VERILOG_NETLIST"
self.work = self.netlist.create_library("work")
self.primitives = self.netlist.create_library("SDN.verilog_primitives")
self.current_library = self.work
preprocessor_defines = set()
star_properties = dict()
time_scale = None
primitive_cell = False
while self.tokenizer.has_next():
token = self.peek_token()
if token.split(maxsplit=1)[0] == vt.CELL_DEFINE:
primitive_cell = True
self.current_library = self.primitives
#token = token.split(maxsplit = 1)[1]
token = self.next_token()
elif token.split(maxsplit=1)[0] == vt.END_CELL_DEFINE:
primitive_cell = False
self.current_library = self.work
#token = token.split(maxsplit = 1)[1]
token = self.next_token()
elif token == vt.MODULE:
if primitive_cell:
self.parse_primitive()
else:
self.parse_module()
# go ahead and set the extra metadata that we collected to this point
if time_scale is not None:
self.current_definition["VERILOG.TimeScale"] = time_scale
if len(star_properties.keys()) > 0:
self.current_definition["VERILOG.InlineConstraints"] = star_properties
star_properties = dict()
elif token == vt.PRIMITIVE:
# self.parse_primitive()
# if time_scale is not None:
# self.current_definition["VERILOG.TimeScale"] = time_scale
# if len(star_properties.keys()) > 0:
# self.current_definition["VERILOG.InlineConstraints"] = star_properties
# star_properties = dict()
star_properties = dict()
while token != vt.END_PRIMITIVE:
token = self.next_token()
elif token == vt.DEFINE:
assert False, "Currently `define is not supported"
elif token == vt.IFDEF:
token = self.next_token()
token = self.next_token()
if token not in preprocessor_defines:
while token != vt.ENDIF:
token = self.next_token()
elif token == vt.OPEN_PARENTHESIS:
stars = self.parse_star_property()
for k, v in stars.items():
star_properties[k] = v
elif token.split(maxsplit=1)[0] == vt.TIMESCALE:
token = self.next_token()
time_scale = token.split(maxsplit=1)[1]
else:
pass
assert False, self.error_string(
"something at the top level of the file", "got unexpected token", token)
self.add_blackbox_definitions()
return self.netlist
def add_blackbox_definitions(self):
self.current_library = self.primitives
for d in self.blackbox_holder.get_undefined_blackboxes():
d["VERILOG.primitive"] = True
self.current_library.add_definition(d)
def parse_primitive(self):
'''similar to parse module but it will only look for the inputs and outputs to get an idea of how those things look'''
token = self.next_token()
assert token == vt.MODULE or token == vt.PRIMITIVE, self.error_string(
vt.MODULE, "to begin module statement", token)
token = self.next_token()
assert vt.is_valid_identifier(token), self.error_string(
"identifier", "not a valid module name", token)
name = token
definition = self.blackbox_holder.get_blackbox(name)
self.blackbox_holder.define(name)
self.current_library.add_definition(definition)
self.current_definition = definition
# uses the same header parser because the primitives and regular cells have the same header.
self.parse_module_header()
self.parse_primitive_body()
def parse_primitive_body(self):
''' just look for port information, skip tasks and functions to help out.'''
token = self.peek_token()
while token != vt.END_MODULE and token != vt.END_PRIMITIVE:
token = self.peek_token()
if token == vt.FUNCTION: # these constructs may contain input output or inout
while token != vt.END_FUNCTION:
token = self.next_token()
elif token == vt.TASK: # these constructs may contain input output or inout
while token != vt.END_TASK:
token = self.next_token()
elif token in vt.PORT_DIRECTIONS:
self.parse_port_declaration(dict())
else:
token = self.next_token()
def parse_module(self):
token = self.next_token()
assert token == vt.MODULE, self.error_string(
vt.MODULE, "to begin module statement", token)
token = self.next_token()
assert vt.is_valid_identifier(token), self.error_string(
"identifier", "not a valid module name", token)
name = token
definition = self.blackbox_holder.get_blackbox(name)
self.blackbox_holder.define(name)
self.current_library.add_definition(definition)
self.current_definition = definition
self.assignment_count = 0
if self.netlist.top_instance is None:
self.netlist.top_instance = sdn.Instance()
self.netlist.top_instance.name = definition.name + "_top"
self.netlist.top_instance.reference = definition
self.netlist.name = "SDN_VERILOG_NETLIST_" + definition.name
self.parse_module_header()
self.parse_module_body()
def parse_module_header(self):
'''parse a module header and add the parameter dictionary and port list to the current_definition'''
token = self.peek_token()
if token == "#":
self.parse_module_header_parameters()
token = self.peek_token()
assert token == "(", self.error_string("(", "for port mapping", token)
self.parse_module_header_ports()
token = self.next_token()
assert token == vt.CLOSE_PARENTHESIS, self.error_string(
vt.CLOSE_PARENTHESIS, "to end the module ports in the header", token)
token = self.next_token()
assert token == vt.SEMI_COLON, self.error_string(
vt.SEMI_COLON, "to end the module header section", token)
def parse_module_header_parameters(self):
'''parse a parameter block in a module header, add all parameters to the current definition'''
token = self.next_token()
assert token == vt.OCTOTHORP, self.error_string(
vt.OCTOTHORP, "to begin parameter map", token)
token = self.next_token()
assert token == vt.OPEN_PARENTHESIS, self.error_string(
vt.OPEN_PARENTHESIS, "to begin parameter map", token)
token = self.next_token()
parameter_dictionary = dict()
while token != ")":
# this is happening twice for all but the first one.. could simplify
assert token == vt.PARAMETER, self.error_string(
vt.PARAMETER, "parameter declaration", token)
key = ""
token = self.peek_token()
if token == vt.OPEN_BRACKET:
left, right = self.parse_brackets()
if right != None:
key = "[" + str(left) + ":" + str(right) + "] "
else:
key = "[" + str(left) + "] "
token = self.next_token()
assert vt.is_valid_identifier(token), self.error_string(
'identifer', "in parameter list", token)
key += token
token = self.next_token()
if key == vt.INTEGER:
key += " " + token
token = self.next_token()
assert token == vt.EQUAL, self.error_string(
vt.EQUAL, "in parameter list", token)
token = self.next_token()
# not really sure what to assert here.
value = token
parameter_dictionary[key] = value
token = self.next_token()
if token == vt.COMMA: # just keep going
token = self.next_token()
assert token == vt.PARAMETER, self.error_string(
vt.PARAMETER, "after comma in parameter map", token)
else:
assert token == vt.CLOSE_PARENTHESIS, self.error_string(
vt.CLOSE_PARENTHESIS, "to end parameter declarations", token)
self.set_definition_parameters(
self.current_definition, parameter_dictionary)
def parse_module_header_ports(self):
'''parse port declarations in the module header and add them to the definition'''
token = self.next_token()
assert token == vt.OPEN_PARENTHESIS, self.error_string(
vt.OPEN_PARENTHESIS, "to begin port declarations", token)
token = self.peek_token()
while token != ")":
# the first token could be a name or input output or inout
if token == ".":
self.parse_module_header_port_alias()
else:
self.parse_module_header_port()
token = self.peek_token()
if token != vt.CLOSE_PARENTHESIS:
assert token == vt.COMMA, self.error_string(
vt.COMMA, "to separate port declarations", token)
token = self.next_token() # consume the comma token
token = self.peek_token() # setup the next token
def parse_module_header_port_alias(self):
'''parse the port alias portion of the module header
this parses the port alias section so that the port name is only a port and the mapped wires are the cables names that connect to that port.
this requires that the cables names be kept in a dictionary to allow for setting the direction when the direction is given to the internal port names.
example syntax
.canale({\\canale[3] ,\\canale[2] ,\\canale[1] ,\\canale[0] }),'''
token = self.next_token()
assert token == vt.DOT, self.error_string(
vt.DOT, "for port aliasing", token)
token = self.next_token()
assert vt.is_valid_identifier(token), self.error_string(
"identifier", "for port in port aliasing", token)
name = token
token = self.next_token()
assert token == vt.OPEN_PARENTHESIS, self.error_string(
vt.OPEN_PARENTHESIS, "parethesis to enclose port aliasing", token)
token = self.peek_token()
if token == vt.OPEN_BRACE:
wires = self.parse_cable_concatenation()
else:
cable, left, right = self.parse_variable_instantiation()
wires = self.get_wires_from_cable(cable, left, right)
token = self.next_token()
assert token == vt.CLOSE_PARENTHESIS, self.error_string(
vt.CLOSE_PARENTHESIS, "parethesis to end port aliasing construct", token)
port = self.create_or_update_port(
name, left_index=len(wires)-1, right_index=0)
# connect the wires to the pins
assert len(port.pins) == len(
wires), "Internal Error: the pins in a created port and the number of wires the aliased cable do not match up"
for i in range(len(port.pins)):
wires[i].connect_pin(port.pins[i])
def parse_cable_concatenation(self):
'''parse a concatenation structure of cables, create the cables mentioned, and deal with indicies
return a list of ordered wires that represents the cable concatenation
example syntax
{wire1, wire2, wire3, wire4}'''
token = self.next_token()
assert token == vt.OPEN_BRACE, self.error_string(
vt.OPEN_BRACE, "to start cable concatenation", token)
token = self.peek_token()
wires = []
while token != vt.CLOSE_BRACE:
cable, left, right = self.parse_variable_instantiation()
wires_temp = self.get_wires_from_cable(cable, left, right)
for w in wires_temp:
wires.append(w)
token = self.next_token()
if token != vt.COMMA:
assert token == vt.CLOSE_BRACE, self.error_string(
vt.CLOSE_BRACE, "to end cable concatenation", token)
return wires
def parse_module_header_port(self):
'''parse the port declaration in the module header'''
token = self.peek_token()
direction = None
defining = False
if token in vt.PORT_DIRECTIONS:
token = self.next_token()
direction = vt.string_to_port_direction(token)
token = self.peek_token()
defining = True
left = None
right = None
if token == vt.OPEN_BRACKET:
left, right = self.parse_brackets()
token = self.next_token()
assert vt.is_valid_identifier(token), self.error_string(
"identifier", "for port declaration", token)
name = token
port = self.create_or_update_port(
name, left_index=left, right_index=right, direction=direction, defining=defining)
# get the left and right out of the port (in case we got more information out of an instance?)
if left == None and right == None:
left = port.lower_index + len(port.pins) - 1
right = port.lower_index
if not port.is_downto:
temp = left
left = right
right = temp
cable = self.create_or_update_cable(
name, left_index=left, right_index=right, defining=defining)
# wire together the cables and the port
assert len(port.pins) == len(cable.wires), self.error_string(
"the pins in a created port and the number of wires in it's cable do not match up", "wires: " + str(len(cable.wires)), "pins: " + str(len(port.pins)))
for i in range(len(port.pins)):
cable.wires[i].connect_pin(port.pins[i])
def parse_module_body(self):
'''
parse through a module body
module bodies consist of port declarations,
wire and reg declarations
and instantiations
expects port declarations to start with the direction and then include the cable type if provided
'''
direction_tokens = [vt.INPUT, vt.OUTPUT, vt.INOUT]
variable_tokens = [vt.WIRE, vt.REG, vt.TRI0, vt.TRI1]
token = self.peek_token()
params = dict()
while token != vt.END_MODULE:
if token in direction_tokens:
self.parse_port_declaration(params)
params = dict()
elif token in variable_tokens:
self.parse_cable_declaration(params)
params = dict()
elif token == vt.ASSIGN:
o_cable, o_left, o_right, i_cable, i_left, i_right = self.parse_assign()
self.connect_wires_for_assign(
o_cable, o_left, o_right, i_cable, i_left, i_right)
elif token == vt.DEFPARAM:
self.parse_defparam_parameters()
elif vt.is_valid_identifier(token):
self.parse_instantiation(params)
params = dict()
elif token == vt.OPEN_PARENTHESIS:
stars = self.parse_star_property()
for k, v in stars.items():
params[k] = v
else:
assert False, self.error_string(
"direction, reg, wire, star_properties, or instance identifier", "in module body", token)
token = self.peek_token()
token = self.next_token()
assert token == vt.END_MODULE, self.error_string(
vt.END_MODULE, "to end the module body", token)
def parse_port_declaration(self, properties):
'''parse the port declaration post port list.'''
token = self.next_token()
assert token in vt.PORT_DIRECTIONS, self.error_string(
"direction keyword", "to define port", token)
direction = vt.string_to_port_direction(token)
token = self.peek_token()
if token in [vt.REG, vt.WIRE]:
var_type = token
token = self.next_token()
else:
var_type = None
token = self.peek_token()
if token == vt.OPEN_BRACKET:
left, right = self.parse_brackets()
else:
left = None
right = None
token = self.next_token()
assert vt.is_valid_identifier(token), self.error_string(
"port identifier", "identify port", token)
names = []
names.append(token)
token = self.next_token()
while token == vt.COMMA:
token = self.next_token()
names.append(token)
token = self.next_token()
assert token == vt.SEMI_COLON, self.error_string(
vt.SEMI_COLON, "to end port declaration", token)
for name in names:
cable = self.create_or_update_cable(
name, left_index=left, right_index=right, var_type=var_type, defining=True)
port_list = self.get_all_ports_from_wires(
self.get_wires_from_cable(cable, left, right))
assert len(port_list) > 0, self.error_string(
"port name defined in the module header", "to declare a port", cable.name)
if len(port_list) > 1:
for p in port_list:
port = self.create_or_update_port(
p.name, direction=direction)
port["VERILOG.InlineConstraints"] = properties
else:
port = self.create_or_update_port(port_list.pop(
).name, left_index=left, right_index=right, direction=direction, defining=True)
if len(cable.wires) > 1:
self.connect_resized_port_cable(cable, port)
def parse_cable_declaration(self, properties):
token = self.next_token()
assert token in [vt.REG, vt.WIRE, vt.TRI0, vt.TRI1], self.error_string(
"reg, tri1, tri0, or wire", "for cable declaration", token)
var_type = token
token = self.peek_token()
if token == vt.OPEN_BRACKET:
left, right = self.parse_brackets()
else:
left = None
right = None
token = self.next_token()
assert vt.is_valid_identifier(token), self.error_string(
"valid cable identifier", "identify the cable", token)
name = token
cable = self.create_or_update_cable(
name, left_index=left, right_index=right, var_type=var_type)
cable["VERILOG.InlineConstraints"] = properties
token = self.next_token()
assert token == vt.SEMI_COLON, self.error_string(
vt.SEMI_COLON, "to end cable declaration", token)
def parse_instantiation(self, properties):
token = self.next_token()
assert vt.is_valid_identifier(token), self.error_string(
"module identifier", "for instantiation", token)
def_name = token
parameter_dict = dict()
token = self.peek_token()
if token == vt.OCTOTHORP:
parameter_dict = self.parse_parameter_mapping()
token = self.next_token()
assert vt.is_valid_identifier(token), self.error_string(
"instance name", "for instantiation", token)
name = token
token = self.peek_token()
assert token == vt.OPEN_PARENTHESIS, self.error_string(
vt.OPEN_PARENTHESIS, "to start port to cable mapping", token)
instance = self.current_definition.create_child()
self.current_instance = instance
instance.name = name
instance.reference = self.blackbox_holder.get_blackbox(def_name)
instance["VERILOG.InlineConstraints"] = properties
self.parse_port_mapping()
self.set_instance_parameters(instance, parameter_dict)
token = self.next_token()
assert token == vt.SEMI_COLON, self.error_string(
vt.SEMI_COLON, "to end instatiation", token)
def parse_defparam_parameters(self):
'''parse a defparam structure and add the parameters to the associated instance
this looks like:
defparam \\avs_s1_readdata[12]~output .bus_hold = "false"; //single backslash to escape name
and must come after the associated instance (I'm not sure this is the verilog spec but
it is the way quartus wrote my example and is much simpler)
'''
params = dict()
token = self.next_token()
assert token == vt.DEFPARAM, self.error_string(vt.DEFPARAM, "to being defparam statement", token)
token = self.next_token()
assert vt.is_valid_identifier(token), self.error_string("valid identifier", "of an instance to apply the defparam to", token)
instance_name = token
if self.current_instance.name == instance_name:
instance = self.current_instance
else:
instance = next(self.current_definition.get_instances(instance_name), None)
assert instance is not None, self.error_string("identifer of existing instance", "within the current definition", instance_name)
token = self.next_token()
assert token == vt.DOT, self.error_string(vt.DOT, "give separate parameter key from the instance name", token)
token = self.next_token()
key = token
token = self.next_token()
assert token == vt.EQUAL, self.error_string(vt.EQUAL, "separate the key from the value in a defparam statement", token)
token = self.next_token()
value = token
token = self.next_token()
assert token == vt.SEMI_COLON, self.error_string(vt.SEMI_COLON, "to end the defparam statement", token)
self.set_instance_parameters(instance, params)
def parse_parameter_mapping(self):
params = dict()
token = self.next_token()
assert token == vt.OCTOTHORP, self.error_string(
vt.OCTOTHORP, "to begin parameter mapping", token)
token = self.next_token()
assert token == vt.OPEN_PARENTHESIS, self.error_string(
vt.OPEN_PARENTHESIS, "after # to begin parameter mapping", token)
while token != vt.CLOSE_PARENTHESIS:
k, v = self.parse_parameter_map_single()
params[k] = v
token = self.next_token()
assert token in [vt.CLOSE_PARENTHESIS, vt.COMMA], self.error_string(
vt.COMMA + " or " + vt.CLOSE_PARENTHESIS, "to separate parameters or end parameter mapping", token)
assert token == vt.CLOSE_PARENTHESIS, self.error_string(
vt.CLOSE_PARENTHESIS, "to terminate ", token)
return params
def parse_parameter_map_single(self):
# syntax looks like .identifier(value)
token = self.next_token()
assert token == vt.DOT, self.error_string(
vt.DOT, "to begin parameter mapping", token)
token = self.next_token()
assert vt.is_valid_identifier(token), self.error_string(
"valid parameter identifier", "in parameter mapping", token)
k = token
token = self.next_token()
assert token == vt.OPEN_PARENTHESIS, self.error_string(
vt.OPEN_PARENTHESIS, "after identifier in parameter mapping", token)
token = self.next_token()
v = token
token = self.next_token()
assert token == vt.CLOSE_PARENTHESIS, self.error_string(
vt.CLOSE_PARENTHESIS, "to close the parameter mapping value", token)
return k, v
def parse_port_mapping(self):
token = self.next_token()
assert token == vt.OPEN_PARENTHESIS, self.error_string(
vt.OPEN_PARENTHESIS, "to start the port mapping", token)
while token != vt.CLOSE_PARENTHESIS:
self.parse_port_map_single()
token = self.next_token()
assert token in [vt.COMMA, vt.CLOSE_PARENTHESIS], self.error_string(
vt.COMMA + " or " + vt.CLOSE_PARENTHESIS, "between port mapping elements or to end the port mapping", token)
def parse_port_map_single(self):
'''acutally does the mapping of the pins'''
token = self.next_token()
assert token == vt.DOT, self.error_string(
vt.DOT, "to start a port mapping instance", token)
token = self.next_token()
assert vt.is_valid_identifier(token), self.error_string(
"valid port identifier", "for port in instantiation port map", token)
port_name = token
token = self.next_token()
assert token == vt.OPEN_PARENTHESIS, self.error_string(
vt.OPEN_PARENTHESIS, "to encapsulate cable name in port mapping", token)
token = self.peek_token()
if token != vt.CLOSE_PARENTHESIS:
if token == vt.OPEN_BRACE:
wires = self.parse_cable_concatenation()
else:
cable, left, right = self.parse_variable_instantiation()
wires = self.get_wires_from_cable(cable, left, right)
pins = self.create_or_update_port_on_instance(
port_name, len(wires))
assert len(pins) >= len(wires), self.error_string(
"pins length to match or exceed cable.wires length", "INTERNAL ERROR", str(len(pins)) + "!=" + str(len(wires)))
# there can be unconnected pins at the end of the port.
for i in range(len(wires)):
wires[i].connect_pin(pins[i])
token = self.next_token()
else:
# consume the )
token = self.next_token()
# the port is intentionally left unconnected.
self.create_or_update_port_on_instance(port_name, 1)
assert token == vt.CLOSE_PARENTHESIS, self.error_string(
vt.CLOSE_PARENTHESIS, "to end cable name in port mapping", token)
def parse_assign(self):
token = self.next_token()
assert token == vt.ASSIGN, self.error_string(
vt.ASSIGN, "to begin assignment statement", token)
l_cable, l_left, l_right = self.parse_variable_instantiation()
token = self.next_token()
assert token == vt.EQUAL, self.error_string(
vt.EQUAL, "in assigment statment", token)
r_cable, r_left, r_right = self.parse_variable_instantiation()
token = self.next_token()
assert token == vt.SEMI_COLON, self.error_string(
vt.SEMI_COLON, "to terminate assign statement", token)
return l_cable, l_left, l_right, r_cable, r_left, r_right
def parse_variable_instantiation(self):
'''parse the cable name and its indicies if any
if we are in Intel land then 2 other things can happen.
the "cable" is a constant,
attach it to the \\<const0> or \\<const1> cable.
the cable is inverted,
create a new cable and an inverter block similar to the assign but with an inversion in the block
'''
token = self.next_token()
if token[0] == "1":
assert token[1] == vt.SINGLE_QUOTE, self.error_string(vt.SINGLE_QUOTE, "in the constant", token)
assert token[2] == 'b', self.error_string('b', "in the constant", token)
assert token[3] in ["0", "1", "x", "X", "z", "Z"], self.error_string("one of 0, 1, x, X, z, Z", "represent the constant value after '", token)
name = "\\<const" + token[2] + "> "
elif vt.is_numeric(token[0]):
assert False, self.error_string("single bit constant", "multibit constants not supported", token)
else:
name = token
assert vt.is_valid_identifier(name), self.error_string(
"valid port identifier", "for port in instantiation port map", name)
token = self.peek_token()
left = None
right = None
if token == vt.OPEN_BRACKET:
left, right = self.parse_brackets()
cable = self.create_or_update_cable(
name, left_index=left, right_index=right)
return cable, left, right
def parse_brackets(self):
'''returns 2 integer values or 1 integer value and none'''
token = self.next_token()
assert token == vt.OPEN_BRACKET, self.error_string(
"[", "to begin array slice", token)
token = self.next_token()
assert self.is_numeric(token), self.error_string(
"number", "after [", token)
left = int(token)
token = self.next_token()
if token == "]":
return left, None
else:
assert(token == vt.COLON), self.error_string(
"] or :", "in array slice", token)
token = self.next_token()
assert(self.is_numeric(token)), self.error_string(
"number", "after : in array slice", token)
right = int(token)
token = self.next_token()
assert token == vt.CLOSE_BRACKET, self.error_string(
"]", "to terminate array slice", token)
return left, right
def parse_star_property(self):
token = self.next_token()
assert token == vt.OPEN_PARENTHESIS, self.error_string(
vt.OPEN_PARENTHESIS, "to begin star property", token)
token = self.next_token()
assert token == vt.STAR, self.error_string(
vt.STAR, "to begin star property", token)
properties_dict = dict()
token = self.next_token()
while token != vt.STAR:
assert vt.is_valid_identifier(token)
key = token
token = self.next_token()
assert token in [vt.EQUAL, vt.STAR, vt.COMMA], self.error_string(
vt.EQUAL + " or " + vt.STAR + " or " + vt.COMMA, "to set a star parameter", token)
if token == vt.EQUAL:
token = self.next_token()
value = ""
while token != vt.STAR and token != vt.COMMA:
value += token
token = self.next_token()
else:
value = None
properties_dict[key] = value
if token != vt.STAR:
token = self.next_token()
assert token == vt.STAR, self.error_string(
vt.STAR, "to start the ending of a star property", token)
token = self.next_token()
assert token == vt.CLOSE_PARENTHESIS, self.error_string(
vt.CLOSE_PARENTHESIS, "to end the star property", token)
return properties_dict
#######################################################
# assignment helpers
#######################################################
def get_assignment_library(self):
'''create or return a previously created assignment library'''
if self.assigns == None:
self.assigns = self.netlist.create_library(
name="SDN_VERILOG_ASSIGNMENT")
return self.assigns
def get_assignment_definition(self, width):
'''get the definition of the specified width for assignments'''
proposed_name = "SDN_VERILOG_ASSIGNMENT_" + str(width)
library = self.get_assignment_library()
definition = next(library.get_definitions(proposed_name), None)
if definition == None:
definition = library.create_definition(name=proposed_name)
in_port = definition.create_port("i")
out_port = definition.create_port("o")
in_port.create_pins(width)
out_port.create_pins(width)
in_port.direction = sdn.Port.Direction.IN
out_port.direction = sdn.Port.Direction.OUT
cable = definition.create_cable("through")
cable.create_wires(width)
for i in range(width):
cable.wires[i].connect_pin(in_port.pins[i])
cable.wires[i].connect_pin(out_port.pins[i])
return definition
def create_assignment_instance(self, width):
'''create a new assign instance of the specified width on the current definition'''
definition = self.get_assignment_definition(width)
instance_name = definition.name + "_" + str(self.assignment_count)
self.assignment_count += 1
instance = self.current_definition.create_child(instance_name)
instance.reference = definition
return instance
def connect_wires_for_assign(self, l_cable, l_left, l_right, r_cable, r_left, r_right):
'''connect the wires in r_left to the wires in l_left'''
out_wires = self.get_wires_from_cable(l_cable, l_left, l_right)
in_wires = self.get_wires_from_cable(r_cable, r_left, r_right)
# min because we don't need extra pins since only what can will assign.
width = min(len(out_wires), len(in_wires))
instance = self.create_assignment_instance(width)
in_port = next(instance.reference.get_ports('i'), None)
out_port = next(instance.reference.get_ports('o'), None)
in_pins = self.get_pins_by_port_from_instance(instance, in_port)
out_pins = self.get_pins_by_port_from_instance(instance, out_port)
for i in range(width):
out_wires[i].connect_pin(out_pins[i])
in_wires[i].connect_pin(in_pins[i])
#######################################################
# helper functions
#######################################################
def get_pins_by_port_from_instance(self, instance, port):
pin_lookup = instance.pins
pins_out = []
for p in port.pins:
pins_out.append(pin_lookup[p])
return pins_out
def set_instance_parameters(self, instance, params):
for k, v in params.items():
#self.set_single_parameter(instance.reference, k, None)
self.set_single_parameter(instance, k, v)
def set_definition_parameters(self, definition, params):
for k, v in params.items():
self.set_single_parameter(definition, k, v)
def set_single_parameter(self, var, k, v):
if "VERILOG.Parameters" not in var:
var["VERILOG.Parameters"] = dict()
if k not in var["VERILOG.Parameters"] or var["VERILOG.Parameters"][k] is None:
var["VERILOG.Parameters"][k] = v
def get_all_ports_from_wires(self, wires):
'''gets all ports associated with a set of wires'''
ports = set()
for w in wires:
for p in w.pins:
if isinstance(p, sdn.InnerPin):
ports.add(p.port)
return ports
def get_wires_from_cable(self, cable, left, right):
wires = []
if left != None and right != None:
left = left - cable.lower_index
right = right - cable.lower_index
temp_wires = cable.wires[min(left, right): max(left, right) + 1]
if left > right:
temp_wires = reversed(temp_wires)
for w in temp_wires:
wires.append(w)
elif left != None or right != None:
if left != None:
index = left - cable.lower_index
else:
index = right - cable.lower_index
wires.append(cable.wires[index])
else:
for w in cable.wires:
wires.append(w)
return wires
def convert_string_to_port_direction(self, token):
if token == vt.INPUT:
return sdn.Port.Direction.IN
if token == vt.INOUT:
return sdn.Port.Direction.INOUT
if token == vt.OUTPUT:
return sdn.Port.Direction.OUT
else:
return sdn.Port.Direction.UNDEFINED
########################################################################################
# Port and cable creation and update managment
########################################################################################
'''I'm handed a few different possible senarios
module name(port1, port2,...);
input [3:0] port1
output [3:0] port2
endmodule
or
module name
(
input [3:0] port1,
output[3:0] port2,
...
);
additionally i need to be aware of the possibility that something like this happens
module name(.port1({cable1, cable2}));
input [1:0] cable1;
output [1:0] cable2;
'''
def connect_resized_port_cable(self, resized_cable, resized_port):
'''One to one connector. Don't use with alias statements. this expects that the given cable should completely fill the port...
after a cable has been updated that is attached to a port it may need to update the port and reconnect the'''
assert len(resized_cable.wires) == len(resized_port.pins), self.error_string("cable and port to have same size",
"to reconnect expanded cables and ports", "wires: " + str(len(resized_cable.wires)) + " pins: " + str(len(resized_port.pins)))
for i in range(len(resized_port.pins)):
# I think these should be lined up right?
if resized_port.pins[i] not in resized_cable.wires[i].pins:
resized_cable.wires[i].connect_pin(resized_port.pins[i])
def create_or_update_cable(self, name, left_index=None, right_index=None, var_type=None, defining=False):
cable_generator = self.current_definition.get_cables(name)
cable = next(cable_generator, None)
if cable == None:
cable = self.current_definition.create_cable()
self.populate_new_cable(
cable, name, left_index, right_index, var_type)
return cable
assert cable.name == name
cable_lower = cable.lower_index
# -1 so that it is the same number if the width is 1
cable_upper = cable.lower_index + len(cable.wires) - 1
if left_index is not None and right_index is not None:
in_lower = min(left_index, right_index)
in_upper = max(left_index, right_index)
elif left_index is not None:
in_lower = left_index
in_upper = left_index
elif right_index is not None:
in_upper = right_index
in_lower = right_index
else:
in_upper = None
in_lower = None
if defining and in_lower is not None: # if the cable width is being defined then recenter the cable
cable.lower_index = in_lower
cable_lower = cable.lower_index
cable_upper = cable.lower_index + len(cable.wires) - 1
if in_upper is not None and in_lower is not None:
if in_lower < cable_lower:
prepend = cable_lower - in_lower
self.prepend_wires(cable, prepend)
if in_upper > cable_upper:
postpend = in_upper - cable_upper
self.postpend_wires(cable, postpend)
if var_type is not None:
cable["VERILOG.CableType"] = var_type
return cable
def populate_new_cable(self, cable, name, left_index, right_index, var_type):
cable.name = name
if left_index is not None and right_index is not None:
cable.is_downto = right_index <= left_index
cable.create_wires(max(left_index, right_index) -
min(left_index, right_index) + 1)
cable.lower_index = min(left_index, right_index)
elif left_index is not None:
cable.lower_index = left_index
cable.create_wire()
elif right_index is not None:
cable.lower_index = right_index
cable.create_wire()
else:
cable.lower_index = 0
cable.create_wire()
if var_type:
cable["VERILOG.CableType"] = var_type
return cable
def prepend_wires(self, cable, count):
orig_count = len(cable.wires)
cable.create_wires(count)
cable.wires = cable.wires[orig_count:] + cable.wires[:orig_count]
cable.lower_index = cable.lower_index - count
def postpend_wires(self, cable, count):
cable.create_wires(count)
def create_or_update_port_on_instance(self, name, width):
'''returns the set of pins associated with the port on the instance'''
pins = []
port = self.create_or_update_port(
name, left_index=width - 1, right_index=0, definition=self.current_instance.reference)
for pin in self.current_instance.pins:
if pin.inner_pin in port.pins:
pins.append(pin)
return pins
def create_or_update_port(self, name, left_index=None, right_index=None, direction=None, definition=None, defining=False):
if definition == None:
definition = self.current_definition
port_generator = definition.get_ports(name)
port = next(port_generator, None)
if port == None:
port = definition.create_port()
self.populate_new_port(
port, name, left_index, right_index, direction)
return port
assert port.name == name
# figure out what we need to do with the indicies
port_lower = port.lower_index
# -1 so that it is the same number if the width is 1
port_upper = port.lower_index + len(port.pins) - 1
if left_index is not None and right_index is not None:
in_lower = min(left_index, right_index)
in_upper = max(left_index, right_index)
elif left_index is not None:
in_lower = left_index
in_upper = left_index
elif right_index is not None:
in_upper = right_index
in_lower = right_index
else:
in_upper = None
in_lower = None
if defining and in_lower is not None: # if the cable width is being defined then recenter the cable
port.lower_index = in_lower
port_lower = port.lower_index
port_upper = port.lower_index + len(port.pins) - 1
if in_upper is not None and in_lower is not None:
if in_lower < port_lower:
prepend = port_lower - in_lower
self.prepend_pins(port, prepend)
if in_upper > port_upper:
postpend = in_upper - port_upper
self.postpend_pins(port, postpend)
if direction is not None:
port.direction = direction
return port
def populate_new_port(self, port, name, left_index, right_index, direction):
port.name = name
if left_index is not None and right_index is not None:
port.is_downto = right_index <= left_index
port.create_pins(max(left_index, right_index) -
min(left_index, right_index) + 1)
port.lower_index = min(left_index, right_index)
elif left_index is not None:
port.lower_index = left_index
port.create_pin()
elif right_index is not None:
port.lower_index = right_index
port.create_pin()
else:
port.lower_index = 0
port.create_pin()
if direction is not None:
port.direction = direction
return port
def prepend_pins(self, port, count):
orig_count = len(port.pins)
port.create_pins(count)
port.pins = port.pins[orig_count:] + port.pins[:orig_count]
port.lower_index = port.lower_index - count
def postpend_pins(self, port, count):
port.create_pins(count)
def is_numeric(self, token):
first = True
for c in token:
if first:
first = False
if c == "-":
continue
if c not in vt.NUMBERS:
return False
return True
def is_alphanumeric(self, token):
for c in token:
if c not in vt.NUMBERS and c not in vt.LETTERS:
return False
return True
def error_string(self, expected, why, result):
'''put in the expectation and then the reason or location and the actual result'''
return "expected " + str(expected) + " " + why + " but got " + str(result) + " Line: " + str(self.tokenizer.line_number)
|
[
"spydrnet.Instance",
"spydrnet.parsers.verilog.verilog_tokens.is_numeric",
"spydrnet.Definition",
"spydrnet.parsers.verilog.tokenizer.VerilogTokenizer",
"spydrnet.Netlist",
"spydrnet.parsers.verilog.verilog_tokens.string_to_port_direction",
"spydrnet.parsers.verilog.verilog_tokens.is_valid_identifier"
] |
[((4072, 4103), 'spydrnet.parsers.verilog.tokenizer.VerilogTokenizer', 'VerilogTokenizer', (['self.filename'], {}), '(self.filename)\n', (4088, 4103), False, 'from spydrnet.parsers.verilog.tokenizer import VerilogTokenizer\n'), ((6412, 6425), 'spydrnet.Netlist', 'sdn.Netlist', ([], {}), '()\n', (6423, 6425), True, 'import spydrnet as sdn\n'), ((10069, 10098), 'spydrnet.parsers.verilog.verilog_tokens.is_valid_identifier', 'vt.is_valid_identifier', (['token'], {}), '(token)\n', (10091, 10098), True, 'import spydrnet.parsers.verilog.verilog_tokens as vt\n'), ((11591, 11620), 'spydrnet.parsers.verilog.verilog_tokens.is_valid_identifier', 'vt.is_valid_identifier', (['token'], {}), '(token)\n', (11613, 11620), True, 'import spydrnet.parsers.verilog.verilog_tokens as vt\n'), ((16980, 17009), 'spydrnet.parsers.verilog.verilog_tokens.is_valid_identifier', 'vt.is_valid_identifier', (['token'], {}), '(token)\n', (17002, 17009), True, 'import spydrnet.parsers.verilog.verilog_tokens as vt\n'), ((19688, 19717), 'spydrnet.parsers.verilog.verilog_tokens.is_valid_identifier', 'vt.is_valid_identifier', (['token'], {}), '(token)\n', (19710, 19717), True, 'import spydrnet.parsers.verilog.verilog_tokens as vt\n'), ((22884, 22918), 'spydrnet.parsers.verilog.verilog_tokens.string_to_port_direction', 'vt.string_to_port_direction', (['token'], {}), '(token)\n', (22911, 22918), True, 'import spydrnet.parsers.verilog.verilog_tokens as vt\n'), ((23335, 23364), 'spydrnet.parsers.verilog.verilog_tokens.is_valid_identifier', 'vt.is_valid_identifier', (['token'], {}), '(token)\n', (23357, 23364), True, 'import spydrnet.parsers.verilog.verilog_tokens as vt\n'), ((25242, 25271), 'spydrnet.parsers.verilog.verilog_tokens.is_valid_identifier', 'vt.is_valid_identifier', (['token'], {}), '(token)\n', (25264, 25271), True, 'import spydrnet.parsers.verilog.verilog_tokens as vt\n'), ((25807, 25836), 'spydrnet.parsers.verilog.verilog_tokens.is_valid_identifier', 'vt.is_valid_identifier', (['token'], {}), '(token)\n', (25829, 25836), True, 'import spydrnet.parsers.verilog.verilog_tokens as vt\n'), ((26154, 26183), 'spydrnet.parsers.verilog.verilog_tokens.is_valid_identifier', 'vt.is_valid_identifier', (['token'], {}), '(token)\n', (26176, 26183), True, 'import spydrnet.parsers.verilog.verilog_tokens as vt\n'), ((27613, 27642), 'spydrnet.parsers.verilog.verilog_tokens.is_valid_identifier', 'vt.is_valid_identifier', (['token'], {}), '(token)\n', (27635, 27642), True, 'import spydrnet.parsers.verilog.verilog_tokens as vt\n'), ((29917, 29946), 'spydrnet.parsers.verilog.verilog_tokens.is_valid_identifier', 'vt.is_valid_identifier', (['token'], {}), '(token)\n', (29939, 29946), True, 'import spydrnet.parsers.verilog.verilog_tokens as vt\n'), ((31314, 31343), 'spydrnet.parsers.verilog.verilog_tokens.is_valid_identifier', 'vt.is_valid_identifier', (['token'], {}), '(token)\n', (31336, 31343), True, 'import spydrnet.parsers.verilog.verilog_tokens as vt\n'), ((34590, 34618), 'spydrnet.parsers.verilog.verilog_tokens.is_valid_identifier', 'vt.is_valid_identifier', (['name'], {}), '(name)\n', (34612, 34618), True, 'import spydrnet.parsers.verilog.verilog_tokens as vt\n'), ((12047, 12061), 'spydrnet.Instance', 'sdn.Instance', ([], {}), '()\n', (12059, 12061), True, 'import spydrnet as sdn\n'), ((14246, 14275), 'spydrnet.parsers.verilog.verilog_tokens.is_valid_identifier', 'vt.is_valid_identifier', (['token'], {}), '(token)\n', (14268, 14275), True, 'import spydrnet.parsers.verilog.verilog_tokens as vt\n'), ((19410, 19444), 'spydrnet.parsers.verilog.verilog_tokens.string_to_port_direction', 'vt.string_to_port_direction', (['token'], {}), '(token)\n', (19437, 19444), True, 'import spydrnet.parsers.verilog.verilog_tokens as vt\n'), ((34401, 34424), 'spydrnet.parsers.verilog.verilog_tokens.is_numeric', 'vt.is_numeric', (['token[0]'], {}), '(token[0])\n', (34414, 34424), True, 'import spydrnet.parsers.verilog.verilog_tokens as vt\n'), ((36464, 36493), 'spydrnet.parsers.verilog.verilog_tokens.is_valid_identifier', 'vt.is_valid_identifier', (['token'], {}), '(token)\n', (36486, 36493), True, 'import spydrnet.parsers.verilog.verilog_tokens as vt\n'), ((2196, 2212), 'spydrnet.Definition', 'sdn.Definition', ([], {}), '()\n', (2210, 2212), True, 'import spydrnet as sdn\n'), ((21945, 21974), 'spydrnet.parsers.verilog.verilog_tokens.is_valid_identifier', 'vt.is_valid_identifier', (['token'], {}), '(token)\n', (21967, 21974), True, 'import spydrnet.parsers.verilog.verilog_tokens as vt\n')]
|
#!/usr/bin/python3
import os
from brownie import VRFConsumer, accounts, config
STATIC_SEED = 123
def main():
dev = accounts.add(os.getenv(config['wallets']['from_key']))
# Get the most recent PriceFeed Object
vrf_contract = VRFConsumer[len(VRFConsumer) - 1]
vrf_contract.getRandomNumber(STATIC_SEED, {'from': dev})
|
[
"os.getenv"
] |
[((135, 175), 'os.getenv', 'os.getenv', (["config['wallets']['from_key']"], {}), "(config['wallets']['from_key'])\n", (144, 175), False, 'import os\n')]
|
"""
Controller Class
-----------------
This class contains the controller logic for the application. This takes
input from the table and other interface objects and then manages change of
state for the database. It will also pass the state changes to the table
objects.
"""
__author__ = 'krishnab'
__version__ = '0.1.0'
from pydhs.Database import DatabasePsycopg2
from pydhs.Database import DatabaseSqlalchemy
## Initialize Constants
TABLENAMES = ["union_table", "intersection_table"]
class Controller_stored_procedures():
def __init__(self, dbname):
## create a database object inside the controller to manage state changes
# to the database.
self.db = DatabasePsycopg2(dbname,
'krishnab',
'3kl4vx71',
'localhost',
5433)
self.conn_sqlalchemy = DatabaseSqlalchemy(dbname,
'krishnab',
'3kl4vx71',
'localhost',
5433)
self.database_table_fields = {}
def add_four_digit_function(self):
query = """
create or replace function four_digit_date(dt TEXT)
returns TEXT
as
$$
DECLARE
intDate INT;
newDate TEXT;
BEGIN
intDate = dt::INT;
IF (intDate > 1000) THEN
RETURN dt;
END IF;
IF (intDate < 18) THEN
intDate = intDate + 2000;
ELSIF (intDate > 18) THEN
intDate = intDate + 1900;
END IF;
RETURN intDate::TEXT;
END;
$$
LANGUAGE plpgsql;
"""
self.db.get_regular_cursor_query_no_return(query)
def add_wealth_v190_recode_function(self):
query = """
create or replace function wealth_v190_recode(val TEXT)
returns TEXT
as
$$
BEGIN
CASE val
WHEN '1.0', '2.0', '3.0', '4.0', '5.0' THEN
RETURN CAST(CAST(CAST(val as FLOAT )as INT) as TEXT);
WHEN 'lowest quintile' THEN
RETURN '1';
WHEN 'second quintile' THEN
RETURN '2';
WHEN 'middle quintile' THEN
RETURN '3';
WHEN 'fourth quintile' THEN
RETURN '4';
WHEN 'highest quintile' THEN
RETURN '5';
WHEN '1', '2', '3', '4', '5' THEN
RETURN val;
ELSE
RETURN val;
END CASE;
END;
$$
LANGUAGE plpgsql;
"""
self.db.get_regular_cursor_query_no_return(query)
|
[
"pydhs.Database.DatabaseSqlalchemy",
"pydhs.Database.DatabasePsycopg2"
] |
[((695, 762), 'pydhs.Database.DatabasePsycopg2', 'DatabasePsycopg2', (['dbname', '"""krishnab"""', '"""3kl4vx71"""', '"""localhost"""', '(5433)'], {}), "(dbname, 'krishnab', '3kl4vx71', 'localhost', 5433)\n", (711, 762), False, 'from pydhs.Database import DatabasePsycopg2\n'), ((935, 1004), 'pydhs.Database.DatabaseSqlalchemy', 'DatabaseSqlalchemy', (['dbname', '"""krishnab"""', '"""3kl4vx71"""', '"""localhost"""', '(5433)'], {}), "(dbname, 'krishnab', '3kl4vx71', 'localhost', 5433)\n", (953, 1004), False, 'from pydhs.Database import DatabaseSqlalchemy\n')]
|
#!/usr/bin/python
# Copyright (C) <NAME> 2003. Permission to copy, use, modify, sell and
# distribute this software is granted provided this copyright notice appears in
# all copies. This software is provided "as is" without express or implied
# warranty, and with no claim as to its suitability for any purpose.
# Regression test: when directory of project root contained regex metacharacters,
# Boost.Build failed to work. Bug reported by <NAME>
from BoostBuild import Tester, List
t = Tester()
t.write("bad[abc]dirname/Jamfile", """
""")
t.write("bad[abc]dirname/project-root.jam", """
""")
t.run_build_system(subdir="bad[abc]dirname")
t.cleanup()
|
[
"BoostBuild.Tester"
] |
[((499, 507), 'BoostBuild.Tester', 'Tester', ([], {}), '()\n', (505, 507), False, 'from BoostBuild import Tester, List\n')]
|
#
# Copyright 2021 Red Hat Inc.
# SPDX-License-Identifier: Apache-2.0
#
from itertools import chain
from itertools import combinations
from unittest.mock import Mock
from django.test import TestCase
from api.common.permissions.openshift_all_access import OpenshiftAllAccessPermission
from api.iam.models import User
from api.provider.models import Provider
ACCESS_KEYS = {
Provider.PROVIDER_AWS: {"aws.account": {"read": ["*"]}},
Provider.PROVIDER_AZURE: {"azure.subscription_guid": {"read": ["*"]}},
Provider.PROVIDER_OCP: {"openshift.cluster": {"read": ["*"]}},
}
class OCPAllAccessPermissionTest(TestCase):
"""Test the OCP-on-All access permissions."""
def test_has_perm_with_access_on_get(self):
"""Test that a user with at least 1 access can execute."""
accessPerm = OpenshiftAllAccessPermission()
s = ACCESS_KEYS.keys()
for key in chain.from_iterable(combinations(s, r) for r in range(1, len(s) + 1)):
with self.subTest(permission=key):
access = {}
for k in key:
access.update(ACCESS_KEYS[k])
user = Mock(spec=User, access=access, admin=False)
req = Mock(user=user, method="GET")
result = accessPerm.has_permission(request=req, view=None)
self.assertTrue(result)
|
[
"itertools.combinations",
"unittest.mock.Mock",
"api.common.permissions.openshift_all_access.OpenshiftAllAccessPermission"
] |
[((814, 844), 'api.common.permissions.openshift_all_access.OpenshiftAllAccessPermission', 'OpenshiftAllAccessPermission', ([], {}), '()\n', (842, 844), False, 'from api.common.permissions.openshift_all_access import OpenshiftAllAccessPermission\n'), ((915, 933), 'itertools.combinations', 'combinations', (['s', 'r'], {}), '(s, r)\n', (927, 933), False, 'from itertools import combinations\n'), ((1144, 1187), 'unittest.mock.Mock', 'Mock', ([], {'spec': 'User', 'access': 'access', 'admin': '(False)'}), '(spec=User, access=access, admin=False)\n', (1148, 1187), False, 'from unittest.mock import Mock\n'), ((1210, 1239), 'unittest.mock.Mock', 'Mock', ([], {'user': 'user', 'method': '"""GET"""'}), "(user=user, method='GET')\n", (1214, 1239), False, 'from unittest.mock import Mock\n')]
|
# Machine Learning/Data Science Precourse Work
# ###
# LAMBDA SCHOOL
# ###
# MIT LICENSE
# ###
# Free example function definition
# This function passes one of the 11 tests contained inside of test.py. Write the rest, defined in README.md, here,
# and execute python test.py to test. Passing this precourse work will greatly increase your odds of acceptance
# into the program.
import math
import numpy as np
def f(x):
return x**2
def f_2(x):
return x**3
def f_3(x):
return (x**3) + (5*x)
def d_f(x):
return 2*x
def d_f_2(x):
return 3*(x**2)
def d_f_3(x):
return 3*(x**2) + 5
# for all values of x, return x + y
def vector_sum(x, y):
for num in range(len(x)):
return [x[num] + y[num]]
# for all values of x, return x - y
def vector_less(x, y):
for num in range(len(x)):
return [x[num] - y[num]]
def vector_magnitude(v):
sqvector = 0
for vector in v:
sqvector += vector**2
return math.sqrt(sqvector)
def vec5():
return np.array([1, 1, 1, 1, 1])
def vec3():
return np.array([0, 0, 0])
def vec2_1():
return np.array([1, 0])
def vec2_2():
return np.array([0, 1])
def matrix_multiply(vec, matrix):
return np.dot(vec, matrix)
|
[
"numpy.dot",
"numpy.array",
"math.sqrt"
] |
[((968, 987), 'math.sqrt', 'math.sqrt', (['sqvector'], {}), '(sqvector)\n', (977, 987), False, 'import math\n'), ((1013, 1038), 'numpy.array', 'np.array', (['[1, 1, 1, 1, 1]'], {}), '([1, 1, 1, 1, 1])\n', (1021, 1038), True, 'import numpy as np\n'), ((1064, 1083), 'numpy.array', 'np.array', (['[0, 0, 0]'], {}), '([0, 0, 0])\n', (1072, 1083), True, 'import numpy as np\n'), ((1111, 1127), 'numpy.array', 'np.array', (['[1, 0]'], {}), '([1, 0])\n', (1119, 1127), True, 'import numpy as np\n'), ((1155, 1171), 'numpy.array', 'np.array', (['[0, 1]'], {}), '([0, 1])\n', (1163, 1171), True, 'import numpy as np\n'), ((1219, 1238), 'numpy.dot', 'np.dot', (['vec', 'matrix'], {}), '(vec, matrix)\n', (1225, 1238), True, 'import numpy as np\n')]
|
import tweepy
import pandas as pd
import re
import json
import os
import datetime
import stripe
import time
from fastapi import FastAPI, Request, BackgroundTasks, Response, Cookie
from fastapi.templating import Jinja2Templates
from fastapi.staticfiles import StaticFiles
from fastapi.responses import RedirectResponse
from typing import Optional
from sqlalchemy import create_engine
## Configs
if os.environ['MODE'] == 'dev':
import uvicorn
if os.environ['STRIPE_MODE'] == 'prod':
stripe.api_key = os.environ['STRIPE_KEY_PROD']
price = "price_1L0We3CsKWtKuHp02UYDbhBF"
else:
stripe.api_key = os.environ['STRIPE_KEY_DEV']
price = "price_1KeQ1PCsKWtKuHp0PIYQ1AnH"
# if os.environ['PAY_MODE'] == 'pay':
# return_path = "create-checkout-session"
# else:
# return_path = 'free_mode'
def HtmlIntake(path):
with open(path) as f:
lines = f.readlines()
return ''.join(lines)
def loadWords(mode):
f = open("references/profane_words.json", 'r')
bad_words = json.load(f)
bad_words_pattern = ' | '.join(bad_words)
return bad_words_pattern, bad_words
def flagDFProces(df):
df['Profane Words'] = df['Text'].apply(lambda x: ' , '.join(re.findall(bad_words_pattern, x)))
df['occurance'] = df['Profane Words'].apply(lambda x: 1 if len(x) > 0 else 0)
df['Date'] = df['date_full'].apply(lambda x: datetime.datetime.date(x))
return df
def inituserOauth(basepath):
oauth2_user_handler = tweepy.OAuth2UserHandler(
client_id=os.getenv('CLIENT_ID'),
redirect_uri=f'{basepath}/return-get',
scope=["tweet.read", "tweet.write", "users.read"],
# Client Secret is only necessary if using a confidential client
client_secret=os.getenv('CLIENT_SECRET'))
return oauth2_user_handler
def setBasePath(mode):
if mode.lower() == 'dev':
basepath = 'http://0.0.0.0:4242'
os.environ['OAUTHLIB_INSECURE_TRANSPORT'] = '1'
elif mode.lower() == 'prod':
basepath = "https://www.cleanmytweets.com"
# basepath = 'https://cleanmytweets.herokuapp.com'
return basepath
def getTweets(user_id, client, username):
# Collect user timeline
twitter_client = client
tweets_out = []
for tweet in tweepy.Paginator(twitter_client.get_users_tweets, id=user_id,
tweet_fields=['id', 'text', 'created_at'], max_results=100).flatten(limit=3000):
tweets_out.append([tweet.id, tweet.text, tweet.created_at])
timeline_df = pd.DataFrame(tweets_out, columns=['Delete?', 'Text', 'date_full'])
# Run scan for flag words
out_df = flagDFProces(timeline_df)
total_count = out_df.shape[0]
prof_df = pd.DataFrame(out_df[out_df['occurance'] == 1])
prof_df['Text'] = prof_df['Text'].apply(lambda x: x.encode('utf-8', 'ignore'))
prof_df['username'] = username
prof_df['total_count'] = total_count
# Check length of prof_df
if len(prof_df) == 0:
prof_df.loc[1] = [0, "Great work, we've found no controversial tweets in your timeline!",
datetime.datetime.now().strftime('%Y-%m-%d %H:%M:%S+00:00'), ' ', 1,
datetime.datetime.now().strftime('%Y-%m-%d'), username, 0]
user_df = pd.DataFrame([[username, datetime.datetime.now().strftime('%Y-%m-%d %H:%M:%S+00:00')]],
columns=['Name', 'Insert_DT'])
# write to sql
prof_df.to_sql('tweets', con=db_engine, if_exists='append') # 'replace'
user_df.to_sql('users', con=db_engine, if_exists='append')
print('Processing Complete')
# initialization
mode = os.environ['MODE']
bad_words_pattern, bad_words = loadWords(mode)
# init DB
db_engine = create_engine(os.environ['DB_URL'], echo=False)
app = FastAPI()
basepath = setBasePath(mode)
oauth2_handler = inituserOauth(basepath)
app.auth = oauth2_handler
templates = Jinja2Templates(directory='templates/jinja')
@app.get("/")
async def home(request: Request):
try:
authorization_url = app.auth.get_authorization_url()
return templates.TemplateResponse('index_j.html', {"request": request, "user_auth_link": authorization_url})
except:
return templates.TemplateResponse('error.html', {"request": request})
@app.get('/return-get', response_class=RedirectResponse)
async def results(request: Request, background_tasks: BackgroundTasks):
try:
access_token = app.auth.fetch_token(str(request.url))
client = tweepy.Client(access_token['access_token'])
except Exception as e:
print(e)
print(request.url)
return templates.TemplateResponse('auth_failed.html', {"request": request})
user = client.get_me(user_auth=False)
username = user.data.username
user_id = user.data.id
# response.set_cookie(key="user_id", value=user_id)
response = RedirectResponse(url="/return-get_2")
response.set_cookie("username", str(username))
response.set_cookie(key="access_token", value=access_token['access_token'])
# Begin Timeline scrape
print(f'beginning scrape: {username}')
background_tasks.add_task(getTweets, user_id=user_id, client=client, username=username)
return response
@app.get('/return-get_2')
async def results(request: Request, username: Optional[str] = Cookie(None)):
return templates.TemplateResponse('account_val.html', {"request": request, "user": username,
"pc_msg": ''})
@app.post("/checkout")
async def userInput(request: Request, username: Optional[str] = Cookie(None)):
try:
# Collect User Input
body = await request.body()
inputPC = body.decode('UTF-8').split('=')[1].strip()
approvedPCs = os.environ['PROMO_CODES'].split(',')
# Check if promocode entered
if len(inputPC) > 0:
if inputPC in approvedPCs:
return templates.TemplateResponse('payment_val.html', {"request": request, "user": Cookie('user')})
else:
return templates.TemplateResponse('account_val.html', {"request": request, "user": username,
"pc_msg": 'Incorrect promocode. Please try again.'})
# If no promocode, then stripe checkout
else:
checkout_session = stripe.checkout.Session.create(
success_url=basepath + "/success?session_id={CHECKOUT_SESSION_ID}",
cancel_url=basepath,
payment_method_types=["card"],
mode="payment",
line_items=[{
"price": price,
"quantity": 1
}], )
return RedirectResponse(checkout_session.url, status_code=303)
except Exception as e:
print(e)
return templates.TemplateResponse('error.html', {"request": request})
@app.get("/success")
async def success(request: Request):
return templates.TemplateResponse('payment_val.html', {"request": request, "user": Cookie('user')})
@app.get("/free_mode")
async def success(request: Request):
return templates.TemplateResponse('free_mode.html', {"request": request})
@app.get("/learn_more")
async def read(request: Request, response: Response, ):
return templates.TemplateResponse('learn_more.html', {"request": request})
@app.get('/create-checkout-session')
async def create_checkout_session(request: Request):
checkout_session = stripe.checkout.Session.create(
success_url=basepath + "/success?session_id={CHECKOUT_SESSION_ID}",
cancel_url=basepath,
payment_method_types=["card"],
mode="payment",
line_items=[{
"price": price,
"quantity": 1
}],
)
return RedirectResponse(checkout_session.url, status_code=303)
@app.get("/scan_tweets")
async def scan_tweets(request: Request, username: Optional[str] = Cookie(None)):
# pull rows
query = (f"""
SELECT *
FROM tweets
WHERE username = '{username}'""")
df = pd.read_sql_query(query, db_engine)
# delete from DB
db_engine.execute(f"DELETE FROM tweets WHERE username = '{username}'")
try:
df['Text'] = df['Text'].apply(lambda x: bytes.fromhex(x[2:]).decode('utf-8'))
except ValueError:
pass
df = df.drop_duplicates()
check_box = r"""<input type="checkbox" id="\1" name="tweet_id" value="\1">
<label for="\1"> </label><br>"""
out_table_html = str(re.sub(r'(\d{18,19})', check_box,
df.drop(columns=['date_full', 'occurance', 'username', 'total_count', 'index'],
axis=1).to_html(index=False).replace(
'<td>', '<td align="center">').replace(
'<tr style="text-align: right;">', '<tr style="text-align: center;">').replace(
'<table border="1" class="dataframe">', '<table class="table">')))
return templates.TemplateResponse('returnPage_j.html', {"request": request,
"p_count": str(df.shape[0]),
'table': out_table_html,
'total_count': str(df['total_count'].values[0]),
'user': username})
try:
tc = str(df['total_count'].values[0])
except:
tc = str(0)
try:
return templates.TemplateResponse('returnPage_j.html', {"request": request,
"p_count": str(df.shape[0]),
'table': out_table_html,
'total_count': tc,
'user': Cookie('user')})
except:
return templates.TemplateResponse('error.html', {"request": request})
@app.post('/selectTweets')
async def selectTweets(request: Request, access_token: Optional[str] = Cookie(None)):
try:
client = tweepy.Client(access_token)
body = await request.body()
values = body.decode("utf-8").replace('tweet_id=', '').split(',')
if values == [""]:
pass
elif len(values) < 17:
delete_failed_flag = False
for v in values:
try:
twitter_client = client
twitter_client.delete_tweet(v, user_auth=False)
except:
delete_failed_flag = True
if delete_failed_flag:
return templates.TemplateResponse('delete_failed.html', {'request': request})
else:
return templates.TemplateResponse('Tweets_deleted.html', {'request': request,
'count': str(len(values))})
elif len(values) >= 17:
return templates.TemplateResponse('over_15.html', {'request': request})
except:
return templates.TemplateResponse('error.html', {"request": request})
if __name__ == '__main__':
if os.environ['MODE'] == 'dev':
uvicorn.run(app, port=4242, host='0.0.0.0')
|
[
"pandas.DataFrame",
"tweepy.Paginator",
"json.load",
"stripe.checkout.Session.create",
"fastapi.Cookie",
"tweepy.Client",
"fastapi.templating.Jinja2Templates",
"re.findall",
"uvicorn.run",
"datetime.datetime.date",
"pandas.read_sql_query",
"sqlalchemy.create_engine",
"fastapi.responses.RedirectResponse",
"datetime.datetime.now",
"os.getenv",
"fastapi.FastAPI"
] |
[((3701, 3748), 'sqlalchemy.create_engine', 'create_engine', (["os.environ['DB_URL']"], {'echo': '(False)'}), "(os.environ['DB_URL'], echo=False)\n", (3714, 3748), False, 'from sqlalchemy import create_engine\n'), ((3756, 3765), 'fastapi.FastAPI', 'FastAPI', ([], {}), '()\n', (3763, 3765), False, 'from fastapi import FastAPI, Request, BackgroundTasks, Response, Cookie\n'), ((3874, 3918), 'fastapi.templating.Jinja2Templates', 'Jinja2Templates', ([], {'directory': '"""templates/jinja"""'}), "(directory='templates/jinja')\n", (3889, 3918), False, 'from fastapi.templating import Jinja2Templates\n'), ((1006, 1018), 'json.load', 'json.load', (['f'], {}), '(f)\n', (1015, 1018), False, 'import json\n'), ((2503, 2569), 'pandas.DataFrame', 'pd.DataFrame', (['tweets_out'], {'columns': "['Delete?', 'Text', 'date_full']"}), "(tweets_out, columns=['Delete?', 'Text', 'date_full'])\n", (2515, 2569), True, 'import pandas as pd\n'), ((2689, 2735), 'pandas.DataFrame', 'pd.DataFrame', (["out_df[out_df['occurance'] == 1]"], {}), "(out_df[out_df['occurance'] == 1])\n", (2701, 2735), True, 'import pandas as pd\n'), ((4840, 4877), 'fastapi.responses.RedirectResponse', 'RedirectResponse', ([], {'url': '"""/return-get_2"""'}), "(url='/return-get_2')\n", (4856, 4877), False, 'from fastapi.responses import RedirectResponse\n'), ((5284, 5296), 'fastapi.Cookie', 'Cookie', (['None'], {}), '(None)\n', (5290, 5296), False, 'from fastapi import FastAPI, Request, BackgroundTasks, Response, Cookie\n'), ((5559, 5571), 'fastapi.Cookie', 'Cookie', (['None'], {}), '(None)\n', (5565, 5571), False, 'from fastapi import FastAPI, Request, BackgroundTasks, Response, Cookie\n'), ((7466, 7690), 'stripe.checkout.Session.create', 'stripe.checkout.Session.create', ([], {'success_url': "(basepath + '/success?session_id={CHECKOUT_SESSION_ID}')", 'cancel_url': 'basepath', 'payment_method_types': "['card']", 'mode': '"""payment"""', 'line_items': "[{'price': price, 'quantity': 1}]"}), "(success_url=basepath +\n '/success?session_id={CHECKOUT_SESSION_ID}', cancel_url=basepath,\n payment_method_types=['card'], mode='payment', line_items=[{'price':\n price, 'quantity': 1}])\n", (7496, 7690), False, 'import stripe\n'), ((7771, 7826), 'fastapi.responses.RedirectResponse', 'RedirectResponse', (['checkout_session.url'], {'status_code': '(303)'}), '(checkout_session.url, status_code=303)\n', (7787, 7826), False, 'from fastapi.responses import RedirectResponse\n'), ((7920, 7932), 'fastapi.Cookie', 'Cookie', (['None'], {}), '(None)\n', (7926, 7932), False, 'from fastapi import FastAPI, Request, BackgroundTasks, Response, Cookie\n'), ((8071, 8106), 'pandas.read_sql_query', 'pd.read_sql_query', (['query', 'db_engine'], {}), '(query, db_engine)\n', (8088, 8106), True, 'import pandas as pd\n'), ((10220, 10232), 'fastapi.Cookie', 'Cookie', (['None'], {}), '(None)\n', (10226, 10232), False, 'from fastapi import FastAPI, Request, BackgroundTasks, Response, Cookie\n'), ((4466, 4509), 'tweepy.Client', 'tweepy.Client', (["access_token['access_token']"], {}), "(access_token['access_token'])\n", (4479, 4509), False, 'import tweepy\n'), ((10261, 10288), 'tweepy.Client', 'tweepy.Client', (['access_token'], {}), '(access_token)\n', (10274, 10288), False, 'import tweepy\n'), ((11368, 11411), 'uvicorn.run', 'uvicorn.run', (['app'], {'port': '(4242)', 'host': '"""0.0.0.0"""'}), "(app, port=4242, host='0.0.0.0')\n", (11379, 11411), False, 'import uvicorn\n'), ((1359, 1384), 'datetime.datetime.date', 'datetime.datetime.date', (['x'], {}), '(x)\n', (1381, 1384), False, 'import datetime\n'), ((1501, 1523), 'os.getenv', 'os.getenv', (['"""CLIENT_ID"""'], {}), "('CLIENT_ID')\n", (1510, 1523), False, 'import os\n'), ((1726, 1752), 'os.getenv', 'os.getenv', (['"""CLIENT_SECRET"""'], {}), "('CLIENT_SECRET')\n", (1735, 1752), False, 'import os\n'), ((2239, 2365), 'tweepy.Paginator', 'tweepy.Paginator', (['twitter_client.get_users_tweets'], {'id': 'user_id', 'tweet_fields': "['id', 'text', 'created_at']", 'max_results': '(100)'}), "(twitter_client.get_users_tweets, id=user_id, tweet_fields=\n ['id', 'text', 'created_at'], max_results=100)\n", (2255, 2365), False, 'import tweepy\n'), ((6335, 6559), 'stripe.checkout.Session.create', 'stripe.checkout.Session.create', ([], {'success_url': "(basepath + '/success?session_id={CHECKOUT_SESSION_ID}')", 'cancel_url': 'basepath', 'payment_method_types': "['card']", 'mode': '"""payment"""', 'line_items': "[{'price': price, 'quantity': 1}]"}), "(success_url=basepath +\n '/success?session_id={CHECKOUT_SESSION_ID}', cancel_url=basepath,\n payment_method_types=['card'], mode='payment', line_items=[{'price':\n price, 'quantity': 1}])\n", (6365, 6559), False, 'import stripe\n'), ((6708, 6763), 'fastapi.responses.RedirectResponse', 'RedirectResponse', (['checkout_session.url'], {'status_code': '(303)'}), '(checkout_session.url, status_code=303)\n', (6724, 6763), False, 'from fastapi.responses import RedirectResponse\n'), ((7033, 7047), 'fastapi.Cookie', 'Cookie', (['"""user"""'], {}), "('user')\n", (7039, 7047), False, 'from fastapi import FastAPI, Request, BackgroundTasks, Response, Cookie\n'), ((1193, 1225), 're.findall', 're.findall', (['bad_words_pattern', 'x'], {}), '(bad_words_pattern, x)\n', (1203, 1225), False, 'import re\n'), ((10013, 10027), 'fastapi.Cookie', 'Cookie', (['"""user"""'], {}), "('user')\n", (10019, 10027), False, 'from fastapi import FastAPI, Request, BackgroundTasks, Response, Cookie\n'), ((3077, 3100), 'datetime.datetime.now', 'datetime.datetime.now', ([], {}), '()\n', (3098, 3100), False, 'import datetime\n'), ((3172, 3195), 'datetime.datetime.now', 'datetime.datetime.now', ([], {}), '()\n', (3193, 3195), False, 'import datetime\n'), ((3271, 3294), 'datetime.datetime.now', 'datetime.datetime.now', ([], {}), '()\n', (3292, 3294), False, 'import datetime\n'), ((5973, 5987), 'fastapi.Cookie', 'Cookie', (['"""user"""'], {}), "('user')\n", (5979, 5987), False, 'from fastapi import FastAPI, Request, BackgroundTasks, Response, Cookie\n')]
|
# August 21st 2018
# Author: <NAME>
# University of Guelph Masters Graduate
# This module is an OpenSim tool created for Static optimization and Computed Muscle Control data to achieve Joint Reaction forces and loads in model
def run(setup,resultsDirectory):
import os
import re
import shutil
import opensim as osim
import directories
allDir = list(directories.main(directories))
paramsDir = allDir[1]
subID = allDir[4]
subResultsDir = allDir[5]
# ikResultsDir = allDir[6]
# idResultsDir = allDir[7]
# soResultsDir = allDir[8]
# cmcResultsDir = allDir[10]
# jrResultsDir = allDir[11]
# # actuatorFile = paramsDir + "/soActuators.xml"
# # genericSetupSO = paramsDir + "/" + "setupSO.xml"
# ikFileName = "subject01_walk1_ik.mot"
# ikFile = ikResultsDir + "/" + ikFileName
# # soForces = soResultsDir + "/" + "subject01_walk1_StaticOptimization_force.sto"
# if os.path.exists(jrResultsDir):
# shutil.rmtree(jrResultsDir, ignore_errors=True)
# if not os.path.exists(jrResultsDir):
# os.mkdir(jrResultsDir)
# # Load Model
aModel = osim.Model(subResultsDir + "/" + subID + ".osim")
# # initialize system
aModel.initSystem()
# # Initialize External Loads File from Generic File
# extLoads = idResultsDir + "/subject01_walk1_extLoads.xml"
# # Get .mot data to determine time range
# motCoordsData = osim.Storage(ikFile)
# # Get initial and final time
# initial_time = motCoordsData.getFirstTime()
# final_time = motCoordsData.getLastTime()
# Analyze Tool Setup for Static Optimization
analyzeTool = osim.AnalyzeTool(setup)
analyzeTool.setModel(aModel)
analyzeTool.setResultsDir(resultsDirectory)
analyzeTool.run()
# analyzeTool = osim.AnalyzeTool(cmcJrSetup)
# analyzeTool.setExternalLoadsFileName(extLoads)
# analyzeTool.setInitialTime(initial_time)
# analyzeTool.setFinalTime(final_time)
# analyzeTool.setLowpassCutoffFrequency(6)
# analyzeTool.setOutputPrecision(20)
# myForceSet = osim.ForceSet(aModel, actuatorFile)
# for i in range(myForceSet.getSize()):
# aModel.updForceSet().append(myForceSet.get(i))
# print(aModel.getForceSet().getSize())
# analysisSet = analyzeTool.getAnalysisSet()
# myForceSetArray = analyzeTool.getForceSetFiles()
# myForceSetArray.set(0, "")
# analyzeTool.setReplaceForceSet(False)
# analyzeTool.setForceSetFiles(myForceSetArray)
# # Joint Reaction Analysis
# jrTool = osim.JointReaction(jrSetup)
# analysisSet.cloneAndAppend(jrTool)
# # Set coordinates
# coordtype = "mot"
# if coordtype == "mot":
# analyzeTool.setStatesFileName("")
# analyzeTool.setCoordinatesFileName(ikFile)
# elif coordtype == "states":
# analyzeTool.setStatesFileName(ikFile)
# analyzeTool.setCoordinatesFileName("")
# analyzeTool.verifyControlsStates()
# analyzeTool.setResultsDir(jrResultsDir)
# # analyzeTool.printToXML(paramsDir +"/setupJR.xml")
# analyzeTool.run()
return()
os.system('cls' if os.name == 'nt' else 'clear')
|
[
"opensim.AnalyzeTool",
"directories.main",
"os.system",
"opensim.Model"
] |
[((1171, 1220), 'opensim.Model', 'osim.Model', (["(subResultsDir + '/' + subID + '.osim')"], {}), "(subResultsDir + '/' + subID + '.osim')\n", (1181, 1220), True, 'import opensim as osim\n'), ((1693, 1716), 'opensim.AnalyzeTool', 'osim.AnalyzeTool', (['setup'], {}), '(setup)\n', (1709, 1716), True, 'import opensim as osim\n'), ((3187, 3235), 'os.system', 'os.system', (["('cls' if os.name == 'nt' else 'clear')"], {}), "('cls' if os.name == 'nt' else 'clear')\n", (3196, 3235), False, 'import os\n'), ((387, 416), 'directories.main', 'directories.main', (['directories'], {}), '(directories)\n', (403, 416), False, 'import directories\n')]
|
from pathlib import Path
from alembic.command import upgrade
from alembic.config import Config
def make_config(dir_: Path, url_: str, config_='alembic.ini'):
"""
:param dir_: migrations script directory
:param url_: sqlalchemy database url
:param config_: config
:return:
"""
# retrieves config file path
config_file = dir_ / config_
config = Config(file_=config_file)
config.set_main_option('script_location', str(dir_))
config.set_main_option('sqlalchemy.url', url_)
config.attributes['configure_logger'] = False
return config
def migrate(url: str):
config = make_config(Path(__file__).parent, url, 'alembic.ini')
# upgrade the database to the latest revision
upgrade(config, 'head')
|
[
"alembic.config.Config",
"alembic.command.upgrade",
"pathlib.Path"
] |
[((383, 408), 'alembic.config.Config', 'Config', ([], {'file_': 'config_file'}), '(file_=config_file)\n', (389, 408), False, 'from alembic.config import Config\n'), ((735, 758), 'alembic.command.upgrade', 'upgrade', (['config', '"""head"""'], {}), "(config, 'head')\n", (742, 758), False, 'from alembic.command import upgrade\n'), ((637, 651), 'pathlib.Path', 'Path', (['__file__'], {}), '(__file__)\n', (641, 651), False, 'from pathlib import Path\n')]
|
import json
import os
from freezegun import freeze_time
import pytest
import responses
import time
from nightfall.api import Nightfall, NightfallUserError
from nightfall.detection_rules import DetectionRule, Detector, LogicalOp, Confidence, ExclusionRule, ContextRule, \
WordList, MatchType, RedactionConfig, MaskConfig, Regex
from nightfall.findings import Finding, Range
@pytest.fixture
def nightfall():
yield Nightfall(os.environ['NIGHTFALL_API_KEY'])
@pytest.mark.integration
def test_scan_text_detection_rules_v3(nightfall):
result, redactions = nightfall.scan_text(
["4916-6734-7572-5015 is my credit card number, 489-36-8350 ssn"],
detection_rules=[
DetectionRule(logical_op=LogicalOp.ANY, detectors=[
Detector(min_confidence=Confidence.LIKELY,
min_num_findings=1,
display_name="Credit Card Number",
nightfall_detector="CREDIT_CARD_NUMBER",
context_rules=[ContextRule(regex=Regex("fake regex", is_case_sensitive=False),
window_before=10, window_after=10,
fixed_confidence=Confidence.VERY_UNLIKELY)],
exclusion_rules=[ExclusionRule(MatchType.FULL,
word_list=WordList(["never", "match"],
is_case_sensitive=True))],
redaction_config=RedactionConfig(remove_finding=False,
mask_config=MaskConfig(masking_char='👀',
num_chars_to_leave_unmasked=3,
chars_to_ignore=["-"])),
),
Detector(min_confidence=Confidence.LIKELY, nightfall_detector="US_SOCIAL_SECURITY_NUMBER")])],
context_bytes=10,
default_redaction_config=RedactionConfig(remove_finding=False, substitution_phrase="[REDACTED]")
)
assert len(result) == 1
assert len(result[0]) == 2
def finding_orderer(f):
return f.codepoint_range.start
result[0].sort(key=finding_orderer)
assert result[0][0] == Finding(
"4916-6734-7572-5015",
"491👀-👀👀👀👀-👀👀👀👀-👀👀👀👀",
None, " is my cre",
"Credit Card Number",
result[0][0].detector_uuid,
Confidence.VERY_LIKELY,
Range(0, 19), Range(0, 19), "",
[], ["Inline Detection Rule #1"])
assert result[0][1] == Finding(
"489-36-8350",
"[REDACTED]",
"d number, ", " ssn",
"",
result[0][1].detector_uuid,
Confidence.VERY_LIKELY,
Range(46, 57), Range(46, 57), "",
[], ["Inline Detection Rule #1"])
assert len(redactions) == 1
assert redactions[0] == "491👀-👀👀👀👀-👀👀👀👀-👀👀👀👀 is my credit card number, [REDACTED] ssn"
@pytest.mark.filetest
@pytest.mark.integration
def test_scan_file_detection_rules(nightfall, tmpdir):
file = tmpdir.mkdir("test_data").join("file.txt")
file.write("4916-6734-7572-5015 is my credit card number")
id, message = nightfall.scan_file(
file,
os.environ['WEBHOOK_ENDPOINT'],
detection_rules=[DetectionRule(logical_op=LogicalOp.ANY, detectors=[
Detector(min_confidence=Confidence.LIKELY, min_num_findings=1,
display_name="Credit Card Number", nightfall_detector="CREDIT_CARD_NUMBER")])]
)
assert id is not None
assert message == 'scan initiated'
@responses.activate
def test_scan_text():
nightfall = Nightfall("NF-NOT_REAL")
responses.add(responses.POST, 'https://api.nightfall.ai/v3/scan',
json={
"findings":
[
[
{
"finding": "4916-6734-7572-5015",
"redactedFinding": "491👀-👀👀👀👀-👀👀👀👀-👀👀👀👀",
"afterContext": " is my cre",
"detector":
{
"name": "Credit Card Number",
"uuid": "74c1815e-c0c3-4df5-8b1e-6cf98864a454"
},
"confidence": "VERY_LIKELY",
"location":
{
"byteRange":
{
"start": 0,
"end": 19
},
"codepointRange":
{
"start": 0,
"end": 19
}
},
"redactedLocation":
{
"byteRange":
{
"start": 0,
"end": 19
},
"codepointRange":
{
"start": 0,
"end": 19
}
},
"matchedDetectionRuleUUIDs":
[],
"matchedDetectionRules":
[
"Inline Detection Rule #1"
]
},
{
"finding": "489-36-8350",
"redactedFinding": "[REDACTED]",
"beforeContext": "d number, ",
"afterContext": " ssn",
"detector":
{
"name": "",
"uuid": "e30d9a87-f6c7-46b9-a8f4-16547901e069"
},
"confidence": "VERY_LIKELY",
"location":
{
"byteRange":
{
"start": 46,
"end": 57
},
"codepointRange":
{
"start": 46,
"end": 57
}
},
"redactedLocation":
{
"byteRange":
{
"start": 46,
"end": 56
},
"codepointRange":
{
"start": 46,
"end": 56
}
},
"matchedDetectionRuleUUIDs":
[],
"matchedDetectionRules":
[
"Inline Detection Rule #1"
]
}
]
],
"redactedPayload":
[
"491👀-👀👀👀👀-👀👀👀👀-👀👀👀👀 is my credit card number, [REDACTED] ssn"
]
})
result, redactions = nightfall.scan_text(
["4916-6734-7572-5015 is my credit card number, 489-36-8350 ssn"],
detection_rules=[
DetectionRule(logical_op=LogicalOp.ANY, detectors=[
Detector(min_confidence=Confidence.LIKELY,
min_num_findings=1,
display_name="Credit Card Number",
nightfall_detector="CREDIT_CARD_NUMBER",
context_rules=[ContextRule(regex=Regex("fake regex", is_case_sensitive=False),
window_before=10, window_after=10,
fixed_confidence=Confidence.VERY_UNLIKELY)],
exclusion_rules=[ExclusionRule(MatchType.FULL,
word_list=WordList(["never", "match"],
is_case_sensitive=True))],
redaction_config=RedactionConfig(remove_finding=False,
mask_config=MaskConfig(masking_char='👀',
num_chars_to_leave_unmasked=3,
chars_to_ignore=["-"])),
),
Detector(min_confidence=Confidence.LIKELY, nightfall_detector="US_SOCIAL_SECURITY_NUMBER")])],
context_bytes=10,
default_redaction_config=RedactionConfig(remove_finding=False, substitution_phrase="[REDACTED]")
)
assert len(responses.calls) == 1
assert responses.calls[0].request.headers.get("Authorization") == "Bearer NF-NOT_REAL"
assert json.loads(responses.calls[0].request.body) == {
"payload":
[
"4916-6734-7572-5015 is my credit card number, 489-36-8350 ssn"
],
"policy":
{
"detectionRules":
[
{
"detectors":
[
{
"minConfidence": "LIKELY",
"minNumFindings": 1,
"nightfallDetector": "CREDIT_CARD_NUMBER",
"detectorType": "NIGHTFALL_DETECTOR",
"displayName": "Credit Card Number",
"contextRules":
[
{
"regex":
{
"pattern": "fake regex",
"isCaseSensitive": False
},
"proximity":
{
"windowBefore": 10,
"windowAfter": 10
},
"confidenceAdjustment":
{
"fixedConfidence": "VERY_UNLIKELY"
}
}
],
"exclusionRules":
[
{
"matchType": "FULL",
"wordList":
{
"values":
[
"never",
"match"
],
"isCaseSensitive": True
},
"exclusionType": "WORD_LIST"
}
],
"redactionConfig":
{
"removeFinding": False,
"maskConfig":
{
"maskingChar": "👀",
"numCharsToLeaveUnmasked": 3,
"maskRightToLeft": False,
"charsToIgnore":
[
"-"
]
}
}
},
{
"minConfidence": "LIKELY",
"minNumFindings": 1,
"nightfallDetector": "US_SOCIAL_SECURITY_NUMBER",
"detectorType": "NIGHTFALL_DETECTOR"
}
],
"logicalOp": "ANY"
}
],
"contextBytes": 10,
"defaultRedactionConfig":
{
"removeFinding": False,
"substitutionConfig":
{
"substitutionPhrase": "[REDACTED]"
}
}
}
}
assert len(result) == 1
assert len(result[0]) == 2
assert result[0][0] == Finding(
"4916-6734-7572-5015",
'491👀-👀👀👀👀-👀👀👀👀-👀👀👀👀',
None, " is my cre",
"Credit Card Number",
result[0][0].detector_uuid,
Confidence.VERY_LIKELY,
Range(0, 19), Range(0, 19), "",
[], ["Inline Detection Rule #1"])
assert result[0][1] == Finding(
"489-36-8350",
"[REDACTED]",
"d number, ", " ssn",
"",
result[0][1].detector_uuid,
Confidence.VERY_LIKELY,
Range(46, 57), Range(46, 57), "",
[], ["Inline Detection Rule #1"])
assert len(redactions) == 1
assert redactions[0] == "491👀-👀👀👀👀-👀👀👀👀-👀👀👀👀 is my credit card number, [REDACTED] ssn"
@responses.activate
def test_scan_text_with_policy_uuids():
nightfall = Nightfall("NF-NOT_REAL")
responses.add(responses.POST, 'https://api.nightfall.ai/v3/scan',
json={
"findings":
[
[
{
"finding": "4916-6734-7572-5015",
"redactedFinding": "491👀-👀👀👀👀-👀👀👀👀-👀👀👀👀",
"afterContext": " is my cre",
"detector":
{
"name": "Credit Card Number",
"uuid": "74c1815e-c0c3-4df5-8b1e-6cf98864a454"
},
"confidence": "VERY_LIKELY",
"location":
{
"byteRange":
{
"start": 0,
"end": 19
},
"codepointRange":
{
"start": 0,
"end": 19
}
},
"redactedLocation":
{
"byteRange":
{
"start": 0,
"end": 19
},
"codepointRange":
{
"start": 0,
"end": 19
}
},
"matchedDetectionRuleUUIDs":
["0d8efd7b-b87a-478b-984e-9cf5534a46bc"],
"matchedDetectionRules":
[]
},
]
],
"redactedPayload":
[
"491👀-👀👀👀👀-👀👀👀👀-👀👀👀👀 is my credit card number, [REDACTED] ssn"
]
})
result, redactions = nightfall.scan_text(
["4916-6734-7572-5015 is my credit card number, 489-36-8350 ssn"],
policy_uuids=["2388f83f-cd31-4689-971b-4ee94f798281"]
)
assert len(responses.calls) == 1
assert responses.calls[0].request.headers.get("Authorization") == "Bearer NF-NOT_REAL"
assert json.loads(responses.calls[0].request.body) == {
"payload":
[
"4916-6734-7572-5015 is my credit card number, 489-36-8350 ssn"
],
"policyUUIDs": ["2388f83f-cd31-4689-971b-4ee94f798281"]
}
assert len(result) == 1
assert len(result[0]) == 1
assert result[0][0] == Finding(
"4916-6734-7572-5015",
'491👀-👀👀👀👀-👀👀👀👀-👀👀👀👀',
None, " is my cre",
"Credit Card Number",
result[0][0].detector_uuid,
Confidence.VERY_LIKELY,
Range(0, 19), Range(0, 19), "",
["0d8efd7b-b87a-478b-984e-9cf5534a46bc"], [])
assert len(redactions) == 1
assert redactions[0] == "491👀-👀👀👀👀-👀👀👀👀-👀👀👀👀 is my credit card number, [REDACTED] ssn"
def test_scan_text_no_detection_rules_or_policy_uuids():
nightfall = Nightfall("NF-NOT_REAL")
with pytest.raises(NightfallUserError):
nightfall.scan_text(texts=["will", "fail"])
@responses.activate
def test_scan_file(tmpdir):
file = tmpdir.mkdir("test_data").join("file.txt")
file.write("4916-6734-7572-5015 is my credit card number")
nightfall = Nightfall("NF-NOT_REAL")
responses.add(responses.POST, 'https://api.nightfall.ai/v3/upload', status=200, json={"id": 1, "chunkSize": 22})
responses.add(responses.PATCH, 'https://api.nightfall.ai/v3/upload/1', status=204)
responses.add(responses.POST, 'https://api.nightfall.ai/v3/upload/1/finish', status=200)
responses.add(responses.POST, 'https://api.nightfall.ai/v3/upload/1/scan', status=200,
json={"id": 1, "message": "scan_started"})
id, message = nightfall.scan_file(file, "https://my-website.example/callback", detection_rule_uuids=["a_uuid"],
request_metadata="some test data")
assert len(responses.calls) == 5
for call in responses.calls:
assert call.request.headers.get("Authorization") == "Bearer NF-NOT_REAL"
assert responses.calls[0].request.body == b'{"fileSizeBytes": 44}'
assert responses.calls[1].request.body == b"4916-6734-7572-5015 is"
assert responses.calls[1].request.headers.get("X-UPLOAD-OFFSET") == '0'
assert responses.calls[2].request.body == b" my credit card number"
assert responses.calls[2].request.headers.get("X-UPLOAD-OFFSET") == '22'
assert responses.calls[4].request.body == b'{"policy": {"webhookURL": "https://my-website.example/callback", ' \
b'"detectionRuleUUIDs": ["a_uuid"]}, "requestMetadata": "some test data"}'
assert id == 1
assert message == "scan_started"
@responses.activate
def test_file_scan_upload_short(tmpdir):
file = tmpdir.mkdir("test_data").join("file.txt")
file.write("4916-6734-7572-5015 is my credit card number")
nightfall = Nightfall("NF-NOT_REAL")
responses.add(responses.PATCH, 'https://api.nightfall.ai/v3/upload/1', status=204)
assert nightfall._file_scan_upload(1, file, 200)
assert len(responses.calls) == 1
assert responses.calls[0].request.headers.get("Authorization") == "Bearer NF-NOT_REAL"
assert responses.calls[0].request.body == b"4916-6734-7572-5015 is my credit card number"
assert responses.calls[0].request.headers.get("X-UPLOAD-OFFSET") == "0"
@responses.activate
def test_file_scan_upload_long(tmpdir):
file = tmpdir.mkdir("test_data").join("file.txt")
test_str = b"4916-6734-7572-5015 is my credit card number"
file.write_binary(test_str)
responses.add(responses.PATCH, 'https://api.nightfall.ai/v3/upload/1', status=204)
nightfall = Nightfall("NF-NOT_REAL")
assert nightfall._file_scan_upload(1, file, 1)
assert len(responses.calls) == 44
for i, call in enumerate(responses.calls):
assert call.request.headers.get("Authorization") == "Bearer NF-NOT_REAL"
assert call.request.body.decode('utf-8') == test_str.decode('utf-8')[i]
assert call.request.headers.get("X-UPLOAD-OFFSET") == str(i)
@freeze_time("2021-10-04T17:30:50Z")
def test_validate_webhook(nightfall):
nightfall.signing_secret = "super-secret-shhhh"
timestamp = 1633368645
body = "hello world foo bar goodnight moon"
expected = "1bb7619a9504474ffc14086d0423ad15db42606d3ca52afccb4a5b2125d7b703"
assert nightfall.validate_webhook(expected, timestamp, body)
@freeze_time("2021-10-04T19:30:50Z")
def test_validate_webhook_too_old(nightfall):
nightfall.signing_secret = "super-secret-shhhh"
timestamp = 1633368645
body = "hello world foo bar goodnight moon"
expected = "1bb7619a9504474ffc14086d0423ad15db42606d3ca52afccb4a5b2125d7b703"
assert not nightfall.validate_webhook(expected, timestamp, body)
@freeze_time("2021-10-04T17:30:50Z")
def test_validate_webhook_incorrect_sig(nightfall):
nightfall.signing_secret = "super-secret-shhhh"
timestamp = 1633368645
body = "hello world foo bar goodnight moon"
expected = "not matching"
assert not nightfall.validate_webhook(expected, timestamp, body)
|
[
"nightfall.api.Nightfall",
"json.loads",
"responses.add",
"nightfall.detection_rules.RedactionConfig",
"nightfall.findings.Range",
"nightfall.detection_rules.Detector",
"nightfall.detection_rules.MaskConfig",
"pytest.raises",
"nightfall.detection_rules.WordList",
"freezegun.freeze_time",
"nightfall.detection_rules.Regex"
] |
[((24054, 24089), 'freezegun.freeze_time', 'freeze_time', (['"""2021-10-04T17:30:50Z"""'], {}), "('2021-10-04T17:30:50Z')\n", (24065, 24089), False, 'from freezegun import freeze_time\n'), ((24405, 24440), 'freezegun.freeze_time', 'freeze_time', (['"""2021-10-04T19:30:50Z"""'], {}), "('2021-10-04T19:30:50Z')\n", (24416, 24440), False, 'from freezegun import freeze_time\n'), ((24768, 24803), 'freezegun.freeze_time', 'freeze_time', (['"""2021-10-04T17:30:50Z"""'], {}), "('2021-10-04T17:30:50Z')\n", (24779, 24803), False, 'from freezegun import freeze_time\n'), ((3852, 3876), 'nightfall.api.Nightfall', 'Nightfall', (['"""NF-NOT_REAL"""'], {}), "('NF-NOT_REAL')\n", (3861, 3876), False, 'from nightfall.api import Nightfall, NightfallUserError\n'), ((3881, 5158), 'responses.add', 'responses.add', (['responses.POST', '"""https://api.nightfall.ai/v3/scan"""'], {'json': "{'findings': [[{'finding': '4916-6734-7572-5015', 'redactedFinding':\n '491👀-👀👀👀👀-👀👀👀👀-👀👀👀👀', 'afterContext': ' is my cre', 'detector': {\n 'name': 'Credit Card Number', 'uuid':\n '74c1815e-c0c3-4df5-8b1e-6cf98864a454'}, 'confidence': 'VERY_LIKELY',\n 'location': {'byteRange': {'start': 0, 'end': 19}, 'codepointRange': {\n 'start': 0, 'end': 19}}, 'redactedLocation': {'byteRange': {'start': 0,\n 'end': 19}, 'codepointRange': {'start': 0, 'end': 19}},\n 'matchedDetectionRuleUUIDs': [], 'matchedDetectionRules': [\n 'Inline Detection Rule #1']}, {'finding': '489-36-8350',\n 'redactedFinding': '[REDACTED]', 'beforeContext': 'd number, ',\n 'afterContext': ' ssn', 'detector': {'name': '', 'uuid':\n 'e30d9a87-f6c7-46b9-a8f4-16547901e069'}, 'confidence': 'VERY_LIKELY',\n 'location': {'byteRange': {'start': 46, 'end': 57}, 'codepointRange': {\n 'start': 46, 'end': 57}}, 'redactedLocation': {'byteRange': {'start': \n 46, 'end': 56}, 'codepointRange': {'start': 46, 'end': 56}},\n 'matchedDetectionRuleUUIDs': [], 'matchedDetectionRules': [\n 'Inline Detection Rule #1']}]], 'redactedPayload': [\n '491👀-👀👀👀👀-👀👀👀👀-👀👀👀👀 is my credit card number, [REDACTED] ssn']}"}), "(responses.POST, 'https://api.nightfall.ai/v3/scan', json={\n 'findings': [[{'finding': '4916-6734-7572-5015', 'redactedFinding':\n '491👀-👀👀👀👀-👀👀👀👀-👀👀👀👀', 'afterContext': ' is my cre', 'detector': {\n 'name': 'Credit Card Number', 'uuid':\n '74c1815e-c0c3-4df5-8b1e-6cf98864a454'}, 'confidence': 'VERY_LIKELY',\n 'location': {'byteRange': {'start': 0, 'end': 19}, 'codepointRange': {\n 'start': 0, 'end': 19}}, 'redactedLocation': {'byteRange': {'start': 0,\n 'end': 19}, 'codepointRange': {'start': 0, 'end': 19}},\n 'matchedDetectionRuleUUIDs': [], 'matchedDetectionRules': [\n 'Inline Detection Rule #1']}, {'finding': '489-36-8350',\n 'redactedFinding': '[REDACTED]', 'beforeContext': 'd number, ',\n 'afterContext': ' ssn', 'detector': {'name': '', 'uuid':\n 'e30d9a87-f6c7-46b9-a8f4-16547901e069'}, 'confidence': 'VERY_LIKELY',\n 'location': {'byteRange': {'start': 46, 'end': 57}, 'codepointRange': {\n 'start': 46, 'end': 57}}, 'redactedLocation': {'byteRange': {'start': \n 46, 'end': 56}, 'codepointRange': {'start': 46, 'end': 56}},\n 'matchedDetectionRuleUUIDs': [], 'matchedDetectionRules': [\n 'Inline Detection Rule #1']}]], 'redactedPayload': [\n '491👀-👀👀👀👀-👀👀👀👀-👀👀👀👀 is my credit card number, [REDACTED] ssn']})\n", (3894, 5158), False, 'import responses\n'), ((16861, 16885), 'nightfall.api.Nightfall', 'Nightfall', (['"""NF-NOT_REAL"""'], {}), "('NF-NOT_REAL')\n", (16870, 16885), False, 'from nightfall.api import Nightfall, NightfallUserError\n'), ((16890, 17634), 'responses.add', 'responses.add', (['responses.POST', '"""https://api.nightfall.ai/v3/scan"""'], {'json': "{'findings': [[{'finding': '4916-6734-7572-5015', 'redactedFinding':\n '491👀-👀👀👀👀-👀👀👀👀-👀👀👀👀', 'afterContext': ' is my cre', 'detector': {\n 'name': 'Credit Card Number', 'uuid':\n '74c1815e-c0c3-4df5-8b1e-6cf98864a454'}, 'confidence': 'VERY_LIKELY',\n 'location': {'byteRange': {'start': 0, 'end': 19}, 'codepointRange': {\n 'start': 0, 'end': 19}}, 'redactedLocation': {'byteRange': {'start': 0,\n 'end': 19}, 'codepointRange': {'start': 0, 'end': 19}},\n 'matchedDetectionRuleUUIDs': ['0d8efd7b-b87a-478b-984e-9cf5534a46bc'],\n 'matchedDetectionRules': []}]], 'redactedPayload': [\n '491👀-👀👀👀👀-👀👀👀👀-👀👀👀👀 is my credit card number, [REDACTED] ssn']}"}), "(responses.POST, 'https://api.nightfall.ai/v3/scan', json={\n 'findings': [[{'finding': '4916-6734-7572-5015', 'redactedFinding':\n '491👀-👀👀👀👀-👀👀👀👀-👀👀👀👀', 'afterContext': ' is my cre', 'detector': {\n 'name': 'Credit Card Number', 'uuid':\n '74c1815e-c0c3-4df5-8b1e-6cf98864a454'}, 'confidence': 'VERY_LIKELY',\n 'location': {'byteRange': {'start': 0, 'end': 19}, 'codepointRange': {\n 'start': 0, 'end': 19}}, 'redactedLocation': {'byteRange': {'start': 0,\n 'end': 19}, 'codepointRange': {'start': 0, 'end': 19}},\n 'matchedDetectionRuleUUIDs': ['0d8efd7b-b87a-478b-984e-9cf5534a46bc'],\n 'matchedDetectionRules': []}]], 'redactedPayload': [\n '491👀-👀👀👀👀-👀👀👀👀-👀👀👀👀 is my credit card number, [REDACTED] ssn']})\n", (16903, 17634), False, 'import responses\n'), ((20895, 20919), 'nightfall.api.Nightfall', 'Nightfall', (['"""NF-NOT_REAL"""'], {}), "('NF-NOT_REAL')\n", (20904, 20919), False, 'from nightfall.api import Nightfall, NightfallUserError\n'), ((21201, 21225), 'nightfall.api.Nightfall', 'Nightfall', (['"""NF-NOT_REAL"""'], {}), "('NF-NOT_REAL')\n", (21210, 21225), False, 'from nightfall.api import Nightfall, NightfallUserError\n'), ((21230, 21347), 'responses.add', 'responses.add', (['responses.POST', '"""https://api.nightfall.ai/v3/upload"""'], {'status': '(200)', 'json': "{'id': 1, 'chunkSize': 22}"}), "(responses.POST, 'https://api.nightfall.ai/v3/upload', status=\n 200, json={'id': 1, 'chunkSize': 22})\n", (21243, 21347), False, 'import responses\n'), ((21347, 21433), 'responses.add', 'responses.add', (['responses.PATCH', '"""https://api.nightfall.ai/v3/upload/1"""'], {'status': '(204)'}), "(responses.PATCH, 'https://api.nightfall.ai/v3/upload/1',\n status=204)\n", (21360, 21433), False, 'import responses\n'), ((21434, 21526), 'responses.add', 'responses.add', (['responses.POST', '"""https://api.nightfall.ai/v3/upload/1/finish"""'], {'status': '(200)'}), "(responses.POST, 'https://api.nightfall.ai/v3/upload/1/finish',\n status=200)\n", (21447, 21526), False, 'import responses\n'), ((21527, 21660), 'responses.add', 'responses.add', (['responses.POST', '"""https://api.nightfall.ai/v3/upload/1/scan"""'], {'status': '(200)', 'json': "{'id': 1, 'message': 'scan_started'}"}), "(responses.POST, 'https://api.nightfall.ai/v3/upload/1/scan',\n status=200, json={'id': 1, 'message': 'scan_started'})\n", (21540, 21660), False, 'import responses\n'), ((22878, 22902), 'nightfall.api.Nightfall', 'Nightfall', (['"""NF-NOT_REAL"""'], {}), "('NF-NOT_REAL')\n", (22887, 22902), False, 'from nightfall.api import Nightfall, NightfallUserError\n'), ((22908, 22994), 'responses.add', 'responses.add', (['responses.PATCH', '"""https://api.nightfall.ai/v3/upload/1"""'], {'status': '(204)'}), "(responses.PATCH, 'https://api.nightfall.ai/v3/upload/1',\n status=204)\n", (22921, 22994), False, 'import responses\n'), ((23559, 23645), 'responses.add', 'responses.add', (['responses.PATCH', '"""https://api.nightfall.ai/v3/upload/1"""'], {'status': '(204)'}), "(responses.PATCH, 'https://api.nightfall.ai/v3/upload/1',\n status=204)\n", (23572, 23645), False, 'import responses\n'), ((23659, 23683), 'nightfall.api.Nightfall', 'Nightfall', (['"""NF-NOT_REAL"""'], {}), "('NF-NOT_REAL')\n", (23668, 23683), False, 'from nightfall.api import Nightfall, NightfallUserError\n'), ((424, 466), 'nightfall.api.Nightfall', 'Nightfall', (["os.environ['NIGHTFALL_API_KEY']"], {}), "(os.environ['NIGHTFALL_API_KEY'])\n", (433, 466), False, 'from nightfall.api import Nightfall, NightfallUserError\n'), ((11194, 11237), 'json.loads', 'json.loads', (['responses.calls[0].request.body'], {}), '(responses.calls[0].request.body)\n', (11204, 11237), False, 'import json\n'), ((20073, 20116), 'json.loads', 'json.loads', (['responses.calls[0].request.body'], {}), '(responses.calls[0].request.body)\n', (20083, 20116), False, 'import json\n'), ((20929, 20962), 'pytest.raises', 'pytest.raises', (['NightfallUserError'], {}), '(NightfallUserError)\n', (20942, 20962), False, 'import pytest\n'), ((2201, 2272), 'nightfall.detection_rules.RedactionConfig', 'RedactionConfig', ([], {'remove_finding': '(False)', 'substitution_phrase': '"""[REDACTED]"""'}), "(remove_finding=False, substitution_phrase='[REDACTED]')\n", (2216, 2272), False, 'from nightfall.detection_rules import DetectionRule, Detector, LogicalOp, Confidence, ExclusionRule, ContextRule, WordList, MatchType, RedactionConfig, MaskConfig, Regex\n'), ((2680, 2692), 'nightfall.findings.Range', 'Range', (['(0)', '(19)'], {}), '(0, 19)\n', (2685, 2692), False, 'from nightfall.findings import Finding, Range\n'), ((2694, 2706), 'nightfall.findings.Range', 'Range', (['(0)', '(19)'], {}), '(0, 19)\n', (2699, 2706), False, 'from nightfall.findings import Finding, Range\n'), ((2953, 2966), 'nightfall.findings.Range', 'Range', (['(46)', '(57)'], {}), '(46, 57)\n', (2958, 2966), False, 'from nightfall.findings import Finding, Range\n'), ((2968, 2981), 'nightfall.findings.Range', 'Range', (['(46)', '(57)'], {}), '(46, 57)\n', (2973, 2981), False, 'from nightfall.findings import Finding, Range\n'), ((10976, 11047), 'nightfall.detection_rules.RedactionConfig', 'RedactionConfig', ([], {'remove_finding': '(False)', 'substitution_phrase': '"""[REDACTED]"""'}), "(remove_finding=False, substitution_phrase='[REDACTED]')\n", (10991, 11047), False, 'from nightfall.detection_rules import DetectionRule, Detector, LogicalOp, Confidence, ExclusionRule, ContextRule, WordList, MatchType, RedactionConfig, MaskConfig, Regex\n'), ((16312, 16324), 'nightfall.findings.Range', 'Range', (['(0)', '(19)'], {}), '(0, 19)\n', (16317, 16324), False, 'from nightfall.findings import Finding, Range\n'), ((16326, 16338), 'nightfall.findings.Range', 'Range', (['(0)', '(19)'], {}), '(0, 19)\n', (16331, 16338), False, 'from nightfall.findings import Finding, Range\n'), ((16585, 16598), 'nightfall.findings.Range', 'Range', (['(46)', '(57)'], {}), '(46, 57)\n', (16590, 16598), False, 'from nightfall.findings import Finding, Range\n'), ((16600, 16613), 'nightfall.findings.Range', 'Range', (['(46)', '(57)'], {}), '(46, 57)\n', (16605, 16613), False, 'from nightfall.findings import Finding, Range\n'), ((20612, 20624), 'nightfall.findings.Range', 'Range', (['(0)', '(19)'], {}), '(0, 19)\n', (20617, 20624), False, 'from nightfall.findings import Finding, Range\n'), ((20626, 20638), 'nightfall.findings.Range', 'Range', (['(0)', '(19)'], {}), '(0, 19)\n', (20631, 20638), False, 'from nightfall.findings import Finding, Range\n'), ((2047, 2142), 'nightfall.detection_rules.Detector', 'Detector', ([], {'min_confidence': 'Confidence.LIKELY', 'nightfall_detector': '"""US_SOCIAL_SECURITY_NUMBER"""'}), "(min_confidence=Confidence.LIKELY, nightfall_detector=\n 'US_SOCIAL_SECURITY_NUMBER')\n", (2055, 2142), False, 'from nightfall.detection_rules import DetectionRule, Detector, LogicalOp, Confidence, ExclusionRule, ContextRule, WordList, MatchType, RedactionConfig, MaskConfig, Regex\n'), ((3557, 3700), 'nightfall.detection_rules.Detector', 'Detector', ([], {'min_confidence': 'Confidence.LIKELY', 'min_num_findings': '(1)', 'display_name': '"""Credit Card Number"""', 'nightfall_detector': '"""CREDIT_CARD_NUMBER"""'}), "(min_confidence=Confidence.LIKELY, min_num_findings=1, display_name\n ='Credit Card Number', nightfall_detector='CREDIT_CARD_NUMBER')\n", (3565, 3700), False, 'from nightfall.detection_rules import DetectionRule, Detector, LogicalOp, Confidence, ExclusionRule, ContextRule, WordList, MatchType, RedactionConfig, MaskConfig, Regex\n'), ((10822, 10917), 'nightfall.detection_rules.Detector', 'Detector', ([], {'min_confidence': 'Confidence.LIKELY', 'nightfall_detector': '"""US_SOCIAL_SECURITY_NUMBER"""'}), "(min_confidence=Confidence.LIKELY, nightfall_detector=\n 'US_SOCIAL_SECURITY_NUMBER')\n", (10830, 10917), False, 'from nightfall.detection_rules import DetectionRule, Detector, LogicalOp, Confidence, ExclusionRule, ContextRule, WordList, MatchType, RedactionConfig, MaskConfig, Regex\n'), ((1740, 1827), 'nightfall.detection_rules.MaskConfig', 'MaskConfig', ([], {'masking_char': '"""👀"""', 'num_chars_to_leave_unmasked': '(3)', 'chars_to_ignore': "['-']"}), "(masking_char='👀', num_chars_to_leave_unmasked=3, chars_to_ignore\n =['-'])\n", (1750, 1827), False, 'from nightfall.detection_rules import DetectionRule, Detector, LogicalOp, Confidence, ExclusionRule, ContextRule, WordList, MatchType, RedactionConfig, MaskConfig, Regex\n'), ((10515, 10602), 'nightfall.detection_rules.MaskConfig', 'MaskConfig', ([], {'masking_char': '"""👀"""', 'num_chars_to_leave_unmasked': '(3)', 'chars_to_ignore': "['-']"}), "(masking_char='👀', num_chars_to_leave_unmasked=3, chars_to_ignore\n =['-'])\n", (10525, 10602), False, 'from nightfall.detection_rules import DetectionRule, Detector, LogicalOp, Confidence, ExclusionRule, ContextRule, WordList, MatchType, RedactionConfig, MaskConfig, Regex\n'), ((1063, 1107), 'nightfall.detection_rules.Regex', 'Regex', (['"""fake regex"""'], {'is_case_sensitive': '(False)'}), "('fake regex', is_case_sensitive=False)\n", (1068, 1107), False, 'from nightfall.detection_rules import DetectionRule, Detector, LogicalOp, Confidence, ExclusionRule, ContextRule, WordList, MatchType, RedactionConfig, MaskConfig, Regex\n'), ((1447, 1499), 'nightfall.detection_rules.WordList', 'WordList', (["['never', 'match']"], {'is_case_sensitive': '(True)'}), "(['never', 'match'], is_case_sensitive=True)\n", (1455, 1499), False, 'from nightfall.detection_rules import DetectionRule, Detector, LogicalOp, Confidence, ExclusionRule, ContextRule, WordList, MatchType, RedactionConfig, MaskConfig, Regex\n'), ((9838, 9882), 'nightfall.detection_rules.Regex', 'Regex', (['"""fake regex"""'], {'is_case_sensitive': '(False)'}), "('fake regex', is_case_sensitive=False)\n", (9843, 9882), False, 'from nightfall.detection_rules import DetectionRule, Detector, LogicalOp, Confidence, ExclusionRule, ContextRule, WordList, MatchType, RedactionConfig, MaskConfig, Regex\n'), ((10222, 10274), 'nightfall.detection_rules.WordList', 'WordList', (["['never', 'match']"], {'is_case_sensitive': '(True)'}), "(['never', 'match'], is_case_sensitive=True)\n", (10230, 10274), False, 'from nightfall.detection_rules import DetectionRule, Detector, LogicalOp, Confidence, ExclusionRule, ContextRule, WordList, MatchType, RedactionConfig, MaskConfig, Regex\n')]
|
import datetime
import os
import wget
from parameters.GraphData import GraphData
import ssl
ssl._create_default_https_context = ssl._create_unverified_context
def get_germany_mobility(graph_en: str):
out = 'Germany/mobility_counties_2019_baseline.csv'
url = 'https://files.de-1.osf.io/v1/resources/n53cz/providers/osfstorage/5f2d5f5c021ce20041f429f4?action=download&direct&version=1'
if not os.path.exists(out) or not os.path.exists('log/' + datetime.date.today().strftime('%Y-%m-%d') + '.germany'):
if os.path.exists(out):
os.remove(out)
wget.download(url, out)
log = open('log/' + datetime.date.today().strftime('%Y-%m-%d') + '.germany', 'w')
file = open(out)
file.readline()
data = {}
for line in file.readlines(): # day from to activity_number 1, 1, 2, 123
line_data = line[:-1].split(',')
time = line_data[0]
if time not in data:
data[time] = []
dict_temp = {
'properties': {},
'geometry': {
"type": "Point",
"coordinates": [],
}
}
dict_temp['properties']['mobility'] = line_data[3]
dict_temp['geometry']['coordinates'] = [line_data[1], line_data[2]]
data[time].append(dict_temp)
return GraphData(data, graph_en=graph_en)
def clear_germany_log():
files = []
for file in os.walk('log/'):
files = file[2]
for filename in files:
if filename.split('.')[-1] == 'germany':
os.remove('log/' + filename)
get_germany_mobility("germany")
|
[
"os.remove",
"os.walk",
"os.path.exists",
"datetime.date.today",
"wget.download",
"parameters.GraphData.GraphData"
] |
[((1307, 1341), 'parameters.GraphData.GraphData', 'GraphData', (['data'], {'graph_en': 'graph_en'}), '(data, graph_en=graph_en)\n', (1316, 1341), False, 'from parameters.GraphData import GraphData\n'), ((1400, 1415), 'os.walk', 'os.walk', (['"""log/"""'], {}), "('log/')\n", (1407, 1415), False, 'import os\n'), ((525, 544), 'os.path.exists', 'os.path.exists', (['out'], {}), '(out)\n', (539, 544), False, 'import os\n'), ((581, 604), 'wget.download', 'wget.download', (['url', 'out'], {}), '(url, out)\n', (594, 604), False, 'import wget\n'), ((405, 424), 'os.path.exists', 'os.path.exists', (['out'], {}), '(out)\n', (419, 424), False, 'import os\n'), ((558, 572), 'os.remove', 'os.remove', (['out'], {}), '(out)\n', (567, 572), False, 'import os\n'), ((1529, 1557), 'os.remove', 'os.remove', (["('log/' + filename)"], {}), "('log/' + filename)\n", (1538, 1557), False, 'import os\n'), ((633, 654), 'datetime.date.today', 'datetime.date.today', ([], {}), '()\n', (652, 654), False, 'import datetime\n'), ((456, 477), 'datetime.date.today', 'datetime.date.today', ([], {}), '()\n', (475, 477), False, 'import datetime\n')]
|
import json
from typing import Dict, List
from etk.knowledge_graph.schema import KGSchema
from etk.knowledge_graph.graph import Graph
from etk.knowledge_graph.subject import Subject
from etk.knowledge_graph.node import URI, Literal
from etk.utilities import deprecated
class KnowledgeGraph(Graph):
"""
This class is a knowledge graph object, provides API for user to construct their kg.
Add field and value to the kg object, analysis on provenance
"""
def __init__(self, schema: KGSchema, doc):
super().__init__()
self.origin_doc = doc
self.schema = schema
self._fork_namespace_manager()
@deprecated()
def add_value(self, field_name: str, value: object=None) -> None:
"""
Add a value to knowledge graph.
Input can either be a value or a json_path. If the input is json_path, the helper function _add_doc_value is
called.
If the input is a value, then it is handled
Args:
field_name: str, the field name in the knowledge graph
value: the value to be added to the knowledge graph
"""
if not self._ns.store.namespace(''):
self.bind(None, 'http://isi.edu/default-ns/')
obj = self.schema.field_type(field_name, value)
if not obj:
raise Exception() # TODO: replace with a specific Exception
self.add_triple(URI(self.origin_doc.doc_id), URI(field_name), obj)
def _find_types(self, triples):
"""
find type in root level
:param triples:
:return:
"""
types = []
for t in triples:
s, p, o = t
if self._is_rdf_type(p):
if isinstance(o, Subject):
continue
types.append(o)
return types
def add_subject(self, subjects, context=None):
if not context:
context = set([])
s_types = self._find_types(subjects)
for t in subjects:
s, p, o = t
o_types = []
if isinstance(o, Subject) and o not in context:
context.add(o)
self.add_subject(o, context)
o_types = self._find_types(o)
if self.schema.is_valid(s_types, p, o_types):
triple = self._convert_triple_rdflib((s, p, o))
self._g.add(triple)
@property
def value(self) -> Dict:
"""
Get knowledge graph object
"""
g = {}
for p, o in self._g.predicate_objects():
_, property_ = self._ns.split_uri(p)
if property_ not in g:
g[property_] = list()
g[property_].append({
'key': self.create_key_from_value(o, property_),
'value': o.toPython()
})
return g
@deprecated()
def get_values(self, field_name: str) -> List[object]:
"""
Get a list of all the values of a field.
"""
result = list()
p = self.schema.parse_field(field_name)
for o in self._g.objects(None, p):
result.append(o.toPython())
return result
def create_key_from_value(self, value, field_name: str):
key = self.schema.field_type(field_name, value)
if isinstance(key, URI):
return key
if isinstance(key, str) or isinstance(key, Literal):
key = str(key).strip().lower()
return key
def serialize(self, format='legacy', namespace_manager=None):
if format == 'legacy':
# Output DIG format
return json.dumps(self.value)
return super().serialize(format, namespace_manager)
def _fork_namespace_manager(self):
for prefix, ns in self.schema.ontology._ns.namespaces():
self.bind(prefix, ns)
def add_types(self, type_):
s = Subject(URI(self.origin_doc.doc_id))
p = URI('rdf:type')
if not isinstance(type_, list):
type_ = [type_]
for a_type in type_:
s.add_property(p, URI(a_type))
self.add_subject(s)
def validate(self):
conforms, result_graph = self.schema.validate(self)
return conforms, result_graph
|
[
"etk.knowledge_graph.node.URI",
"json.dumps",
"etk.utilities.deprecated"
] |
[((648, 660), 'etk.utilities.deprecated', 'deprecated', ([], {}), '()\n', (658, 660), False, 'from etk.utilities import deprecated\n'), ((2851, 2863), 'etk.utilities.deprecated', 'deprecated', ([], {}), '()\n', (2861, 2863), False, 'from etk.utilities import deprecated\n'), ((3935, 3950), 'etk.knowledge_graph.node.URI', 'URI', (['"""rdf:type"""'], {}), "('rdf:type')\n", (3938, 3950), False, 'from etk.knowledge_graph.node import URI, Literal\n'), ((1402, 1429), 'etk.knowledge_graph.node.URI', 'URI', (['self.origin_doc.doc_id'], {}), '(self.origin_doc.doc_id)\n', (1405, 1429), False, 'from etk.knowledge_graph.node import URI, Literal\n'), ((1431, 1446), 'etk.knowledge_graph.node.URI', 'URI', (['field_name'], {}), '(field_name)\n', (1434, 1446), False, 'from etk.knowledge_graph.node import URI, Literal\n'), ((3619, 3641), 'json.dumps', 'json.dumps', (['self.value'], {}), '(self.value)\n', (3629, 3641), False, 'import json\n'), ((3894, 3921), 'etk.knowledge_graph.node.URI', 'URI', (['self.origin_doc.doc_id'], {}), '(self.origin_doc.doc_id)\n', (3897, 3921), False, 'from etk.knowledge_graph.node import URI, Literal\n'), ((4079, 4090), 'etk.knowledge_graph.node.URI', 'URI', (['a_type'], {}), '(a_type)\n', (4082, 4090), False, 'from etk.knowledge_graph.node import URI, Literal\n')]
|
# -----------------------------------------------------------------------------
# Libraries
# -----------------------------------------------------------------------------
# Core libs
from typing import TYPE_CHECKING
# Third party libs
from django.db import models
# Project libs
# If type checking, __all__
if TYPE_CHECKING:
pass
# -----------------------------------------------------------------------------
# Constants
# -----------------------------------------------------------------------------
# -----------------------------------------------------------------------------
# Functions
# -----------------------------------------------------------------------------
# -----------------------------------------------------------------------------
# Classes
# -----------------------------------------------------------------------------
class ClientAddress(models.Model):
client_address_id = models.AutoField(primary_key=True)
client = models.ForeignKey("users.Client", on_delete=models.CASCADE)
street = models.CharField(max_length=50)
num_ext = models.CharField(max_length=50)
num_int = models.CharField(blank=True, max_length=50)
neighborhood = models.CharField(max_length=50)
zip_code = models.CharField(max_length=5)
class Meta:
verbose_name = "Client Address"
verbose_name_plural = "Client Addresses"
def __str__(self):
return self.get_full_address()
def get_full_address(self):
address_fields = [
# 'address_name',
"street",
"num_ext",
"num_int",
"neighborhood",
"zip_code",
]
address = [
getattr(self, field)
for field in address_fields
if getattr(self, field) != None and getattr(self, field) != ""
]
return ", ".join(address)
|
[
"django.db.models.ForeignKey",
"django.db.models.CharField",
"django.db.models.AutoField"
] |
[((916, 950), 'django.db.models.AutoField', 'models.AutoField', ([], {'primary_key': '(True)'}), '(primary_key=True)\n', (932, 950), False, 'from django.db import models\n'), ((964, 1023), 'django.db.models.ForeignKey', 'models.ForeignKey', (['"""users.Client"""'], {'on_delete': 'models.CASCADE'}), "('users.Client', on_delete=models.CASCADE)\n", (981, 1023), False, 'from django.db import models\n'), ((1037, 1068), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(50)'}), '(max_length=50)\n', (1053, 1068), False, 'from django.db import models\n'), ((1083, 1114), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(50)'}), '(max_length=50)\n', (1099, 1114), False, 'from django.db import models\n'), ((1129, 1172), 'django.db.models.CharField', 'models.CharField', ([], {'blank': '(True)', 'max_length': '(50)'}), '(blank=True, max_length=50)\n', (1145, 1172), False, 'from django.db import models\n'), ((1192, 1223), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(50)'}), '(max_length=50)\n', (1208, 1223), False, 'from django.db import models\n'), ((1239, 1269), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(5)'}), '(max_length=5)\n', (1255, 1269), False, 'from django.db import models\n')]
|
"""
Collect Host's basic metric
Thanks to Feng_Qi a lot of code in this file was borrow from him.
"""
import psutil
import time
import json
import copy
import logging
from rpc.transfer import send_data_to_transfer
from utils import g
def collect():
logging.debug('enter basic collect')
push_interval = 60
zh_decode = "gbk"
time_now = int(time.time())
payload = []
data = {"endpoint": g.HOSTNAME, "metric": "", "timestamp": time_now,
"step": push_interval, "value": "", "counterType": "", "tags": ""}
cpu_status = psutil.cpu_times_percent()
mem_status = psutil.virtual_memory()
swap_status = psutil.swap_memory()
disk_io_status = psutil.disk_io_counters(perdisk=True)
net_io_status = psutil.net_io_counters(pernic=True)
# agent alive
data["metric"] = "agent.alive"
data["value"] = 1
data["counterType"] = "GAUGE"
payload.append(copy.copy(data))
logging.debug(cpu_status)
data["metric"] = "cpu.user"
data["value"] = cpu_status.user
data["counterType"] = "GAUGE"
payload.append(copy.copy(data))
data["metric"] = "cpu.system"
data["value"] = cpu_status.system
payload.append(copy.copy(data))
data["metric"] = "cpu.idle"
data["value"] = cpu_status.idle
payload.append(copy.copy(data))
data["metric"] = "mem.memused.percent"
data["value"] = mem_status.percent
payload.append(copy.copy(data))
data["metric"] = "mem.swapused.percent"
data["value"] = swap_status.percent
payload.append(copy.copy(data))
disk_status = psutil.disk_partitions()
for disk in disk_status:
if 'cdrom' in disk.opts or disk.fstype == '':
continue
disk_info = psutil.disk_usage(disk.mountpoint)
data["metric"] = "df.used.percent"
data["value"] = disk_info.percent
data["tags"] = "disk=" + disk.device.split(":")[0]
payload.append(copy.copy(data))
data["metric"] = "df.byte.total"
data["value"] = disk_info.total
payload.append(copy.copy(data))
data["metric"] = "df.byte.used"
data["value"] = disk_info.used
payload.append(copy.copy(data))
data["metric"] = "df.byte.free"
data["value"] = disk_info.free
payload.append(copy.copy(data))
for key in disk_io_status:
data["metric"] = "disk.io.read_count"
data["value"] = disk_io_status[key].read_count
data["tags"] = "device=" + key
data["counterType"] = "COUNTER"
payload.append(copy.copy(data))
data["metric"] = "disk.io.write_count"
data["value"] = disk_io_status[key].write_count
payload.append(copy.copy(data))
data["metric"] = "disk.io.read_bytes"
data["value"] = disk_io_status[key].read_bytes
payload.append(copy.copy(data))
data["metric"] = "disk.io.write_bytes"
data["value"] = disk_io_status[key].write_bytes
payload.append(copy.copy(data))
data["metric"] = "disk.io.read_time"
data["value"] = disk_io_status[key].read_time
payload.append(copy.copy(data))
data["metric"] = "disk.io.write_time"
data["value"] = disk_io_status[key].write_time
payload.append(copy.copy(data))
for key in net_io_status:
if is_interface_ignore(key):
continue
data["metric"] = "net.if.in.mbits"
data["value"] = net_io_status[key].bytes_recv * 8 / 100000
data["tags"] = "interface=" + key.decode(zh_decode)
payload.append(copy.copy(data))
data["metric"] = "net.if.out.mbits"
data["value"] = net_io_status[key].bytes_sent * 8 / 100000
payload.append(copy.copy(data))
data["metric"] = "net.if.in.packets"
data["value"] = net_io_status[key].packets_recv
payload.append(copy.copy(data))
data["metric"] = "net.if.out.packets"
data["value"] = net_io_status[key].packets_sent
payload.append(copy.copy(data))
data["metric"] = "net.if.in.error"
data["value"] = net_io_status[key].errin
payload.append(copy.copy(data))
data["metric"] = "net.if.out.error"
data["value"] = net_io_status[key].errout
payload.append(copy.copy(data))
data["metric"] = "net.if.in.drop"
data["value"] = net_io_status[key].dropin
payload.append(copy.copy(data))
data["metric"] = "net.if.out.drop"
data["value"] = net_io_status[key].dropout
payload.append(copy.copy(data))
logging.debug(payload)
payload = filter(lambda x: x['metric'] not in g.IGNORE, payload)
try:
result = send_data_to_transfer(payload)
except Exception as e:
logging.error(e)
else:
logging.info(result)
def is_interface_ignore(key):
"""
return if the inferface will ignore
"""
for ignore_key in g.COLLECTOR['ifacePrefixIgnore']:
if ignore_key in key.decode('gbk'):
return True
def basic_collect(period):
"""
deadloop to collect data periodically
:params: `period` is the seconds of collecting circle
"""
logging.debug('prepare collect basic data')
while True:
try:
collect()
except Exception as e:
logging.error(e, exc_info=True)
time.sleep(period)
|
[
"psutil.virtual_memory",
"psutil.disk_partitions",
"logging.debug",
"logging.error",
"psutil.swap_memory",
"psutil.net_io_counters",
"psutil.cpu_times_percent",
"copy.copy",
"time.time",
"psutil.disk_usage",
"rpc.transfer.send_data_to_transfer",
"logging.info",
"psutil.disk_io_counters",
"time.sleep"
] |
[((269, 305), 'logging.debug', 'logging.debug', (['"""enter basic collect"""'], {}), "('enter basic collect')\n", (282, 305), False, 'import logging\n'), ((580, 606), 'psutil.cpu_times_percent', 'psutil.cpu_times_percent', ([], {}), '()\n', (604, 606), False, 'import psutil\n'), ((625, 648), 'psutil.virtual_memory', 'psutil.virtual_memory', ([], {}), '()\n', (646, 648), False, 'import psutil\n'), ((668, 688), 'psutil.swap_memory', 'psutil.swap_memory', ([], {}), '()\n', (686, 688), False, 'import psutil\n'), ((711, 748), 'psutil.disk_io_counters', 'psutil.disk_io_counters', ([], {'perdisk': '(True)'}), '(perdisk=True)\n', (734, 748), False, 'import psutil\n'), ((770, 805), 'psutil.net_io_counters', 'psutil.net_io_counters', ([], {'pernic': '(True)'}), '(pernic=True)\n', (792, 805), False, 'import psutil\n'), ((965, 990), 'logging.debug', 'logging.debug', (['cpu_status'], {}), '(cpu_status)\n', (978, 990), False, 'import logging\n'), ((1624, 1648), 'psutil.disk_partitions', 'psutil.disk_partitions', ([], {}), '()\n', (1646, 1648), False, 'import psutil\n'), ((5319, 5362), 'logging.debug', 'logging.debug', (['"""prepare collect basic data"""'], {}), "('prepare collect basic data')\n", (5332, 5362), False, 'import logging\n'), ((375, 386), 'time.time', 'time.time', ([], {}), '()\n', (384, 386), False, 'import time\n'), ((941, 956), 'copy.copy', 'copy.copy', (['data'], {}), '(data)\n', (950, 956), False, 'import copy\n'), ((1116, 1131), 'copy.copy', 'copy.copy', (['data'], {}), '(data)\n', (1125, 1131), False, 'import copy\n'), ((1229, 1244), 'copy.copy', 'copy.copy', (['data'], {}), '(data)\n', (1238, 1244), False, 'import copy\n'), ((1338, 1353), 'copy.copy', 'copy.copy', (['data'], {}), '(data)\n', (1347, 1353), False, 'import copy\n'), ((1461, 1476), 'copy.copy', 'copy.copy', (['data'], {}), '(data)\n', (1470, 1476), False, 'import copy\n'), ((1586, 1601), 'copy.copy', 'copy.copy', (['data'], {}), '(data)\n', (1595, 1601), False, 'import copy\n'), ((1777, 1811), 'psutil.disk_usage', 'psutil.disk_usage', (['disk.mountpoint'], {}), '(disk.mountpoint)\n', (1794, 1811), False, 'import psutil\n'), ((4686, 4708), 'logging.debug', 'logging.debug', (['payload'], {}), '(payload)\n', (4699, 4708), False, 'import logging\n'), ((4815, 4845), 'rpc.transfer.send_data_to_transfer', 'send_data_to_transfer', (['payload'], {}), '(payload)\n', (4836, 4845), False, 'from rpc.transfer import send_data_to_transfer\n'), ((4920, 4940), 'logging.info', 'logging.info', (['result'], {}), '(result)\n', (4932, 4940), False, 'import logging\n'), ((5503, 5521), 'time.sleep', 'time.sleep', (['period'], {}), '(period)\n', (5513, 5521), False, 'import time\n'), ((1985, 2000), 'copy.copy', 'copy.copy', (['data'], {}), '(data)\n', (1994, 2000), False, 'import copy\n'), ((2111, 2126), 'copy.copy', 'copy.copy', (['data'], {}), '(data)\n', (2120, 2126), False, 'import copy\n'), ((2235, 2250), 'copy.copy', 'copy.copy', (['data'], {}), '(data)\n', (2244, 2250), False, 'import copy\n'), ((2359, 2374), 'copy.copy', 'copy.copy', (['data'], {}), '(data)\n', (2368, 2374), False, 'import copy\n'), ((2618, 2633), 'copy.copy', 'copy.copy', (['data'], {}), '(data)\n', (2627, 2633), False, 'import copy\n'), ((2766, 2781), 'copy.copy', 'copy.copy', (['data'], {}), '(data)\n', (2775, 2781), False, 'import copy\n'), ((2912, 2927), 'copy.copy', 'copy.copy', (['data'], {}), '(data)\n', (2921, 2927), False, 'import copy\n'), ((3060, 3075), 'copy.copy', 'copy.copy', (['data'], {}), '(data)\n', (3069, 3075), False, 'import copy\n'), ((3204, 3219), 'copy.copy', 'copy.copy', (['data'], {}), '(data)\n', (3213, 3219), False, 'import copy\n'), ((3350, 3365), 'copy.copy', 'copy.copy', (['data'], {}), '(data)\n', (3359, 3365), False, 'import copy\n'), ((3659, 3674), 'copy.copy', 'copy.copy', (['data'], {}), '(data)\n', (3668, 3674), False, 'import copy\n'), ((3815, 3830), 'copy.copy', 'copy.copy', (['data'], {}), '(data)\n', (3824, 3830), False, 'import copy\n'), ((3961, 3976), 'copy.copy', 'copy.copy', (['data'], {}), '(data)\n', (3970, 3976), False, 'import copy\n'), ((4108, 4123), 'copy.copy', 'copy.copy', (['data'], {}), '(data)\n', (4117, 4123), False, 'import copy\n'), ((4245, 4260), 'copy.copy', 'copy.copy', (['data'], {}), '(data)\n', (4254, 4260), False, 'import copy\n'), ((4384, 4399), 'copy.copy', 'copy.copy', (['data'], {}), '(data)\n', (4393, 4399), False, 'import copy\n'), ((4521, 4536), 'copy.copy', 'copy.copy', (['data'], {}), '(data)\n', (4530, 4536), False, 'import copy\n'), ((4660, 4675), 'copy.copy', 'copy.copy', (['data'], {}), '(data)\n', (4669, 4675), False, 'import copy\n'), ((4883, 4899), 'logging.error', 'logging.error', (['e'], {}), '(e)\n', (4896, 4899), False, 'import logging\n'), ((5462, 5493), 'logging.error', 'logging.error', (['e'], {'exc_info': '(True)'}), '(e, exc_info=True)\n', (5475, 5493), False, 'import logging\n')]
|
import lafs
import random
import math
# Returns an Identity Matrix of dimensions (n, n_col)
def I(n, n_col = None):
if type(n) == lafs.matrix.Matrix:
n_col = n.dim(1)
n = n.dim(0)
elif n_col == None:
n_col = n
ret = lafs.matrix.Matrix(n, n_col)
for i in range(min(n, n_col)):
ret[i][i] = 1
return ret
# Returns a Ones Matrix of dimensions (n, n_col)
# If input is Matrix A, returns a Ones Matrix of same size.
def U(n, n_col = None):
if type(n) == lafs.matrix.Matrix:
n_col = n.dim(1)
n = n.dim(0)
elif n_col == None:
n_col = n
ret = lafs.matrix.Matrix(n, n_col)
for i in range(n):
for j in range(n_col):
ret[i][j] = 1
return ret
# Returns an Zeros Matrix of dimensions (n, n_col)
def Z(n, n_col = None):
if type(n) == lafs.matrix.Matrix:
n_col = n.dim(1)
n = n.dim(0)
elif n_col == None:
n_col = n
ret = lafs.matrix.Matrix(n, n_col)
for i in range(n):
for j in range(n_col):
ret[i][j] = 0
return ret
# Returns lower triangular version of input matrix.
def lower(matrix):
if type(matrix) != lafs.matrix.Matrix:
raise ValueError("Input must be a Matrix")
ret = lafs.matrix.Matrix(matrix.dim(0), matrix.dim(1))
for i in range(matrix.dim(0)):
for j in range(i + 1):
ret[i][j] = matrix[i][j]
return ret
# Returns upper triangular version of input matrix.
def upper(matrix):
if type(matrix) != lafs.matrix.Matrix:
raise ValueError("Input must be a Matrix")
ret = lafs.matrix.Matrix(matrix.dim(0), matrix.dim(1))
for i in range(matrix.dim(0)):
for j in range(i, matrix.dim(1)):
ret[i][j] = matrix[i][j]
return ret
# Temporary function for matrix generation.
def randm(n_row, n_col=None):
if n_col == None:
n_col = n_row
rows = []
for i in range(n_row):
row = []
for j in range(n_col):
row.append(random.randint(-10, 10))
rows.append(row)
return lafs.matrix.Matrix(rows)
# Temporary function for vector generation.
def randv(n):
rows = []
for i in range(n):
rows.append(random.randint(-10, 10))
return lafs.vector.Vec(rows)
# Returns rotation matrix about the third axis of angle t.
def Rz(t):
# Assumes default angle unit is in degrees.
if lafs.unit_angle != 'rad':
t *= math.pi/180
return lafs.matrix.Matrix([
[math.cos(t), -math.sin(t), 0],
[math.sin(t), math.cos(t), 0],
[ 0, 0, 1]
])
# Returns rotation matrix about the second axis of angle t.
def Ry(t):
# Assumes default angle unit is in degrees.
if lafs.unit_angle != 'rad':
t *= math.pi/180
return lafs.matrix.Matrix([
[ math.cos(t), 0, math.sin(t)],
[ 0, 1, 0],
[-math.sin(t), 0, math.cos(t)]
])
# Returns rotation matrix about the first axis of angle t.
def Rx(t):
# Assumes default angle unit is in degrees.
if lafs.unit_angle != 'rad':
t *= math.pi/180
return lafs.matrix.Matrix([
[1, 0, 0],
[0, math.cos(t), -math.sin(t)],
[0, math.sin(t), math.cos(t)]
])
if __name__ == "__main__":
pass
|
[
"random.randint",
"math.sin",
"lafs.vector.Vec",
"math.cos",
"lafs.matrix.Matrix"
] |
[((253, 281), 'lafs.matrix.Matrix', 'lafs.matrix.Matrix', (['n', 'n_col'], {}), '(n, n_col)\n', (271, 281), False, 'import lafs\n'), ((624, 652), 'lafs.matrix.Matrix', 'lafs.matrix.Matrix', (['n', 'n_col'], {}), '(n, n_col)\n', (642, 652), False, 'import lafs\n'), ((960, 988), 'lafs.matrix.Matrix', 'lafs.matrix.Matrix', (['n', 'n_col'], {}), '(n, n_col)\n', (978, 988), False, 'import lafs\n'), ((2089, 2113), 'lafs.matrix.Matrix', 'lafs.matrix.Matrix', (['rows'], {}), '(rows)\n', (2107, 2113), False, 'import lafs\n'), ((2266, 2287), 'lafs.vector.Vec', 'lafs.vector.Vec', (['rows'], {}), '(rows)\n', (2281, 2287), False, 'import lafs\n'), ((2230, 2253), 'random.randint', 'random.randint', (['(-10)', '(10)'], {}), '(-10, 10)\n', (2244, 2253), False, 'import random\n'), ((2028, 2051), 'random.randint', 'random.randint', (['(-10)', '(10)'], {}), '(-10, 10)\n', (2042, 2051), False, 'import random\n'), ((2517, 2528), 'math.cos', 'math.cos', (['t'], {}), '(t)\n', (2525, 2528), False, 'import math\n'), ((2568, 2579), 'math.sin', 'math.sin', (['t'], {}), '(t)\n', (2576, 2579), False, 'import math\n'), ((2582, 2593), 'math.cos', 'math.cos', (['t'], {}), '(t)\n', (2590, 2593), False, 'import math\n'), ((2901, 2912), 'math.cos', 'math.cos', (['t'], {}), '(t)\n', (2909, 2912), False, 'import math\n'), ((2917, 2928), 'math.sin', 'math.sin', (['t'], {}), '(t)\n', (2925, 2928), False, 'import math\n'), ((3019, 3030), 'math.cos', 'math.cos', (['t'], {}), '(t)\n', (3027, 3030), False, 'import math\n'), ((3336, 3347), 'math.cos', 'math.cos', (['t'], {}), '(t)\n', (3344, 3347), False, 'import math\n'), ((3387, 3398), 'math.sin', 'math.sin', (['t'], {}), '(t)\n', (3395, 3398), False, 'import math\n'), ((3401, 3412), 'math.cos', 'math.cos', (['t'], {}), '(t)\n', (3409, 3412), False, 'import math\n'), ((2531, 2542), 'math.sin', 'math.sin', (['t'], {}), '(t)\n', (2539, 2542), False, 'import math\n'), ((3003, 3014), 'math.sin', 'math.sin', (['t'], {}), '(t)\n', (3011, 3014), False, 'import math\n'), ((3350, 3361), 'math.sin', 'math.sin', (['t'], {}), '(t)\n', (3358, 3361), False, 'import math\n')]
|
import h5py
import tools.pymus_utils as pymusutil
import numpy as np
import matplotlib.pyplot as plt
import logging
logging.basicConfig(level=logging.DEBUG)
class ImageFormatError(Exception):
pass
class EchoImage(object):
''' Echogeneicity grayscale image
'''
def __init__(self,scan):
self.scan = scan
self.data_array = np.zeros((len(scan.z_axis),len(scan.x_axis)))
self.scan_x_bounds = [self.scan.x_axis.min(), self.scan.x_axis.max()]
self.scan_z_bounds = [self.scan.z_axis.min(), self.scan.z_axis.max()]
self.title = ""
def import_data(self,data):
try:
self.data_array = np.abs( np.reshape(data,self.data_array.shape) )
except:
raise ImageFormatError(" format error - cannot reshape %s to %s " % ( str(data.shape),str(self.data_array.shape) ))
return
def set_title(self,title):
self.title = title
def show_image(self,dbScale=True,dynamic_range=60,to_file=None):
z_m, z_M = self.scan_z_bounds
x_m, x_M = self.scan_x_bounds
z_span = z_M - z_m
x_span = x_M - x_m
x_ratio = x_span/z_span
print("X -> %s %s Z -> %s %s / %s %s / %s " % (x_m,x_M,z_m,z_M,z_span,x_span,x_ratio))
base_sz = 6.
im_M = self.data_array.max()
fig, ax = plt.subplots(figsize=(1.0 + x_ratio*base_sz,0.3 + base_sz))
xtent = [x_m,x_M,z_m,z_M]
if dbScale:
plt_im = 20.*np.log10((1./im_M)*self.data_array)
else:
plt_im = self.data_array
cax = ax.imshow(plt_im,interpolation='none',vmin=-1.*dynamic_range,vmax=0.,extent=xtent,cmap='gray')
ax.set_xlabel(" x [mm] ")
ax.set_ylabel(" z [mm] ")
ax.set_title(self.title)
range_ticks = [-1.*k for k in np.arange(int(dynamic_range + 1))[::-10]]
fig.colorbar(cax, ticks = range_ticks)
if to_file is not None:
plt.savefig(to_file)
plt.show()
def write_file(self,filename,prefix=None,overwrite=False):
data_to_write = {'title' : self.title, 'data' : self.data_array}
pymusutil.generic_hdf5_write(filename,prefix,overwrite,data_to_write)
def read_file(self,filename,prefix):
data_from_file = {'title' : None, 'data' : None}
res = pymusutil.generic_hdf5_read(filename,prefix,data_from_file)
logging.debug(data_from_file)
if data_from_file['title'] is None:
logging.error("title not found in %s:%s " % (filename,prefix))
else:
self.title = data_from_file['title'][0]
if data_from_file['data'] is None:
logging.error("image data not found in %s:%s " % (filename,prefix))
else:
self.data_array = data_from_file['data'][:]
|
[
"logging.error",
"tools.pymus_utils.generic_hdf5_read",
"matplotlib.pyplot.show",
"logging.basicConfig",
"logging.debug",
"numpy.reshape",
"numpy.log10",
"matplotlib.pyplot.subplots",
"matplotlib.pyplot.savefig",
"tools.pymus_utils.generic_hdf5_write"
] |
[((117, 157), 'logging.basicConfig', 'logging.basicConfig', ([], {'level': 'logging.DEBUG'}), '(level=logging.DEBUG)\n', (136, 157), False, 'import logging\n'), ((1185, 1247), 'matplotlib.pyplot.subplots', 'plt.subplots', ([], {'figsize': '(1.0 + x_ratio * base_sz, 0.3 + base_sz)'}), '(figsize=(1.0 + x_ratio * base_sz, 0.3 + base_sz))\n', (1197, 1247), True, 'import matplotlib.pyplot as plt\n'), ((1728, 1738), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (1736, 1738), True, 'import matplotlib.pyplot as plt\n'), ((1872, 1944), 'tools.pymus_utils.generic_hdf5_write', 'pymusutil.generic_hdf5_write', (['filename', 'prefix', 'overwrite', 'data_to_write'], {}), '(filename, prefix, overwrite, data_to_write)\n', (1900, 1944), True, 'import tools.pymus_utils as pymusutil\n'), ((2040, 2101), 'tools.pymus_utils.generic_hdf5_read', 'pymusutil.generic_hdf5_read', (['filename', 'prefix', 'data_from_file'], {}), '(filename, prefix, data_from_file)\n', (2067, 2101), True, 'import tools.pymus_utils as pymusutil\n'), ((2102, 2131), 'logging.debug', 'logging.debug', (['data_from_file'], {}), '(data_from_file)\n', (2115, 2131), False, 'import logging\n'), ((1705, 1725), 'matplotlib.pyplot.savefig', 'plt.savefig', (['to_file'], {}), '(to_file)\n', (1716, 1725), True, 'import matplotlib.pyplot as plt\n'), ((2173, 2236), 'logging.error', 'logging.error', (["('title not found in %s:%s ' % (filename, prefix))"], {}), "('title not found in %s:%s ' % (filename, prefix))\n", (2186, 2236), False, 'import logging\n'), ((2327, 2395), 'logging.error', 'logging.error', (["('image data not found in %s:%s ' % (filename, prefix))"], {}), "('image data not found in %s:%s ' % (filename, prefix))\n", (2340, 2395), False, 'import logging\n'), ((608, 647), 'numpy.reshape', 'np.reshape', (['data', 'self.data_array.shape'], {}), '(data, self.data_array.shape)\n', (618, 647), True, 'import numpy as np\n'), ((1303, 1341), 'numpy.log10', 'np.log10', (['(1.0 / im_M * self.data_array)'], {}), '(1.0 / im_M * self.data_array)\n', (1311, 1341), True, 'import numpy as np\n')]
|
'''
Created on 11.12.2018
@author: mirandaa
'''
import unittest
import pytest
from mztab_m_swagger_client.api_client import ApiClient
import json
from collections import namedtuple
from pprint import pprint
from mztab_m_io import mztab_parser
from pathlib import Path, PurePath
class MzTabParseTestCase(unittest.TestCase):
def setUp(self):
self.datapath = PurePath(Path(__file__).parents[1].absolute(), Path('data'))
def testJsonToModelToJson(self):
filePath = PurePath(self.datapath, 'lipidomics-example.mzTab.json')
with open(filePath, 'r') as jsonfile:
txt = jsonfile.read().replace('\n', '')
Response = namedtuple('Response', 'data')
response = Response(txt)
apiclient = ApiClient()
my_mztab = apiclient.deserialize(response, 'MzTab')
self.assertEqual("2.0.0-M", my_mztab.metadata.mz_tab_version)
self.assertEqual("ISAS-2018-1234", my_mztab.metadata.mz_tab_id)
self.assertIsNone(my_mztab.metadata.title)
self.assertEqual("Minimal proposed sample file for identification and quantification of lipids", my_mztab.metadata.description)
my_mztab_json = apiclient.sanitize_for_serialization(my_mztab)
print(my_mztab_json)
self.assertNotEqual('', my_mztab_json)
# TODO: reenable when TSV parsing works
# def testMzTabParsing(self, shared_datadir):
# # print(my_mztab_text)
#
# filePath = shared_datadir + 'lipidomics-example.mzTab'
# with open(filePath,'r') as f:
# text = f.read()
#
# res = mztab_parser.parse(text)
# pprint(res)
# self.assertNotEqual('', res)
|
[
"pathlib.Path",
"mztab_m_swagger_client.api_client.ApiClient",
"pathlib.PurePath",
"collections.namedtuple"
] |
[((490, 546), 'pathlib.PurePath', 'PurePath', (['self.datapath', '"""lipidomics-example.mzTab.json"""'], {}), "(self.datapath, 'lipidomics-example.mzTab.json')\n", (498, 546), False, 'from pathlib import Path, PurePath\n'), ((664, 694), 'collections.namedtuple', 'namedtuple', (['"""Response"""', '"""data"""'], {}), "('Response', 'data')\n", (674, 694), False, 'from collections import namedtuple\n'), ((749, 760), 'mztab_m_swagger_client.api_client.ApiClient', 'ApiClient', ([], {}), '()\n', (758, 760), False, 'from mztab_m_swagger_client.api_client import ApiClient\n'), ((419, 431), 'pathlib.Path', 'Path', (['"""data"""'], {}), "('data')\n", (423, 431), False, 'from pathlib import Path, PurePath\n'), ((381, 395), 'pathlib.Path', 'Path', (['__file__'], {}), '(__file__)\n', (385, 395), False, 'from pathlib import Path, PurePath\n')]
|
from django.utils import timezone
from django import forms
from clothing.models import ClothingItem, WornEvent
from users.models import User
class ItemCreationForm(forms.ModelForm):
name = forms.CharField(label='Item Name')
owner = forms.ModelChoiceField(widget=forms.HiddenInput(), queryset=User.objects.all())
class Meta:
model = ClothingItem
fields = ('name', 'owner', 'category', 'tag_id','image_url')
class WornEventCreationForm(forms.ModelForm):
item = forms.ModelChoiceField(ClothingItem.objects.all())
class Meta:
model = WornEvent
fields = ('item',)
class WornEventCreationForm(forms.Form):
tag_id = forms.CharField()
class WeatherRangeForm(forms.Form):
minimum_temperature = forms.IntegerField(min_value=-15)
maximum_temperature = forms.IntegerField(max_value=45)
|
[
"django.forms.IntegerField",
"django.forms.HiddenInput",
"django.forms.CharField",
"clothing.models.ClothingItem.objects.all",
"users.models.User.objects.all"
] |
[((194, 228), 'django.forms.CharField', 'forms.CharField', ([], {'label': '"""Item Name"""'}), "(label='Item Name')\n", (209, 228), False, 'from django import forms\n'), ((671, 688), 'django.forms.CharField', 'forms.CharField', ([], {}), '()\n', (686, 688), False, 'from django import forms\n'), ((753, 786), 'django.forms.IntegerField', 'forms.IntegerField', ([], {'min_value': '(-15)'}), '(min_value=-15)\n', (771, 786), False, 'from django import forms\n'), ((813, 845), 'django.forms.IntegerField', 'forms.IntegerField', ([], {'max_value': '(45)'}), '(max_value=45)\n', (831, 845), False, 'from django import forms\n'), ((518, 544), 'clothing.models.ClothingItem.objects.all', 'ClothingItem.objects.all', ([], {}), '()\n', (542, 544), False, 'from clothing.models import ClothingItem, WornEvent\n'), ((271, 290), 'django.forms.HiddenInput', 'forms.HiddenInput', ([], {}), '()\n', (288, 290), False, 'from django import forms\n'), ((301, 319), 'users.models.User.objects.all', 'User.objects.all', ([], {}), '()\n', (317, 319), False, 'from users.models import User\n')]
|
# -*- coding: utf-8 -*-
"""
Created on Tue Aug 24 11:53:24 2021
@author: Ashoo
"""
from password_generator import generate_simple_password
def test_generate_simple_password():
assert generate_simple_password()=='abcd'
|
[
"password_generator.generate_simple_password"
] |
[((190, 216), 'password_generator.generate_simple_password', 'generate_simple_password', ([], {}), '()\n', (214, 216), False, 'from password_generator import generate_simple_password\n')]
|
from collections import MutableMapping
from os.path import expanduser, join
from glob import glob
from drivelink import Link
from drivelink.hash import hash
class _page(dict):
currentDepth = 0
class Dict(Link, MutableMapping):
"""
A dictionary class that maintains O(1) look up and write while keeping RAM usage O(1) as well.
This is accomplished through a rudimentary (for now) hashing scheme to page the
dictionary into parts.
The object created can be used any way a normal dict would be used, and will
clean itself up on python closing. This means saving all the remaining pages
to disk. If the file_basename and file_location was used before, it will load
the old values back into itself so that the results can be reused.
There are two ways to initialize this object, as a standard object:
>>> diskDict = Dict("sampledict")
>>> for i in range(10):
... diskDict[i] = chr(97+i)
...
>>> diskDict[3]
'd'
>>> 5 in diskDict
True
>>> del diskDict[5]
>>> ", ".join(str(x) for x in diskDict.keys())
'0, 1, 2, 3, 4, 6, 7, 8, 9'
>>> 5 in diskDict
False
Or through context:
>>> with Dict("testdict") as d:
... for i in range(10):
... d[i] = chr(97+i)
... print(d[3])
d
If there is a way to break dict like behavior and you can reproduce it, please
report it to `the GitHub issues <https://github.com/cdusold/DriveLink/issues/>`_.
"""
def __init__(self, file_basename, size_limit=1024, max_pages=16, file_location=join(expanduser("~"), ".DriveLink"), compression_ratio=0):
self.pages = _page()
self._total = set()
super(Dict, self).__init__(file_basename, size_limit, max_pages, file_location, compression_ratio)
def copy_from(self, other):
for key in other:
self[key] = other[key]
def load_index(self):
other_values = super(Dict, self).load_index()
if other_values is None:
return
self.pages.currentDepth = other_values[0]
for f in glob(self._file_base + '*'):
try:
self._total.add(int(f[len(self._file_base):]))
except ValueError:
pass
def store_index(self):
super(Dict, self).store_index(self.pages.currentDepth)
def open_page(self, k):
if k in self._total:
self._load_page_from_disk(k)
if k not in self.pages:
self.pages[k] = _page()
self._total.add(k)
self.pages[k].currentDepth = self.pages.currentDepth
self._queue.append(k)
def determine_index(self, key):
"""
Figures out where the key in question should be.
Most frequently O(1), when a page becomes too large, there's
a O(ln(n)) search that refactors O(k ln(n)) elements
once every O(k) insertions. A full refactor usually
happens in strides, moving a total of O(n) elements
split up over O(ln(n)) calls. This makes the worst
time refactor O(n) and usual refactor approximately
O(n/ ln(n)). Average case lookup O(n/k).
"""
k = hash(key) & self.pages.currentDepth
i = 0
while (k & (self.pages.currentDepth >> i)) not in self._total | set([0]):
i += 1
self._branchpage(k & (self.pages.currentDepth >> i))
return k, key
def page_indices(self):
for k in list(self._total):
yield k
def __setitem__(self, key, value):
'''
Sets a value that a key maps to.
'''
super(Dict, self).__setitem__(key, value)
i, _ = self.determine_index(key)
if len(self.pages[i]) > self.size_limit:
if self.pages[i].currentDepth == self.pages.currentDepth:
self.pages.currentDepth <<= 1
self.pages.currentDepth |= 1
self._branchpage(i)
def __contains__(self, item):
try:
i, k = self._finditem(item)
except:
return False
return k in self.pages[i]
def page_removed(self, number):
self._total.remove(number)
def __str__(self):
return "Dictionary with values stored to " + self._file_base
def _branchpage(self, pagenumber):
self._guarantee_page(pagenumber)
if self.pages[pagenumber].currentDepth == self.pages.currentDepth:
return
self.pages[pagenumber].currentDepth = self.pages.currentDepth
for key in set(self.pages[pagenumber].keys()):
k = hash(key) & self.pages.currentDepth
if k != pagenumber:
self._guarantee_page(pagenumber)
v = self.pages[pagenumber][key]
del self.pages[pagenumber][key]
self._guarantee_page(k)
self.pages[k][key] = v
|
[
"drivelink.hash.hash",
"os.path.expanduser",
"glob.glob"
] |
[((2161, 2188), 'glob.glob', 'glob', (["(self._file_base + '*')"], {}), "(self._file_base + '*')\n", (2165, 2188), False, 'from glob import glob\n'), ((1649, 1664), 'os.path.expanduser', 'expanduser', (['"""~"""'], {}), "('~')\n", (1659, 1664), False, 'from os.path import expanduser, join\n'), ((3259, 3268), 'drivelink.hash.hash', 'hash', (['key'], {}), '(key)\n', (3263, 3268), False, 'from drivelink.hash import hash\n'), ((4661, 4670), 'drivelink.hash.hash', 'hash', (['key'], {}), '(key)\n', (4665, 4670), False, 'from drivelink.hash import hash\n')]
|
import os
from flask_script import Manager
from blog import app
from blog.database import session, Entry
manager = Manager(app)
@manager.command
def run():
port = int(os.environ.get('PORT', 8080))
app.run(host='0.0.0.0', port=port)
@manager.command
def seed():
content = """TEST TEST TEST, these entries are all just tests. Mic check one, two
, one, two, testing, testing."""
for i in range(1,26):
entry = Entry(
title="Test Entry #{}".format(i),
content=content
)
session.add(entry)
session.commit()
from getpass import getpass
from werkzeug.security import generate_password_hash
from blog.database import User
@manager.command
def adduser():
name = input("Name: ")
email = input("Email: ")
if session.query(User).filter_by(email=email).first():
print("User with that email address already exists")
return
password = ""
while len(password) < 8 or password != password_2:
password = getpass("Password: ")
password_2 = getpass("<PASSWORD>: ")
user = User(name=name, email=email,
password=generate_password_hash(password))
session.add(user)
session.commit()
from flask_migrate import Migrate, MigrateCommand
from blog.database import Base
class DB(object):
def __init__(self, metadata):
self.metadata = metadata
migrate = Migrate(app, DB(Base.metadata))
manager.add_command('db', MigrateCommand)
if __name__ == "__main__":
manager.run()
|
[
"flask_script.Manager",
"blog.app.run",
"getpass.getpass",
"blog.database.session.query",
"os.environ.get",
"blog.database.session.add",
"blog.database.session.commit",
"werkzeug.security.generate_password_hash"
] |
[((117, 129), 'flask_script.Manager', 'Manager', (['app'], {}), '(app)\n', (124, 129), False, 'from flask_script import Manager\n'), ((208, 242), 'blog.app.run', 'app.run', ([], {'host': '"""0.0.0.0"""', 'port': 'port'}), "(host='0.0.0.0', port=port)\n", (215, 242), False, 'from blog import app\n'), ((561, 577), 'blog.database.session.commit', 'session.commit', ([], {}), '()\n', (575, 577), False, 'from blog.database import session, Entry\n'), ((1183, 1200), 'blog.database.session.add', 'session.add', (['user'], {}), '(user)\n', (1194, 1200), False, 'from blog.database import session, Entry\n'), ((1205, 1221), 'blog.database.session.commit', 'session.commit', ([], {}), '()\n', (1219, 1221), False, 'from blog.database import session, Entry\n'), ((174, 202), 'os.environ.get', 'os.environ.get', (['"""PORT"""', '(8080)'], {}), "('PORT', 8080)\n", (188, 202), False, 'import os\n'), ((538, 556), 'blog.database.session.add', 'session.add', (['entry'], {}), '(entry)\n', (549, 556), False, 'from blog.database import session, Entry\n'), ((1012, 1033), 'getpass.getpass', 'getpass', (['"""Password: """'], {}), "('Password: ')\n", (1019, 1033), False, 'from getpass import getpass\n'), ((1055, 1078), 'getpass.getpass', 'getpass', (['"""<PASSWORD>: """'], {}), "('<PASSWORD>: ')\n", (1062, 1078), False, 'from getpass import getpass\n'), ((1145, 1177), 'werkzeug.security.generate_password_hash', 'generate_password_hash', (['password'], {}), '(password)\n', (1167, 1177), False, 'from werkzeug.security import generate_password_hash\n'), ((787, 806), 'blog.database.session.query', 'session.query', (['User'], {}), '(User)\n', (800, 806), False, 'from blog.database import session, Entry\n')]
|
#!/usr/bin/python3
# Copyright 2019 Adobe. All rights reserved.
# This file is licensed to you under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License. You may obtain a copy
# of the License at http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software distributed under
# the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR REPRESENTATIONS
# OF ANY KIND, either express or implied. See the License for the specific language
# governing permissions and limitations under the License.
"""
This class mostly exists because almost every script needs to do a get_distinct_zones
Having it centralized, means that the included and excluded status' can be managed in one place.
"""
import logging
from datetime import datetime
from pymongo import MongoClient
from tld import get_fld
class ZoneManager(object):
# A status of confirmed typically means it was entered by a human
CONFIRMED = "confirmed"
# A status of unconfirmed means that it was added via automation
# It has not been reviewed by a human
UNCONFIRMED = "unconfirmed"
# A status of false positive means that a human identified that automation made a mistake
FALSE_POSITIVE = "false_positive"
# A status of expired means that the automation believes that the domain is no longer registered
EXPIRED = "expired"
# The MongoConnector
mongo_connector = None
# The zone collection
zone_collection = None
# The logger
_logger = None
def _log(self):
"""
Get the log
"""
return logging.getLogger(__name__)
def __init__(self, mongo_connector):
"""
Initialize the MongoDB Connector
"""
self._logger = self._log()
self.mongo_connector = mongo_connector
self.zone_collection = mongo_connector.get_zone_connection()
def _check_valid_status(self, status):
if (
status != ZoneManager.EXPIRED
and status != ZoneManager.FALSE_POSITIVE
and status != ZoneManager.CONFIRMED
and status != ZoneManager.UNCONFIRMED
):
self._logger.error("ERROR: Bad status value")
return False
return True
@staticmethod
def get_distinct_zones(mongo_connector, includeAll=False):
"""
This is the most common usage of get zones where the caller wants just the list of
active zones.
This returns the list of zones as an array of strings rather than the complete JSON objects
"""
zones_collection = mongo_connector.get_zone_connection()
if includeAll:
zone_results = mongo_connector.perform_distinct(zones_collection, "zone")
else:
zone_results = mongo_connector.perform_distinct(
zones_collection,
"zone",
{"status": {"$nin": [ZoneManager.FALSE_POSITIVE, ZoneManager.EXPIRED]}},
)
zones = []
for zone in zone_results:
if zone.find(".") >= 0:
zones.append(zone)
return zones
@staticmethod
def get_reversed_zones(mongo_connector):
"""
Retrieve the list of active zones and then reverse them to match the Common Crawl format
"""
zones_collection = mongo_connector.get_zone_connection()
zone_results = mongo_connector.perform_distinct(
zones_collection,
"zone",
{"status": {"$nin": [ZoneManager.FALSE_POSITIVE, ZoneManager.EXPIRED]}},
)
zones = []
for zone in zone_results:
if zone.find("."):
zone_parts = zone.split(".")
# The vertices.txt entries from common_crawl are in reverse order (e.g. org.example.www)
# To string match faster, the zones are stored in a reverse format prior to matching.
# This avoids having to reverse each entry in the file which is less efficient.
rev_zone = ""
for part in zone_parts:
rev_zone = part + "." + rev_zone
rev_zone = rev_zone[:-1]
zones.append(rev_zone)
return zones
@staticmethod
def get_zones_by_source(mongo_connector, source, includeAll=False):
"""
Returns a list of zones based on the provided reporting source
"""
zone_collection = mongo_connector.get_zone_connection()
if includeAll:
zones = mongo_connector.perform_distinct(
zone_collection, "zone", {"reporting_sources.source": source}
)
else:
zones = mongo_connector.perform_distinct(
zone_collection,
"zone",
{
"reporting_sources.source": source,
"status": {
"$nin": [ZoneManager.FALSE_POSITIVE, ZoneManager.EXPIRED]
},
},
)
return zones
@staticmethod
def get_zones(mongo_connector, includeAll=False):
"""
This is will return the full zones object for all active zones.
This returns the complete json objects for the matching descriptions
"""
zones_collection = mongo_connector.get_zone_connection()
if includeAll:
zone_results = mongo_connector.perform_find(zones_collection, {})
else:
zone_results = mongo_connector.perform_find(
zones_collection,
{"status": {"$nin": [ZoneManager.FALSE_POSITIVE, ZoneManager.EXPIRED]}},
)
zones = []
for zone in zone_results:
if zone["zone"].find(".") >= 0:
zones.append(zone)
return zones
@staticmethod
def get_root_domain(value, zone=None):
"""
Get the root domain (FLD) for the provided value
"""
res = get_fld(value, fix_protocol=True, fail_silently=True)
if res is None:
return zone
return res
def get_zone(self, zone):
"""
Fetch the full individual zone record.
This is not a static method since it would probably be called repeatedly.
"""
return self.mongo_connector.perform_find_one(
self.zone_collection, {"zone": zone}
)
def get_zones_by_status(self, status):
"""
This returns the list of zones associated with the provided status.
This returns the list of zones as an array of strings rather than the complete JSON objects
"""
if not self._check_valid_status(status):
return
zone_results = self.mongo_connector.perform_distinct(
self.zone_collection, "zone", {"status": status}
)
zones = []
for zone in zone_results:
if zone.find(".") >= 0:
zones.append(zone)
return zones
def set_status(self, zone, status, caller):
"""
Set a zone to expired.
"""
count = self.mongo_connector.perform_count(self.zone_collection, {"zone": zone})
if count == 0:
self._logger.error("ERROR: Invalid zone!")
return
if (
status != ZoneManager.EXPIRED
and status != ZoneManager.FALSE_POSITIVE
and status != ZoneManager.CONFIRMED
and status != ZoneManager.UNCONFIRMED
):
self._logger.error("ERROR: Bad status value!")
return
if caller is None or caller == "":
self._logger.error("ERROR: Please provide a caller value!")
return
now = datetime.now()
note = caller + " set to " + status + " on " + str(now)
self.zone_collection.update_one(
{"zone": zone},
{"$set": {"status": status, "updated": now}, "$addToSet": {"notes": note}},
)
def add_note(self, zone, note):
"""
In the future, there should probably be restrictions on note length.
For now, it is not set until more information on usage is available.
"""
self.zone_collection.update_one({"zone": zone}, {"$addToSet": {"notes": note}})
|
[
"tld.get_fld",
"datetime.datetime.now",
"logging.getLogger"
] |
[((1674, 1701), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (1691, 1701), False, 'import logging\n'), ((6059, 6112), 'tld.get_fld', 'get_fld', (['value'], {'fix_protocol': '(True)', 'fail_silently': '(True)'}), '(value, fix_protocol=True, fail_silently=True)\n', (6066, 6112), False, 'from tld import get_fld\n'), ((7810, 7824), 'datetime.datetime.now', 'datetime.now', ([], {}), '()\n', (7822, 7824), False, 'from datetime import datetime\n')]
|
'''
Function:
身份证信息查询小工具
Author:
Charles
微信公众号:
Charles的皮卡丘
'''
import sys
from PyQt5 import *
from PyQt5.QtGui import *
from PyQt5.QtCore import *
from PyQt5.QtWidgets import *
from PyQt5 import QtWidgets, QtGui
from id_validator import validator
'''身份证信息查询小工具'''
class IDCardQuery(QWidget):
def __init__(self, parent=None, **kwargs):
super(IDCardQuery, self).__init__(parent)
self.setWindowTitle('身份证信息查询小工具 - 微信公众号: Charles的皮卡丘')
self.setWindowIcon(QIcon('resources/icon.jpg'))
self.setFixedSize(600, 400)
# 定义组件
self.birthday_label = QLabel('出生日期: ')
self.birthday_line_edit = QLineEdit('2000-01-01')
self.address_label = QLabel('出生地区: ')
self.address_line_edit = QLineEdit('上海市')
self.sex_label = QLabel('性别: ')
self.sex_combobox = QComboBox()
self.sex_combobox.addItem('男')
self.sex_combobox.addItem('女')
self.generate_button = QPushButton('随机生成')
self.idcard_label = QLabel('身份证号: ')
self.idcard_line_edit = QLineEdit()
self.query_button = QPushButton('验证查询')
self.result_label = QLabel('查询结果: ')
self.result_text_edit = QTextEdit()
# 排版
self.grid = QGridLayout()
self.grid.addWidget(self.birthday_label, 0, 0, 1, 1)
self.grid.addWidget(self.birthday_line_edit, 0, 1, 1, 3)
self.grid.addWidget(self.address_label, 0, 4, 1, 1)
self.grid.addWidget(self.address_line_edit, 0, 5, 1, 3)
self.grid.addWidget(self.sex_label, 0, 8, 1, 1)
self.grid.addWidget(self.sex_combobox, 0, 9, 1, 2)
self.grid.addWidget(self.generate_button, 0, 11, 1, 1)
self.grid.addWidget(self.idcard_label, 1, 0, 1, 1)
self.grid.addWidget(self.idcard_line_edit, 1, 1, 1, 10)
self.grid.addWidget(self.query_button, 1, 11, 1, 1)
self.grid.addWidget(self.result_label, 2, 0, 1, 1)
self.grid.addWidget(self.result_text_edit, 3, 0, 1, 12)
self.setLayout(self.grid)
# 事件绑定
self.generate_button.clicked.connect(self.generateID)
self.query_button.clicked.connect(self.CheckAndParseID)
'''验证并解析身份证号信息'''
def CheckAndParseID(self):
id_ = self.idcard_line_edit.text()
is_valid = validator.is_valid(id_)
if not is_valid:
self.result_text_edit.setText('身份证号是否合法: 否\n身份证号信息: 无')
return
showinfo = '身份证号是否合法: 是\n'
idinfos = validator.get_info(id_)
key_to_showtext = {
'address_code': '地址码',
'abandoned': '地址码是否废弃(1是废弃, 0是仍在使用)',
'address': '地址',
'birthday_code': '出生日期',
'constellation': '星座',
'chinese_zodiac': '生肖',
'sex': '性别',
}
for key, value in idinfos.items():
if key not in key_to_showtext: continue
showinfo += f'{key_to_showtext[key]}: {value}\n'
self.result_text_edit.setText(showinfo)
'''生成假的身份证号'''
def generateID(self):
birthday = self.birthday_line_edit.text().replace('-', '')
birthday = birthday if birthday else None
address = self.address_line_edit.text()
address = address if address else None
sex = self.sex_combobox.currentText()
sex = 1 if sex == '男' else 0
try:
id_ = validator.fake_id(True, address, birthday, sex)
except:
id_ = validator.fake_id()
self.idcard_line_edit.setText(id_)
'''run'''
if __name__ == '__main__':
app = QApplication(sys.argv)
client = IDCardQuery()
client.show()
sys.exit(app.exec_())
|
[
"id_validator.validator.is_valid",
"id_validator.validator.fake_id",
"id_validator.validator.get_info"
] |
[((2281, 2304), 'id_validator.validator.is_valid', 'validator.is_valid', (['id_'], {}), '(id_)\n', (2299, 2304), False, 'from id_validator import validator\n'), ((2470, 2493), 'id_validator.validator.get_info', 'validator.get_info', (['id_'], {}), '(id_)\n', (2488, 2493), False, 'from id_validator import validator\n'), ((3354, 3401), 'id_validator.validator.fake_id', 'validator.fake_id', (['(True)', 'address', 'birthday', 'sex'], {}), '(True, address, birthday, sex)\n', (3371, 3401), False, 'from id_validator import validator\n'), ((3436, 3455), 'id_validator.validator.fake_id', 'validator.fake_id', ([], {}), '()\n', (3453, 3455), False, 'from id_validator import validator\n')]
|
#!/usr/bin/env python3
# perfectgift: a tornado webapp for creating wish lists between friends
# Copyright (C) 2014, NCSS14 Group 4
# Permission is hereby granted, free of charge, to any person obtaining a copy of
# this software and associated documentation files (the "Software"), to deal in
# the Software without restriction, including without limitation the rights to
# use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of
# the Software, and to permit persons to whom the Software is furnished to do so,
# subject to the following conditions:
# 1. The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
import epyc
import sqlite3
from tornado.ncss import Server
from tornado.log import app_log
from db.api import User, Product, UserNotFound
from login import logged_in, get_current_user
@logged_in
def friends_list(response):
current_username = get_current_user(response)
current_user = User.find(current_username)
friends_list = current_user.find_friends()
scope = {'friends': friends_list, 'logged_in': current_username}
response.write(epyc.render("templates/friends.html", scope))
def search(response):
search = response.get_field("q")
logged_in = get_current_user(response)
types = {
"people": 0,
"items": 1
}
tp = types.get(response.get_field("t"), 0)
if search:
if tp == types['people']:
items = User.search(search)
else:
items = Product.search(search)
scope = {
"query": search,
"results": items,
"tp": tp,
"types": types,
"logged_in": get_current_user(response)
}
app_log.info("[%s found for '%s'] %s" % (response.get_field('t'), search, items))
response.write(epyc.render("templates/search.html", scope))
else:
scope = {
"query": "",
"results": [],
"tp": tp,
"types": types,
"logged_in": get_current_user(response)
}
response.write(epyc.render("templates/search.html", scope))
def hello(response, match):
response.write(epyc._render('''
<html>
<header>:)</header>
<body>
<h1>Hellos peoples of the internets</h1>
</body>
</html>
'''))
if __name__ == '__main__':
server=Server()
server.register('/search',search)
server.register('/friends/([a-zA-Z0-9_]+)', friends_list)
server.run()
|
[
"tornado.ncss.Server",
"epyc._render",
"db.api.Product.search",
"db.api.User.search",
"login.get_current_user",
"db.api.User.find",
"epyc.render"
] |
[((1439, 1465), 'login.get_current_user', 'get_current_user', (['response'], {}), '(response)\n', (1455, 1465), False, 'from login import logged_in, get_current_user\n'), ((1482, 1509), 'db.api.User.find', 'User.find', (['current_username'], {}), '(current_username)\n', (1491, 1509), False, 'from db.api import User, Product, UserNotFound\n'), ((1754, 1780), 'login.get_current_user', 'get_current_user', (['response'], {}), '(response)\n', (1770, 1780), False, 'from login import logged_in, get_current_user\n'), ((2660, 2668), 'tornado.ncss.Server', 'Server', ([], {}), '()\n', (2666, 2668), False, 'from tornado.ncss import Server\n'), ((1638, 1682), 'epyc.render', 'epyc.render', (['"""templates/friends.html"""', 'scope'], {}), "('templates/friends.html', scope)\n", (1649, 1682), False, 'import epyc\n'), ((2504, 2632), 'epyc._render', 'epyc._render', (['"""\n\t<html>\n\t<header>:)</header>\n\t<body>\n\t<h1>Hellos peoples of the internets</h1>\n\t</body>\n\t</html>\n"""'], {}), '(\n """\n\t<html>\n\t<header>:)</header>\n\t<body>\n\t<h1>Hellos peoples of the internets</h1>\n\t</body>\n\t</html>\n"""\n )\n', (2516, 2632), False, 'import epyc\n'), ((1921, 1940), 'db.api.User.search', 'User.search', (['search'], {}), '(search)\n', (1932, 1940), False, 'from db.api import User, Product, UserNotFound\n'), ((1960, 1982), 'db.api.Product.search', 'Product.search', (['search'], {}), '(search)\n', (1974, 1982), False, 'from db.api import User, Product, UserNotFound\n'), ((2085, 2111), 'login.get_current_user', 'get_current_user', (['response'], {}), '(response)\n', (2101, 2111), False, 'from login import logged_in, get_current_user\n'), ((2218, 2261), 'epyc.render', 'epyc.render', (['"""templates/search.html"""', 'scope'], {}), "('templates/search.html', scope)\n", (2229, 2261), False, 'import epyc\n'), ((2365, 2391), 'login.get_current_user', 'get_current_user', (['response'], {}), '(response)\n', (2381, 2391), False, 'from login import logged_in, get_current_user\n'), ((2414, 2457), 'epyc.render', 'epyc.render', (['"""templates/search.html"""', 'scope'], {}), "('templates/search.html', scope)\n", (2425, 2457), False, 'import epyc\n')]
|
# -*- coding:utf-8 -*-
# pylint: disable=C0103, C0111, W0621
"""Implementation of MGCN model"""
import torch
import torch.nn as nn
from .layers import AtomEmbedding, RBFLayer, EdgeEmbedding, \
MultiLevelInteraction
from ...nn.pytorch import SumPooling
class MGCNModel(nn.Module):
"""
`Molecular Property Prediction: A Multilevel
Quantum Interactions Modeling Perspective <https://arxiv.org/abs/1906.11081>`__
Parameters
----------
dim : int
Size for embeddings, default to be 128.
width : int
Width in the RBF layer, default to be 1.
cutoff : float
The maximum distance between nodes, default to be 5.0.
edge_dim : int
Size for edge embedding, default to be 128.
out_put_dim: int
Number of target properties to predict, default to be 1.
n_conv : int
Number of convolutional layers, default to be 3.
norm : bool
Whether to perform normalization, default to be False.
atom_ref : Atom embeddings or None
If None, random representation initialization will be used. Otherwise,
they will be used to initialize atom representations. Default to be None.
pre_train : Atom embeddings or None
If None, random representation initialization will be used. Otherwise,
they will be used to initialize atom representations. Default to be None.
"""
def __init__(self,
dim=128,
width=1,
cutoff=5.0,
edge_dim=128,
output_dim=1,
n_conv=3,
norm=False,
atom_ref=None,
pre_train=None):
super(MGCNModel, self).__init__()
self._dim = dim
self.output_dim = output_dim
self.edge_dim = edge_dim
self.cutoff = cutoff
self.width = width
self.n_conv = n_conv
self.atom_ref = atom_ref
self.norm = norm
if pre_train is None:
self.embedding_layer = AtomEmbedding(dim)
else:
self.embedding_layer = AtomEmbedding(pre_train=pre_train)
self.rbf_layer = RBFLayer(0, cutoff, width)
self.edge_embedding_layer = EdgeEmbedding(dim=edge_dim)
if atom_ref is not None:
self.e0 = AtomEmbedding(1, pre_train=atom_ref)
self.conv_layers = nn.ModuleList([
MultiLevelInteraction(self.rbf_layer._fan_out, dim)
for i in range(n_conv)
])
self.out_project = nn.Sequential(
nn.Linear(dim * (self.n_conv + 1), 64),
nn.Softplus(beta=1, threshold=20),
nn.Linear(64, output_dim)
)
self.readout = SumPooling()
def set_mean_std(self, mean, std, device="cpu"):
"""Set the mean and std of atom representations for normalization.
Parameters
----------
mean : list or numpy array
The mean of labels
std : list or numpy array
The std of labels
device : str or torch.device
Device for storing the mean and std
"""
self.mean_per_node = torch.tensor(mean, device=device)
self.std_per_node = torch.tensor(std, device=device)
def forward(self, g, atom_types, edge_distances):
"""Predict molecule labels
Parameters
----------
g : DGLGraph
Input DGLGraph for molecule(s)
atom_types : int64 tensor of shape (B1)
Types for atoms in the graph(s), B1 for the number of atoms.
edge_distances : float32 tensor of shape (B2, 1)
Edge distances, B2 for the number of edges.
Returns
-------
prediction : float32 tensor of shape (B, output_dim)
Model prediction for the batch of graphs, B for the number
of graphs, output_dim for the prediction size.
"""
h = self.embedding_layer(atom_types)
e = self.edge_embedding_layer(g, atom_types)
rbf_out = self.rbf_layer(edge_distances)
all_layer_h = [h]
for idx in range(self.n_conv):
h, e = self.conv_layers[idx](g, h, e, rbf_out)
all_layer_h.append(h)
# concat multilevel representations
h = torch.cat(all_layer_h, dim=1)
h = self.out_project(h)
if self.atom_ref is not None:
h_ref = self.e0(atom_types)
h = h + h_ref
if self.norm:
h = h * self.std_per_node + self.mean_per_node
return self.readout(g, h)
|
[
"torch.nn.Softplus",
"torch.cat",
"torch.tensor",
"torch.nn.Linear"
] |
[((3138, 3171), 'torch.tensor', 'torch.tensor', (['mean'], {'device': 'device'}), '(mean, device=device)\n', (3150, 3171), False, 'import torch\n'), ((3200, 3232), 'torch.tensor', 'torch.tensor', (['std'], {'device': 'device'}), '(std, device=device)\n', (3212, 3232), False, 'import torch\n'), ((4259, 4288), 'torch.cat', 'torch.cat', (['all_layer_h'], {'dim': '(1)'}), '(all_layer_h, dim=1)\n', (4268, 4288), False, 'import torch\n'), ((2543, 2581), 'torch.nn.Linear', 'nn.Linear', (['(dim * (self.n_conv + 1))', '(64)'], {}), '(dim * (self.n_conv + 1), 64)\n', (2552, 2581), True, 'import torch.nn as nn\n'), ((2595, 2628), 'torch.nn.Softplus', 'nn.Softplus', ([], {'beta': '(1)', 'threshold': '(20)'}), '(beta=1, threshold=20)\n', (2606, 2628), True, 'import torch.nn as nn\n'), ((2642, 2667), 'torch.nn.Linear', 'nn.Linear', (['(64)', 'output_dim'], {}), '(64, output_dim)\n', (2651, 2667), True, 'import torch.nn as nn\n')]
|
# Generated by Django 3.2.6 on 2021-08-25 17:51
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('course', '0004_coursesmodel_thumbnail'),
]
operations = [
migrations.AddField(
model_name='coursesmodel',
name='slug',
field=models.SlugField(default=''),
),
]
|
[
"django.db.models.SlugField"
] |
[((341, 369), 'django.db.models.SlugField', 'models.SlugField', ([], {'default': '""""""'}), "(default='')\n", (357, 369), False, 'from django.db import migrations, models\n')]
|
from functools import partial
import numpy as np
import tensorflow as tf
from tensorflow.keras.callbacks import EarlyStopping, TensorBoard
from tensorflow.keras.optimizers import Adam
from nets.facenet import facenet
from nets.facenet_training import FacenetDataset, LFWDataset, triplet_loss
from utils.callbacks import (ExponentDecayScheduler, LFW_callback, LossHistory,
ModelCheckpoint)
from utils.utils_fit import fit_one_epoch
#------------------------------------------------#
# 计算一共有多少个人,用于利用交叉熵辅助收敛
#------------------------------------------------#
def get_num_classes(annotation_path):
with open(annotation_path) as f:
dataset_path = f.readlines()
labels = []
for path in dataset_path:
path_split = path.split(";")
labels.append(int(path_split[0]))
num_classes = np.max(labels) + 1
return num_classes
gpus = tf.config.experimental.list_physical_devices(device_type='GPU')
for gpu in gpus:
tf.config.experimental.set_memory_growth(gpu, True)
if __name__ == "__main__":
#----------------------------------------------------#
# 是否使用eager模式训练
#----------------------------------------------------#
eager = False
#--------------------------------------------------------#
# 指向根目录下的cls_train.txt,读取人脸路径与标签
#--------------------------------------------------------#
annotation_path = "cls_train.txt"
#--------------------------------------------------------#
# 输入图像大小,常用设置如[112, 112, 3]
#--------------------------------------------------------#
input_shape = [160, 160, 3]
#--------------------------------------------------------#
# 主干特征提取网络的选择
# mobilenet;inception_resnetv1
#--------------------------------------------------------#
backbone = "mobilenet"
#----------------------------------------------------------------------------------------------------------------------------#
# 权值文件的下载请看README,可以通过网盘下载。
# 模型的 预训练权重 比较重要的部分是 主干特征提取网络的权值部分,用于进行特征提取。
#
# 如果训练过程中存在中断训练的操作,可以将model_path设置成logs文件夹下的权值文件,将已经训练了一部分的权值再次载入。
# 同时修改下方的训练参数,来保证模型epoch的连续性。
#
# 当model_path = ''的时候不加载整个模型的权值。
#
# 如果想要让模型从主干的预训练权值开始训练,则设置model_path为主干网络的权值,此时仅加载主干。
# 如果想要让模型从0开始训练,则设置model_path = '',Freeze_Train = False,此时从0开始训练,且没有冻结主干的过程。
# 一般来讲,从0开始训练效果会很差,因为权值太过随机,特征提取效果不明显。
#
# 网络一般不从0开始训练,至少会使用主干部分的权值,有些论文提到可以不用预训练,主要原因是他们 数据集较大 且 调参能力优秀。
# 如果一定要训练网络的主干部分,可以了解imagenet数据集,首先训练分类模型,分类模型的 主干部分 和该模型通用,基于此进行训练。
#----------------------------------------------------------------------------------------------------------------------------#
model_path = "model_data/facenet_mobilenet.h5"
#-------------------------------------------------------------------#
# 是否进行冻结训练,默认先冻结主干训练后解冻训练。
#-------------------------------------------------------------------#
Freeze_Train = True
#-------------------------------------------------------------------#
# 用于设置是否使用多线程读取数据,1代表关闭多线程
# 开启后会加快数据读取速度,但是会占用更多内存
# 在IO为瓶颈的时候再开启多线程,即GPU运算速度远大于读取图片的速度。
#-------------------------------------------------------------------#
num_workers = 1
#-------------------------------------------------------------------#
# 是否开启LFW评估
#-------------------------------------------------------------------#
lfw_eval_flag = True
#-------------------------------------------------------------------#
# LFW评估数据集的文件路径和对应的txt文件
#-------------------------------------------------------------------#
lfw_dir_path = "lfw"
lfw_pairs_path = "model_data/lfw_pair.txt"
num_classes = get_num_classes(annotation_path)
model = facenet(input_shape, num_classes, backbone=backbone, mode="train")
model.load_weights(model_path, by_name=True, skip_mismatch=True)
#-------------------------------------------------------------------------------#
# 训练参数的设置
# logging表示tensorboard的保存地址
# checkpoint用于设置权值保存的细节,period用于修改多少epoch保存一次
# reduce_lr用于设置学习率下降的方式
# early_stopping用于设定早停,val_loss多次不下降自动结束训练,表示模型基本收敛
#-------------------------------------------------------------------------------#
checkpoint_period = ModelCheckpoint('logs/ep{epoch:03d}-loss{loss:.3f}-val_loss{val_loss:.3f}.h5',
monitor='val_loss', save_weights_only=True, save_best_only=False, period=1)
reduce_lr = ExponentDecayScheduler(decay_rate = 0.94, verbose = 1)
early_stopping = EarlyStopping(monitor='val_loss', min_delta=0, patience=10, verbose=1)
tensorboard = TensorBoard(log_dir='logs/')
loss_history = LossHistory('logs/')
#----------------------#
# LFW估计
#----------------------#
test_loader = LFWDataset(dir=lfw_dir_path, pairs_path=lfw_pairs_path, batch_size=32, input_shape=input_shape) if lfw_eval_flag else None
lfw_callback = LFW_callback(test_loader)
#-------------------------------------------------------#
# 0.05用于验证,0.95用于训练
#-------------------------------------------------------#
val_split = 0.05
with open(annotation_path) as f:
lines = f.readlines()
np.random.seed(10101)
np.random.shuffle(lines)
np.random.seed(None)
num_val = int(len(lines)*val_split)
num_train = len(lines) - num_val
if backbone=="mobilenet":
freeze_layer = 81
elif backbone=="inception_resnetv1":
freeze_layer = 440
else:
raise ValueError('Unsupported backbone - `{}`, Use mobilenet, inception_resnetv1.'.format(backbone))
if Freeze_Train:
for i in range(freeze_layer):
model.layers[i].trainable = False
#---------------------------------------------------------#
# 训练分为两个阶段,分别是冻结阶段和解冻阶段。
# 显存不足与数据集大小无关,提示显存不足请调小batch_size。
# 受到BatchNorm层影响,batch_size最小为2,不能为1。
#---------------------------------------------------------#
#---------------------------------------------------------#
# Init_Epoch为起始世代
# Freeze_Epoch为冻结训练的世代
# Epoch总训练世代
# 提示OOM或者显存不足请调小Batch_size
#---------------------------------------------------------#
if True:
#----------------------------------------------------#
# 冻结阶段训练参数
# 此时模型的主干被冻结了,特征提取网络不发生改变
# 占用的显存较小,仅对网络进行微调
#----------------------------------------------------#
Batch_size = 32
Lr = 1e-3
Init_epoch = 0
Freeze_epoch = 50
epoch_step = num_train // Batch_size
epoch_step_val = num_val // Batch_size
if epoch_step == 0 or epoch_step_val == 0:
raise ValueError('数据集过小,无法进行训练,请扩充数据集。')
train_dataset = FacenetDataset(input_shape, lines[:num_train], num_train, num_classes, Batch_size)
val_dataset = FacenetDataset(input_shape, lines[num_train:], num_val, num_classes, Batch_size)
print('Train on {} samples, val on {} samples, with batch size {}.'.format(num_train, num_val, Batch_size))
if eager:
gen = tf.data.Dataset.from_generator(partial(train_dataset.generate), (tf.float32, tf.float32))
gen_val = tf.data.Dataset.from_generator(partial(val_dataset.generate), (tf.float32, tf.float32))
gen = gen.shuffle(buffer_size = Batch_size).prefetch(buffer_size = Batch_size)
gen_val = gen_val.shuffle(buffer_size = Batch_size).prefetch(buffer_size = Batch_size)
lr_schedule = tf.keras.optimizers.schedules.ExponentialDecay(
initial_learning_rate = Lr, decay_steps = epoch_step, decay_rate=0.94, staircase=True)
optimizer = tf.keras.optimizers.Adam(learning_rate = lr_schedule)
for epoch in range(Init_epoch, Freeze_epoch):
fit_one_epoch(model, loss_history, optimizer, epoch, epoch_step, epoch_step_val, gen, gen_val,
Freeze_epoch, triplet_loss(batch_size=Batch_size), test_loader, lfw_eval_flag)
else:
model.compile(
loss={'Embedding' : triplet_loss(batch_size=Batch_size), 'Softmax' : 'categorical_crossentropy'},
optimizer = Adam(lr=Lr), metrics = {'Softmax' : 'categorical_accuracy'}
)
model.fit_generator(
generator = train_dataset,
steps_per_epoch = epoch_step,
validation_data = val_dataset,
validation_steps = epoch_step_val,
epochs = Freeze_epoch,
initial_epoch = Init_epoch,
use_multiprocessing = True if num_workers > 1 else False,
workers = num_workers,
callbacks = [checkpoint_period, reduce_lr, early_stopping, tensorboard, loss_history, lfw_callback] if lfw_eval_flag else [checkpoint_period, reduce_lr, early_stopping, tensorboard, loss_history]
)
if Freeze_Train:
for i in range(freeze_layer):
model.layers[i].trainable = True
if True:
#----------------------------------------------------#
# 解冻阶段训练参数
# 此时模型的主干不被冻结了,特征提取网络会发生改变
# 占用的显存较大,网络所有的参数都会发生改变
#----------------------------------------------------#
Batch_size = 32
Lr = 1e-4
Freeze_epoch = 50
Epoch = 100
epoch_step = num_train // Batch_size
epoch_step_val = num_val // Batch_size
if epoch_step == 0 or epoch_step_val == 0:
raise ValueError('数据集过小,无法进行训练,请扩充数据集。')
train_dataset = FacenetDataset(input_shape, lines[:num_train], num_train, num_classes, Batch_size)
val_dataset = FacenetDataset(input_shape, lines[num_train:], num_val, num_classes, Batch_size)
print('Train on {} samples, val on {} samples, with batch size {}.'.format(num_train, num_val, Batch_size))
if eager:
gen = tf.data.Dataset.from_generator(partial(train_dataset.generate), (tf.float32, tf.float32))
gen_val = tf.data.Dataset.from_generator(partial(val_dataset.generate), (tf.float32, tf.float32))
gen = gen.shuffle(buffer_size = Batch_size).prefetch(buffer_size = Batch_size)
gen_val = gen_val.shuffle(buffer_size = Batch_size).prefetch(buffer_size = Batch_size)
lr_schedule = tf.keras.optimizers.schedules.ExponentialDecay(
initial_learning_rate = Lr, decay_steps = epoch_step, decay_rate=0.94, staircase=True)
optimizer = tf.keras.optimizers.Adam(learning_rate = lr_schedule)
for epoch in range(Freeze_epoch, Epoch):
fit_one_epoch(model, loss_history, optimizer, epoch, epoch_step, epoch_step_val, gen, gen_val,
Freeze_epoch, triplet_loss(batch_size=Batch_size), test_loader, lfw_eval_flag)
else:
model.compile(
loss={'Embedding' : triplet_loss(batch_size=Batch_size), 'Softmax' : 'categorical_crossentropy'},
optimizer = Adam(lr=Lr), metrics = {'Softmax' : 'categorical_accuracy'}
)
model.fit_generator(
generator = train_dataset,
steps_per_epoch = epoch_step,
validation_data = val_dataset,
validation_steps = epoch_step_val,
epochs = Epoch,
initial_epoch = Freeze_epoch,
use_multiprocessing = True if num_workers > 1 else False,
workers = num_workers,
callbacks = [checkpoint_period, reduce_lr, early_stopping, tensorboard, loss_history, lfw_callback] if lfw_eval_flag else [checkpoint_period, reduce_lr, early_stopping, tensorboard, loss_history]
)
|
[
"utils.callbacks.LFW_callback",
"functools.partial",
"numpy.random.seed",
"numpy.random.shuffle",
"nets.facenet_training.triplet_loss",
"tensorflow.config.experimental.set_memory_growth",
"tensorflow.keras.optimizers.schedules.ExponentialDecay",
"nets.facenet_training.LFWDataset",
"nets.facenet.facenet",
"numpy.max",
"nets.facenet_training.FacenetDataset",
"tensorflow.keras.optimizers.Adam",
"utils.callbacks.LossHistory",
"utils.callbacks.ExponentDecayScheduler",
"tensorflow.keras.callbacks.TensorBoard",
"tensorflow.config.experimental.list_physical_devices",
"tensorflow.keras.callbacks.EarlyStopping",
"utils.callbacks.ModelCheckpoint"
] |
[((897, 960), 'tensorflow.config.experimental.list_physical_devices', 'tf.config.experimental.list_physical_devices', ([], {'device_type': '"""GPU"""'}), "(device_type='GPU')\n", (941, 960), True, 'import tensorflow as tf\n'), ((982, 1033), 'tensorflow.config.experimental.set_memory_growth', 'tf.config.experimental.set_memory_growth', (['gpu', '(True)'], {}), '(gpu, True)\n', (1022, 1033), True, 'import tensorflow as tf\n'), ((3776, 3842), 'nets.facenet.facenet', 'facenet', (['input_shape', 'num_classes'], {'backbone': 'backbone', 'mode': '"""train"""'}), "(input_shape, num_classes, backbone=backbone, mode='train')\n", (3783, 3842), False, 'from nets.facenet import facenet\n'), ((4301, 4459), 'utils.callbacks.ModelCheckpoint', 'ModelCheckpoint', (['"""logs/ep{epoch:03d}-loss{loss:.3f}-val_loss{val_loss:.3f}.h5"""'], {'monitor': '"""val_loss"""', 'save_weights_only': '(True)', 'save_best_only': '(False)', 'period': '(1)'}), "('logs/ep{epoch:03d}-loss{loss:.3f}-val_loss{val_loss:.3f}.h5',\n monitor='val_loss', save_weights_only=True, save_best_only=False, period=1)\n", (4316, 4459), False, 'from utils.callbacks import ExponentDecayScheduler, LFW_callback, LossHistory, ModelCheckpoint\n'), ((4510, 4560), 'utils.callbacks.ExponentDecayScheduler', 'ExponentDecayScheduler', ([], {'decay_rate': '(0.94)', 'verbose': '(1)'}), '(decay_rate=0.94, verbose=1)\n', (4532, 4560), False, 'from utils.callbacks import ExponentDecayScheduler, LFW_callback, LossHistory, ModelCheckpoint\n'), ((4591, 4661), 'tensorflow.keras.callbacks.EarlyStopping', 'EarlyStopping', ([], {'monitor': '"""val_loss"""', 'min_delta': '(0)', 'patience': '(10)', 'verbose': '(1)'}), "(monitor='val_loss', min_delta=0, patience=10, verbose=1)\n", (4604, 4661), False, 'from tensorflow.keras.callbacks import EarlyStopping, TensorBoard\n'), ((4688, 4716), 'tensorflow.keras.callbacks.TensorBoard', 'TensorBoard', ([], {'log_dir': '"""logs/"""'}), "(log_dir='logs/')\n", (4699, 4716), False, 'from tensorflow.keras.callbacks import EarlyStopping, TensorBoard\n'), ((4743, 4763), 'utils.callbacks.LossHistory', 'LossHistory', (['"""logs/"""'], {}), "('logs/')\n", (4754, 4763), False, 'from utils.callbacks import ExponentDecayScheduler, LFW_callback, LossHistory, ModelCheckpoint\n'), ((5011, 5036), 'utils.callbacks.LFW_callback', 'LFW_callback', (['test_loader'], {}), '(test_loader)\n', (5023, 5036), False, 'from utils.callbacks import ExponentDecayScheduler, LFW_callback, LossHistory, ModelCheckpoint\n'), ((5284, 5305), 'numpy.random.seed', 'np.random.seed', (['(10101)'], {}), '(10101)\n', (5298, 5305), True, 'import numpy as np\n'), ((5310, 5334), 'numpy.random.shuffle', 'np.random.shuffle', (['lines'], {}), '(lines)\n', (5327, 5334), True, 'import numpy as np\n'), ((5339, 5359), 'numpy.random.seed', 'np.random.seed', (['None'], {}), '(None)\n', (5353, 5359), True, 'import numpy as np\n'), ((847, 861), 'numpy.max', 'np.max', (['labels'], {}), '(labels)\n', (853, 861), True, 'import numpy as np\n'), ((4862, 4961), 'nets.facenet_training.LFWDataset', 'LFWDataset', ([], {'dir': 'lfw_dir_path', 'pairs_path': 'lfw_pairs_path', 'batch_size': '(32)', 'input_shape': 'input_shape'}), '(dir=lfw_dir_path, pairs_path=lfw_pairs_path, batch_size=32,\n input_shape=input_shape)\n', (4872, 4961), False, 'from nets.facenet_training import FacenetDataset, LFWDataset, triplet_loss\n'), ((6853, 6939), 'nets.facenet_training.FacenetDataset', 'FacenetDataset', (['input_shape', 'lines[:num_train]', 'num_train', 'num_classes', 'Batch_size'], {}), '(input_shape, lines[:num_train], num_train, num_classes,\n Batch_size)\n', (6867, 6939), False, 'from nets.facenet_training import FacenetDataset, LFWDataset, triplet_loss\n'), ((6960, 7045), 'nets.facenet_training.FacenetDataset', 'FacenetDataset', (['input_shape', 'lines[num_train:]', 'num_val', 'num_classes', 'Batch_size'], {}), '(input_shape, lines[num_train:], num_val, num_classes, Batch_size\n )\n', (6974, 7045), False, 'from nets.facenet_training import FacenetDataset, LFWDataset, triplet_loss\n'), ((9821, 9907), 'nets.facenet_training.FacenetDataset', 'FacenetDataset', (['input_shape', 'lines[:num_train]', 'num_train', 'num_classes', 'Batch_size'], {}), '(input_shape, lines[:num_train], num_train, num_classes,\n Batch_size)\n', (9835, 9907), False, 'from nets.facenet_training import FacenetDataset, LFWDataset, triplet_loss\n'), ((9928, 10013), 'nets.facenet_training.FacenetDataset', 'FacenetDataset', (['input_shape', 'lines[num_train:]', 'num_val', 'num_classes', 'Batch_size'], {}), '(input_shape, lines[num_train:], num_val, num_classes, Batch_size\n )\n', (9942, 10013), False, 'from nets.facenet_training import FacenetDataset, LFWDataset, triplet_loss\n'), ((7636, 7769), 'tensorflow.keras.optimizers.schedules.ExponentialDecay', 'tf.keras.optimizers.schedules.ExponentialDecay', ([], {'initial_learning_rate': 'Lr', 'decay_steps': 'epoch_step', 'decay_rate': '(0.94)', 'staircase': '(True)'}), '(initial_learning_rate=Lr,\n decay_steps=epoch_step, decay_rate=0.94, staircase=True)\n', (7682, 7769), True, 'import tensorflow as tf\n'), ((7824, 7875), 'tensorflow.keras.optimizers.Adam', 'tf.keras.optimizers.Adam', ([], {'learning_rate': 'lr_schedule'}), '(learning_rate=lr_schedule)\n', (7848, 7875), True, 'import tensorflow as tf\n'), ((10604, 10737), 'tensorflow.keras.optimizers.schedules.ExponentialDecay', 'tf.keras.optimizers.schedules.ExponentialDecay', ([], {'initial_learning_rate': 'Lr', 'decay_steps': 'epoch_step', 'decay_rate': '(0.94)', 'staircase': '(True)'}), '(initial_learning_rate=Lr,\n decay_steps=epoch_step, decay_rate=0.94, staircase=True)\n', (10650, 10737), True, 'import tensorflow as tf\n'), ((10792, 10843), 'tensorflow.keras.optimizers.Adam', 'tf.keras.optimizers.Adam', ([], {'learning_rate': 'lr_schedule'}), '(learning_rate=lr_schedule)\n', (10816, 10843), True, 'import tensorflow as tf\n'), ((7245, 7276), 'functools.partial', 'partial', (['train_dataset.generate'], {}), '(train_dataset.generate)\n', (7252, 7276), False, 'from functools import partial\n'), ((7357, 7386), 'functools.partial', 'partial', (['val_dataset.generate'], {}), '(val_dataset.generate)\n', (7364, 7386), False, 'from functools import partial\n'), ((10213, 10244), 'functools.partial', 'partial', (['train_dataset.generate'], {}), '(train_dataset.generate)\n', (10220, 10244), False, 'from functools import partial\n'), ((10325, 10354), 'functools.partial', 'partial', (['val_dataset.generate'], {}), '(val_dataset.generate)\n', (10332, 10354), False, 'from functools import partial\n'), ((8091, 8126), 'nets.facenet_training.triplet_loss', 'triplet_loss', ([], {'batch_size': 'Batch_size'}), '(batch_size=Batch_size)\n', (8103, 8126), False, 'from nets.facenet_training import FacenetDataset, LFWDataset, triplet_loss\n'), ((8340, 8351), 'tensorflow.keras.optimizers.Adam', 'Adam', ([], {'lr': 'Lr'}), '(lr=Lr)\n', (8344, 8351), False, 'from tensorflow.keras.optimizers import Adam\n'), ((11054, 11089), 'nets.facenet_training.triplet_loss', 'triplet_loss', ([], {'batch_size': 'Batch_size'}), '(batch_size=Batch_size)\n', (11066, 11089), False, 'from nets.facenet_training import FacenetDataset, LFWDataset, triplet_loss\n'), ((11303, 11314), 'tensorflow.keras.optimizers.Adam', 'Adam', ([], {'lr': 'Lr'}), '(lr=Lr)\n', (11307, 11314), False, 'from tensorflow.keras.optimizers import Adam\n'), ((8233, 8268), 'nets.facenet_training.triplet_loss', 'triplet_loss', ([], {'batch_size': 'Batch_size'}), '(batch_size=Batch_size)\n', (8245, 8268), False, 'from nets.facenet_training import FacenetDataset, LFWDataset, triplet_loss\n'), ((11196, 11231), 'nets.facenet_training.triplet_loss', 'triplet_loss', ([], {'batch_size': 'Batch_size'}), '(batch_size=Batch_size)\n', (11208, 11231), False, 'from nets.facenet_training import FacenetDataset, LFWDataset, triplet_loss\n')]
|
import urllib.request as req
import pyodbc
import requests
from bs4 import BeautifulSoup as bs
import time
import random
import json
def sqlquote( value ):
"""Naive SQL quoting
All values except NULL are returned as SQL strings in single quotes,
with any embedded quotes doubled.
"""
if value is None or value=="":
return 'NULL'
return "'{}'".format(str(value).replace( "'", "''" ))
# for page in range(1900000001,9999999999):
for page in range(2000700783, 2000728783):
headers = {'User-Agent':'Mozilla/5.0 (Windows NT 6.3; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/63.0.3239.132 Safari/537.36'}
url = 'secretUrl'.format(page)
html = requests.get(url, headers = headers).text
time.sleep(random.uniform(1,4))
json_data = json.loads( html )
print( 'articleNo : ' + str(page) )
if 'error' not in json_data:
print( 'table : ' + str(len(json_data)) )
print()
for table in json_data:
if table == "landPrice":
print( '응 landPrice는 있어' )
for column in json_data[table]:
if column == 'landPriceTax':
print("와우우우우웅웅우우우우우웅우!!!!!!")
print('=========================')
# print( table + " : " + str( len(json_data[table]) ) )
# for column in table:
# print( "column : " + column )
# print( type( column ) )
# if len(column) > 0:
# print( type(column[0]) )
print()
print('==========================================')
print()
else:
print()
print( '단지정보가 없습니다.' )
print( '=========================================' )
|
[
"json.loads",
"requests.get",
"random.uniform"
] |
[((795, 811), 'json.loads', 'json.loads', (['html'], {}), '(html)\n', (805, 811), False, 'import json\n'), ((699, 733), 'requests.get', 'requests.get', (['url'], {'headers': 'headers'}), '(url, headers=headers)\n', (711, 733), False, 'import requests\n'), ((756, 776), 'random.uniform', 'random.uniform', (['(1)', '(4)'], {}), '(1, 4)\n', (770, 776), False, 'import random\n')]
|
# -*- coding: UTF-8 -*-
# Copyright 2017-2020 Rumma & Ko Ltd
# License: BSD (see file COPYING for details)
"""General demo data for Lino Avanti.
- Course providers and courses
"""
# from django.conf import settings
# from lino.utils import mti
from lino.utils import Cycler # join_words
from lino.utils.mldbc import babel_named as named
from lino.api import rt, dd, _
from lino.modlib.users.choicelists import UserTypes
from lino_xl.lib.cal.choicelists import Recurrencies
from lino_xl.lib.courses.choicelists import EnrolmentStates
course_stages = [
_("Dispens"),
_("Eingeschrieben"),
_("Abgeschlossen"),
_("Abgebrochen"),
_("Ausgeschlossen")]
trends_config = []
trends_config.append((
_("Info Integration"),
[ "!Erstgespräch",
"Sprachtest",
"Einschreibung in Sprachkurs",
"Einschreibung in Integrationskurs",
"!Bilanzgespräch"]))
trends_config.append((_("Alphabetisation"), course_stages))
trends_config.append((_("A1"), course_stages))
trends_config.append((_("A2"), course_stages))
trends_config.append((_("Citizen course"), course_stages))
trends_config.append((_("Professional integration"), [
"Begleitet vom DSBE",
"Begleitet vom ADG",
"Erwerbstätigkeit",
]))
def objects():
Line = rt.models.courses.Line
Teacher = dd.plugins.courses.teacher_model
Course = rt.models.courses.Course
Topic = rt.models.courses.Topic
Enrolment = rt.models.courses.Enrolment
CourseStates = rt.models.courses.CourseStates
User = rt.models.users.User
EventType = rt.models.cal.EventType
Guest = rt.models.cal.Guest
GuestRole = rt.models.cal.GuestRole
GuestStates = rt.models.cal.GuestStates
EntryStates = rt.models.cal.EntryStates
Event = rt.models.cal.Event
Person = rt.models.contacts.Person
CommentType = rt.models.comments.CommentType
TrendStage = rt.models.trends.TrendStage
TrendArea = rt.models.trends.TrendArea
for area, stages in trends_config:
ta = named(TrendArea, area)
yield ta
for stage in stages:
kw = dict(trend_area=ta)
if stage[0] == "!":
stage = stage[1:]
kw.update(subject_column=True)
yield named(TrendStage, stage, **kw)
yield EventType(**dd.str2kw('name', _("First contact")))
kw = dd.str2kw('name', _("Lesson"))
kw.update(dd.str2kw('event_label', _("Lesson")))
event_type = EventType(**kw)
yield event_type
pupil = named(GuestRole, _("Pupil"))
yield pupil
yield named(GuestRole, _("Assistant"))
topic_citizen = named(Topic, _("Citizen course"))
yield topic_citizen
topic_lang = named(Topic, _("Language courses"))
yield topic_lang
kw.update(topic=topic_citizen)
kw = dict(event_type=event_type, guest_role=pupil)
yield named(Line, _("Citizen course"), **kw)
kw.update(topic=topic_lang)
alpha = named(Line, _("Alphabetisation"), **kw)
yield alpha
yield named(Line, _("German for beginners"), **kw)
yield named(Line, _("German A1+"), **kw)
yield named(Line, _("German A2"), **kw)
yield named(Line, _("German A2 (women)"), **kw)
yield named(CommentType, _("Phone call"))
yield named(CommentType, _("Visit"))
yield named(CommentType, _("Individual consultation"))
yield named(CommentType, _("Internal meeting"))
yield named(CommentType, _("Meeting with partners"))
laura = Teacher(first_name="Laura", last_name="Lieblig")
yield laura
yield User(username="laura", user_type=UserTypes.teacher,
partner=laura)
yield User(username="nathalie", user_type=UserTypes.user)
yield User(username="nelly", user_type=UserTypes.user)
yield User(username="audrey", user_type=UserTypes.auditor)
yield User(username="martina", user_type=UserTypes.coordinator)
yield User(username="sandra", user_type=UserTypes.secretary)
USERS = Cycler(User.objects.exclude(
user_type__in=(UserTypes.auditor, UserTypes.admin)))
kw = dict(monday=True, tuesday=True, thursday=True, friday=True)
kw.update(
line=alpha,
start_date=dd.demo_date(-30),
start_time="9:00", end_time="12:00",
max_date=dd.demo_date(10),
state=CourseStates.active,
every_unit=Recurrencies.daily,
user=USERS.pop(),
teacher=laura,
max_places=5)
yield Course(**kw)
kw.update(start_time="14:00", end_time="17:00", user=USERS.pop(),
max_places=15)
yield Course(**kw)
kw.update(start_time="18:00", end_time="20:00", user=USERS.pop(),
max_places=15)
yield Course(**kw)
PUPILS = Cycler(dd.plugins.courses.pupil_model.objects.all())
# print(20170302, dd.plugins.courses.pupil_model.objects.all())
COURSES = Cycler(Course.objects.all())
STATES = Cycler(EnrolmentStates.objects())
def fits(course, pupil):
if course.max_places and course.get_free_places() == 0:
return False
if Enrolment.objects.filter(course=course, pupil=pupil).count():
return False
return True
def enrol(pupil):
course = COURSES.pop()
if fits(course, pupil):
kw = dict(user=USERS.pop(), course=course, pupil=pupil)
kw.update(request_date=dd.demo_date(-i))
kw.update(state=STATES.pop())
return Enrolment(**kw)
for i, p in enumerate(
dd.plugins.courses.pupil_model.objects.order_by('id')):
yield enrol(p)
if i % 2 == 0:
yield enrol(p)
if i % 3 == 0:
yield enrol(p)
ar = rt.login('robin')
for obj in Course.objects.all():
obj.update_auto_events(ar)
# Suggested calendar entries older than 7 days should be marked as
# either took_place or cancelled.
qs = Event.objects.filter(
start_date__lte=dd.demo_date(-7),
state=EntryStates.suggested)
for i, obj in enumerate(qs):
if i % 9:
obj.state = EntryStates.took_place
else:
obj.state = EntryStates.cancelled
obj.full_clean()
obj.save()
# participants of events which took place should be marked as
# either absent or present or excused:
qs = Guest.objects.filter(
event__start_date__lte=dd.demo_date(-7),
event__state=EntryStates.took_place).order_by('id')
STATES = Cycler(GuestStates.get_list_items())
for i, obj in enumerate(qs):
obj.state = STATES.pop()
# if i % 8:
# obj.state = GuestStates.present
# elif i % 3:
# obj.state = GuestStates.missing
# else:
# obj.state = GuestStates.excused
obj.full_clean()
obj.save()
|
[
"lino.api.dd.plugins.courses.pupil_model.objects.all",
"lino_xl.lib.courses.choicelists.EnrolmentStates.objects",
"lino.api.rt.login",
"lino.api.dd.demo_date",
"lino.api._",
"lino.utils.mldbc.babel_named",
"lino.api.dd.plugins.courses.pupil_model.objects.order_by"
] |
[((561, 573), 'lino.api._', '_', (['"""Dispens"""'], {}), "('Dispens')\n", (562, 573), False, 'from lino.api import rt, dd, _\n'), ((579, 598), 'lino.api._', '_', (['"""Eingeschrieben"""'], {}), "('Eingeschrieben')\n", (580, 598), False, 'from lino.api import rt, dd, _\n'), ((604, 622), 'lino.api._', '_', (['"""Abgeschlossen"""'], {}), "('Abgeschlossen')\n", (605, 622), False, 'from lino.api import rt, dd, _\n'), ((628, 644), 'lino.api._', '_', (['"""Abgebrochen"""'], {}), "('Abgebrochen')\n", (629, 644), False, 'from lino.api import rt, dd, _\n'), ((650, 669), 'lino.api._', '_', (['"""Ausgeschlossen"""'], {}), "('Ausgeschlossen')\n", (651, 669), False, 'from lino.api import rt, dd, _\n'), ((5630, 5647), 'lino.api.rt.login', 'rt.login', (['"""robin"""'], {}), "('robin')\n", (5638, 5647), False, 'from lino.api import rt, dd, _\n'), ((718, 739), 'lino.api._', '_', (['"""Info Integration"""'], {}), "('Info Integration')\n", (719, 739), False, 'from lino.api import rt, dd, _\n'), ((913, 933), 'lino.api._', '_', (['"""Alphabetisation"""'], {}), "('Alphabetisation')\n", (914, 933), False, 'from lino.api import rt, dd, _\n'), ((973, 980), 'lino.api._', '_', (['"""A1"""'], {}), "('A1')\n", (974, 980), False, 'from lino.api import rt, dd, _\n'), ((1020, 1027), 'lino.api._', '_', (['"""A2"""'], {}), "('A2')\n", (1021, 1027), False, 'from lino.api import rt, dd, _\n'), ((1067, 1086), 'lino.api._', '_', (['"""Citizen course"""'], {}), "('Citizen course')\n", (1068, 1086), False, 'from lino.api import rt, dd, _\n'), ((1126, 1155), 'lino.api._', '_', (['"""Professional integration"""'], {}), "('Professional integration')\n", (1127, 1155), False, 'from lino.api import rt, dd, _\n'), ((1998, 2020), 'lino.utils.mldbc.babel_named', 'named', (['TrendArea', 'area'], {}), '(TrendArea, area)\n', (2003, 2020), True, 'from lino.utils.mldbc import babel_named as named\n'), ((2356, 2367), 'lino.api._', '_', (['"""Lesson"""'], {}), "('Lesson')\n", (2357, 2367), False, 'from lino.api import rt, dd, _\n'), ((2506, 2516), 'lino.api._', '_', (['"""Pupil"""'], {}), "('Pupil')\n", (2507, 2516), False, 'from lino.api import rt, dd, _\n'), ((2611, 2630), 'lino.api._', '_', (['"""Citizen course"""'], {}), "('Citizen course')\n", (2612, 2630), False, 'from lino.api import rt, dd, _\n'), ((2687, 2708), 'lino.api._', '_', (['"""Language courses"""'], {}), "('Language courses')\n", (2688, 2708), False, 'from lino.api import rt, dd, _\n'), ((2928, 2948), 'lino.api._', '_', (['"""Alphabetisation"""'], {}), "('Alphabetisation')\n", (2929, 2948), False, 'from lino.api import rt, dd, _\n'), ((4675, 4719), 'lino.api.dd.plugins.courses.pupil_model.objects.all', 'dd.plugins.courses.pupil_model.objects.all', ([], {}), '()\n', (4717, 4719), False, 'from lino.api import rt, dd, _\n'), ((4852, 4877), 'lino_xl.lib.courses.choicelists.EnrolmentStates.objects', 'EnrolmentStates.objects', ([], {}), '()\n', (4875, 4877), False, 'from lino_xl.lib.courses.choicelists import EnrolmentStates\n'), ((5440, 5493), 'lino.api.dd.plugins.courses.pupil_model.objects.order_by', 'dd.plugins.courses.pupil_model.objects.order_by', (['"""id"""'], {}), "('id')\n", (5487, 5493), False, 'from lino.api import rt, dd, _\n'), ((2408, 2419), 'lino.api._', '_', (['"""Lesson"""'], {}), "('Lesson')\n", (2409, 2419), False, 'from lino.api import rt, dd, _\n'), ((2561, 2575), 'lino.api._', '_', (['"""Assistant"""'], {}), "('Assistant')\n", (2562, 2575), False, 'from lino.api import rt, dd, _\n'), ((2844, 2863), 'lino.api._', '_', (['"""Citizen course"""'], {}), "('Citizen course')\n", (2845, 2863), False, 'from lino.api import rt, dd, _\n'), ((2994, 3019), 'lino.api._', '_', (['"""German for beginners"""'], {}), "('German for beginners')\n", (2995, 3019), False, 'from lino.api import rt, dd, _\n'), ((3049, 3064), 'lino.api._', '_', (['"""German A1+"""'], {}), "('German A1+')\n", (3050, 3064), False, 'from lino.api import rt, dd, _\n'), ((3094, 3108), 'lino.api._', '_', (['"""German A2"""'], {}), "('German A2')\n", (3095, 3108), False, 'from lino.api import rt, dd, _\n'), ((3138, 3160), 'lino.api._', '_', (['"""German A2 (women)"""'], {}), "('German A2 (women)')\n", (3139, 3160), False, 'from lino.api import rt, dd, _\n'), ((3198, 3213), 'lino.api._', '_', (['"""Phone call"""'], {}), "('Phone call')\n", (3199, 3213), False, 'from lino.api import rt, dd, _\n'), ((3244, 3254), 'lino.api._', '_', (['"""Visit"""'], {}), "('Visit')\n", (3245, 3254), False, 'from lino.api import rt, dd, _\n'), ((3285, 3313), 'lino.api._', '_', (['"""Individual consultation"""'], {}), "('Individual consultation')\n", (3286, 3313), False, 'from lino.api import rt, dd, _\n'), ((3344, 3365), 'lino.api._', '_', (['"""Internal meeting"""'], {}), "('Internal meeting')\n", (3345, 3365), False, 'from lino.api import rt, dd, _\n'), ((3396, 3422), 'lino.api._', '_', (['"""Meeting with partners"""'], {}), "('Meeting with partners')\n", (3397, 3422), False, 'from lino.api import rt, dd, _\n'), ((4139, 4156), 'lino.api.dd.demo_date', 'dd.demo_date', (['(-30)'], {}), '(-30)\n', (4151, 4156), False, 'from lino.api import rt, dd, _\n'), ((4220, 4236), 'lino.api.dd.demo_date', 'dd.demo_date', (['(10)'], {}), '(10)\n', (4232, 4236), False, 'from lino.api import rt, dd, _\n'), ((5885, 5901), 'lino.api.dd.demo_date', 'dd.demo_date', (['(-7)'], {}), '(-7)\n', (5897, 5901), False, 'from lino.api import rt, dd, _\n'), ((2235, 2265), 'lino.utils.mldbc.babel_named', 'named', (['TrendStage', 'stage'], {}), '(TrendStage, stage, **kw)\n', (2240, 2265), True, 'from lino.utils.mldbc import babel_named as named\n'), ((2307, 2325), 'lino.api._', '_', (['"""First contact"""'], {}), "('First contact')\n", (2308, 2325), False, 'from lino.api import rt, dd, _\n'), ((5305, 5321), 'lino.api.dd.demo_date', 'dd.demo_date', (['(-i)'], {}), '(-i)\n', (5317, 5321), False, 'from lino.api import rt, dd, _\n'), ((6314, 6330), 'lino.api.dd.demo_date', 'dd.demo_date', (['(-7)'], {}), '(-7)\n', (6326, 6330), False, 'from lino.api import rt, dd, _\n')]
|
import argparse
import json
from tqdm import tqdm
def align_ws(old_token, new_token):
# Align trailing whitespaces between tokens
if old_token[-1] == new_token[-1] == " ":
return new_token
elif old_token[-1] == " ":
return new_token + " "
elif new_token[-1] == " ":
return new_token[:-1]
else:
return new_token
def process_one(i, json_dict, generated_texts):
ori_summ = json_dict['summary']
text_outs = []
for ent_i, (ori_entity, new_entity_summaries) in enumerate(zip(json_dict['entitys'], generated_texts['new_ent_summary'])):
valid_entity_summaries = []
for new_entity_summary in set(new_entity_summaries[:3]):
if new_entity_summary == ori_summ:
continue
valid_entity_summaries.append(new_entity_summary)
break
text_outs.append((valid_entity_summaries, (i, 0, f'entity_{ent_i}', len(valid_entity_summaries))))
if json_dict['entity_relations']:
j = 0
for rel_i, x in enumerate(json_dict['entity_relations']):
num_heads = len(x['heads'])
num_children = len(x['children'])
j += 1 # entity chunk
for head_i in range(num_heads):
j += 1 # only head
valid_summaries = []
for new_rel_summary in set(generated_texts['new_rel_summary'][j][:3]):
if new_rel_summary == ori_summ:
continue
valid_summaries.append(new_rel_summary)
break
j += 1 # both chunk and head
text_outs.append((valid_summaries, (i, 0, f'relation_{rel_i}_head{head_i}', len(valid_summaries))))
for head_i in range(num_children):
j += 1 # only child
valid_summaries = []
for new_rel_summary in set(generated_texts['new_rel_summary'][j][:3]):
if new_rel_summary == ori_summ:
continue
valid_summaries.append(new_rel_summary)
break
j += 1 # both chunk and child
text_outs.append((valid_summaries, (i, 0, f'relation_{rel_i}_child{head_i}', len(valid_summaries))))
return text_outs
def main():
parser = argparse.ArgumentParser()
parser.add_argument('jsonl_file')
parser.add_argument('generated_file')
parser.add_argument('out_file')
parser.add_argument('new_other_file')
args = parser.parse_args()
generated_texts = []
with open(args.generated_file) as f:
for line in f:
generated_texts.append(json.loads(line))
with open(args.jsonl_file) as f:
lines = f.readlines()
results = []
for i, line in tqdm(enumerate(lines)):
# for i, line in enumerate(f):
x = json.loads(line)
if len(x['entitys']) + len(x['entity_relations']) > 0:
results.append(process_one(i, x, generated_texts[i]))
with open(args.out_file, 'w') as f, open(args.new_other_file, 'w') as fother:
for result in results:
for beams, other in result:
for beam in beams:
f.write(beam + '\n')
fother.write(' '.join([str(xx) for xx in other]) + '\n')
if __name__ == '__main__':
main()
|
[
"argparse.ArgumentParser",
"json.loads"
] |
[((2331, 2356), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {}), '()\n', (2354, 2356), False, 'import argparse\n'), ((2865, 2881), 'json.loads', 'json.loads', (['line'], {}), '(line)\n', (2875, 2881), False, 'import json\n'), ((2671, 2687), 'json.loads', 'json.loads', (['line'], {}), '(line)\n', (2681, 2687), False, 'import json\n')]
|