content stringlengths 1 1.04M | input_ids listlengths 1 774k | ratio_char_token float64 0.38 22.9 | token_count int64 1 774k |
|---|---|---|---|
from pathlib import Path
from random import randint
import pygame as pg
from core.tools import Point
| [
6738,
3108,
8019,
1330,
10644,
198,
6738,
4738,
1330,
43720,
600,
198,
198,
11748,
12972,
6057,
355,
23241,
198,
198,
6738,
4755,
13,
31391,
1330,
6252,
628
] | 3.851852 | 27 |
# -*- coding: utf-8 -*-
# Form implementation generated from reading ui file 'vertdef.ui'
#
# Created by: PyQt5 UI code generator 5.6
#
# WARNING! All changes made in this file will be lost!
from PyQt5 import QtCore, QtGui, QtWidgets
| [
2,
532,
9,
12,
19617,
25,
3384,
69,
12,
23,
532,
9,
12,
198,
198,
2,
5178,
7822,
7560,
422,
3555,
334,
72,
2393,
705,
1851,
4299,
13,
9019,
6,
198,
2,
198,
2,
15622,
416,
25,
9485,
48,
83,
20,
12454,
2438,
17301,
642,
13,
21... | 2.855422 | 83 |
import sys
if __name__ == '__main__':
file_name = sys.argv[1]
with open(file_name) as f:
raw_lst = f.read()
lst = raw_lst.splitlines()
print(day06(lst))
print(day06_prime(lst)) | [
11748,
25064,
628,
198,
198,
361,
11593,
3672,
834,
6624,
705,
834,
12417,
834,
10354,
198,
220,
220,
220,
2393,
62,
3672,
796,
25064,
13,
853,
85,
58,
16,
60,
628,
220,
220,
220,
351,
1280,
7,
7753,
62,
3672,
8,
355,
277,
25,
1... | 1.954955 | 111 |
from train import Trainer
from option import args
if __name__ == '__main__':
main() | [
6738,
4512,
1330,
31924,
198,
6738,
3038,
1330,
26498,
628,
198,
198,
361,
11593,
3672,
834,
6624,
705,
834,
12417,
834,
10354,
198,
220,
220,
220,
1388,
3419
] | 3.214286 | 28 |
import httplib2
import oauth2client
import os
from apiclient import discovery
from oauth2client import client
from oauth2client import tools
class Calendar:
"""Stores data to make Google Calendar API happy"""
secret_path = ""
application_name = ""
credentials = ""
http = ""
service = ""
def __init__(self, secret_path, application_name):
"""Instantiate Calendar, keep track of secret path"""
self.SCOPES = 'https://www.googleapis.com/auth/calendar'
self.CREDENTIALS_DIR = "credentials/"
self.secret_path = secret_path
self.application_name = application_name
def get_credentials(self):
"""Gets valid user credentials from storage.
If nothing has been stored, or if the stored credentials are invalid,
the OAuth2 flow is completed to obtain the new credentials.
Returns:
Credentials, the obtained credential.
"""
if not os.path.exists(self.CREDENTIALS_DIR):
os.makedirs(self.CREDENTIALS_DIR)
credential_path = os.path.join(self.CREDENTIALS_DIR,'calendar.json')
store = oauth2client.file.Storage(credential_path)
credentials = store.get()
if not credentials or credentials.invalid:
flow = client.flow_from_clientsecrets(self.secret_path+'/client_secret.json', self.SCOPES)
flow.user_agent = self.application_name
credentials = tools.run_flow(flow, store)
print('Storing credentials to ' + credential_path)
return credentials
| [
11748,
1841,
489,
571,
17,
198,
11748,
267,
18439,
17,
16366,
198,
11748,
28686,
198,
6738,
2471,
291,
75,
1153,
1330,
9412,
198,
6738,
267,
18439,
17,
16366,
1330,
5456,
198,
6738,
267,
18439,
17,
16366,
1330,
4899,
198,
198,
4871,
2... | 2.590381 | 603 |
import os
import os.path
import seissolxdmf
import shutil
import recreateXdmf
import numpy as np
import argparse
parser = argparse.ArgumentParser(description="resample output file and write as binary files")
parser.add_argument("xdmfFilename", help="xdmf output file")
parser.add_argument("--add2prefix", help="string to append to prefix for new file", type=str, default="_resampled")
parser.add_argument("--Data", nargs="+", metavar=("variable"), default=(""), help="Data to resample (example SRs)")
parser.add_argument("--downsample", help="write one out of n output", type=int)
parser.add_argument("--precision", type=str, choices=["float", "double"], default="float", help="precision of output file")
parser.add_argument("--backend", type=str, choices=["hdf5", "raw"], default="hdf5", help="backend used: raw (.bin file), hdf5 (.h5)")
parser.add_argument("--last", dest="last", default=False, action="store_true", help="output last time step")
parser.add_argument("--idt", nargs="+", help="list of time step to write (ex $(seq 7 3 28))", type=int)
parser.add_argument(
"--xfilter",
nargs=2,
metavar=("xmin", "xmax"),
help="output only cells with x center coordinate in range xmin xmax",
type=float,
)
parser.add_argument(
"--yfilter",
nargs=2,
metavar=("ymin", "ymax"),
help="output only cells with y center coordinate in range ymin ymax",
type=float,
)
parser.add_argument(
"--zfilter",
nargs=2,
metavar=("zmin", "zmax"),
help="output only cells with z center coordinate in range zmin zmax",
type=float,
)
args = parser.parse_args()
sx = seissolxdmfExtended(args.xdmfFilename)
xyz = sx.ReadGeometry()
connect = sx.ReadConnect()
spatial_filtering = (args.xfilter or args.yfilter) or args.zfilter
nElements = connect.shape[0]
if spatial_filtering:
print("Warning: spatial filtering significantly slows down this script")
ids = range(0, sx.nElements)
xyzc = (xyz[connect[:, 0], :] + xyz[connect[:, 1], :] + xyz[connect[:, 2], :]) / 3.0
if args.xfilter:
id0 = filter_cells(xyzc[:, 0], args.xfilter)
ids = np.intersect1d(ids, id0) if len(ids) else id0
if args.yfilter:
id0 = filter_cells(xyzc[:, 1], args.yfilter)
ids = np.intersect1d(ids, id0) if len(ids) else id0
if args.zfilter:
id0 = filter_cells(xyzc[:, 2], args.zfilter)
ids = np.intersect1d(ids, id0) if len(ids) else id0
if len(ids):
connect = connect[ids, :]
nElements = connect.shape[0]
if nElements != sx.nElements:
print(f"extracting {nElements} cells out of {sx.nElements}")
else:
spatial_filtering = False
else:
raise ValueError("all elements are outside filter range")
ndt = sx.ndt
if args.last:
indices = [ndt - 1]
if args.idt or args.downsample:
print("last option cannot be used together with idt and downsample options")
exit()
else:
if args.idt and args.downsample:
print("idt and downsample options cannot be used together")
exit()
elif not args.downsample:
indices = args.idt
else:
indices = range(0, ndt, args.downsample)
# Check if input is in hdf5 format or not
dataLocation, data_prec, MemDimension = sx.GetDataLocationPrecisionMemDimension("partition")
splitArgs = dataLocation.split(":")
if len(splitArgs) == 2:
isHdf5 = True
else:
isHdf5 = False
if args.precision == "double":
myDtype = "float64"
myprec = 8
else:
myDtype = "float32"
myprec = 4
if args.backend == "raw":
write2Binary = True
else:
write2Binary = False
prefix = os.path.splitext(args.xdmfFilename)[0]
prefix_new = recreateXdmf.generate_new_prefix(prefix, args.add2prefix)
# Create folders if necessary
if write2Binary:
if not os.path.exists(prefix_new + "_cell"):
os.mkdir(prefix_new + "_cell")
if not os.path.exists(prefix_new + "_cell/mesh0/"):
os.mkdir(prefix_new + "_cell/mesh0/")
if not os.path.exists(prefix_new + "_vertex"):
os.mkdir(prefix_new + "_vertex")
if not os.path.exists(prefix_new + "_vertex/mesh0/"):
os.mkdir(prefix_new + "_vertex/mesh0/")
# Write geometry and connect
if write2Binary:
fn2 = prefix_new + "_cell/mesh0/connect.bin"
if isHdf5:
# write Connect
output_file = open(fn2, "wb")
connect.tofile(output_file)
output_file.close()
else:
shutil.copy2(os.path.splitext(args.xdmfFilename)[0] + "_cell/mesh0/connect.bin", fn2)
print("done writing " + fn2)
# write geometry
fn3 = prefix_new + "_vertex/mesh0/geometry.bin"
output_file = open(fn3, "wb")
xyz.tofile(output_file)
output_file.close()
print("done writing " + fn3)
else:
import h5py
# write geometry to hdf5 format
h5fv = h5py.File(prefix_new + "_vertex.h5", "w")
h5fv.create_dataset("/mesh0/geometry", data=xyz)
h5fv.close()
print("done writing " + prefix_new + "_vertex.h5")
# write connect to hdf5 format
h5fc = h5py.File(prefix_new + "_cell.h5", "w")
h5fc.create_dataset("/mesh0/connect", data=connect)
# Write data items
for ida, sdata in enumerate(args.Data):
if write2Binary:
fname2 = prefix_new + "_cell/mesh0/" + args.Data[ida] + ".bin"
output_file = open(fname2, "wb")
else:
dset = h5fc.create_dataset("/mesh0/" + args.Data[ida], (len(indices), nElements), dtype=myDtype)
# read only one row
print(sdata, end=" ", flush=True)
for kk, i in enumerate(indices):
if (kk % 10 == 0) and kk > 0:
print(kk)
else:
print(kk, end=" ", flush=True)
if i >= ndt:
print("ignoring index %d>=ndt=%d" % (i, ndt))
continue
if spatial_filtering:
myData = sx.ReadData(args.Data[ida], idt=i)[ids]
else:
myData = sx.ReadData(args.Data[ida], idt=i)
if write2Binary:
myData.astype(myDtype).tofile(output_file)
else:
dset[kk, :] = myData[:]
if write2Binary:
output_file.close()
print("done writing " + fname2)
if not write2Binary:
h5fc.close()
print("done writing " + prefix_new + "_cell.h5")
# Now recreate the Xdmf
prefix = os.path.splitext(args.xdmfFilename)[0]
# Read all parameters from the xdmf file of SeisSol (if any)
dt = recreateXdmf.ReadDtFromXdmf(args.xdmfFilename)
nvertex = recreateXdmf.ReadNvertexFromXdmf(args.xdmfFilename)
ndt, nmem = recreateXdmf.ReadNdtNmemFromXdmf(args.xdmfFilename)
recreateXdmf.recreateXdmf(prefix, prefix_new, nvertex, nElements, nElements, dt, indices, args.Data, not write2Binary, myprec, args.add2prefix)
| [
11748,
28686,
198,
11748,
28686,
13,
6978,
198,
11748,
384,
747,
349,
24954,
76,
69,
198,
11748,
4423,
346,
198,
11748,
32049,
55,
36020,
69,
198,
11748,
299,
32152,
355,
45941,
198,
11748,
1822,
29572,
198,
198,
48610,
796,
1822,
29572... | 2.338591 | 2,853 |
from apps.base.mappers import (PagingRequestSchema, PagingResponseSchema)
from apps.story.mappers import (StorySchema, GetStoryRequestSchema, PostStoryRequestSchema,
PutStoryRequestSchema, DeleteStoryRequestSchema)
from contracts.base import (LIMIT_NAME, OFFSET_NAME, SORT_NAME, COUNT_NAME, CURRENT_NAME, PREV_NAME, NEXT_NAME)
from contracts.story import (STORY_ID, NAME, SLUG, DESCRIPTION, CONTENT, CATEGORY_ID, GetStoryRequest, PostStoryRequest,
PutStoryRequest,
DeleteStoryRequest)
TEST_COUNT = 10
TEST_CURRENT = "http://sample"
TEST_PREV = "http://sample/prev"
TEST_NEXT = "http://sample/next"
TEST_LIMIT = 10
TEST_OFFSET = 10
TEST_SORT = "sort"
TEST_PATH = "http://sample/path"
PAGING_REQUEST = {
LIMIT_NAME: TEST_LIMIT,
OFFSET_NAME: TEST_OFFSET,
SORT_NAME: TEST_SORT
}
PAGING_RESPONSE = {
COUNT_NAME: TEST_COUNT,
CURRENT_NAME: TEST_CURRENT,
PREV_NAME: TEST_PREV,
NEXT_NAME: TEST_NEXT
}
TEST_STORY_ID = 1
TEST_STORY_NAME = 'Story name 1'
TEST_STORY_SLUG = 'story-name-1'
TEST_STORY_DESCRIPTION = 'story description'
TEST_STORY_CONTENT = 'story content'
TEST_STORY_CATEGORY_ID = 1
TEST_STORY = {
STORY_ID: TEST_STORY_ID,
NAME: TEST_STORY_NAME,
SLUG: TEST_STORY_SLUG,
DESCRIPTION: TEST_STORY_DESCRIPTION,
CONTENT: TEST_STORY_CONTENT,
CATEGORY_ID: TEST_STORY_CATEGORY_ID
}
TEST_CATEGORY_POST = {
NAME: TEST_STORY_NAME,
SLUG: TEST_STORY_SLUG,
DESCRIPTION: TEST_STORY_DESCRIPTION,
CONTENT: TEST_STORY_CONTENT,
CATEGORY_ID: TEST_STORY_CATEGORY_ID
}
TEST_CATEGORY_PUT = {
NAME: TEST_STORY_NAME,
SLUG: TEST_STORY_SLUG,
DESCRIPTION: TEST_STORY_DESCRIPTION,
CONTENT: TEST_STORY_CONTENT,
CATEGORY_ID: TEST_STORY_CATEGORY_ID
}
| [
6738,
6725,
13,
8692,
13,
76,
46629,
1330,
357,
47,
3039,
18453,
27054,
2611,
11,
350,
3039,
31077,
27054,
2611,
8,
198,
6738,
6725,
13,
13571,
13,
76,
46629,
1330,
357,
11605,
27054,
2611,
11,
3497,
11605,
18453,
27054,
2611,
11,
294... | 2.176259 | 834 |
import json
from rec_process import rec_for_you_process
| [
11748,
33918,
198,
6738,
664,
62,
14681,
1330,
664,
62,
1640,
62,
5832,
62,
14681,
628
] | 3.5625 | 16 |
# Generated by Django 2.2.6 on 2020-03-11 13:09
from django.db import migrations
import enumfields.fields
import leasing.enums
| [
2,
2980,
515,
416,
37770,
362,
13,
17,
13,
21,
319,
12131,
12,
3070,
12,
1157,
1511,
25,
2931,
198,
198,
6738,
42625,
14208,
13,
9945,
1330,
15720,
602,
198,
11748,
33829,
25747,
13,
25747,
198,
11748,
42150,
13,
268,
5700,
628
] | 3.071429 | 42 |
from collections import defaultdict
import json
########################### | [
6738,
17268,
1330,
4277,
11600,
198,
11748,
33918,
628,
197,
14468,
7804,
21017
] | 5.923077 | 13 |
import logging
from abc import ABC
from django.conf import settings
from django.db.models.base import Model
from json2model.services import data_type_transform
from json2model.services.dynamic_model import dynamic_model_utils as dm_utils
from json2model.services.dynamic_model.i_json_iterator import IJsonIterator
logger = logging.getLogger(__name__)
RELATE_TO_USER = getattr(settings, 'RELATE_TO_USER', False)
| [
11748,
18931,
198,
6738,
450,
66,
1330,
9738,
198,
198,
6738,
42625,
14208,
13,
10414,
1330,
6460,
198,
6738,
42625,
14208,
13,
9945,
13,
27530,
13,
8692,
1330,
9104,
198,
198,
6738,
33918,
17,
19849,
13,
30416,
1330,
1366,
62,
4906,
... | 3.224806 | 129 |
#!/usr/bin/env python
import os
import sys
import subprocess
from androguard.core.bytecodes.apk import APK as APK
if (len(sys.argv) < 5):
print("main.py <apk_dir> <result dir> <device_serial_number>"
"<device_codename>")
exit(0)
apk_dir = os.path.abspath(sys.argv[1])
res_dir = os.path.abspath(sys.argv[2])
dev = sys.argv[3]
code = sys.argv[4]
if not (os.path.isdir(apk_dir)):
print("{0} does not exist".format(apk_dir))
exit(0)
if not (os.path.isdir(res_dir)):
os.mkdir(res_dir)
done_before = list()
log_path = os.path.join(res_dir, "analysed.txt")
if os.path.isfile(log_path):
log = open(log_path, "r")
for line in log.read().splitlines():
done_before.append(os.path.basename(line))
log.close()
log = open(log_path, "a")
f_output = open(os.path.join(res_dir, "rawoutput.txt"), "a")
print "Ready to process {0}".format(apk_dir)
cur_dir = os.curdir
# os.chdir(os.path.join(os.path.dirname(__file__),
# "BranchExplorer"))
for root, dlist, flist in os.walk(apk_dir):
for f in flist:
if (f in done_before):
print "{0} has already been analysed. Skipped".format(f)
continue
apk = os.path.join(root, f)
try:
a = APK(apk)
except:
continue
print "Sample: {}".format(os.path.basename(apk))
output = os.path.join(res_dir, "{0}.res".format(apk.split("/")[-1]))
# cmd = ["sh", "/Users/rado/blare/twrpbackup/flash.sh", dev, code]
# subprocess.call(cmd)
cmd = ["python3",
"-m",
"branchexp.main",
# "--device",
# dev,
# "--device-code",
# code,
# "--run-type",
# "grodd",
# "--max-runs",
# "2",
"--output-dir",
output,
apk
]
p = subprocess.Popen(cmd, stdout=f_output, stderr=f_output)
p.wait()
cmd = ["adb", "-s", dev, "shell", "pm", "uninstall", a.get_package()]
p = subprocess.Popen(cmd, stdout=f_output, stderr=f_output)
p.wait()
log.write("{0}\n".format(apk))
log.flush()
os.chdir(cur_dir)
log.close()
| [
2,
48443,
14629,
14,
8800,
14,
24330,
21015,
198,
198,
11748,
28686,
198,
11748,
25064,
198,
11748,
850,
14681,
198,
6738,
290,
3828,
84,
446,
13,
7295,
13,
26327,
40148,
13,
499,
74,
1330,
3486,
42,
355,
3486,
42,
198,
198,
361,
35... | 1.877907 | 1,204 |
from a10sdk.common.A10BaseClass import A10BaseClass
class Stats(A10BaseClass):
"""This class does not support CRUD Operations please use parent.
:param icmp_freed: {"optional": true, "size": "8", "type": "number", "oid": "3", "format": "counter"}
:param tcp_freed: {"optional": true, "size": "8", "type": "number", "oid": "10", "format": "counter"}
:param udp: {"optional": true, "size": "8", "type": "number", "oid": "5", "format": "counter"}
:param icmp_total: {"optional": true, "size": "8", "type": "number", "oid": "4", "format": "counter"}
:param users: {"optional": true, "size": "8", "type": "number", "oid": "1", "format": "counter"}
:param tcp: {"optional": true, "size": "8", "type": "number", "oid": "9", "format": "counter"}
:param tcp_rsvd: {"optional": true, "size": "8", "type": "number", "oid": "12", "format": "counter"}
:param tcp_total: {"optional": true, "size": "8", "type": "number", "oid": "11", "format": "counter"}
:param udp_rsvd: {"optional": true, "size": "8", "type": "number", "oid": "8", "format": "counter"}
:param icmp: {"optional": true, "size": "8", "type": "number", "oid": "2", "format": "counter"}
:param udp_freed: {"optional": true, "size": "8", "type": "number", "oid": "6", "format": "counter"}
:param udp_total: {"optional": true, "size": "8", "type": "number", "oid": "7", "format": "counter"}
:param DeviceProxy: The device proxy for REST operations and session handling. Refer to `common/device_proxy.py`
"""
class Pool(A10BaseClass):
"""Class Description::
Statistics for the object pool.
Class pool supports CRUD Operations and inherits from `common/A10BaseClass`.
This class is the `"PARENT"` class for this module.`
:param pool_name: {"description": "Specify pool name or pool group", "format": "string-rlx", "minLength": 1, "oid": "1001", "optional": false, "maxLength": 63, "type": "string"}
:param DeviceProxy: The device proxy for REST operations and session handling. Refer to `common/device_proxy.py`
URL for this object::
`https://<Hostname|Ip address>//axapi/v3/cgnv6/nat/pool/{pool_name}/stats`.
"""
| [
6738,
257,
940,
21282,
74,
13,
11321,
13,
32,
940,
14881,
9487,
1330,
317,
940,
14881,
9487,
628,
198,
4871,
20595,
7,
32,
940,
14881,
9487,
2599,
198,
220,
220,
220,
220,
198,
220,
220,
220,
37227,
1212,
1398,
857,
407,
1104,
8740,... | 2.754057 | 801 |
from imdb import IMDb
instance = IMDb()
| [
6738,
545,
9945,
1330,
8959,
43832,
198,
198,
39098,
796,
8959,
43832,
3419,
628,
628,
628,
628,
628
] | 2.777778 | 18 |
# -*- coding:utf-8 -*-
# 4293
"""
299
200
333
340
399
443
531
504
461
536
493
562
503
423
516
316
554
500
543
491
537
509
459
467
551
489
519
614
576
510
498
542
"""
"""
425.3813
339.3123
425.4084
426.7195
346.5619
342.1731
346.4786
389.6992
427.9460
426.4859
393.6239
428.2821
329.8853
367.9668
372.7045
376.2823
365.4773
426.2885
421.0901
424.6047
379.8000
423.1902
370.4271
373.4348
398.8455
390.0929
421.6984
420.9566
420.1010
317.4631
426.0412
426.9891
426.5954
389.0763
425.9510
427.4420
425.9278
427.8777
427.8219
425.4135
426.6216
426.7444
425.1452
425.9407
395.6746
426.2799
424.7812
424.7365
423.4941
424.7979
423.1833
424.7138
424.1059
422.8674
423.0326
422.8661
422.3526
422.3204
421.3811
382.9742
"""
normalization('data/1')
| [
2,
532,
9,
12,
19617,
25,
40477,
12,
23,
532,
9,
12,
628,
198,
2,
604,
31675,
628,
198,
37811,
198,
22579,
198,
2167,
198,
20370,
198,
23601,
198,
28771,
198,
34938,
198,
20,
3132,
198,
33580,
198,
40652,
198,
44468,
198,
43134,
1... | 1.847134 | 471 |
import collections
from sim_objs import *
from mapper import *
# ############################################ Scher ########################################### #
| [
11748,
17268,
198,
198,
6738,
985,
62,
672,
8457,
1330,
1635,
198,
6738,
285,
11463,
1330,
1635,
198,
198,
2,
1303,
29113,
7804,
21017,
220,
47956,
220,
1303,
29113,
7804,
2235,
1303,
198
] | 5.030303 | 33 |
from django.shortcuts import render, redirect, get_object_or_404
from django.http import HttpResponseBadRequest, HttpResponse
from django.core.paginator import Paginator, EmptyPage, PageNotAnInteger
from bootcamp.confcreator.forms import ConfCreatorForm
from bootcamp.confcreator.models import ConfCreator
from django.contrib.auth.decorators import login_required
from bootcamp.inventories.models import Inventory
from bootcamp.utils.loadconfig import get_vars
import json,requests
from play_util.AnsiblePlaybook import AnsiblePlaybook
import os.path
from bootcamp.settings import MEDIA_ROOT
from shutil import copy
@login_required
@login_required
@login_required()
@login_required()
# @login_required
# def createconfinstance(request):
# if request.method == 'POST':
# # id=3
# id = request.POST.get('confid')
# variableinjson = str(get_variables(id).variable)
# variables = json.loads(variableinjson)
# form = ConfForm(request.POST or None, variables=variables)
# if form.is_valid():
# output={}
# for (key, value) in form.cleaned_data.iteritems():
# output[key]=str(value)
#
# del output['name']
# confinstance = ConfTemplateInstance()
# confinstance.create_user = request.user
# confinstance.name = str(get_variables(id).name)
# confinstance.varvalues = json.dumps(output)
# confinstance.conftemplate = get_object_or_404(ConfTemplate, pk=id)
# confinstance.save()
# conftemp = get_object_or_404(ConfTemplate, pk=confinstance.conftemplate.id)
# srcfilename = str(conftemp.filename()) #str(conftemp.jinjatemplate)
# destfilename = "conffile"+str(confinstance.id)+".txt"
# confinstance.confoutfilename = destfilename
# confinstance.save()
# create_playbook(output,'/var/ansible/templateGen/'+srcfilename,'/etc/templateout/'+destfilename)
#
# filegenstatus = 'false'
# if os.path.isfile('/etc/templateout/'+destfilename):
# filegenstatus = 'true'
#
# playbookName = 'EMC-Edge-RTR-Active-template.yml'
# inventory = 'dev'
# playbookinst=AnsiblePlaybook(playbookName,inventory,'/etc/ansibout/output.out')
# Output=playbookinst.runPlaybook()
# fileRead = open('/etc/ansibout/output.out')
# Output = fileRead.read()
# Output=Output.replace("[0;32m","")
# Output=Output.replace("[0;31m","")
# Output=Output.replace("[0m"," ")
# Output=Output.replace("\x1b"," ")
#
# # variable=form.cleaned_data.get('custom_0')
# return render(request, "confgenerator/configurations.html", {'temporary': Output, 'confid':confinstance.id, 'filegenstatus' :filegenstatus})
# # return redirect("create_user_success")
# else:
# # id=3
# id = request.GET.get('confid')
# variableinjson = str(get_variables(id).variable)
# variables = json.loads(variableinjson)
# form = ConfForm(variables=variables)
#
# return render(request, "confgenerator/form.html", {'form': form, 'conf': id})
#
#
# def create_playbook(variables,src,dest):
# filepath = '/var/ansible' + '/templateGen/'
# target = open(filepath+'EMC-Edge-RTR-Active-template.yml', 'w')
# target.write('---')
# target.write("\n")
# target.write("- hosts: template")
# target.write("\n")
# target.write(" connection: local")
# target.write("\n")
# target.write(" vars:")
# target.write("\n")
# for key,value in variables.iteritems():
# target.write(" "+key+": "+value)
# target.write("\n")
# target.write(" tasks:")
# target.write("\n")
# target.write(" - name: GENERATE THE OUTPUT FILE")
# target.write("\n")
# target.write(" template: src="+src+" dest="+dest)
# @login_required
# def tag(request, tag_name):
# tags = Tag.objects.filter(tag=tag_name)
# articles = []
# for tag in tags:
# if tag.article.status == Article.PUBLISHED:
# articles.append(tag.article)
# return _articles(request, articles)
#
#
# @login_required
# def write(request):
# if request.method == 'POST':
# form = ArticleForm(request.POST)
# if form.is_valid():
# article = Article()
# article.create_user = request.user
# article.title = form.cleaned_data.get('title')
# article.content = form.cleaned_data.get('content')
# status = form.cleaned_data.get('status')
# if status in [Article.PUBLISHED, Article.DRAFT]:
# article.status = form.cleaned_data.get('status')
# article.save()
# tags = form.cleaned_data.get('tags')
# article.create_tags(tags)
# return redirect('/articles/')
# else:
# form = ArticleForm()
# return render(request, 'articles/write.html', {'form': form})
#
#
# @login_required
# def drafts(request):
# drafts = Article.objects.filter(create_user=request.user,
# status=Article.DRAFT)
# return render(request, 'articles/drafts.html', {'drafts': drafts})
#
#
# @login_required
# def edit(request, id):
# tags = ''
# if id:
# article = get_object_or_404(Article, pk=id)
# for tag in article.get_tags():
# tags = u'{0} {1}'.format(tags, tag.tag)
# tags = tags.strip()
# else:
# article = Article(create_user=request.user)
#
# if article.create_user.id != request.user.id:
# return redirect('home')
#
# if request.POST:
# form = ArticleForm(request.POST, instance=article)
# if form.is_valid():
# form.save()
# return redirect('/articles/')
# else:
# form = ArticleForm(instance=article, initial={'tags': tags})
# return render(request, 'articles/edit.html', {'form': form})
#
#
# @login_required
# @ajax_required
# def preview(request):
# try:
# if request.method == 'POST':
# content = request.POST.get('content')
# html = 'Nothing to display :('
# if len(content.strip()) > 0:
# html = markdown.markdown(content, safe_mode='escape')
# return HttpResponse(html)
# else:
# return HttpResponseBadRequest()
#
# except Exception, e:
# return HttpResponseBadRequest()
#
#
# @login_required
# @ajax_required
# def comment(request):
# try:
# if request.method == 'POST':
# article_id = request.POST.get('article')
# article = Article.objects.get(pk=article_id)
# comment = request.POST.get('comment')
# comment = comment.strip()
# if len(comment) > 0:
# article_comment = ArticleComment(user=request.user,
# article=article,
# comment=comment)
# article_comment.save()
# html = u''
# for comment in article.get_comments():
# html = u'{0}{1}'.format(html, render_to_string('articles/partial_article_comment.html',
# {'comment': comment}))
#
# return HttpResponse(html)
#
# else:
# return HttpResponseBadRequest()
#
# except Exception, e:
# return HttpResponseBadRequest()
| [
6738,
42625,
14208,
13,
19509,
23779,
1330,
8543,
11,
18941,
11,
651,
62,
15252,
62,
273,
62,
26429,
198,
6738,
42625,
14208,
13,
4023,
1330,
367,
29281,
31077,
22069,
18453,
11,
367,
29281,
31077,
198,
6738,
42625,
14208,
13,
7295,
13,... | 2.161731 | 3,512 |
import scipy.optimize
import numpy as np
#analytic soln
G=1
m=1
M=1
mu=1
t=1
eps=1
turn=np.zeros(1) | [
11748,
629,
541,
88,
13,
40085,
1096,
198,
11748,
299,
32152,
355,
45941,
198,
2,
38200,
13370,
1540,
77,
198,
38,
28,
16,
198,
76,
28,
16,
198,
44,
28,
16,
198,
30300,
28,
16,
198,
83,
28,
16,
198,
25386,
28,
16,
198,
15344,
... | 1.867925 | 53 |
########################################
# MIT License
#
# Copyright (c) 2020 Miguel Ramos Pernas
########################################
'''
Definition of the interface functions and classes with :mod:`iminuit`.
'''
from ..base import data_types
from ..base import parameters
from . import core
import contextlib
import functools
import iminuit
__all__ = ['MinuitMinimizer']
# Definition of the errors. This is given from the nature of the FCNs. If this is
# changed the output of the FCNs must change accordingly. A value of 1 means
# that the output of the FCNs is a chi-square-like function.
ERRORDEF = 1.
MINUIT = 'minuit'
def use_const_cache(method):
'''
Use the constant cache of the evaluator when calling the method.
'''
@functools.wraps(method)
return wrapper
def registry_to_minuit_input(registry):
'''
Transform a registry of parameters into a dictionary to be parsed by Minuit.
:param registry: registry of parameters.
:type registry: Registry(Parameter)
:returns: Minuit configuration dictionary.
:rtype: dict
'''
values = {v.name: v.value for v in registry}
# 0 for Minuit, None for MinKit
errors = {f'error_{v.name}': v.error for v in registry if v.error is not None}
limits = {f'limit_{v.name}': v.bounds for v in registry}
const = {f'fix_{v.name}': v.constant for v in registry}
return dict(errordef=ERRORDEF, **values, **errors, **limits, **const)
| [
29113,
7804,
198,
2,
17168,
13789,
198,
2,
198,
2,
15069,
357,
66,
8,
12131,
29825,
36692,
350,
1142,
292,
198,
29113,
7804,
198,
7061,
6,
198,
36621,
286,
262,
7071,
5499,
290,
6097,
351,
1058,
4666,
25,
63,
320,
259,
5013,
44646,
... | 3.145336 | 461 |
# -*- coding: utf-8 -*-
# Resource object code
#
# Created by: The Resource Compiler for PyQt5 (Qt v5.15.2)
#
# WARNING! All changes made in this file will be lost!
from PyQt5 import QtCore
qt_resource_data = b"\
\x00\x00\x00\xa0\
\x89\
\x50\x4e\x47\x0d\x0a\x1a\x0a\x00\x00\x00\x0d\x49\x48\x44\x52\x00\
\x00\x00\x06\x00\x00\x00\x09\x08\x04\x00\x00\x00\xbb\x93\x95\x16\
\x00\x00\x00\x01\x73\x52\x47\x42\x00\xae\xce\x1c\xe9\x00\x00\x00\
\x02\x62\x4b\x47\x44\x00\xff\x87\x8f\xcc\xbf\x00\x00\x00\x09\x70\
\x48\x59\x73\x00\x00\x0b\x13\x00\x00\x0b\x13\x01\x00\x9a\x9c\x18\
\x00\x00\x00\x07\x74\x49\x4d\x45\x07\xdc\x08\x17\x14\x1c\x1f\x24\
\xc6\x09\x17\x00\x00\x00\x24\x49\x44\x41\x54\x08\xd7\x63\x60\x40\
\x05\xff\xcf\xc3\x58\x4c\xc8\x5c\x26\x64\x59\x26\x64\xc5\x70\x0e\
\xa3\x21\x9c\xc3\x68\x88\x61\x1a\x0a\x00\x00\x6d\x84\x09\x75\x37\
\x9e\xd9\x23\x00\x00\x00\x00\x49\x45\x4e\x44\xae\x42\x60\x82\
\x00\x00\x07\x30\
\x89\
\x50\x4e\x47\x0d\x0a\x1a\x0a\x00\x00\x00\x0d\x49\x48\x44\x52\x00\
\x00\x00\x0a\x00\x00\x00\x07\x08\x06\x00\x00\x00\x31\xac\xdc\x63\
\x00\x00\x04\xb0\x69\x54\x58\x74\x58\x4d\x4c\x3a\x63\x6f\x6d\x2e\
\x61\x64\x6f\x62\x65\x2e\x78\x6d\x70\x00\x00\x00\x00\x00\x3c\x3f\
\x78\x70\x61\x63\x6b\x65\x74\x20\x62\x65\x67\x69\x6e\x3d\x22\xef\
\xbb\xbf\x22\x20\x69\x64\x3d\x22\x57\x35\x4d\x30\x4d\x70\x43\x65\
\x68\x69\x48\x7a\x72\x65\x53\x7a\x4e\x54\x63\x7a\x6b\x63\x39\x64\
\x22\x3f\x3e\x0a\x3c\x78\x3a\x78\x6d\x70\x6d\x65\x74\x61\x20\x78\
\x6d\x6c\x6e\x73\x3a\x78\x3d\x22\x61\x64\x6f\x62\x65\x3a\x6e\x73\
\x3a\x6d\x65\x74\x61\x2f\x22\x20\x78\x3a\x78\x6d\x70\x74\x6b\x3d\
\x22\x58\x4d\x50\x20\x43\x6f\x72\x65\x20\x35\x2e\x35\x2e\x30\x22\
\x3e\x0a\x20\x3c\x72\x64\x66\x3a\x52\x44\x46\x20\x78\x6d\x6c\x6e\
\x73\x3a\x72\x64\x66\x3d\x22\x68\x74\x74\x70\x3a\x2f\x2f\x77\x77\
\x77\x2e\x77\x33\x2e\x6f\x72\x67\x2f\x31\x39\x39\x39\x2f\x30\x32\
\x2f\x32\x32\x2d\x72\x64\x66\x2d\x73\x79\x6e\x74\x61\x78\x2d\x6e\
\x73\x23\x22\x3e\x0a\x20\x20\x3c\x72\x64\x66\x3a\x44\x65\x73\x63\
\x72\x69\x70\x74\x69\x6f\x6e\x20\x72\x64\x66\x3a\x61\x62\x6f\x75\
\x74\x3d\x22\x22\x0a\x20\x20\x20\x20\x78\x6d\x6c\x6e\x73\x3a\x65\
\x78\x69\x66\x3d\x22\x68\x74\x74\x70\x3a\x2f\x2f\x6e\x73\x2e\x61\
\x64\x6f\x62\x65\x2e\x63\x6f\x6d\x2f\x65\x78\x69\x66\x2f\x31\x2e\
\x30\x2f\x22\x0a\x20\x20\x20\x20\x78\x6d\x6c\x6e\x73\x3a\x74\x69\
\x66\x66\x3d\x22\x68\x74\x74\x70\x3a\x2f\x2f\x6e\x73\x2e\x61\x64\
\x6f\x62\x65\x2e\x63\x6f\x6d\x2f\x74\x69\x66\x66\x2f\x31\x2e\x30\
\x2f\x22\x0a\x20\x20\x20\x20\x78\x6d\x6c\x6e\x73\x3a\x70\x68\x6f\
\x74\x6f\x73\x68\x6f\x70\x3d\x22\x68\x74\x74\x70\x3a\x2f\x2f\x6e\
\x73\x2e\x61\x64\x6f\x62\x65\x2e\x63\x6f\x6d\x2f\x70\x68\x6f\x74\
\x6f\x73\x68\x6f\x70\x2f\x31\x2e\x30\x2f\x22\x0a\x20\x20\x20\x20\
\x78\x6d\x6c\x6e\x73\x3a\x78\x6d\x70\x3d\x22\x68\x74\x74\x70\x3a\
\x2f\x2f\x6e\x73\x2e\x61\x64\x6f\x62\x65\x2e\x63\x6f\x6d\x2f\x78\
\x61\x70\x2f\x31\x2e\x30\x2f\x22\x0a\x20\x20\x20\x20\x78\x6d\x6c\
\x6e\x73\x3a\x78\x6d\x70\x4d\x4d\x3d\x22\x68\x74\x74\x70\x3a\x2f\
\x2f\x6e\x73\x2e\x61\x64\x6f\x62\x65\x2e\x63\x6f\x6d\x2f\x78\x61\
\x70\x2f\x31\x2e\x30\x2f\x6d\x6d\x2f\x22\x0a\x20\x20\x20\x20\x78\
\x6d\x6c\x6e\x73\x3a\x73\x74\x45\x76\x74\x3d\x22\x68\x74\x74\x70\
\x3a\x2f\x2f\x6e\x73\x2e\x61\x64\x6f\x62\x65\x2e\x63\x6f\x6d\x2f\
\x78\x61\x70\x2f\x31\x2e\x30\x2f\x73\x54\x79\x70\x65\x2f\x52\x65\
\x73\x6f\x75\x72\x63\x65\x45\x76\x65\x6e\x74\x23\x22\x0a\x20\x20\
\x20\x65\x78\x69\x66\x3a\x50\x69\x78\x65\x6c\x58\x44\x69\x6d\x65\
\x6e\x73\x69\x6f\x6e\x3d\x22\x31\x30\x22\x0a\x20\x20\x20\x65\x78\
\x69\x66\x3a\x50\x69\x78\x65\x6c\x59\x44\x69\x6d\x65\x6e\x73\x69\
\x6f\x6e\x3d\x22\x37\x22\x0a\x20\x20\x20\x65\x78\x69\x66\x3a\x43\
\x6f\x6c\x6f\x72\x53\x70\x61\x63\x65\x3d\x22\x31\x22\x0a\x20\x20\
\x20\x74\x69\x66\x66\x3a\x49\x6d\x61\x67\x65\x57\x69\x64\x74\x68\
\x3d\x22\x31\x30\x22\x0a\x20\x20\x20\x74\x69\x66\x66\x3a\x49\x6d\
\x61\x67\x65\x4c\x65\x6e\x67\x74\x68\x3d\x22\x37\x22\x0a\x20\x20\
\x20\x74\x69\x66\x66\x3a\x52\x65\x73\x6f\x6c\x75\x74\x69\x6f\x6e\
\x55\x6e\x69\x74\x3d\x22\x32\x22\x0a\x20\x20\x20\x74\x69\x66\x66\
\x3a\x58\x52\x65\x73\x6f\x6c\x75\x74\x69\x6f\x6e\x3d\x22\x37\x32\
\x2e\x30\x22\x0a\x20\x20\x20\x74\x69\x66\x66\x3a\x59\x52\x65\x73\
\x6f\x6c\x75\x74\x69\x6f\x6e\x3d\x22\x37\x32\x2e\x30\x22\x0a\x20\
\x20\x20\x70\x68\x6f\x74\x6f\x73\x68\x6f\x70\x3a\x43\x6f\x6c\x6f\
\x72\x4d\x6f\x64\x65\x3d\x22\x33\x22\x0a\x20\x20\x20\x70\x68\x6f\
\x74\x6f\x73\x68\x6f\x70\x3a\x49\x43\x43\x50\x72\x6f\x66\x69\x6c\
\x65\x3d\x22\x73\x52\x47\x42\x20\x49\x45\x43\x36\x31\x39\x36\x36\
\x2d\x32\x2e\x31\x22\x0a\x20\x20\x20\x78\x6d\x70\x3a\x4d\x6f\x64\
\x69\x66\x79\x44\x61\x74\x65\x3d\x22\x32\x30\x32\x31\x2d\x30\x35\
\x2d\x33\x31\x54\x31\x32\x3a\x33\x33\x3a\x31\x34\x2b\x30\x32\x3a\
\x30\x30\x22\x0a\x20\x20\x20\x78\x6d\x70\x3a\x4d\x65\x74\x61\x64\
\x61\x74\x61\x44\x61\x74\x65\x3d\x22\x32\x30\x32\x31\x2d\x30\x35\
\x2d\x33\x31\x54\x31\x32\x3a\x33\x33\x3a\x31\x34\x2b\x30\x32\x3a\
\x30\x30\x22\x3e\x0a\x20\x20\x20\x3c\x78\x6d\x70\x4d\x4d\x3a\x48\
\x69\x73\x74\x6f\x72\x79\x3e\x0a\x20\x20\x20\x20\x3c\x72\x64\x66\
\x3a\x53\x65\x71\x3e\x0a\x20\x20\x20\x20\x20\x3c\x72\x64\x66\x3a\
\x6c\x69\x0a\x20\x20\x20\x20\x20\x20\x73\x74\x45\x76\x74\x3a\x61\
\x63\x74\x69\x6f\x6e\x3d\x22\x70\x72\x6f\x64\x75\x63\x65\x64\x22\
\x0a\x20\x20\x20\x20\x20\x20\x73\x74\x45\x76\x74\x3a\x73\x6f\x66\
\x74\x77\x61\x72\x65\x41\x67\x65\x6e\x74\x3d\x22\x41\x66\x66\x69\
\x6e\x69\x74\x79\x20\x44\x65\x73\x69\x67\x6e\x65\x72\x20\x31\x2e\
\x39\x2e\x32\x22\x0a\x20\x20\x20\x20\x20\x20\x73\x74\x45\x76\x74\
\x3a\x77\x68\x65\x6e\x3d\x22\x32\x30\x32\x31\x2d\x30\x35\x2d\x33\
\x31\x54\x31\x32\x3a\x33\x33\x3a\x31\x34\x2b\x30\x32\x3a\x30\x30\
\x22\x2f\x3e\x0a\x20\x20\x20\x20\x3c\x2f\x72\x64\x66\x3a\x53\x65\
\x71\x3e\x0a\x20\x20\x20\x3c\x2f\x78\x6d\x70\x4d\x4d\x3a\x48\x69\
\x73\x74\x6f\x72\x79\x3e\x0a\x20\x20\x3c\x2f\x72\x64\x66\x3a\x44\
\x65\x73\x63\x72\x69\x70\x74\x69\x6f\x6e\x3e\x0a\x20\x3c\x2f\x72\
\x64\x66\x3a\x52\x44\x46\x3e\x0a\x3c\x2f\x78\x3a\x78\x6d\x70\x6d\
\x65\x74\x61\x3e\x0a\x3c\x3f\x78\x70\x61\x63\x6b\x65\x74\x20\x65\
\x6e\x64\x3d\x22\x72\x22\x3f\x3e\x48\x8b\x5b\x5e\x00\x00\x01\x83\
\x69\x43\x43\x50\x73\x52\x47\x42\x20\x49\x45\x43\x36\x31\x39\x36\
\x36\x2d\x32\x2e\x31\x00\x00\x28\x91\x75\x91\xcf\x2b\x44\x51\x14\
\xc7\x3f\x66\x68\xfc\x18\x8d\x62\x61\x31\x65\x12\x16\x42\x83\x12\
\x1b\x8b\x99\x18\x0a\x8b\x99\x51\x7e\x6d\x66\x9e\x79\x33\x6a\xde\
\x78\xbd\x37\xd2\x64\xab\x6c\xa7\x28\xb1\xf1\x6b\xc1\x5f\xc0\x56\
\x59\x2b\x45\xa4\x64\xa7\xac\x89\x0d\x7a\xce\x9b\x51\x23\x99\x73\
\x3b\xf7\x7c\xee\xf7\xde\x73\xba\xf7\x5c\x70\x44\xd3\x8a\x66\x56\
\xfa\x41\xcb\x64\x8d\x70\x28\xe0\x9b\x99\x9d\xf3\xb9\x9e\xa8\xa2\
\x85\x1a\x3a\xf1\xc6\x14\x53\x9f\x8c\x8c\x46\x29\x6b\xef\xb7\x54\
\xd8\xf1\xba\xdb\xae\x55\xfe\xdc\xbf\x56\xb7\x98\x30\x15\xa8\xa8\
\x16\x1e\x56\x74\x23\x2b\x3c\x26\x3c\xb1\x9a\xd5\x6d\xde\x12\x6e\
\x52\x52\xb1\x45\xe1\x13\xe1\x2e\x43\x2e\x28\x7c\x63\xeb\xf1\x22\
\x3f\xdb\x9c\x2c\xf2\xa7\xcd\x46\x34\x1c\x04\x47\x83\xb0\x2f\xf9\
\x8b\xe3\xbf\x58\x49\x19\x9a\xb0\xbc\x9c\x36\x2d\xbd\xa2\xfc\xdc\
\xc7\x7e\x89\x3b\x91\x99\x8e\x48\x6c\x15\xf7\x62\x12\x26\x44\x00\
\x1f\xe3\x8c\x10\x64\x80\x5e\x86\x64\x1e\xa0\x9b\x3e\x7a\x64\x45\
\x99\x7c\x7f\x21\x7f\x8a\x65\xc9\x55\x64\xd6\xc9\x61\xb0\x44\x92\
\x14\x59\xba\x44\x5d\x91\xea\x09\x89\xaa\xe8\x09\x19\x69\x72\x76\
\xff\xff\xf6\xd5\x54\xfb\xfb\x8a\xd5\xdd\x01\xa8\x7a\xb4\xac\xd7\
\x76\x70\x6d\xc2\x57\xde\xb2\x3e\x0e\x2c\xeb\xeb\x10\x9c\x0f\x70\
\x9e\x29\xe5\x2f\xef\xc3\xe0\x9b\xe8\xf9\x92\xd6\xb6\x07\x9e\x75\
\x38\xbd\x28\x69\xf1\x6d\x38\xdb\x80\xe6\x7b\x3d\x66\xc4\x0a\x92\
\x53\xdc\xa1\xaa\xf0\x72\x0c\xf5\xb3\xd0\x78\x05\xb5\xf3\xc5\x9e\
\xfd\xec\x73\x74\x07\xd1\x35\xf9\xaa\x4b\xd8\xd9\x85\x0e\x39\xef\
\x59\xf8\x06\x8e\xfd\x67\xf8\xfd\x8a\x18\x97\x00\x00\x00\x09\x70\
\x48\x59\x73\x00\x00\x0b\x13\x00\x00\x0b\x13\x01\x00\x9a\x9c\x18\
\x00\x00\x00\x97\x49\x44\x41\x54\x18\x95\x6d\xcf\xb1\x6a\x02\x41\
\x14\x85\xe1\x6f\xb7\xb6\xd0\x27\x48\x3d\x56\x69\x03\xb1\xb4\x48\
\x3b\x6c\xa5\xf1\x39\xf6\x59\x02\x56\x42\xba\x61\x0a\x0b\x3b\x1b\
\x1b\x6b\x41\x18\x02\x29\x6d\xe3\xbe\x82\xcd\x06\x16\xd9\xdb\xdd\
\x9f\xff\x5c\xee\xa9\x62\x2a\x13\x4c\x73\x13\x6e\x46\x26\xa6\xf2\
\x82\xae\x46\x8b\xdf\x98\xca\xfb\x88\xb4\xc0\x0f\xda\x1a\x5b\x74\
\xd8\xc7\x54\xc2\x40\x9a\x63\x8f\x3f\x7c\x55\x3d\x7c\xc5\x09\x77\
\xbc\xa1\xc2\x19\x33\x2c\x72\x13\x2e\xd5\xe0\xc2\x12\x07\x5c\x51\
\x23\xe0\x23\x37\xe1\xa8\x4f\x0e\x7f\xda\x60\xd7\xaf\x9f\xb9\x09\
\xdf\x63\x05\xff\xe5\x75\x4c\x65\xf5\xcc\x1f\x0d\x33\x2c\x83\xb6\
\x06\x44\x83\x00\x00\x00\x00\x49\x45\x4e\x44\xae\x42\x60\x82\
\x00\x00\x00\xa5\
\x89\
\x50\x4e\x47\x0d\x0a\x1a\x0a\x00\x00\x00\x0d\x49\x48\x44\x52\x00\
\x00\x00\x09\x00\x00\x00\x06\x08\x04\x00\x00\x00\xbb\xce\x7c\x4e\
\x00\x00\x00\x01\x73\x52\x47\x42\x00\xae\xce\x1c\xe9\x00\x00\x00\
\x02\x62\x4b\x47\x44\x00\x9c\x53\x34\xfc\x5d\x00\x00\x00\x09\x70\
\x48\x59\x73\x00\x00\x0b\x13\x00\x00\x0b\x13\x01\x00\x9a\x9c\x18\
\x00\x00\x00\x07\x74\x49\x4d\x45\x07\xdc\x08\x17\x0b\x02\x04\x6d\
\x98\x1b\x69\x00\x00\x00\x29\x49\x44\x41\x54\x08\xd7\x63\x60\xc0\
\x00\x8c\x0c\x0c\xff\xcf\xa3\x08\x18\x32\x32\x30\x20\x0b\x32\x1a\
\x32\x30\x30\x42\x98\x10\x41\x46\x43\x14\x13\x50\xb5\xa3\x01\x00\
\xd6\x10\x07\xd2\x2f\x48\xdf\x4a\x00\x00\x00\x00\x49\x45\x4e\x44\
\xae\x42\x60\x82\
\x00\x00\x00\xa0\
\x89\
\x50\x4e\x47\x0d\x0a\x1a\x0a\x00\x00\x00\x0d\x49\x48\x44\x52\x00\
\x00\x00\x06\x00\x00\x00\x09\x08\x04\x00\x00\x00\xbb\x93\x95\x16\
\x00\x00\x00\x01\x73\x52\x47\x42\x00\xae\xce\x1c\xe9\x00\x00\x00\
\x02\x62\x4b\x47\x44\x00\xff\x87\x8f\xcc\xbf\x00\x00\x00\x09\x70\
\x48\x59\x73\x00\x00\x0b\x13\x00\x00\x0b\x13\x01\x00\x9a\x9c\x18\
\x00\x00\x00\x07\x74\x49\x4d\x45\x07\xdc\x08\x17\x14\x1f\x0d\xfc\
\x52\x2b\x9c\x00\x00\x00\x24\x49\x44\x41\x54\x08\xd7\x63\x60\x40\
\x05\x73\x3e\xc0\x58\x4c\xc8\x5c\x26\x64\x59\x26\x64\xc5\x70\x4e\
\x8a\x00\x9c\x93\x22\x80\x61\x1a\x0a\x00\x00\x29\x95\x08\xaf\x88\
\xac\xba\x34\x00\x00\x00\x00\x49\x45\x4e\x44\xae\x42\x60\x82\
\x00\x00\x07\xad\
\x89\
\x50\x4e\x47\x0d\x0a\x1a\x0a\x00\x00\x00\x0d\x49\x48\x44\x52\x00\
\x00\x00\x07\x00\x00\x00\x0a\x08\x06\x00\x00\x00\x78\xcc\x44\x0d\
\x00\x00\x05\x52\x69\x54\x58\x74\x58\x4d\x4c\x3a\x63\x6f\x6d\x2e\
\x61\x64\x6f\x62\x65\x2e\x78\x6d\x70\x00\x00\x00\x00\x00\x3c\x3f\
\x78\x70\x61\x63\x6b\x65\x74\x20\x62\x65\x67\x69\x6e\x3d\x22\xef\
\xbb\xbf\x22\x20\x69\x64\x3d\x22\x57\x35\x4d\x30\x4d\x70\x43\x65\
\x68\x69\x48\x7a\x72\x65\x53\x7a\x4e\x54\x63\x7a\x6b\x63\x39\x64\
\x22\x3f\x3e\x0a\x3c\x78\x3a\x78\x6d\x70\x6d\x65\x74\x61\x20\x78\
\x6d\x6c\x6e\x73\x3a\x78\x3d\x22\x61\x64\x6f\x62\x65\x3a\x6e\x73\
\x3a\x6d\x65\x74\x61\x2f\x22\x20\x78\x3a\x78\x6d\x70\x74\x6b\x3d\
\x22\x58\x4d\x50\x20\x43\x6f\x72\x65\x20\x35\x2e\x35\x2e\x30\x22\
\x3e\x0a\x20\x3c\x72\x64\x66\x3a\x52\x44\x46\x20\x78\x6d\x6c\x6e\
\x73\x3a\x72\x64\x66\x3d\x22\x68\x74\x74\x70\x3a\x2f\x2f\x77\x77\
\x77\x2e\x77\x33\x2e\x6f\x72\x67\x2f\x31\x39\x39\x39\x2f\x30\x32\
\x2f\x32\x32\x2d\x72\x64\x66\x2d\x73\x79\x6e\x74\x61\x78\x2d\x6e\
\x73\x23\x22\x3e\x0a\x20\x20\x3c\x72\x64\x66\x3a\x44\x65\x73\x63\
\x72\x69\x70\x74\x69\x6f\x6e\x20\x72\x64\x66\x3a\x61\x62\x6f\x75\
\x74\x3d\x22\x22\x0a\x20\x20\x20\x20\x78\x6d\x6c\x6e\x73\x3a\x64\
\x63\x3d\x22\x68\x74\x74\x70\x3a\x2f\x2f\x70\x75\x72\x6c\x2e\x6f\
\x72\x67\x2f\x64\x63\x2f\x65\x6c\x65\x6d\x65\x6e\x74\x73\x2f\x31\
\x2e\x31\x2f\x22\x0a\x20\x20\x20\x20\x78\x6d\x6c\x6e\x73\x3a\x65\
\x78\x69\x66\x3d\x22\x68\x74\x74\x70\x3a\x2f\x2f\x6e\x73\x2e\x61\
\x64\x6f\x62\x65\x2e\x63\x6f\x6d\x2f\x65\x78\x69\x66\x2f\x31\x2e\
\x30\x2f\x22\x0a\x20\x20\x20\x20\x78\x6d\x6c\x6e\x73\x3a\x74\x69\
\x66\x66\x3d\x22\x68\x74\x74\x70\x3a\x2f\x2f\x6e\x73\x2e\x61\x64\
\x6f\x62\x65\x2e\x63\x6f\x6d\x2f\x74\x69\x66\x66\x2f\x31\x2e\x30\
\x2f\x22\x0a\x20\x20\x20\x20\x78\x6d\x6c\x6e\x73\x3a\x70\x68\x6f\
\x74\x6f\x73\x68\x6f\x70\x3d\x22\x68\x74\x74\x70\x3a\x2f\x2f\x6e\
\x73\x2e\x61\x64\x6f\x62\x65\x2e\x63\x6f\x6d\x2f\x70\x68\x6f\x74\
\x6f\x73\x68\x6f\x70\x2f\x31\x2e\x30\x2f\x22\x0a\x20\x20\x20\x20\
\x78\x6d\x6c\x6e\x73\x3a\x78\x6d\x70\x3d\x22\x68\x74\x74\x70\x3a\
\x2f\x2f\x6e\x73\x2e\x61\x64\x6f\x62\x65\x2e\x63\x6f\x6d\x2f\x78\
\x61\x70\x2f\x31\x2e\x30\x2f\x22\x0a\x20\x20\x20\x20\x78\x6d\x6c\
\x6e\x73\x3a\x78\x6d\x70\x4d\x4d\x3d\x22\x68\x74\x74\x70\x3a\x2f\
\x2f\x6e\x73\x2e\x61\x64\x6f\x62\x65\x2e\x63\x6f\x6d\x2f\x78\x61\
\x70\x2f\x31\x2e\x30\x2f\x6d\x6d\x2f\x22\x0a\x20\x20\x20\x20\x78\
\x6d\x6c\x6e\x73\x3a\x73\x74\x45\x76\x74\x3d\x22\x68\x74\x74\x70\
\x3a\x2f\x2f\x6e\x73\x2e\x61\x64\x6f\x62\x65\x2e\x63\x6f\x6d\x2f\
\x78\x61\x70\x2f\x31\x2e\x30\x2f\x73\x54\x79\x70\x65\x2f\x52\x65\
\x73\x6f\x75\x72\x63\x65\x45\x76\x65\x6e\x74\x23\x22\x0a\x20\x20\
\x20\x65\x78\x69\x66\x3a\x50\x69\x78\x65\x6c\x58\x44\x69\x6d\x65\
\x6e\x73\x69\x6f\x6e\x3d\x22\x37\x22\x0a\x20\x20\x20\x65\x78\x69\
\x66\x3a\x50\x69\x78\x65\x6c\x59\x44\x69\x6d\x65\x6e\x73\x69\x6f\
\x6e\x3d\x22\x31\x30\x22\x0a\x20\x20\x20\x65\x78\x69\x66\x3a\x43\
\x6f\x6c\x6f\x72\x53\x70\x61\x63\x65\x3d\x22\x31\x22\x0a\x20\x20\
\x20\x74\x69\x66\x66\x3a\x49\x6d\x61\x67\x65\x57\x69\x64\x74\x68\
\x3d\x22\x37\x22\x0a\x20\x20\x20\x74\x69\x66\x66\x3a\x49\x6d\x61\
\x67\x65\x4c\x65\x6e\x67\x74\x68\x3d\x22\x31\x30\x22\x0a\x20\x20\
\x20\x74\x69\x66\x66\x3a\x52\x65\x73\x6f\x6c\x75\x74\x69\x6f\x6e\
\x55\x6e\x69\x74\x3d\x22\x32\x22\x0a\x20\x20\x20\x74\x69\x66\x66\
\x3a\x58\x52\x65\x73\x6f\x6c\x75\x74\x69\x6f\x6e\x3d\x22\x37\x32\
\x2e\x30\x22\x0a\x20\x20\x20\x74\x69\x66\x66\x3a\x59\x52\x65\x73\
\x6f\x6c\x75\x74\x69\x6f\x6e\x3d\x22\x37\x32\x2e\x30\x22\x0a\x20\
\x20\x20\x70\x68\x6f\x74\x6f\x73\x68\x6f\x70\x3a\x43\x6f\x6c\x6f\
\x72\x4d\x6f\x64\x65\x3d\x22\x33\x22\x0a\x20\x20\x20\x70\x68\x6f\
\x74\x6f\x73\x68\x6f\x70\x3a\x49\x43\x43\x50\x72\x6f\x66\x69\x6c\
\x65\x3d\x22\x73\x52\x47\x42\x20\x49\x45\x43\x36\x31\x39\x36\x36\
\x2d\x32\x2e\x31\x22\x0a\x20\x20\x20\x78\x6d\x70\x3a\x4d\x6f\x64\
\x69\x66\x79\x44\x61\x74\x65\x3d\x22\x32\x30\x32\x31\x2d\x30\x35\
\x2d\x33\x31\x54\x31\x32\x3a\x34\x33\x3a\x33\x35\x2b\x30\x32\x3a\
\x30\x30\x22\x0a\x20\x20\x20\x78\x6d\x70\x3a\x4d\x65\x74\x61\x64\
\x61\x74\x61\x44\x61\x74\x65\x3d\x22\x32\x30\x32\x31\x2d\x30\x35\
\x2d\x33\x31\x54\x31\x32\x3a\x34\x33\x3a\x33\x35\x2b\x30\x32\x3a\
\x30\x30\x22\x3e\x0a\x20\x20\x20\x3c\x64\x63\x3a\x74\x69\x74\x6c\
\x65\x3e\x0a\x20\x20\x20\x20\x3c\x72\x64\x66\x3a\x41\x6c\x74\x3e\
\x0a\x20\x20\x20\x20\x20\x3c\x72\x64\x66\x3a\x6c\x69\x20\x78\x6d\
\x6c\x3a\x6c\x61\x6e\x67\x3d\x22\x78\x2d\x64\x65\x66\x61\x75\x6c\
\x74\x22\x3e\x62\x72\x61\x6e\x63\x68\x5f\x63\x6c\x6f\x73\x65\x3c\
\x2f\x72\x64\x66\x3a\x6c\x69\x3e\x0a\x20\x20\x20\x20\x3c\x2f\x72\
\x64\x66\x3a\x41\x6c\x74\x3e\x0a\x20\x20\x20\x3c\x2f\x64\x63\x3a\
\x74\x69\x74\x6c\x65\x3e\x0a\x20\x20\x20\x3c\x78\x6d\x70\x4d\x4d\
\x3a\x48\x69\x73\x74\x6f\x72\x79\x3e\x0a\x20\x20\x20\x20\x3c\x72\
\x64\x66\x3a\x53\x65\x71\x3e\x0a\x20\x20\x20\x20\x20\x3c\x72\x64\
\x66\x3a\x6c\x69\x0a\x20\x20\x20\x20\x20\x20\x73\x74\x45\x76\x74\
\x3a\x61\x63\x74\x69\x6f\x6e\x3d\x22\x70\x72\x6f\x64\x75\x63\x65\
\x64\x22\x0a\x20\x20\x20\x20\x20\x20\x73\x74\x45\x76\x74\x3a\x73\
\x6f\x66\x74\x77\x61\x72\x65\x41\x67\x65\x6e\x74\x3d\x22\x41\x66\
\x66\x69\x6e\x69\x74\x79\x20\x44\x65\x73\x69\x67\x6e\x65\x72\x20\
\x31\x2e\x39\x2e\x32\x22\x0a\x20\x20\x20\x20\x20\x20\x73\x74\x45\
\x76\x74\x3a\x77\x68\x65\x6e\x3d\x22\x32\x30\x32\x31\x2d\x30\x35\
\x2d\x33\x31\x54\x31\x32\x3a\x34\x33\x3a\x33\x35\x2b\x30\x32\x3a\
\x30\x30\x22\x2f\x3e\x0a\x20\x20\x20\x20\x3c\x2f\x72\x64\x66\x3a\
\x53\x65\x71\x3e\x0a\x20\x20\x20\x3c\x2f\x78\x6d\x70\x4d\x4d\x3a\
\x48\x69\x73\x74\x6f\x72\x79\x3e\x0a\x20\x20\x3c\x2f\x72\x64\x66\
\x3a\x44\x65\x73\x63\x72\x69\x70\x74\x69\x6f\x6e\x3e\x0a\x20\x3c\
\x2f\x72\x64\x66\x3a\x52\x44\x46\x3e\x0a\x3c\x2f\x78\x3a\x78\x6d\
\x70\x6d\x65\x74\x61\x3e\x0a\x3c\x3f\x78\x70\x61\x63\x6b\x65\x74\
\x20\x65\x6e\x64\x3d\x22\x72\x22\x3f\x3e\x24\xe1\x35\x97\x00\x00\
\x01\x83\x69\x43\x43\x50\x73\x52\x47\x42\x20\x49\x45\x43\x36\x31\
\x39\x36\x36\x2d\x32\x2e\x31\x00\x00\x28\x91\x75\x91\xcf\x2b\x44\
\x51\x14\xc7\x3f\x66\x68\xfc\x18\x8d\x62\x61\x31\x65\x12\x16\x42\
\x83\x12\x1b\x8b\x99\x18\x0a\x8b\x99\x51\x7e\x6d\x66\x9e\x79\x33\
\x6a\xde\x78\xbd\x37\xd2\x64\xab\x6c\xa7\x28\xb1\xf1\x6b\xc1\x5f\
\xc0\x56\x59\x2b\x45\xa4\x64\xa7\xac\x89\x0d\x7a\xce\x9b\x51\x23\
\x99\x73\x3b\xf7\x7c\xee\xf7\xde\x73\xba\xf7\x5c\x70\x44\xd3\x8a\
\x66\x56\xfa\x41\xcb\x64\x8d\x70\x28\xe0\x9b\x99\x9d\xf3\xb9\x9e\
\xa8\xa2\x85\x1a\x3a\xf1\xc6\x14\x53\x9f\x8c\x8c\x46\x29\x6b\xef\
\xb7\x54\xd8\xf1\xba\xdb\xae\x55\xfe\xdc\xbf\x56\xb7\x98\x30\x15\
\xa8\xa8\x16\x1e\x56\x74\x23\x2b\x3c\x26\x3c\xb1\x9a\xd5\x6d\xde\
\x12\x6e\x52\x52\xb1\x45\xe1\x13\xe1\x2e\x43\x2e\x28\x7c\x63\xeb\
\xf1\x22\x3f\xdb\x9c\x2c\xf2\xa7\xcd\x46\x34\x1c\x04\x47\x83\xb0\
\x2f\xf9\x8b\xe3\xbf\x58\x49\x19\x9a\xb0\xbc\x9c\x36\x2d\xbd\xa2\
\xfc\xdc\xc7\x7e\x89\x3b\x91\x99\x8e\x48\x6c\x15\xf7\x62\x12\x26\
\x44\x00\x1f\xe3\x8c\x10\x64\x80\x5e\x86\x64\x1e\xa0\x9b\x3e\x7a\
\x64\x45\x99\x7c\x7f\x21\x7f\x8a\x65\xc9\x55\x64\xd6\xc9\x61\xb0\
\x44\x92\x14\x59\xba\x44\x5d\x91\xea\x09\x89\xaa\xe8\x09\x19\x69\
\x72\x76\xff\xff\xf6\xd5\x54\xfb\xfb\x8a\xd5\xdd\x01\xa8\x7a\xb4\
\xac\xd7\x76\x70\x6d\xc2\x57\xde\xb2\x3e\x0e\x2c\xeb\xeb\x10\x9c\
\x0f\x70\x9e\x29\xe5\x2f\xef\xc3\xe0\x9b\xe8\xf9\x92\xd6\xb6\x07\
\x9e\x75\x38\xbd\x28\x69\xf1\x6d\x38\xdb\x80\xe6\x7b\x3d\x66\xc4\
\x0a\x92\x53\xdc\xa1\xaa\xf0\x72\x0c\xf5\xb3\xd0\x78\x05\xb5\xf3\
\xc5\x9e\xfd\xec\x73\x74\x07\xd1\x35\xf9\xaa\x4b\xd8\xd9\x85\x0e\
\x39\xef\x59\xf8\x06\x8e\xfd\x67\xf8\xfd\x8a\x18\x97\x00\x00\x00\
\x09\x70\x48\x59\x73\x00\x00\x0b\x13\x00\x00\x0b\x13\x01\x00\x9a\
\x9c\x18\x00\x00\x00\x72\x49\x44\x41\x54\x18\x95\x6d\xcf\x31\x0a\
\xc2\x50\x14\x44\xd1\xe8\x02\xb4\x57\x08\xd6\x49\x61\x99\x4a\x43\
\x74\x15\x82\xab\x49\x36\x28\xee\x40\x04\xdb\xa8\x95\x58\x78\x2c\
\xf2\x09\xe1\xf3\x07\xa6\x9a\xfb\xe0\xbe\x0c\x1b\xb4\x58\x64\x71\
\x70\x30\xe4\x82\x55\x0a\x38\xe3\x8b\x1b\x8a\x14\x70\xc4\x1b\x3d\
\x76\x29\x60\x8b\x07\x3e\xa8\xe6\xd1\xfe\x0b\x9d\x85\x8e\x57\x0d\
\x5e\x78\xa2\x9e\x0e\xa7\x20\x74\x47\x39\x1d\xf6\xe1\x95\x2b\xd6\
\xb1\x44\x8e\x0e\xcb\x58\xf0\x0f\x52\x8a\x79\x18\xdc\xe2\x02\x70\
\x00\x00\x00\x00\x49\x45\x4e\x44\xae\x42\x60\x82\
\x00\x00\x00\xa0\
\x89\
\x50\x4e\x47\x0d\x0a\x1a\x0a\x00\x00\x00\x0d\x49\x48\x44\x52\x00\
\x00\x00\x06\x00\x00\x00\x09\x08\x04\x00\x00\x00\xbb\x93\x95\x16\
\x00\x00\x00\x01\x73\x52\x47\x42\x00\xae\xce\x1c\xe9\x00\x00\x00\
\x02\x62\x4b\x47\x44\x00\xff\x87\x8f\xcc\xbf\x00\x00\x00\x09\x70\
\x48\x59\x73\x00\x00\x0b\x13\x00\x00\x0b\x13\x01\x00\x9a\x9c\x18\
\x00\x00\x00\x07\x74\x49\x4d\x45\x07\xdc\x08\x17\x14\x1c\x1f\x24\
\xc6\x09\x17\x00\x00\x00\x24\x49\x44\x41\x54\x08\xd7\x63\x60\x40\
\x05\xff\xcf\xc3\x58\x4c\xc8\x5c\x26\x64\x59\x26\x64\xc5\x70\x0e\
\xa3\x21\x9c\xc3\x68\x88\x61\x1a\x0a\x00\x00\x6d\x84\x09\x75\x37\
\x9e\xd9\x23\x00\x00\x00\x00\x49\x45\x4e\x44\xae\x42\x60\x82\
\x00\x00\x00\x9f\
\x89\
\x50\x4e\x47\x0d\x0a\x1a\x0a\x00\x00\x00\x0d\x49\x48\x44\x52\x00\
\x00\x00\x09\x00\x00\x00\x06\x08\x04\x00\x00\x00\xbb\xce\x7c\x4e\
\x00\x00\x00\x01\x73\x52\x47\x42\x00\xae\xce\x1c\xe9\x00\x00\x00\
\x02\x62\x4b\x47\x44\x00\xff\x87\x8f\xcc\xbf\x00\x00\x00\x09\x70\
\x48\x59\x73\x00\x00\x0b\x13\x00\x00\x0b\x13\x01\x00\x9a\x9c\x18\
\x00\x00\x00\x07\x74\x49\x4d\x45\x07\xdc\x08\x17\x08\x14\x1f\xf9\
\x23\xd9\x0b\x00\x00\x00\x23\x49\x44\x41\x54\x08\xd7\x63\x60\xc0\
\x0d\xe6\x7c\x80\xb1\x18\x91\x05\x52\x04\xe0\x42\x08\x15\x29\x02\
\x0c\x0c\x8c\xc8\x02\x08\x95\x68\x00\x00\xac\xac\x07\x90\x4e\x65\
\x34\xac\x00\x00\x00\x00\x49\x45\x4e\x44\xae\x42\x60\x82\
\x00\x00\x00\x9e\
\x89\
\x50\x4e\x47\x0d\x0a\x1a\x0a\x00\x00\x00\x0d\x49\x48\x44\x52\x00\
\x00\x00\x09\x00\x00\x00\x06\x08\x04\x00\x00\x00\xbb\xce\x7c\x4e\
\x00\x00\x00\x01\x73\x52\x47\x42\x00\xae\xce\x1c\xe9\x00\x00\x00\
\x02\x62\x4b\x47\x44\x00\xff\x87\x8f\xcc\xbf\x00\x00\x00\x09\x70\
\x48\x59\x73\x00\x00\x0b\x13\x00\x00\x0b\x13\x01\x00\x9a\x9c\x18\
\x00\x00\x00\x07\x74\x49\x4d\x45\x07\xdc\x08\x17\x08\x15\x0f\xfd\
\x8f\xf8\x2e\x00\x00\x00\x22\x49\x44\x41\x54\x08\xd7\x63\x60\xc0\
\x0d\xfe\x9f\x87\xb1\x18\x91\x05\x18\x0d\xe1\x42\x48\x2a\x0c\x19\
\x18\x18\x91\x05\x10\x2a\xd1\x00\x00\xca\xb5\x07\xd2\x76\xbb\xb2\
\xc5\x00\x00\x00\x00\x49\x45\x4e\x44\xae\x42\x60\x82\
\x00\x00\x00\xa5\
\x89\
\x50\x4e\x47\x0d\x0a\x1a\x0a\x00\x00\x00\x0d\x49\x48\x44\x52\x00\
\x00\x00\x09\x00\x00\x00\x06\x08\x04\x00\x00\x00\xbb\xce\x7c\x4e\
\x00\x00\x00\x01\x73\x52\x47\x42\x00\xae\xce\x1c\xe9\x00\x00\x00\
\x02\x62\x4b\x47\x44\x00\x9c\x53\x34\xfc\x5d\x00\x00\x00\x09\x70\
\x48\x59\x73\x00\x00\x0b\x13\x00\x00\x0b\x13\x01\x00\x9a\x9c\x18\
\x00\x00\x00\x07\x74\x49\x4d\x45\x07\xdc\x08\x17\x0b\x02\x04\x6d\
\x98\x1b\x69\x00\x00\x00\x29\x49\x44\x41\x54\x08\xd7\x63\x60\xc0\
\x00\x8c\x0c\x0c\xff\xcf\xa3\x08\x18\x32\x32\x30\x20\x0b\x32\x1a\
\x32\x30\x30\x42\x98\x10\x41\x46\x43\x14\x13\x50\xb5\xa3\x01\x00\
\xd6\x10\x07\xd2\x2f\x48\xdf\x4a\x00\x00\x00\x00\x49\x45\x4e\x44\
\xae\x42\x60\x82\
\x00\x00\x00\xa6\
\x89\
\x50\x4e\x47\x0d\x0a\x1a\x0a\x00\x00\x00\x0d\x49\x48\x44\x52\x00\
\x00\x00\x09\x00\x00\x00\x06\x08\x04\x00\x00\x00\xbb\xce\x7c\x4e\
\x00\x00\x00\x01\x73\x52\x47\x42\x00\xae\xce\x1c\xe9\x00\x00\x00\
\x02\x62\x4b\x47\x44\x00\xff\x87\x8f\xcc\xbf\x00\x00\x00\x09\x70\
\x48\x59\x73\x00\x00\x0b\x13\x00\x00\x0b\x13\x01\x00\x9a\x9c\x18\
\x00\x00\x00\x07\x74\x49\x4d\x45\x07\xdc\x08\x17\x08\x15\x3b\xdc\
\x3b\x0c\x9b\x00\x00\x00\x2a\x49\x44\x41\x54\x08\xd7\x63\x60\xc0\
\x00\x8c\x0c\x0c\x73\x3e\x20\x0b\xa4\x08\x30\x32\x30\x20\x0b\xa6\
\x08\x30\x30\x30\x42\x98\x10\xc1\x14\x01\x14\x13\x50\xb5\xa3\x01\
\x00\xc6\xb9\x07\x90\x5d\x66\x1f\x83\x00\x00\x00\x00\x49\x45\x4e\
\x44\xae\x42\x60\x82\
\x00\x00\x07\xdd\
\x89\
\x50\x4e\x47\x0d\x0a\x1a\x0a\x00\x00\x00\x0d\x49\x48\x44\x52\x00\
\x00\x00\x07\x00\x00\x00\x0a\x08\x06\x00\x00\x00\x78\xcc\x44\x0d\
\x00\x00\x05\x52\x69\x54\x58\x74\x58\x4d\x4c\x3a\x63\x6f\x6d\x2e\
\x61\x64\x6f\x62\x65\x2e\x78\x6d\x70\x00\x00\x00\x00\x00\x3c\x3f\
\x78\x70\x61\x63\x6b\x65\x74\x20\x62\x65\x67\x69\x6e\x3d\x22\xef\
\xbb\xbf\x22\x20\x69\x64\x3d\x22\x57\x35\x4d\x30\x4d\x70\x43\x65\
\x68\x69\x48\x7a\x72\x65\x53\x7a\x4e\x54\x63\x7a\x6b\x63\x39\x64\
\x22\x3f\x3e\x0a\x3c\x78\x3a\x78\x6d\x70\x6d\x65\x74\x61\x20\x78\
\x6d\x6c\x6e\x73\x3a\x78\x3d\x22\x61\x64\x6f\x62\x65\x3a\x6e\x73\
\x3a\x6d\x65\x74\x61\x2f\x22\x20\x78\x3a\x78\x6d\x70\x74\x6b\x3d\
\x22\x58\x4d\x50\x20\x43\x6f\x72\x65\x20\x35\x2e\x35\x2e\x30\x22\
\x3e\x0a\x20\x3c\x72\x64\x66\x3a\x52\x44\x46\x20\x78\x6d\x6c\x6e\
\x73\x3a\x72\x64\x66\x3d\x22\x68\x74\x74\x70\x3a\x2f\x2f\x77\x77\
\x77\x2e\x77\x33\x2e\x6f\x72\x67\x2f\x31\x39\x39\x39\x2f\x30\x32\
\x2f\x32\x32\x2d\x72\x64\x66\x2d\x73\x79\x6e\x74\x61\x78\x2d\x6e\
\x73\x23\x22\x3e\x0a\x20\x20\x3c\x72\x64\x66\x3a\x44\x65\x73\x63\
\x72\x69\x70\x74\x69\x6f\x6e\x20\x72\x64\x66\x3a\x61\x62\x6f\x75\
\x74\x3d\x22\x22\x0a\x20\x20\x20\x20\x78\x6d\x6c\x6e\x73\x3a\x64\
\x63\x3d\x22\x68\x74\x74\x70\x3a\x2f\x2f\x70\x75\x72\x6c\x2e\x6f\
\x72\x67\x2f\x64\x63\x2f\x65\x6c\x65\x6d\x65\x6e\x74\x73\x2f\x31\
\x2e\x31\x2f\x22\x0a\x20\x20\x20\x20\x78\x6d\x6c\x6e\x73\x3a\x65\
\x78\x69\x66\x3d\x22\x68\x74\x74\x70\x3a\x2f\x2f\x6e\x73\x2e\x61\
\x64\x6f\x62\x65\x2e\x63\x6f\x6d\x2f\x65\x78\x69\x66\x2f\x31\x2e\
\x30\x2f\x22\x0a\x20\x20\x20\x20\x78\x6d\x6c\x6e\x73\x3a\x74\x69\
\x66\x66\x3d\x22\x68\x74\x74\x70\x3a\x2f\x2f\x6e\x73\x2e\x61\x64\
\x6f\x62\x65\x2e\x63\x6f\x6d\x2f\x74\x69\x66\x66\x2f\x31\x2e\x30\
\x2f\x22\x0a\x20\x20\x20\x20\x78\x6d\x6c\x6e\x73\x3a\x70\x68\x6f\
\x74\x6f\x73\x68\x6f\x70\x3d\x22\x68\x74\x74\x70\x3a\x2f\x2f\x6e\
\x73\x2e\x61\x64\x6f\x62\x65\x2e\x63\x6f\x6d\x2f\x70\x68\x6f\x74\
\x6f\x73\x68\x6f\x70\x2f\x31\x2e\x30\x2f\x22\x0a\x20\x20\x20\x20\
\x78\x6d\x6c\x6e\x73\x3a\x78\x6d\x70\x3d\x22\x68\x74\x74\x70\x3a\
\x2f\x2f\x6e\x73\x2e\x61\x64\x6f\x62\x65\x2e\x63\x6f\x6d\x2f\x78\
\x61\x70\x2f\x31\x2e\x30\x2f\x22\x0a\x20\x20\x20\x20\x78\x6d\x6c\
\x6e\x73\x3a\x78\x6d\x70\x4d\x4d\x3d\x22\x68\x74\x74\x70\x3a\x2f\
\x2f\x6e\x73\x2e\x61\x64\x6f\x62\x65\x2e\x63\x6f\x6d\x2f\x78\x61\
\x70\x2f\x31\x2e\x30\x2f\x6d\x6d\x2f\x22\x0a\x20\x20\x20\x20\x78\
\x6d\x6c\x6e\x73\x3a\x73\x74\x45\x76\x74\x3d\x22\x68\x74\x74\x70\
\x3a\x2f\x2f\x6e\x73\x2e\x61\x64\x6f\x62\x65\x2e\x63\x6f\x6d\x2f\
\x78\x61\x70\x2f\x31\x2e\x30\x2f\x73\x54\x79\x70\x65\x2f\x52\x65\
\x73\x6f\x75\x72\x63\x65\x45\x76\x65\x6e\x74\x23\x22\x0a\x20\x20\
\x20\x65\x78\x69\x66\x3a\x50\x69\x78\x65\x6c\x58\x44\x69\x6d\x65\
\x6e\x73\x69\x6f\x6e\x3d\x22\x37\x22\x0a\x20\x20\x20\x65\x78\x69\
\x66\x3a\x50\x69\x78\x65\x6c\x59\x44\x69\x6d\x65\x6e\x73\x69\x6f\
\x6e\x3d\x22\x31\x30\x22\x0a\x20\x20\x20\x65\x78\x69\x66\x3a\x43\
\x6f\x6c\x6f\x72\x53\x70\x61\x63\x65\x3d\x22\x31\x22\x0a\x20\x20\
\x20\x74\x69\x66\x66\x3a\x49\x6d\x61\x67\x65\x57\x69\x64\x74\x68\
\x3d\x22\x37\x22\x0a\x20\x20\x20\x74\x69\x66\x66\x3a\x49\x6d\x61\
\x67\x65\x4c\x65\x6e\x67\x74\x68\x3d\x22\x31\x30\x22\x0a\x20\x20\
\x20\x74\x69\x66\x66\x3a\x52\x65\x73\x6f\x6c\x75\x74\x69\x6f\x6e\
\x55\x6e\x69\x74\x3d\x22\x32\x22\x0a\x20\x20\x20\x74\x69\x66\x66\
\x3a\x58\x52\x65\x73\x6f\x6c\x75\x74\x69\x6f\x6e\x3d\x22\x37\x32\
\x2e\x30\x22\x0a\x20\x20\x20\x74\x69\x66\x66\x3a\x59\x52\x65\x73\
\x6f\x6c\x75\x74\x69\x6f\x6e\x3d\x22\x37\x32\x2e\x30\x22\x0a\x20\
\x20\x20\x70\x68\x6f\x74\x6f\x73\x68\x6f\x70\x3a\x43\x6f\x6c\x6f\
\x72\x4d\x6f\x64\x65\x3d\x22\x33\x22\x0a\x20\x20\x20\x70\x68\x6f\
\x74\x6f\x73\x68\x6f\x70\x3a\x49\x43\x43\x50\x72\x6f\x66\x69\x6c\
\x65\x3d\x22\x73\x52\x47\x42\x20\x49\x45\x43\x36\x31\x39\x36\x36\
\x2d\x32\x2e\x31\x22\x0a\x20\x20\x20\x78\x6d\x70\x3a\x4d\x6f\x64\
\x69\x66\x79\x44\x61\x74\x65\x3d\x22\x32\x30\x32\x31\x2d\x30\x35\
\x2d\x33\x31\x54\x31\x32\x3a\x34\x33\x3a\x30\x39\x2b\x30\x32\x3a\
\x30\x30\x22\x0a\x20\x20\x20\x78\x6d\x70\x3a\x4d\x65\x74\x61\x64\
\x61\x74\x61\x44\x61\x74\x65\x3d\x22\x32\x30\x32\x31\x2d\x30\x35\
\x2d\x33\x31\x54\x31\x32\x3a\x34\x33\x3a\x30\x39\x2b\x30\x32\x3a\
\x30\x30\x22\x3e\x0a\x20\x20\x20\x3c\x64\x63\x3a\x74\x69\x74\x6c\
\x65\x3e\x0a\x20\x20\x20\x20\x3c\x72\x64\x66\x3a\x41\x6c\x74\x3e\
\x0a\x20\x20\x20\x20\x20\x3c\x72\x64\x66\x3a\x6c\x69\x20\x78\x6d\
\x6c\x3a\x6c\x61\x6e\x67\x3d\x22\x78\x2d\x64\x65\x66\x61\x75\x6c\
\x74\x22\x3e\x62\x72\x61\x6e\x63\x68\x5f\x63\x6c\x6f\x73\x65\x3c\
\x2f\x72\x64\x66\x3a\x6c\x69\x3e\x0a\x20\x20\x20\x20\x3c\x2f\x72\
\x64\x66\x3a\x41\x6c\x74\x3e\x0a\x20\x20\x20\x3c\x2f\x64\x63\x3a\
\x74\x69\x74\x6c\x65\x3e\x0a\x20\x20\x20\x3c\x78\x6d\x70\x4d\x4d\
\x3a\x48\x69\x73\x74\x6f\x72\x79\x3e\x0a\x20\x20\x20\x20\x3c\x72\
\x64\x66\x3a\x53\x65\x71\x3e\x0a\x20\x20\x20\x20\x20\x3c\x72\x64\
\x66\x3a\x6c\x69\x0a\x20\x20\x20\x20\x20\x20\x73\x74\x45\x76\x74\
\x3a\x61\x63\x74\x69\x6f\x6e\x3d\x22\x70\x72\x6f\x64\x75\x63\x65\
\x64\x22\x0a\x20\x20\x20\x20\x20\x20\x73\x74\x45\x76\x74\x3a\x73\
\x6f\x66\x74\x77\x61\x72\x65\x41\x67\x65\x6e\x74\x3d\x22\x41\x66\
\x66\x69\x6e\x69\x74\x79\x20\x44\x65\x73\x69\x67\x6e\x65\x72\x20\
\x31\x2e\x39\x2e\x32\x22\x0a\x20\x20\x20\x20\x20\x20\x73\x74\x45\
\x76\x74\x3a\x77\x68\x65\x6e\x3d\x22\x32\x30\x32\x31\x2d\x30\x35\
\x2d\x33\x31\x54\x31\x32\x3a\x34\x33\x3a\x30\x39\x2b\x30\x32\x3a\
\x30\x30\x22\x2f\x3e\x0a\x20\x20\x20\x20\x3c\x2f\x72\x64\x66\x3a\
\x53\x65\x71\x3e\x0a\x20\x20\x20\x3c\x2f\x78\x6d\x70\x4d\x4d\x3a\
\x48\x69\x73\x74\x6f\x72\x79\x3e\x0a\x20\x20\x3c\x2f\x72\x64\x66\
\x3a\x44\x65\x73\x63\x72\x69\x70\x74\x69\x6f\x6e\x3e\x0a\x20\x3c\
\x2f\x72\x64\x66\x3a\x52\x44\x46\x3e\x0a\x3c\x2f\x78\x3a\x78\x6d\
\x70\x6d\x65\x74\x61\x3e\x0a\x3c\x3f\x78\x70\x61\x63\x6b\x65\x74\
\x20\x65\x6e\x64\x3d\x22\x72\x22\x3f\x3e\x58\xad\xf2\x80\x00\x00\
\x01\x83\x69\x43\x43\x50\x73\x52\x47\x42\x20\x49\x45\x43\x36\x31\
\x39\x36\x36\x2d\x32\x2e\x31\x00\x00\x28\x91\x75\x91\xcf\x2b\x44\
\x51\x14\xc7\x3f\x66\x68\xfc\x18\x8d\x62\x61\x31\x65\x12\x16\x42\
\x83\x12\x1b\x8b\x99\x18\x0a\x8b\x99\x51\x7e\x6d\x66\x9e\x79\x33\
\x6a\xde\x78\xbd\x37\xd2\x64\xab\x6c\xa7\x28\xb1\xf1\x6b\xc1\x5f\
\xc0\x56\x59\x2b\x45\xa4\x64\xa7\xac\x89\x0d\x7a\xce\x9b\x51\x23\
\x99\x73\x3b\xf7\x7c\xee\xf7\xde\x73\xba\xf7\x5c\x70\x44\xd3\x8a\
\x66\x56\xfa\x41\xcb\x64\x8d\x70\x28\xe0\x9b\x99\x9d\xf3\xb9\x9e\
\xa8\xa2\x85\x1a\x3a\xf1\xc6\x14\x53\x9f\x8c\x8c\x46\x29\x6b\xef\
\xb7\x54\xd8\xf1\xba\xdb\xae\x55\xfe\xdc\xbf\x56\xb7\x98\x30\x15\
\xa8\xa8\x16\x1e\x56\x74\x23\x2b\x3c\x26\x3c\xb1\x9a\xd5\x6d\xde\
\x12\x6e\x52\x52\xb1\x45\xe1\x13\xe1\x2e\x43\x2e\x28\x7c\x63\xeb\
\xf1\x22\x3f\xdb\x9c\x2c\xf2\xa7\xcd\x46\x34\x1c\x04\x47\x83\xb0\
\x2f\xf9\x8b\xe3\xbf\x58\x49\x19\x9a\xb0\xbc\x9c\x36\x2d\xbd\xa2\
\xfc\xdc\xc7\x7e\x89\x3b\x91\x99\x8e\x48\x6c\x15\xf7\x62\x12\x26\
\x44\x00\x1f\xe3\x8c\x10\x64\x80\x5e\x86\x64\x1e\xa0\x9b\x3e\x7a\
\x64\x45\x99\x7c\x7f\x21\x7f\x8a\x65\xc9\x55\x64\xd6\xc9\x61\xb0\
\x44\x92\x14\x59\xba\x44\x5d\x91\xea\x09\x89\xaa\xe8\x09\x19\x69\
\x72\x76\xff\xff\xf6\xd5\x54\xfb\xfb\x8a\xd5\xdd\x01\xa8\x7a\xb4\
\xac\xd7\x76\x70\x6d\xc2\x57\xde\xb2\x3e\x0e\x2c\xeb\xeb\x10\x9c\
\x0f\x70\x9e\x29\xe5\x2f\xef\xc3\xe0\x9b\xe8\xf9\x92\xd6\xb6\x07\
\x9e\x75\x38\xbd\x28\x69\xf1\x6d\x38\xdb\x80\xe6\x7b\x3d\x66\xc4\
\x0a\x92\x53\xdc\xa1\xaa\xf0\x72\x0c\xf5\xb3\xd0\x78\x05\xb5\xf3\
\xc5\x9e\xfd\xec\x73\x74\x07\xd1\x35\xf9\xaa\x4b\xd8\xd9\x85\x0e\
\x39\xef\x59\xf8\x06\x8e\xfd\x67\xf8\xfd\x8a\x18\x97\x00\x00\x00\
\x09\x70\x48\x59\x73\x00\x00\x0b\x13\x00\x00\x0b\x13\x01\x00\x9a\
\x9c\x18\x00\x00\x00\xa2\x49\x44\x41\x54\x18\x95\x55\xcf\xb1\x4a\
\xc3\x31\x00\xc4\xe1\x2f\xff\xb9\x93\xa3\x93\xb8\xa5\x8b\x0f\x20\
\x55\x44\x10\x5c\x3a\x84\x2c\x1d\x5c\x7c\x0f\xb7\x8e\x3e\x4a\x88\
\xa3\xb8\x08\x6d\x05\xbb\x77\xc8\xea\xe2\x0b\x74\x6f\xe9\xd2\x42\
\x7a\x70\x70\xf0\xe3\x0e\x2e\xa4\xd2\xae\xf0\x8a\xf7\x9a\xe3\x56\
\xa7\x01\xd7\x78\xc3\x32\x95\x76\x79\x06\x6b\x8e\xdf\x78\xc1\x18\
\xbf\xa9\xb4\xf1\x09\x86\x53\x48\xa5\x3d\xe2\x03\x3b\x4c\x6b\x8e\
\xab\xd0\xcf\xa4\xd2\x6e\xf0\x89\x0b\xdc\x0f\xce\xb5\x3f\x3a\x20\
\x0c\x5d\xeb\x01\x3f\x18\xe1\xa9\xe6\xb8\x1e\x8e\x60\x86\x2f\x6c\
\x71\x5b\x73\x5c\x40\x48\xa5\xdd\x61\x81\x0d\x9e\x6b\x8e\xff\xfd\
\xcf\x3f\xcc\x31\xe9\x01\x1c\x00\x73\x52\x2d\x71\xe4\x4a\x1b\x69\
\x00\x00\x00\x00\x49\x45\x4e\x44\xae\x42\x60\x82\
\x00\x00\x00\xa6\
\x89\
\x50\x4e\x47\x0d\x0a\x1a\x0a\x00\x00\x00\x0d\x49\x48\x44\x52\x00\
\x00\x00\x09\x00\x00\x00\x06\x08\x04\x00\x00\x00\xbb\xce\x7c\x4e\
\x00\x00\x00\x01\x73\x52\x47\x42\x00\xae\xce\x1c\xe9\x00\x00\x00\
\x02\x62\x4b\x47\x44\x00\xff\x87\x8f\xcc\xbf\x00\x00\x00\x09\x70\
\x48\x59\x73\x00\x00\x0b\x13\x00\x00\x0b\x13\x01\x00\x9a\x9c\x18\
\x00\x00\x00\x07\x74\x49\x4d\x45\x07\xdc\x08\x17\x08\x15\x3b\xdc\
\x3b\x0c\x9b\x00\x00\x00\x2a\x49\x44\x41\x54\x08\xd7\x63\x60\xc0\
\x00\x8c\x0c\x0c\x73\x3e\x20\x0b\xa4\x08\x30\x32\x30\x20\x0b\xa6\
\x08\x30\x30\x30\x42\x98\x10\xc1\x14\x01\x14\x13\x50\xb5\xa3\x01\
\x00\xc6\xb9\x07\x90\x5d\x66\x1f\x83\x00\x00\x00\x00\x49\x45\x4e\
\x44\xae\x42\x60\x82\
\x00\x00\x00\xa5\
\x89\
\x50\x4e\x47\x0d\x0a\x1a\x0a\x00\x00\x00\x0d\x49\x48\x44\x52\x00\
\x00\x00\x09\x00\x00\x00\x06\x08\x04\x00\x00\x00\xbb\xce\x7c\x4e\
\x00\x00\x00\x01\x73\x52\x47\x42\x00\xae\xce\x1c\xe9\x00\x00\x00\
\x02\x62\x4b\x47\x44\x00\x9c\x53\x34\xfc\x5d\x00\x00\x00\x09\x70\
\x48\x59\x73\x00\x00\x0b\x13\x00\x00\x0b\x13\x01\x00\x9a\x9c\x18\
\x00\x00\x00\x07\x74\x49\x4d\x45\x07\xdc\x08\x17\x0b\x02\x04\x6d\
\x98\x1b\x69\x00\x00\x00\x29\x49\x44\x41\x54\x08\xd7\x63\x60\xc0\
\x00\x8c\x0c\x0c\xff\xcf\xa3\x08\x18\x32\x32\x30\x20\x0b\x32\x1a\
\x32\x30\x30\x42\x98\x10\x41\x46\x43\x14\x13\x50\xb5\xa3\x01\x00\
\xd6\x10\x07\xd2\x2f\x48\xdf\x4a\x00\x00\x00\x00\x49\x45\x4e\x44\
\xae\x42\x60\x82\
\x00\x00\x00\xa6\
\x89\
\x50\x4e\x47\x0d\x0a\x1a\x0a\x00\x00\x00\x0d\x49\x48\x44\x52\x00\
\x00\x00\x06\x00\x00\x00\x09\x08\x04\x00\x00\x00\xbb\x93\x95\x16\
\x00\x00\x00\x01\x73\x52\x47\x42\x00\xae\xce\x1c\xe9\x00\x00\x00\
\x02\x62\x4b\x47\x44\x00\xff\x87\x8f\xcc\xbf\x00\x00\x00\x09\x70\
\x48\x59\x73\x00\x00\x0b\x13\x00\x00\x0b\x13\x01\x00\x9a\x9c\x18\
\x00\x00\x00\x07\x74\x49\x4d\x45\x07\xdc\x08\x17\x14\x1f\x20\xb9\
\x8d\x77\xe9\x00\x00\x00\x2a\x49\x44\x41\x54\x08\xd7\x63\x60\xc0\
\x06\xe6\x7c\x60\x60\x60\x42\x30\xa1\x1c\x08\x93\x81\x81\x09\xc1\
\x64\x60\x60\x62\x60\x48\x11\x40\xe2\x20\x73\x19\x90\x8d\x40\x02\
\x00\x23\xed\x08\xaf\x64\x9f\x0f\x15\x00\x00\x00\x00\x49\x45\x4e\
\x44\xae\x42\x60\x82\
\x00\x00\x00\x9e\
\x89\
\x50\x4e\x47\x0d\x0a\x1a\x0a\x00\x00\x00\x0d\x49\x48\x44\x52\x00\
\x00\x00\x09\x00\x00\x00\x06\x08\x04\x00\x00\x00\xbb\xce\x7c\x4e\
\x00\x00\x00\x01\x73\x52\x47\x42\x00\xae\xce\x1c\xe9\x00\x00\x00\
\x02\x62\x4b\x47\x44\x00\xff\x87\x8f\xcc\xbf\x00\x00\x00\x09\x70\
\x48\x59\x73\x00\x00\x0b\x13\x00\x00\x0b\x13\x01\x00\x9a\x9c\x18\
\x00\x00\x00\x07\x74\x49\x4d\x45\x07\xdc\x08\x17\x08\x15\x0f\xfd\
\x8f\xf8\x2e\x00\x00\x00\x22\x49\x44\x41\x54\x08\xd7\x63\x60\xc0\
\x0d\xfe\x9f\x87\xb1\x18\x91\x05\x18\x0d\xe1\x42\x48\x2a\x0c\x19\
\x18\x18\x91\x05\x10\x2a\xd1\x00\x00\xca\xb5\x07\xd2\x76\xbb\xb2\
\xc5\x00\x00\x00\x00\x49\x45\x4e\x44\xae\x42\x60\x82\
\x00\x00\x00\xa6\
\x89\
\x50\x4e\x47\x0d\x0a\x1a\x0a\x00\x00\x00\x0d\x49\x48\x44\x52\x00\
\x00\x00\x09\x00\x00\x00\x06\x08\x04\x00\x00\x00\xbb\xce\x7c\x4e\
\x00\x00\x00\x01\x73\x52\x47\x42\x00\xae\xce\x1c\xe9\x00\x00\x00\
\x02\x62\x4b\x47\x44\x00\xff\x87\x8f\xcc\xbf\x00\x00\x00\x09\x70\
\x48\x59\x73\x00\x00\x0b\x13\x00\x00\x0b\x13\x01\x00\x9a\x9c\x18\
\x00\x00\x00\x07\x74\x49\x4d\x45\x07\xdc\x08\x17\x08\x15\x3b\xdc\
\x3b\x0c\x9b\x00\x00\x00\x2a\x49\x44\x41\x54\x08\xd7\x63\x60\xc0\
\x00\x8c\x0c\x0c\x73\x3e\x20\x0b\xa4\x08\x30\x32\x30\x20\x0b\xa6\
\x08\x30\x30\x30\x42\x98\x10\xc1\x14\x01\x14\x13\x50\xb5\xa3\x01\
\x00\xc6\xb9\x07\x90\x5d\x66\x1f\x83\x00\x00\x00\x00\x49\x45\x4e\
\x44\xae\x42\x60\x82\
\x00\x00\x00\xa6\
\x89\
\x50\x4e\x47\x0d\x0a\x1a\x0a\x00\x00\x00\x0d\x49\x48\x44\x52\x00\
\x00\x00\x06\x00\x00\x00\x09\x08\x04\x00\x00\x00\xbb\x93\x95\x16\
\x00\x00\x00\x01\x73\x52\x47\x42\x00\xae\xce\x1c\xe9\x00\x00\x00\
\x02\x62\x4b\x47\x44\x00\xff\x87\x8f\xcc\xbf\x00\x00\x00\x09\x70\
\x48\x59\x73\x00\x00\x0b\x13\x00\x00\x0b\x13\x01\x00\x9a\x9c\x18\
\x00\x00\x00\x07\x74\x49\x4d\x45\x07\xdc\x08\x17\x14\x1d\x00\xb0\
\xd5\x35\xa3\x00\x00\x00\x2a\x49\x44\x41\x54\x08\xd7\x63\x60\xc0\
\x06\xfe\x9f\x67\x60\x60\x42\x30\xa1\x1c\x08\x93\x81\x81\x09\xc1\
\x64\x60\x60\x62\x60\x60\x34\x44\xe2\x20\x73\x19\x90\x8d\x40\x02\
\x00\x64\x40\x09\x75\x86\xb3\xad\x9c\x00\x00\x00\x00\x49\x45\x4e\
\x44\xae\x42\x60\x82\
\x00\x00\x00\xa6\
\x89\
\x50\x4e\x47\x0d\x0a\x1a\x0a\x00\x00\x00\x0d\x49\x48\x44\x52\x00\
\x00\x00\x06\x00\x00\x00\x09\x08\x04\x00\x00\x00\xbb\x93\x95\x16\
\x00\x00\x00\x01\x73\x52\x47\x42\x00\xae\xce\x1c\xe9\x00\x00\x00\
\x02\x62\x4b\x47\x44\x00\xff\x87\x8f\xcc\xbf\x00\x00\x00\x09\x70\
\x48\x59\x73\x00\x00\x0b\x13\x00\x00\x0b\x13\x01\x00\x9a\x9c\x18\
\x00\x00\x00\x07\x74\x49\x4d\x45\x07\xdc\x08\x17\x14\x1d\x00\xb0\
\xd5\x35\xa3\x00\x00\x00\x2a\x49\x44\x41\x54\x08\xd7\x63\x60\xc0\
\x06\xfe\x9f\x67\x60\x60\x42\x30\xa1\x1c\x08\x93\x81\x81\x09\xc1\
\x64\x60\x60\x62\x60\x60\x34\x44\xe2\x20\x73\x19\x90\x8d\x40\x02\
\x00\x64\x40\x09\x75\x86\xb3\xad\x9c\x00\x00\x00\x00\x49\x45\x4e\
\x44\xae\x42\x60\x82\
\x00\x00\x07\x06\
\x89\
\x50\x4e\x47\x0d\x0a\x1a\x0a\x00\x00\x00\x0d\x49\x48\x44\x52\x00\
\x00\x00\x0a\x00\x00\x00\x07\x08\x06\x00\x00\x00\x31\xac\xdc\x63\
\x00\x00\x04\xb0\x69\x54\x58\x74\x58\x4d\x4c\x3a\x63\x6f\x6d\x2e\
\x61\x64\x6f\x62\x65\x2e\x78\x6d\x70\x00\x00\x00\x00\x00\x3c\x3f\
\x78\x70\x61\x63\x6b\x65\x74\x20\x62\x65\x67\x69\x6e\x3d\x22\xef\
\xbb\xbf\x22\x20\x69\x64\x3d\x22\x57\x35\x4d\x30\x4d\x70\x43\x65\
\x68\x69\x48\x7a\x72\x65\x53\x7a\x4e\x54\x63\x7a\x6b\x63\x39\x64\
\x22\x3f\x3e\x0a\x3c\x78\x3a\x78\x6d\x70\x6d\x65\x74\x61\x20\x78\
\x6d\x6c\x6e\x73\x3a\x78\x3d\x22\x61\x64\x6f\x62\x65\x3a\x6e\x73\
\x3a\x6d\x65\x74\x61\x2f\x22\x20\x78\x3a\x78\x6d\x70\x74\x6b\x3d\
\x22\x58\x4d\x50\x20\x43\x6f\x72\x65\x20\x35\x2e\x35\x2e\x30\x22\
\x3e\x0a\x20\x3c\x72\x64\x66\x3a\x52\x44\x46\x20\x78\x6d\x6c\x6e\
\x73\x3a\x72\x64\x66\x3d\x22\x68\x74\x74\x70\x3a\x2f\x2f\x77\x77\
\x77\x2e\x77\x33\x2e\x6f\x72\x67\x2f\x31\x39\x39\x39\x2f\x30\x32\
\x2f\x32\x32\x2d\x72\x64\x66\x2d\x73\x79\x6e\x74\x61\x78\x2d\x6e\
\x73\x23\x22\x3e\x0a\x20\x20\x3c\x72\x64\x66\x3a\x44\x65\x73\x63\
\x72\x69\x70\x74\x69\x6f\x6e\x20\x72\x64\x66\x3a\x61\x62\x6f\x75\
\x74\x3d\x22\x22\x0a\x20\x20\x20\x20\x78\x6d\x6c\x6e\x73\x3a\x65\
\x78\x69\x66\x3d\x22\x68\x74\x74\x70\x3a\x2f\x2f\x6e\x73\x2e\x61\
\x64\x6f\x62\x65\x2e\x63\x6f\x6d\x2f\x65\x78\x69\x66\x2f\x31\x2e\
\x30\x2f\x22\x0a\x20\x20\x20\x20\x78\x6d\x6c\x6e\x73\x3a\x74\x69\
\x66\x66\x3d\x22\x68\x74\x74\x70\x3a\x2f\x2f\x6e\x73\x2e\x61\x64\
\x6f\x62\x65\x2e\x63\x6f\x6d\x2f\x74\x69\x66\x66\x2f\x31\x2e\x30\
\x2f\x22\x0a\x20\x20\x20\x20\x78\x6d\x6c\x6e\x73\x3a\x70\x68\x6f\
\x74\x6f\x73\x68\x6f\x70\x3d\x22\x68\x74\x74\x70\x3a\x2f\x2f\x6e\
\x73\x2e\x61\x64\x6f\x62\x65\x2e\x63\x6f\x6d\x2f\x70\x68\x6f\x74\
\x6f\x73\x68\x6f\x70\x2f\x31\x2e\x30\x2f\x22\x0a\x20\x20\x20\x20\
\x78\x6d\x6c\x6e\x73\x3a\x78\x6d\x70\x3d\x22\x68\x74\x74\x70\x3a\
\x2f\x2f\x6e\x73\x2e\x61\x64\x6f\x62\x65\x2e\x63\x6f\x6d\x2f\x78\
\x61\x70\x2f\x31\x2e\x30\x2f\x22\x0a\x20\x20\x20\x20\x78\x6d\x6c\
\x6e\x73\x3a\x78\x6d\x70\x4d\x4d\x3d\x22\x68\x74\x74\x70\x3a\x2f\
\x2f\x6e\x73\x2e\x61\x64\x6f\x62\x65\x2e\x63\x6f\x6d\x2f\x78\x61\
\x70\x2f\x31\x2e\x30\x2f\x6d\x6d\x2f\x22\x0a\x20\x20\x20\x20\x78\
\x6d\x6c\x6e\x73\x3a\x73\x74\x45\x76\x74\x3d\x22\x68\x74\x74\x70\
\x3a\x2f\x2f\x6e\x73\x2e\x61\x64\x6f\x62\x65\x2e\x63\x6f\x6d\x2f\
\x78\x61\x70\x2f\x31\x2e\x30\x2f\x73\x54\x79\x70\x65\x2f\x52\x65\
\x73\x6f\x75\x72\x63\x65\x45\x76\x65\x6e\x74\x23\x22\x0a\x20\x20\
\x20\x65\x78\x69\x66\x3a\x50\x69\x78\x65\x6c\x58\x44\x69\x6d\x65\
\x6e\x73\x69\x6f\x6e\x3d\x22\x31\x30\x22\x0a\x20\x20\x20\x65\x78\
\x69\x66\x3a\x50\x69\x78\x65\x6c\x59\x44\x69\x6d\x65\x6e\x73\x69\
\x6f\x6e\x3d\x22\x37\x22\x0a\x20\x20\x20\x65\x78\x69\x66\x3a\x43\
\x6f\x6c\x6f\x72\x53\x70\x61\x63\x65\x3d\x22\x31\x22\x0a\x20\x20\
\x20\x74\x69\x66\x66\x3a\x49\x6d\x61\x67\x65\x57\x69\x64\x74\x68\
\x3d\x22\x31\x30\x22\x0a\x20\x20\x20\x74\x69\x66\x66\x3a\x49\x6d\
\x61\x67\x65\x4c\x65\x6e\x67\x74\x68\x3d\x22\x37\x22\x0a\x20\x20\
\x20\x74\x69\x66\x66\x3a\x52\x65\x73\x6f\x6c\x75\x74\x69\x6f\x6e\
\x55\x6e\x69\x74\x3d\x22\x32\x22\x0a\x20\x20\x20\x74\x69\x66\x66\
\x3a\x58\x52\x65\x73\x6f\x6c\x75\x74\x69\x6f\x6e\x3d\x22\x37\x32\
\x2e\x30\x22\x0a\x20\x20\x20\x74\x69\x66\x66\x3a\x59\x52\x65\x73\
\x6f\x6c\x75\x74\x69\x6f\x6e\x3d\x22\x37\x32\x2e\x30\x22\x0a\x20\
\x20\x20\x70\x68\x6f\x74\x6f\x73\x68\x6f\x70\x3a\x43\x6f\x6c\x6f\
\x72\x4d\x6f\x64\x65\x3d\x22\x33\x22\x0a\x20\x20\x20\x70\x68\x6f\
\x74\x6f\x73\x68\x6f\x70\x3a\x49\x43\x43\x50\x72\x6f\x66\x69\x6c\
\x65\x3d\x22\x73\x52\x47\x42\x20\x49\x45\x43\x36\x31\x39\x36\x36\
\x2d\x32\x2e\x31\x22\x0a\x20\x20\x20\x78\x6d\x70\x3a\x4d\x6f\x64\
\x69\x66\x79\x44\x61\x74\x65\x3d\x22\x32\x30\x32\x31\x2d\x30\x35\
\x2d\x33\x31\x54\x31\x32\x3a\x33\x30\x3a\x31\x31\x2b\x30\x32\x3a\
\x30\x30\x22\x0a\x20\x20\x20\x78\x6d\x70\x3a\x4d\x65\x74\x61\x64\
\x61\x74\x61\x44\x61\x74\x65\x3d\x22\x32\x30\x32\x31\x2d\x30\x35\
\x2d\x33\x31\x54\x31\x32\x3a\x33\x30\x3a\x31\x31\x2b\x30\x32\x3a\
\x30\x30\x22\x3e\x0a\x20\x20\x20\x3c\x78\x6d\x70\x4d\x4d\x3a\x48\
\x69\x73\x74\x6f\x72\x79\x3e\x0a\x20\x20\x20\x20\x3c\x72\x64\x66\
\x3a\x53\x65\x71\x3e\x0a\x20\x20\x20\x20\x20\x3c\x72\x64\x66\x3a\
\x6c\x69\x0a\x20\x20\x20\x20\x20\x20\x73\x74\x45\x76\x74\x3a\x61\
\x63\x74\x69\x6f\x6e\x3d\x22\x70\x72\x6f\x64\x75\x63\x65\x64\x22\
\x0a\x20\x20\x20\x20\x20\x20\x73\x74\x45\x76\x74\x3a\x73\x6f\x66\
\x74\x77\x61\x72\x65\x41\x67\x65\x6e\x74\x3d\x22\x41\x66\x66\x69\
\x6e\x69\x74\x79\x20\x44\x65\x73\x69\x67\x6e\x65\x72\x20\x31\x2e\
\x39\x2e\x32\x22\x0a\x20\x20\x20\x20\x20\x20\x73\x74\x45\x76\x74\
\x3a\x77\x68\x65\x6e\x3d\x22\x32\x30\x32\x31\x2d\x30\x35\x2d\x33\
\x31\x54\x31\x32\x3a\x33\x30\x3a\x31\x31\x2b\x30\x32\x3a\x30\x30\
\x22\x2f\x3e\x0a\x20\x20\x20\x20\x3c\x2f\x72\x64\x66\x3a\x53\x65\
\x71\x3e\x0a\x20\x20\x20\x3c\x2f\x78\x6d\x70\x4d\x4d\x3a\x48\x69\
\x73\x74\x6f\x72\x79\x3e\x0a\x20\x20\x3c\x2f\x72\x64\x66\x3a\x44\
\x65\x73\x63\x72\x69\x70\x74\x69\x6f\x6e\x3e\x0a\x20\x3c\x2f\x72\
\x64\x66\x3a\x52\x44\x46\x3e\x0a\x3c\x2f\x78\x3a\x78\x6d\x70\x6d\
\x65\x74\x61\x3e\x0a\x3c\x3f\x78\x70\x61\x63\x6b\x65\x74\x20\x65\
\x6e\x64\x3d\x22\x72\x22\x3f\x3e\x85\x9d\x9f\x08\x00\x00\x01\x83\
\x69\x43\x43\x50\x73\x52\x47\x42\x20\x49\x45\x43\x36\x31\x39\x36\
\x36\x2d\x32\x2e\x31\x00\x00\x28\x91\x75\x91\xcf\x2b\x44\x51\x14\
\xc7\x3f\x66\x68\xfc\x18\x8d\x62\x61\x31\x65\x12\x16\x42\x83\x12\
\x1b\x8b\x99\x18\x0a\x8b\x99\x51\x7e\x6d\x66\x9e\x79\x33\x6a\xde\
\x78\xbd\x37\xd2\x64\xab\x6c\xa7\x28\xb1\xf1\x6b\xc1\x5f\xc0\x56\
\x59\x2b\x45\xa4\x64\xa7\xac\x89\x0d\x7a\xce\x9b\x51\x23\x99\x73\
\x3b\xf7\x7c\xee\xf7\xde\x73\xba\xf7\x5c\x70\x44\xd3\x8a\x66\x56\
\xfa\x41\xcb\x64\x8d\x70\x28\xe0\x9b\x99\x9d\xf3\xb9\x9e\xa8\xa2\
\x85\x1a\x3a\xf1\xc6\x14\x53\x9f\x8c\x8c\x46\x29\x6b\xef\xb7\x54\
\xd8\xf1\xba\xdb\xae\x55\xfe\xdc\xbf\x56\xb7\x98\x30\x15\xa8\xa8\
\x16\x1e\x56\x74\x23\x2b\x3c\x26\x3c\xb1\x9a\xd5\x6d\xde\x12\x6e\
\x52\x52\xb1\x45\xe1\x13\xe1\x2e\x43\x2e\x28\x7c\x63\xeb\xf1\x22\
\x3f\xdb\x9c\x2c\xf2\xa7\xcd\x46\x34\x1c\x04\x47\x83\xb0\x2f\xf9\
\x8b\xe3\xbf\x58\x49\x19\x9a\xb0\xbc\x9c\x36\x2d\xbd\xa2\xfc\xdc\
\xc7\x7e\x89\x3b\x91\x99\x8e\x48\x6c\x15\xf7\x62\x12\x26\x44\x00\
\x1f\xe3\x8c\x10\x64\x80\x5e\x86\x64\x1e\xa0\x9b\x3e\x7a\x64\x45\
\x99\x7c\x7f\x21\x7f\x8a\x65\xc9\x55\x64\xd6\xc9\x61\xb0\x44\x92\
\x14\x59\xba\x44\x5d\x91\xea\x09\x89\xaa\xe8\x09\x19\x69\x72\x76\
\xff\xff\xf6\xd5\x54\xfb\xfb\x8a\xd5\xdd\x01\xa8\x7a\xb4\xac\xd7\
\x76\x70\x6d\xc2\x57\xde\xb2\x3e\x0e\x2c\xeb\xeb\x10\x9c\x0f\x70\
\x9e\x29\xe5\x2f\xef\xc3\xe0\x9b\xe8\xf9\x92\xd6\xb6\x07\x9e\x75\
\x38\xbd\x28\x69\xf1\x6d\x38\xdb\x80\xe6\x7b\x3d\x66\xc4\x0a\x92\
\x53\xdc\xa1\xaa\xf0\x72\x0c\xf5\xb3\xd0\x78\x05\xb5\xf3\xc5\x9e\
\xfd\xec\x73\x74\x07\xd1\x35\xf9\xaa\x4b\xd8\xd9\x85\x0e\x39\xef\
\x59\xf8\x06\x8e\xfd\x67\xf8\xfd\x8a\x18\x97\x00\x00\x00\x09\x70\
\x48\x59\x73\x00\x00\x0b\x13\x00\x00\x0b\x13\x01\x00\x9a\x9c\x18\
\x00\x00\x00\x6d\x49\x44\x41\x54\x18\x95\x75\xcf\xc1\x09\xc2\x50\
\x10\x84\xe1\xd7\x85\x07\x9b\xd0\x43\x40\xd2\x82\x78\x14\x7b\x30\
\x57\x21\x8d\x84\x60\x3f\x62\x4b\x7a\x48\xcc\x97\x83\xfb\x30\x04\
\xdf\x9c\x86\x7f\x67\x99\xdd\x84\x0d\xaa\x54\x10\x6a\x6c\x13\x1e\
\xbe\xba\xfe\x09\x35\x31\x7b\xe6\x8d\x0f\x26\x1c\x17\xa1\x53\xb0\
\x11\x87\x0c\x2f\x01\x07\xec\xb0\x0f\x3f\xe1\xbc\xae\x69\xa3\xe6\
\x85\x77\xf8\x5b\xe9\xf0\xbb\x9f\xfa\xd2\x83\x39\xdc\xa3\x5b\xf3\
\x19\x2e\xa8\x89\xb5\x30\xf7\x43\xa0\x00\x00\x00\x00\x49\x45\x4e\
\x44\xae\x42\x60\x82\
"
qt_resource_name = b"\
\x00\x08\
\x06\xc5\x8e\xa5\
\x00\x6f\
\x00\x70\x00\x65\x00\x6e\x00\x70\x00\x79\x00\x70\x00\x65\
\x00\x06\
\x07\x03\x7d\xc3\
\x00\x69\
\x00\x6d\x00\x61\x00\x67\x00\x65\x00\x73\
\x00\x0f\
\x02\x9f\x05\x87\
\x00\x72\
\x00\x69\x00\x67\x00\x68\x00\x74\x00\x5f\x00\x61\x00\x72\x00\x72\x00\x6f\x00\x77\x00\x2e\x00\x70\x00\x6e\x00\x67\
\x00\x12\
\x05\x8f\x9d\x07\
\x00\x62\
\x00\x72\x00\x61\x00\x6e\x00\x63\x00\x68\x00\x5f\x00\x6f\x00\x70\x00\x65\x00\x6e\x00\x5f\x00\x6f\x00\x6e\x00\x2e\x00\x70\x00\x6e\
\x00\x67\
\x00\x1b\
\x03\x5a\x32\x27\
\x00\x63\
\x00\x6f\x00\x6d\x00\x62\x00\x6f\x00\x62\x00\x6f\x00\x78\x00\x5f\x00\x61\x00\x72\x00\x72\x00\x6f\x00\x77\x00\x5f\x00\x64\x00\x69\
\x00\x73\x00\x61\x00\x62\x00\x6c\x00\x65\x00\x64\x00\x2e\x00\x70\x00\x6e\x00\x67\
\x00\x18\
\x03\x8e\xde\x67\
\x00\x72\
\x00\x69\x00\x67\x00\x68\x00\x74\x00\x5f\x00\x61\x00\x72\x00\x72\x00\x6f\x00\x77\x00\x5f\x00\x64\x00\x69\x00\x73\x00\x61\x00\x62\
\x00\x6c\x00\x65\x00\x64\x00\x2e\x00\x70\x00\x6e\x00\x67\
\x00\x11\
\x0b\xda\x30\xa7\
\x00\x62\
\x00\x72\x00\x61\x00\x6e\x00\x63\x00\x68\x00\x5f\x00\x63\x00\x6c\x00\x6f\x00\x73\x00\x65\x00\x64\x00\x2e\x00\x70\x00\x6e\x00\x67\
\
\x00\x12\
\x03\x8d\x04\x47\
\x00\x72\
\x00\x69\x00\x67\x00\x68\x00\x74\x00\x5f\x00\x61\x00\x72\x00\x72\x00\x6f\x00\x77\x00\x5f\x00\x6f\x00\x6e\x00\x2e\x00\x70\x00\x6e\
\x00\x67\
\x00\x15\
\x0f\xf3\xc0\x07\
\x00\x75\
\x00\x70\x00\x5f\x00\x61\x00\x72\x00\x72\x00\x6f\x00\x77\x00\x5f\x00\x64\x00\x69\x00\x73\x00\x61\x00\x62\x00\x6c\x00\x65\x00\x64\
\x00\x2e\x00\x70\x00\x6e\x00\x67\
\x00\x0f\
\x01\x73\x8b\x07\
\x00\x75\
\x00\x70\x00\x5f\x00\x61\x00\x72\x00\x72\x00\x6f\x00\x77\x00\x5f\x00\x6f\x00\x6e\x00\x2e\x00\x70\x00\x6e\x00\x67\
\x00\x0e\
\x04\xa2\xfc\xa7\
\x00\x64\
\x00\x6f\x00\x77\x00\x6e\x00\x5f\x00\x61\x00\x72\x00\x72\x00\x6f\x00\x77\x00\x2e\x00\x70\x00\x6e\x00\x67\
\x00\x12\
\x01\x2e\x03\x27\
\x00\x63\
\x00\x6f\x00\x6d\x00\x62\x00\x6f\x00\x62\x00\x6f\x00\x78\x00\x5f\x00\x61\x00\x72\x00\x72\x00\x6f\x00\x77\x00\x2e\x00\x70\x00\x6e\
\x00\x67\
\x00\x14\
\x04\x5e\x2d\xa7\
\x00\x62\
\x00\x72\x00\x61\x00\x6e\x00\x63\x00\x68\x00\x5f\x00\x63\x00\x6c\x00\x6f\x00\x73\x00\x65\x00\x64\x00\x5f\x00\x6f\x00\x6e\x00\x2e\
\x00\x70\x00\x6e\x00\x67\
\x00\x17\
\x0c\xab\x51\x07\
\x00\x64\
\x00\x6f\x00\x77\x00\x6e\x00\x5f\x00\x61\x00\x72\x00\x72\x00\x6f\x00\x77\x00\x5f\x00\x64\x00\x69\x00\x73\x00\x61\x00\x62\x00\x6c\
\x00\x65\x00\x64\x00\x2e\x00\x70\x00\x6e\x00\x67\
\x00\x11\
\x01\x1f\xc3\x87\
\x00\x64\
\x00\x6f\x00\x77\x00\x6e\x00\x5f\x00\x61\x00\x72\x00\x72\x00\x6f\x00\x77\x00\x5f\x00\x6f\x00\x6e\x00\x2e\x00\x70\x00\x6e\x00\x67\
\
\x00\x17\
\x0c\x65\xce\x07\
\x00\x6c\
\x00\x65\x00\x66\x00\x74\x00\x5f\x00\x61\x00\x72\x00\x72\x00\x6f\x00\x77\x00\x5f\x00\x64\x00\x69\x00\x73\x00\x61\x00\x62\x00\x6c\
\x00\x65\x00\x64\x00\x2e\x00\x70\x00\x6e\x00\x67\
\x00\x0c\
\x06\xe6\xe6\x67\
\x00\x75\
\x00\x70\x00\x5f\x00\x61\x00\x72\x00\x72\x00\x6f\x00\x77\x00\x2e\x00\x70\x00\x6e\x00\x67\
\x00\x15\
\x03\x27\x72\x67\
\x00\x63\
\x00\x6f\x00\x6d\x00\x62\x00\x6f\x00\x62\x00\x6f\x00\x78\x00\x5f\x00\x61\x00\x72\x00\x72\x00\x6f\x00\x77\x00\x5f\x00\x6f\x00\x6e\
\x00\x2e\x00\x70\x00\x6e\x00\x67\
\x00\x11\
\x00\xb8\x8c\x07\
\x00\x6c\
\x00\x65\x00\x66\x00\x74\x00\x5f\x00\x61\x00\x72\x00\x72\x00\x6f\x00\x77\x00\x5f\x00\x6f\x00\x6e\x00\x2e\x00\x70\x00\x6e\x00\x67\
\
\x00\x0e\
\x0e\xde\xfa\xc7\
\x00\x6c\
\x00\x65\x00\x66\x00\x74\x00\x5f\x00\x61\x00\x72\x00\x72\x00\x6f\x00\x77\x00\x2e\x00\x70\x00\x6e\x00\x67\
\x00\x0f\
\x06\x53\x25\xa7\
\x00\x62\
\x00\x72\x00\x61\x00\x6e\x00\x63\x00\x68\x00\x5f\x00\x6f\x00\x70\x00\x65\x00\x6e\x00\x2e\x00\x70\x00\x6e\x00\x67\
"
qt_resource_struct_v1 = b"\
\x00\x00\x00\x00\x00\x02\x00\x00\x00\x01\x00\x00\x00\x01\
\x00\x00\x00\x00\x00\x02\x00\x00\x00\x01\x00\x00\x00\x02\
\x00\x00\x00\x16\x00\x02\x00\x00\x00\x13\x00\x00\x00\x03\
\x00\x00\x02\xe6\x00\x00\x00\x00\x00\x01\x00\x00\x1f\x3c\
\x00\x00\x02\x3c\x00\x00\x00\x00\x00\x01\x00\x00\x1c\x9d\
\x00\x00\x01\xb0\x00\x00\x00\x00\x00\x01\x00\x00\x13\x68\
\x00\x00\x01\x6a\x00\x00\x00\x00\x00\x01\x00\x00\x12\x1d\
\x00\x00\x00\x28\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\
\x00\x00\x02\xb6\x00\x00\x00\x00\x00\x01\x00\x00\x1e\x92\
\x00\x00\x00\x76\x00\x00\x00\x00\x00\x01\x00\x00\x07\xd8\
\x00\x00\x01\x10\x00\x00\x00\x00\x00\x01\x00\x00\x10\xd6\
\x00\x00\x00\xb2\x00\x00\x00\x00\x00\x01\x00\x00\x08\x81\
\x00\x00\x01\xda\x00\x00\x00\x00\x00\x01\x00\x00\x14\x12\
\x00\x00\x01\x8e\x00\x00\x00\x00\x00\x01\x00\x00\x12\xbf\
\x00\x00\x00\x4c\x00\x00\x00\x00\x00\x01\x00\x00\x00\xa4\
\x00\x00\x03\x30\x00\x00\x00\x00\x00\x01\x00\x00\x20\x90\
\x00\x00\x02\x98\x00\x00\x00\x00\x00\x01\x00\x00\x1d\xf0\
\x00\x00\x00\xe8\x00\x00\x00\x00\x00\x01\x00\x00\x09\x25\
\x00\x00\x02\x64\x00\x00\x00\x00\x00\x01\x00\x00\x1d\x46\
\x00\x00\x02\x08\x00\x00\x00\x00\x00\x01\x00\x00\x1b\xf3\
\x00\x00\x03\x0e\x00\x00\x00\x00\x00\x01\x00\x00\x1f\xe6\
\x00\x00\x01\x3a\x00\x00\x00\x00\x00\x01\x00\x00\x11\x7a\
"
qt_resource_struct_v2 = b"\
\x00\x00\x00\x00\x00\x02\x00\x00\x00\x01\x00\x00\x00\x01\
\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x02\x00\x00\x00\x01\x00\x00\x00\x02\
\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x16\x00\x02\x00\x00\x00\x13\x00\x00\x00\x03\
\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x02\xe6\x00\x00\x00\x00\x00\x01\x00\x00\x1f\x3c\
\x00\x00\x01\x76\x41\x9d\xa2\x35\
\x00\x00\x02\x3c\x00\x00\x00\x00\x00\x01\x00\x00\x1c\x9d\
\x00\x00\x01\x76\x41\x9d\xa2\x35\
\x00\x00\x01\xb0\x00\x00\x00\x00\x00\x01\x00\x00\x13\x68\
\x00\x00\x01\x79\xb4\x72\xcc\x9c\
\x00\x00\x01\x6a\x00\x00\x00\x00\x00\x01\x00\x00\x12\x1d\
\x00\x00\x01\x76\x41\x9d\xa2\x39\
\x00\x00\x00\x28\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\
\x00\x00\x01\x76\x41\x9d\xa2\x37\
\x00\x00\x02\xb6\x00\x00\x00\x00\x00\x01\x00\x00\x1e\x92\
\x00\x00\x01\x79\xb4\x72\xcc\x9c\
\x00\x00\x00\x76\x00\x00\x00\x00\x00\x01\x00\x00\x07\xd8\
\x00\x00\x01\x79\xb4\x72\xcc\x9c\
\x00\x00\x01\x10\x00\x00\x00\x00\x00\x01\x00\x00\x10\xd6\
\x00\x00\x01\x76\x41\x9d\xa2\x37\
\x00\x00\x00\xb2\x00\x00\x00\x00\x00\x01\x00\x00\x08\x81\
\x00\x00\x01\x76\x41\x9d\xa2\x37\
\x00\x00\x01\xda\x00\x00\x00\x00\x00\x01\x00\x00\x14\x12\
\x00\x00\x01\x79\xc2\x05\x2b\x60\
\x00\x00\x01\x8e\x00\x00\x00\x00\x00\x01\x00\x00\x12\xbf\
\x00\x00\x01\x76\x41\x9d\xa2\x35\
\x00\x00\x00\x4c\x00\x00\x00\x00\x00\x01\x00\x00\x00\xa4\
\x00\x00\x01\x79\xc1\xfc\x16\x91\
\x00\x00\x03\x30\x00\x00\x00\x00\x00\x01\x00\x00\x20\x90\
\x00\x00\x01\x79\xc1\xf9\x4b\x78\
\x00\x00\x02\x98\x00\x00\x00\x00\x00\x01\x00\x00\x1d\xf0\
\x00\x00\x01\x76\x41\x9d\xa2\x39\
\x00\x00\x00\xe8\x00\x00\x00\x00\x00\x01\x00\x00\x09\x25\
\x00\x00\x01\x79\xc2\x05\x91\x2a\
\x00\x00\x02\x64\x00\x00\x00\x00\x00\x01\x00\x00\x1d\x46\
\x00\x00\x01\x76\x41\x9d\xa2\x35\
\x00\x00\x02\x08\x00\x00\x00\x00\x00\x01\x00\x00\x1b\xf3\
\x00\x00\x01\x76\x41\x9d\xa2\x35\
\x00\x00\x03\x0e\x00\x00\x00\x00\x00\x01\x00\x00\x1f\xe6\
\x00\x00\x01\x76\x41\x9d\xa2\x35\
\x00\x00\x01\x3a\x00\x00\x00\x00\x00\x01\x00\x00\x11\x7a\
\x00\x00\x01\x76\x41\x9d\xa2\x39\
"
qt_version = [int(v) for v in QtCore.qVersion().split('.')]
if qt_version < [5, 8, 0]:
rcc_version = 1
qt_resource_struct = qt_resource_struct_v1
else:
rcc_version = 2
qt_resource_struct = qt_resource_struct_v2
| [
2,
532,
9,
12,
19617,
25,
3384,
69,
12,
23,
532,
9,
12,
198,
198,
2,
20857,
2134,
2438,
198,
2,
198,
2,
15622,
416,
25,
383,
20857,
3082,
5329,
329,
9485,
48,
83,
20,
357,
48,
83,
410,
20,
13,
1314,
13,
17,
8,
198,
2,
198,... | 1.230209 | 40,107 |
import os
import filecmp
import argparse
from os.path import join, abspath, dirname, pardir
from regresspy import compare_utils, regression
ABS_ROOT = abspath(join(dirname(__file__), pardir, pardir))
REGR_DIR = join("tests", "regression")
TBTS = join("tests", "inputs", "tbt_files")
MODELS = join("tests", "inputs", "models")
OPTICS = join("tests", "inputs", "optics_files")
HARM_FILES = join("tests", "inputs", "harmonic_results")
GETLLM_FILES = join("tests", "inputs", "getllm_results")
TEST_CASES_HOLE_IN_ONE = (
regression.TestCase(
name="hole_in_one_test_flat_3dkick",
script="hole_in_one.py",
arguments=("--file={file} --model={model} --output={output} clean "
"harpy --tunex 0.27 --tuney 0.322 --tunez 4.5e-4 "
"--nattunex 0.28 --nattuney 0.31 --tolerance 0.005".format(
file=join(TBTS, "flat_beam1_3d.sdds"),
model=join(MODELS, "flat_beam1", "twiss.dat"),
output=join(REGR_DIR, "_out_hole_in_one_test_flat_3dkick"))),
output=join(REGR_DIR, "_out_hole_in_one_test_flat_3dkick"),
test_function=lambda d1, d2: compare_utils.compare_dirs(d1, d2, ignore=[r".*\.log"]),
pre_hook=lambda dir: os.makedirs(join(dir, REGR_DIR, "_out_hole_in_one_test_flat_3dkick")),
),
)
TEST_CASES_GETLLM = (
regression.TestCase(
name="getllm_test_flat_disp",
script=join("GetLLM", "GetLLM.py"),
arguments=("--accel=LHCB1 "
"--model={model} "
"--files={files_dir}/on_mom_file1.sdds,{files_dir}/on_mom_file2.sdds,{files_dir}/neg_mom_file1.sdds,{files_dir}/pos_mom_file1.sdds "
"--output={output} "
"--tbtana=SUSSIX --bpmu=mm --lhcphase=1 "
"--errordefs={errordefs}")
.format(model=join(MODELS, "flat_beam1", "twiss.dat"),
files_dir=join(HARM_FILES, "flat_60_15cm_b1"),
output=join(REGR_DIR, "_out_getllm_test_flat_disp"),
errordefs=join(MODELS, "flat_beam1", "error_deff.txt")),
output=join(REGR_DIR, "_out_getllm_test_flat_disp"),
test_function=lambda dir1, dir2:
compare_utils.compare_dirs_ignore_words(dir1, dir2, ["Command", "Date", "CWD"]),
pre_hook=lambda dir: None,
),
)
TEST_CASES_MODEL_CREATION = (
regression.TestCase(
name="model_creator_test_lhc",
script=join("model", "creator.py"),
arguments=("--type nominal --accel lhc --lhcmode lhc_runII_2017 "
"--beam 1 --nattunex 0.28 --nattuney 0.31 --acd "
"--drvtunex 0.27 --drvtuney 0.322 --dpp 0.0 "
"--optics {optics} "
"--output {output}").format(optics=join(OPTICS, "2017", "opticsfile.19"),
output=join(REGR_DIR, "_out_model_creator_test_lhc")),
output=join(REGR_DIR, "_out_model_creator_test_lhc"),
test_function=lambda dir1, dir2:
compare_utils.compare_dirs_ignore_words(dir1, dir2, ["ORIGIN", "DATE", "TIME"]),
pre_hook=lambda dir: os.makedirs(join(dir, REGR_DIR, "_out_model_creator_test_lhc")),
),
)
TEST_CASES_RESPONSE_CREATION = (
regression.TestCase(
name="response_creation_test_via_madx",
script=join("generate_fullresponse_pandas.py"),
arguments=" ".join([
"--accel lhc --lhcmode lhc_runII_2017 --beam 1",
"--model_dir {model_dir}",
"--optics_file {optics_file}",
"--creator madx",
"--outfile {response_out}",
"--variables MQY coupling_knobs",
"--deltak 2e-5",
]).format(
model_dir=join(REGR_DIR, "_out_create_response_test_madx", "model"),
optics_file=join(OPTICS, "2018", "opticsfile.24_ctpps2"),
response_out=join(REGR_DIR, "_out_create_response_test_madx", "fullresponse")
),
output=join(REGR_DIR, "_out_create_response_test_madx"),
test_function=lambda d1, d2: filecmp.cmp(
join(d1, "fullresponse"), join(d2, "fullresponse")
),
pre_hook=lambda dir: compare_utils.copy_item(
join(MODELS, "25cm_beam1"),
join(dir, REGR_DIR, "_out_create_response_test_madx", "model")
)
),
regression.TestCase(
name="response_creation_test_via_twiss",
script=join("generate_fullresponse_pandas.py"),
arguments=" ".join([
"--accel lhc --lhcmode lhc_runII_2017 --beam 1",
"--model_dir {model_dir}",
"--optics_file {optics_file}",
"--creator twiss",
"--outfile {response_out}",
"--variables MQY coupling_knobs",
"--optics_params MUX MUY Q DX DY BBX BBY BETX BETY F1001I F1001R F1010R F1010I",
]).format(
model_dir=join(REGR_DIR, "_out_create_response_test_twiss", "model"),
optics_file=join(OPTICS, "2018", "opticsfile.24_ctpps2"),
response_out=join(REGR_DIR, "_out_create_response_test_twiss", "fullresponse")
),
output=join(REGR_DIR, "_out_create_response_test_twiss"),
test_function=lambda d1, d2: filecmp.cmp(
join(d1, "fullresponse"), join(d2, "fullresponse")
),
pre_hook=lambda dir: compare_utils.copy_item(
join(MODELS, "25cm_beam1"),
join(dir, REGR_DIR, "_out_create_response_test_twiss", "model")
)
),
)
TEST_CASES_GLOBAL_CORRECTION = (
regression.TestCase(
name="correct_iterative_test",
script=join("global_correct_iterative.py"),
arguments=" ".join([
"--accel lhc --lhcmode lhc_runII_2017 --beam 1",
"--model_dir {model_dir}",
"--optics_file {optics_file}",
"--variables MQY",
"--optics_params MUX MUY BBX BBY Q",
"--weights 1 1 1 1 10",
"--meas_dir {meas_dir}",
"--output_dir {out_dir}",
"--max_iter 1",
]).format(
model_dir=join(MODELS, "25cm_beam1"),
meas_dir=join(GETLLM_FILES, "25cm_beam1"),
optics_file=join(OPTICS, "2018", "opticsfile.24_ctpps2"),
out_dir=join(REGR_DIR, "_out_correct_iterative_test"),
),
output=join(REGR_DIR, "_out_correct_iterative_test"),
test_function=lambda d1, d2: compare_utils.compare_dirs_ignore_words(
d1, d2,
ignore_files=[r".*\.log", "model"],
ignore_words=["DATE", "TIME"],
),
pre_hook=lambda dir: compare_utils.copy_item(
join(MODELS, "25cm_beam1"),
join(dir, REGR_DIR, "_out_correct_iterative_test", "model")
),
),
)
def run_tests(opts=None):
"""Run the test cases and raise RegressionTestFailed on failure.
"""
alltests = (
list(TEST_CASES_HOLE_IN_ONE) +
#list(TEST_CASES_GETLLM) +
list(TEST_CASES_MODEL_CREATION) +
list(TEST_CASES_RESPONSE_CREATION) +
list(TEST_CASES_GLOBAL_CORRECTION)
)
regression.launch_test_set(alltests, ABS_ROOT,
yaml_conf=join(ABS_ROOT, ".travis.yml"),
keep_fails=opts.keepfiles if opts else False)
if __name__ == "__main__":
_options = _parse_args()
run_tests(_options)
| [
11748,
28686,
198,
11748,
2393,
48991,
198,
11748,
1822,
29572,
198,
6738,
28686,
13,
6978,
1330,
4654,
11,
2352,
6978,
11,
26672,
3672,
11,
41746,
343,
198,
6738,
50252,
9078,
1330,
8996,
62,
26791,
11,
20683,
628,
198,
32,
4462,
62,
... | 1.91224 | 3,897 |
# coding: utf-8
import os
import sys
sys.path.append(os.pardir) # 親ディレクトリのファイルをインポートするための設定
import numpy as np
from dataset.mnist import load_mnist
from common.multi_layer_net_extend import MultiLayerNetExtend
# データの読み込み
(x_train, t_train), (x_test, t_test) = load_mnist(normalize=True, one_hot_label=True)
network = MultiLayerNetExtend(input_size=784, hidden_size_list=[100, 100], output_size=10,
use_batchnorm=True)
x_batch = x_train[:1]
t_batch = t_train[:1]
grad_backprop = network.gradient(x_batch, t_batch)
grad_numerical = network.numerical_gradient(x_batch, t_batch)
for key in grad_numerical.keys():
diff = np.average(np.abs(grad_backprop[key] - grad_numerical[key]))
print(key + ":" + str(diff))
| [
2,
19617,
25,
3384,
69,
12,
23,
198,
11748,
28686,
198,
11748,
25064,
198,
198,
17597,
13,
6978,
13,
33295,
7,
418,
13,
26037,
343,
8,
220,
1303,
5525,
99,
103,
40629,
24186,
14099,
13298,
12675,
5641,
41939,
11482,
9202,
31758,
11482... | 2.18314 | 344 |
from pathlib import Path
import argparse
from preprocess import process_files
if __name__ == '__main__':
parser = argparse.ArgumentParser()
parser.add_argument('index_file', help='index file')
parser.add_argument('audio_dir', help='input audio folder')
parser.add_argument('data_dir', help='output data file')
parser.add_argument('--feature-type', '-ft', default='musicnn-melspectrogram',
choices=[
'musicnn-melspectrogram',
'vggish-melspectrogram',
'musicnn',
'vggish',
'openl3',
'tempocnn',
'spleeter',
'effnet_b0',
'yamnet'
],
help='input feature type')
args = parser.parse_args()
index_file = args.index_file
audio_dir = Path(args.audio_dir)
data_dir = Path(args.data_dir)
feature_type = args.feature_type
# set audio representations folder
data_dir.mkdir(exist_ok=True, parents=True)
fw = open(data_dir / 'index.tsv', "w")
fw.write('')
fw.close()
# list audios to process: according to 'index_file'
files_to_convert = []
f = open(index_file)
for line in f.readlines():
id, audio_path = line.strip().split("\t")
audio_repr = Path(audio_path).with_suffix(".dat")
tgt = str(data_dir / audio_repr)
src = str(audio_dir / audio_path)
files_to_convert.append((id, src, tgt))
process_files(files_to_convert, data_dir, feature_type=feature_type)
| [
6738,
3108,
8019,
1330,
10644,
198,
11748,
1822,
29572,
198,
198,
6738,
662,
14681,
1330,
1429,
62,
16624,
628,
198,
361,
11593,
3672,
834,
6624,
705,
834,
12417,
834,
10354,
198,
220,
220,
220,
30751,
796,
1822,
29572,
13,
28100,
1713,... | 1.972061 | 859 |
#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
# Copyright 2012 California Institute of Technology. ALL RIGHTS RESERVED.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# United States Government Sponsorship acknowledged. This software is subject to
# U.S. export control laws and regulations and has been classified as 'EAR99 NLR'
# (No [Export] License Required except when exporting to an embargoed country,
# end user, or in support of a prohibited end use). By downloading this software,
# the user agrees to comply with all applicable U.S. export laws and regulations.
# The user has the responsibility to obtain export licenses, or other export
# authority as may be required before exporting this software to any 'EAR99'
# embargoed foreign country or citizen of those countries.
#
# Author: Eric Belz
#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
"""geo is for doing coordinates on Earth. Here are the modules:
euclid Scalar, Vector, Tensor objects in E3 -eucliden 3-space.
charts rotations in E3, aka: charts on SO(3).
affine rigid affine transformations in E3.
coordinates Coordinates on Earth
ellipsoid oblate ellipsoid of revolution (e.g, WGS84) with all the
bells and whistles.
Note: sub-package use __all__, so they are:
>>>from geo import *
safe.
See mainpage.txt for a complete dump of geo's philosophy-- otherwise,
use the docstrings.
"""
## \namespace geo Vector- and Affine-spaces, on Earth
__all__ = ['euclid', 'coordinates', 'ellipsoid', 'charts', 'affine', 'motion']
| [
2,
27156,
27156,
27156,
27156,
15116,
8728,
4907,
93,
198,
2,
15069,
2321,
3442,
5136,
286,
8987,
13,
11096,
371,
34874,
15731,
1137,
53,
1961,
13,
198,
2,
220,
198,
2,
49962,
739,
262,
24843,
13789,
11,
10628,
362,
13,
15,
357,
116... | 3.58124 | 597 |
from concurrent.futures.thread import ThreadPoolExecutor
from nbdler.handler import (
SpeedAdjuster,
AIOReaderWriter,
BlockSlicer,
FileTempData,
ClientWorker,
URIStatusManager,
GatherException,
h, Handlers)
from .client import get_policy, ClientPolicy
from .version import VERSION
from .utils import forever_loop_in_executor
from traceback import format_exc
import weakref
import warnings
import asyncio
import os
__all__ = (
'Downloader',
)
| [
198,
6738,
24580,
13,
69,
315,
942,
13,
16663,
1330,
14122,
27201,
23002,
38409,
198,
6738,
299,
17457,
1754,
13,
30281,
1330,
357,
198,
220,
220,
220,
8729,
39668,
263,
11,
198,
220,
220,
220,
9552,
1581,
1329,
263,
34379,
11,
198,
... | 2.95122 | 164 |
#! /usr/bin/env python
import os
travis_tag = os.getenv("TRAVIS_TAG")
addon = os.getenv("ADDON")
docker_build = "docker run --rm --privileged " \
"-v /var/run/docker.sock:/var/run/docker.sock " \
"-v ~/.docker:/root/.docker " \
"-v $(pwd):/docker " \
"hassioaddons/build-env:latest " \
"--target {addon} " \
"--login ${{DOCKER_USER}} " \
"--password ${{DOCKER_PASS}} " \
"--no-cache --all --tag-latest"
if travis_tag:
docker_build = docker_build + " --push"
run(docker_build.format(addon=addon))
| [
2,
0,
1220,
14629,
14,
8800,
14,
24330,
21015,
198,
11748,
28686,
628,
198,
83,
16956,
62,
12985,
796,
28686,
13,
1136,
24330,
7203,
51,
3861,
29817,
62,
42197,
4943,
198,
48078,
796,
28686,
13,
1136,
24330,
7203,
29266,
1340,
4943,
1... | 1.977778 | 315 |
# Generated by Django 3.2.12 on 2022-02-25 11:08
import django.db.models.deletion
from django.db import migrations, models
| [
2,
2980,
515,
416,
37770,
513,
13,
17,
13,
1065,
319,
33160,
12,
2999,
12,
1495,
1367,
25,
2919,
198,
198,
11748,
42625,
14208,
13,
9945,
13,
27530,
13,
2934,
1616,
295,
198,
6738,
42625,
14208,
13,
9945,
1330,
15720,
602,
11,
4981,... | 2.840909 | 44 |
from cached_property import cached_property
import rlp
from rlp.sedes import (
big_endian_int,
binary,
)
from eth_typing import (
Address
)
from eth_hash.auto import keccak
from eth_utils import (
ValidationError,
)
from eth.abc import (
BaseTransactionAPI,
ComputationAPI,
SignedTransactionAPI,
TransactionFieldsAPI,
UnsignedTransactionAPI,
)
from .sedes import address
BASE_TRANSACTION_FIELDS = [
('nonce', big_endian_int),
('gas_price', big_endian_int),
('gas', big_endian_int),
('to', address),
('value', big_endian_int),
('data', binary),
('v', big_endian_int),
('r', big_endian_int),
('s', big_endian_int),
]
| [
6738,
39986,
62,
26745,
1330,
39986,
62,
26745,
198,
11748,
374,
34431,
198,
6738,
374,
34431,
13,
36622,
274,
1330,
357,
198,
220,
220,
220,
1263,
62,
437,
666,
62,
600,
11,
198,
220,
220,
220,
13934,
11,
198,
8,
198,
198,
6738,
... | 2.462898 | 283 |
import sys
from email.mime.multipart import MIMEMultipart
from email.mime.text import MIMEText
import aiosmtplib
from jinja2 import Environment, PackageLoader, Template
from alarme import Action
| [
11748,
25064,
198,
6738,
3053,
13,
76,
524,
13,
16680,
541,
433,
1330,
337,
3955,
3620,
586,
541,
433,
198,
6738,
3053,
13,
76,
524,
13,
5239,
1330,
337,
3955,
2767,
2302,
198,
198,
11748,
257,
4267,
16762,
489,
571,
198,
6738,
474,... | 3.193548 | 62 |
# Generated by Django 2.0.4 on 2018-04-17 11:11
from django.db import migrations, models
import django.utils.timezone
| [
2,
2980,
515,
416,
37770,
362,
13,
15,
13,
19,
319,
2864,
12,
3023,
12,
1558,
1367,
25,
1157,
198,
198,
6738,
42625,
14208,
13,
9945,
1330,
15720,
602,
11,
4981,
198,
11748,
42625,
14208,
13,
26791,
13,
2435,
11340,
628
] | 2.926829 | 41 |
# Pyrogram - Telegram MTProto API Client Library for Python
# Copyright (C) 2017-2021 Dan <https://github.com/delivrance>
#
# This file is part of Pyrogram.
#
# Pyrogram is free software: you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License as published
# by the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Pyrogram is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with Pyrogram. If not, see <http://www.gnu.org/licenses/>.
from io import BytesIO
from pyrogram.raw.core.primitives import Int, Long, Int128, Int256, Bool, Bytes, String, Double, Vector
from pyrogram.raw.core import TLObject
from pyrogram import raw
from typing import List, Union, Any
# # # # # # # # # # # # # # # # # # # # # # # #
# !!! WARNING !!! #
# This is a generated file! #
# All changes made in this file will be lost! #
# # # # # # # # # # # # # # # # # # # # # # # #
class Config(TLObject): # type: ignore
"""This object is a constructor of the base type :obj:`~pyrogram.raw.base.Config`.
Details:
- Layer: ``126``
- ID: ``0x330b4067``
Parameters:
date: ``int`` ``32-bit``
expires: ``int`` ``32-bit``
test_mode: ``bool``
this_dc: ``int`` ``32-bit``
dc_options: List of :obj:`DcOption <pyrogram.raw.base.DcOption>`
dc_txt_domain_name: ``str``
chat_size_max: ``int`` ``32-bit``
megagroup_size_max: ``int`` ``32-bit``
forwarded_count_max: ``int`` ``32-bit``
online_update_period_ms: ``int`` ``32-bit``
offline_blur_timeout_ms: ``int`` ``32-bit``
offline_idle_timeout_ms: ``int`` ``32-bit``
online_cloud_timeout_ms: ``int`` ``32-bit``
notify_cloud_delay_ms: ``int`` ``32-bit``
notify_default_delay_ms: ``int`` ``32-bit``
push_chat_period_ms: ``int`` ``32-bit``
push_chat_limit: ``int`` ``32-bit``
saved_gifs_limit: ``int`` ``32-bit``
edit_time_limit: ``int`` ``32-bit``
revoke_time_limit: ``int`` ``32-bit``
revoke_pm_time_limit: ``int`` ``32-bit``
rating_e_decay: ``int`` ``32-bit``
stickers_recent_limit: ``int`` ``32-bit``
stickers_faved_limit: ``int`` ``32-bit``
channels_read_media_period: ``int`` ``32-bit``
pinned_dialogs_count_max: ``int`` ``32-bit``
pinned_infolder_count_max: ``int`` ``32-bit``
call_receive_timeout_ms: ``int`` ``32-bit``
call_ring_timeout_ms: ``int`` ``32-bit``
call_connect_timeout_ms: ``int`` ``32-bit``
call_packet_timeout_ms: ``int`` ``32-bit``
me_url_prefix: ``str``
caption_length_max: ``int`` ``32-bit``
message_length_max: ``int`` ``32-bit``
webfile_dc_id: ``int`` ``32-bit``
phonecalls_enabled (optional): ``bool``
default_p2p_contacts (optional): ``bool``
preload_featured_stickers (optional): ``bool``
ignore_phone_entities (optional): ``bool``
revoke_pm_inbox (optional): ``bool``
blocked_mode (optional): ``bool``
pfs_enabled (optional): ``bool``
tmp_sessions (optional): ``int`` ``32-bit``
autoupdate_url_prefix (optional): ``str``
gif_search_username (optional): ``str``
venue_search_username (optional): ``str``
img_search_username (optional): ``str``
static_maps_provider (optional): ``str``
suggested_lang_code (optional): ``str``
lang_pack_version (optional): ``int`` ``32-bit``
base_lang_pack_version (optional): ``int`` ``32-bit``
See Also:
This object can be returned by 1 method:
.. hlist::
:columns: 2
- :obj:`help.GetConfig <pyrogram.raw.functions.help.GetConfig>`
"""
__slots__: List[str] = ["date", "expires", "test_mode", "this_dc", "dc_options", "dc_txt_domain_name", "chat_size_max", "megagroup_size_max", "forwarded_count_max", "online_update_period_ms", "offline_blur_timeout_ms", "offline_idle_timeout_ms", "online_cloud_timeout_ms", "notify_cloud_delay_ms", "notify_default_delay_ms", "push_chat_period_ms", "push_chat_limit", "saved_gifs_limit", "edit_time_limit", "revoke_time_limit", "revoke_pm_time_limit", "rating_e_decay", "stickers_recent_limit", "stickers_faved_limit", "channels_read_media_period", "pinned_dialogs_count_max", "pinned_infolder_count_max", "call_receive_timeout_ms", "call_ring_timeout_ms", "call_connect_timeout_ms", "call_packet_timeout_ms", "me_url_prefix", "caption_length_max", "message_length_max", "webfile_dc_id", "phonecalls_enabled", "default_p2p_contacts", "preload_featured_stickers", "ignore_phone_entities", "revoke_pm_inbox", "blocked_mode", "pfs_enabled", "tmp_sessions", "autoupdate_url_prefix", "gif_search_username", "venue_search_username", "img_search_username", "static_maps_provider", "suggested_lang_code", "lang_pack_version", "base_lang_pack_version"]
ID = 0x330b4067
QUALNAME = "types.Config"
@staticmethod
| [
2,
220,
9485,
39529,
532,
50203,
19308,
2964,
1462,
7824,
20985,
10074,
329,
11361,
198,
2,
220,
15069,
357,
34,
8,
2177,
12,
1238,
2481,
6035,
1279,
5450,
1378,
12567,
13,
785,
14,
12381,
452,
8132,
29,
198,
2,
198,
2,
220,
770,
... | 2.438869 | 2,192 |
"""
Not yet implemented
added FMC v6.5.0
Appears to only be valid for Firepower 1010 devices.
"""
| [
37811,
198,
3673,
1865,
9177,
198,
29373,
376,
9655,
410,
21,
13,
20,
13,
15,
198,
4677,
4127,
284,
691,
307,
4938,
329,
3764,
6477,
8949,
15,
4410,
13,
198,
37811,
198
] | 3.0625 | 32 |
SIZEOF_BYTE = 8
SIZEOF_INT_16 = 16 // SIZEOF_BYTE
SIZEOF_INT_32 = 32 // SIZEOF_BYTE
| [
11584,
57,
4720,
37,
62,
17513,
9328,
796,
807,
198,
198,
11584,
57,
4720,
37,
62,
12394,
62,
1433,
796,
1467,
3373,
311,
14887,
4720,
37,
62,
17513,
9328,
198,
11584,
57,
4720,
37,
62,
12394,
62,
2624,
796,
3933,
3373,
311,
14887,
... | 1.734694 | 49 |
import setuptools
setuptools.setup(
name="deepspeech",
version="0.3.0",
description="train and evaluate a DeepSpeech or DeepSpeech2 network",
author="myrtle.ai",
author_email="mlperf@myrtle.ai",
packages=setuptools.find_packages('src'),
package_dir={'': 'src'},
python_requires='>=3.5',
entry_points={
'console_scripts': ['deepspeech=deepspeech.run:main']
}
)
| [
11748,
900,
37623,
10141,
198,
198,
2617,
37623,
10141,
13,
40406,
7,
198,
220,
220,
220,
1438,
2625,
22089,
45862,
1600,
198,
220,
220,
220,
2196,
2625,
15,
13,
18,
13,
15,
1600,
198,
220,
220,
220,
6764,
2625,
27432,
290,
13446,
2... | 2.449102 | 167 |
#!/usr/bin/env python
# -*- coding: latin-1 -*-
#
# Copyright 2016-2021 Blaise Frederick
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
#
# $Author: frederic $
# $Date: 2016/04/07 21:46:54 $
# $Id: OrthoImageItem.py,v 1.13 2016/04/07 21:46:54 frederic Exp $
#
# -*- coding: utf-8 -*-
"""
A widget for orthographically displaying 3 and 4 dimensional data
"""
import os
import numpy as np
import pyqtgraph as pg
from pyqtgraph.Qt import QtCore, QtGui
try:
from PIL import Image
PILexists = True
except ImportError:
PILexists = False
| [
2,
48443,
14629,
14,
8800,
14,
24330,
21015,
198,
2,
532,
9,
12,
19617,
25,
3042,
259,
12,
16,
532,
9,
12,
198,
2,
198,
2,
220,
220,
15069,
1584,
12,
1238,
2481,
1086,
64,
786,
26113,
198,
2,
198,
2,
220,
220,
49962,
739,
262,... | 2.936986 | 365 |
# Copyright (c) 2009, 2012-2013, 2015-2020 ARM Limited
# All rights reserved.
#
# The license below extends only to copyright in the software and shall
# not be construed as granting a license to any other intellectual
# property including but not limited to intellectual property relating
# to a hardware implementation of the functionality of the software
# licensed hereunder. You may use the software subject to the license
# terms below provided that you ensure that this notice is replicated
# unmodified and in its entirety in all distributions of the software,
# modified or unmodified, in source code or in binary form.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met: redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer;
# redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution;
# neither the name of the copyright holders nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
from m5.params import *
from m5.options import *
from m5.SimObject import *
from m5.objects.Workload import KernelWorkload
| [
2,
15069,
357,
66,
8,
3717,
11,
2321,
12,
6390,
11,
1853,
12,
42334,
20359,
15302,
198,
2,
1439,
2489,
10395,
13,
198,
2,
198,
2,
383,
5964,
2174,
14582,
691,
284,
6634,
287,
262,
3788,
290,
2236,
198,
2,
407,
307,
30816,
355,
2... | 3.948307 | 561 |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
Median is the middle value in an ordered integer list. If the size of the list
is even, there is no middle value. So the median is the mean of the two middle
value.
Examples:
[2,3,4] , the median is 3
[2,3], the median is (2 + 3) / 2 = 2.5
Design a data structure that supports the following two operations:
void addNum(int num) - Add a integer number from the data stream to the data
structure.
double findMedian() - Return the median of all elements so far.
>>> finder = MedianFinder()
>>> finder.findMedian()
>>> finder.addNum(1)
>>> finder.findMedian()
1
>>> finder.addNum(2)
>>> finder.findMedian()
1.5
>>> finder.addNum(3)
>>> finder.findMedian()
2
>>> finder = MedianFinder()
>>> finder.findMedian()
>>> finder.addNum(2)
>>> finder.findMedian()
2
>>> finder.addNum(1)
>>> finder.findMedian()
1.5
>>> finder.addNum(3)
>>> finder.findMedian()
2
"""
import bisect
| [
2,
48443,
14629,
14,
8800,
14,
24330,
21015,
198,
2,
532,
9,
12,
19617,
25,
3384,
69,
12,
23,
532,
9,
12,
198,
37811,
198,
9921,
666,
318,
262,
3504,
1988,
287,
281,
6149,
18253,
1351,
13,
1002,
262,
2546,
286,
262,
1351,
198,
2... | 2.547739 | 398 |
from .client import Tiingo | [
6738,
764,
16366,
1330,
16953,
32735
] | 4.333333 | 6 |
"""
#
# Sina Farhadi
#
# @2019 All Rights Served
#
# This Is Logger Code
#
# This Module is Use To Print Debugging Process
#
"""
# Colors
debug = True
# Error Log
# Info Log | [
37811,
198,
2,
198,
2,
220,
220,
28743,
6755,
71,
9189,
198,
2,
220,
220,
220,
198,
2,
220,
220,
2488,
23344,
1439,
6923,
3116,
276,
198,
2,
198,
2,
220,
220,
770,
1148,
5972,
1362,
6127,
198,
2,
198,
2,
220,
220,
770,
19937,
... | 2.506667 | 75 |
"""
A set of tests for using the QCEngine project
"""
import copy
import numpy as np
import tempfile
import logging
import math
from geometric.molecule import bohr2ang
logger = logging.getLogger(__name__)
from . import addons
import geometric.optimize as gt
from geometric.internal import CartesianCoordinates,\
PrimitiveInternalCoordinates, DelocalizedInternalCoordinates
from geometric.nifty import ang2bohr
localizer = addons.in_folder
test_logger = addons.test_logger
_base_schema = {
"schema_version": 1,
"molecule": {
"geometry": [
0.0, 0.0, -0.1294769411935893,
0.0, -1.494187339479985, 1.0274465079245698,
0.0, 1.494187339479985, 1.0274465079245698
],
"symbols": ["O", "H", "H"],
"connectivity": [[0, 1, 1], [0, 2, 1]]
},
"driver": "gradient",
"model": {
"method": "UFF",
"basis": None
},
"keywords": {},
"program": "rdkit"
} # yapf: disable
_geo2 = [0.0139, -0.4830, 0.2848,
0.0628, -0.2860, 0.7675,
0.0953, -1.0031, 0.4339]
@addons.using_qcengine
@addons.using_rdkit
class BatchOptimizer(object):
""" Demo BatchOptmizer for runnig pytest test """
def _initOptimizer(self, schemas):
""" initilize all OptObjects for the schmas passed.
Arguements
----------
schemas: list of schemas for qcengine
return
------
list of OptOject's for each schema
"""
#=========================================#
#| Set up the internal coordinate system |#
#=========================================#
# First item in tuple: The class to be initialized
# Second item in tuple: Whether to connect non-bonded fragments
# Third item in tuple: Whether to throw in all Cartesian (no effect if second item is True)
CoordSysDict = {'cart':(CartesianCoordinates, False, False),
'prim':(PrimitiveInternalCoordinates, True, False),
'dlc':(DelocalizedInternalCoordinates, True, False),
'hdlc':(DelocalizedInternalCoordinates, False, True),
'tric':(DelocalizedInternalCoordinates, False, False)}
coordsys = self.kwargs.get('coordsys', 'tric')
CoordClass, connect, addcart = CoordSysDict[coordsys.lower()]
optimizers = []
for schema in schemas:
M, engine = gt.get_molecule_engine(engine='qcengine', qcschema=schema, **self.kwargs)
coords = M.xyzs[0].flatten() * ang2bohr
# Read in the constraints
constraints = self.kwargs.get('constraints', None) #Constraint input file (optional)
if constraints is not None:
Cons, CVals = gt.ParseConstraints(M, open(constraints).read())
else:
Cons = None
CVals = None
IC = CoordClass(M, build=True, connect=connect, addcart=addcart, constraints=Cons,
cvals=CVals[0] if CVals is not None else None)
tmpDir = tempfile.mkdtemp(".tmp", "batchOpt")
optimizer = gt.Optimizer(coords, M, IC, engine, tmpDir, self.params)
optimizer.calcEnergyForce()
optimizer.prepareFirstStep()
logger.debug("[AU]: e=%.5f bl=%.5f,%.5f g=%.4f" % (
optimizer.E, optimizer.X[0],optimizer.X[3], optimizer.gradx[0]))
optimizers.append(optimizer)
return optimizers
def _batchComputeEnergyAndForces(self, optimizers):
""" This just an mockup. if this was NNP this would work in one batch
on the GPU.
"""
for optimizer in optimizers:
if optimizer.state == gt.OPT_STATE.NEEDS_EVALUATION:
optimizer.calcEnergyForce()
logger.debug("[AU]: e=%.5f bl=%.5f,%.5f g=%.4f" % (
optimizer.E, optimizer.X[0],optimizer.X[3], optimizer.gradx[0]))
def optimizeMols(self, schemas):
""" Optmize all molecules as represented by the schemas.
return
------
list of optimized Molecule's
"""
optimizers = self._initOptimizer(schemas)
res = []
# Optimization Loop, while not all have completed optimization
while len(optimizers) > 0:
nextOptObjs = []
# take one step, energy and gradient must have been stored in optObj
for optimizer in optimizers:
optimizer.step()
self._batchComputeEnergyAndForces(optimizers)
# evaluate step
for optimizer in optimizers:
if optimizer.state == gt.OPT_STATE.NEEDS_EVALUATION:
optimizer.evaluateStep()
if optimizer.state in [gt.OPT_STATE.CONVERGED, gt.OPT_STATE.FAILED]:
logger.info("Optmization convereged!")
res.append(optimizer.progress)
continue
nextOptObjs.append(optimizer)
if len(nextOptObjs) == 0: break ######## All Done
# step and evaluation completed, next step for remaining conformations
optimizers = nextOptObjs
return res
@addons.using_qcengine
@addons.using_rdkit
_N2_schema = {
"schema_version": 1,
"molecule": {
"geometry": [
0.0, 0., 0.,
1.9, 0., 0.
],
"symbols": ["N", "N"],
"connectivity": [[0, 1, 3]]
},
"driver": "gradient",
"model": {
"method": "UFF",
"basis": None
},
"keywords": {},
"program": "rdkit"
} # yapf: disable
_N2_geo2 = [0.0, 0., 0.,
0.6, 0., 0.,]
@addons.using_qcengine
@addons.using_rdkit
| [
37811,
198,
32,
900,
286,
5254,
329,
1262,
262,
1195,
5222,
782,
500,
1628,
198,
37811,
198,
198,
11748,
4866,
198,
11748,
299,
32152,
355,
45941,
198,
11748,
20218,
7753,
198,
11748,
18931,
198,
11748,
10688,
198,
6738,
38445,
13,
76,
... | 1.966015 | 3,119 |
import inspect
class RpcConnectionError(Exception):
"""Raised (in the caller) if the connection to the broker is lost while
waiting for an rpc reply"""
class ContainerBeingKilled(Exception):
"""Raised by :meth:`Container.spawn_worker` if it has started a ``kill``
sequence.
Entrypoint providers should catch this and react as if they hadn't been
available in the first place, e.g. an rpc consumer should probably requeue
the message.
We need this because eventlet may yield during the execution of
:meth:`Container.kill`, giving entrypoints a chance to fire before
they themselves have been killed.
"""
registry = {}
def get_module_path(exc_type):
""" Return the dotted module path of `exc_type`, including the class name.
e.g.::
>>> get_module_path(MethodNotFound)
>>> "nameko.exceptions.MethodNotFound"
"""
module = inspect.getmodule(exc_type)
return "{}.{}".format(module.__name__, exc_type.__name__)
class RemoteError(Exception):
""" Exception to raise at the caller if an exception occured in the
remote worker.
"""
def serialize(exc):
""" Serialize `self.exc` into a data dictionary representing it.
"""
try:
value = unicode(exc)
except Exception:
value = '[__unicode__ failed]'
return {
'exc_type': type(exc).__name__,
'exc_path': get_module_path(type(exc)),
'exc_args': exc.args,
'value': value,
}
def deserialize(data):
""" Deserialize `data` to an exception instance.
If the `exc_path` value matches an exception registered as
``deserializable``, return an instance of that exception type.
Otherwise, return a `RemoteError` instance describing the exception
that occured.
"""
key = data.get('exc_path')
if key in registry:
exc_args = data.get('exc_args', ())
return registry[key](*exc_args)
exc_type = data.get('exc_type')
value = data.get('value')
return RemoteError(exc_type=exc_type, value=value)
def deserialize_to_instance(exc_type):
""" Decorator that registers `exc_type` as deserializable back into an
instance, rather than a :class:`RemoteError`. See :func:`deserialize`.
"""
key = get_module_path(exc_type)
registry[key] = exc_type
return exc_type
@deserialize_to_instance
@deserialize_to_instance
@deserialize_to_instance
| [
11748,
10104,
628,
628,
198,
4871,
371,
14751,
32048,
12331,
7,
16922,
2599,
198,
220,
220,
220,
37227,
21762,
1417,
357,
259,
262,
24955,
8,
611,
262,
4637,
284,
262,
20426,
318,
2626,
981,
198,
220,
220,
220,
4953,
329,
281,
374,
... | 2.832944 | 856 |
from .sync_result import sync_table # noqa
| [
6738,
764,
27261,
62,
20274,
1330,
17510,
62,
11487,
1303,
645,
20402,
198
] | 3.307692 | 13 |
from absl import app
from absl import flags
from collections import OrderedDict
import itertools
import pandas as pd
from pathlib import Path
import pickle
from sklearn.model_selection import ParameterGrid
from proteingnn.model import get_default_trainer
from proteingnn.example.data import DefaultDatamodule, read_DeepSequence_csv
from proteingnn.example.model import FastGCNModel
FLAGS = flags.FLAGS
flags.DEFINE_boolean('debug', False, 'Run one batch on CPU before training.')
flags.DEFINE_multi_string('datasets', None, 'Dataset(s) use`d in training.')
flags.DEFINE_multi_integer('hidden_channels', 16, 'Hidden layer size.')
flags.DEFINE_boolean('verbose', False, 'Verbose mode')
flags.DEFINE_integer('batch_size', 16, 'Batch size')
flags.DEFINE_string('reboot_pkl', None, 'Restart from parameter scanning.')
flags.DEFINE_integer('model_repeat', 1, 'Number of training ensembles for each parameter.')
flags.DEFINE_multi_string('gnn_name', ['GCNConv'], 'GNN name in torch_geometric.nn')
flags.DEFINE_integer('patience', 50, 'Patience in training.')
flags.DEFINE_multi_float('weight_decay', 0, 'Weight decay in Adam.')
flags.DEFINE_bool('regression', True, 'regression/classification')
flags.DEFINE_string('dataset_name', None, 'Dataset name.')
flags.DEFINE_string('embedding_radius', 'esm-6', '{Embedding name}-{radius}')
flags.mark_flags_as_required(['dataset_name'])
if __name__ == '__main__':
app.run(main)
| [
6738,
2352,
75,
1330,
598,
198,
6738,
2352,
75,
1330,
9701,
198,
6738,
17268,
1330,
14230,
1068,
35,
713,
198,
11748,
340,
861,
10141,
198,
11748,
19798,
292,
355,
279,
67,
198,
6738,
3108,
8019,
1330,
10644,
198,
11748,
2298,
293,
19... | 3.006342 | 473 |
n = str(input('Qual seu nome? ')).strip()
nome = n.split()
print('O seu primeiro nome é {} '.format(nome[0]))
print('O seu último nome é {}'.format(nome[len(nome)-1]))
| [
198,
77,
796,
965,
7,
15414,
10786,
46181,
384,
84,
299,
462,
30,
705,
29720,
36311,
3419,
198,
77,
462,
796,
299,
13,
35312,
3419,
198,
4798,
10786,
46,
384,
84,
6994,
7058,
299,
462,
38251,
23884,
45302,
18982,
7,
77,
462,
58,
1... | 2.253333 | 75 |
import math
import numpy
import torch
from torch import nn
class MultiHeadedAttention(nn.Module):
"""Multi-Head Attention layer
:param int n_head: the number of head s
:param int n_feat: the number of features
:param float dropout_rate: dropout rate
"""
def forward(self, query, key, value, mask, rmvalue):
"""Compute 'Scaled Dot Product Attention'
:param torch.Tensor query: (batch, time1, size)
:param torch.Tensor key: (batch, time2, size)
:param torch.Tensor value: (batch, time2, size)
:param torch.Tensor mask: (batch, time1, time2)
:param torch.nn.Dropout dropout:
:return torch.Tensor: attentined and transformed `value` (batch, time1, d_model)
weighted by the query dot key attention (batch, head, time1, time2)
"""
n_batch = query.size(0)
q = self.linear_q(query).view(n_batch, -1, self.h, self.d_k)
k = self.linear_k(key).view(n_batch, -1, self.h, self.d_k)
v = self.linear_v(value).view(n_batch, -1, self.h, self.d_k)
rmv = self.linear_rmv(rmvalue).view(n_batch, -1, self.h, self.d_k)
q = q.transpose(1, 2) # (batch, head, time1, d_k)
k = k.transpose(1, 2) # (batch, head, time2, d_k)
v = v.transpose(1, 2) # (batch, head, time2, d_k)
rmv = rmv.transpose(1, 2) # (batch, head, time2, d_k)
scores = torch.matmul(q, k.transpose(-2, -1)) / math.sqrt(
self.d_k
) # (batch, head, time1, time2)
if mask is not None:
mask = mask.unsqueeze(1).eq(0) # (batch, 1, time1, time2)
min_value = float(
numpy.finfo(torch.tensor(0, dtype=scores.dtype).numpy().dtype).min
)
scores = scores.masked_fill(mask, min_value)
self.attn = torch.softmax(scores, dim=-1).masked_fill(
mask, 0.0
) # (batch, head, time1, time2)
else:
self.attn = torch.softmax(scores, dim=-1) # (batch, head, time1, time2)
p_attn = self.dropout(self.attn)
x = torch.matmul(p_attn, v) # (batch, head, time1, d_k)
x = (
x.transpose(1, 2).contiguous().view(n_batch, -1, self.h * self.d_k)
) # (batch, time1, d_model)
rmx = torch.matmul(p_attn, rmv) # (batch, head, time1, d_k)
rmx = (
rmx.transpose(1, 2).contiguous().view(n_batch, -1, self.h * self.d_k)
) # (batch, time1, d_model)
return self.linear_out(x), self.linear_rmout(rmx) # (batch, time1, d_model)
| [
11748,
10688,
198,
198,
11748,
299,
32152,
198,
11748,
28034,
198,
6738,
28034,
1330,
299,
77,
628,
198,
4871,
15237,
13847,
276,
8086,
1463,
7,
20471,
13,
26796,
2599,
198,
220,
220,
220,
37227,
29800,
12,
13847,
47406,
7679,
628,
220,... | 2.011774 | 1,274 |
from enum import Enum
from pydantic import BaseModel
class Direction(str, Enum):
"""
The text direction of the paragraph/section. If unset, default to `LEFT_TO_RIGHT` since paragraph/section
direction is not inherited.
"""
UNSPECIFIED = "CONTENT_DIRECTION_UNSPECIFIED"
LEFT_TO_RIGHT = "LEFT_TO_RIGHT"
RIGHT_TO_LEFT = "RIGHT_TO_LEFT"
class Dimension(BaseModel):
"""A magnitude in a single direction in the specified units"""
magnitude: float = 0
unit: "Unit"
class Unit(str, Enum):
"""The units for magnitude"""
UNSPECIFIED = "UNIT_UNSPECIFIED"
POINT = "PT"
Dimension.update_forward_refs()
| [
6738,
33829,
1330,
2039,
388,
198,
6738,
279,
5173,
5109,
1330,
7308,
17633,
628,
198,
4871,
41837,
7,
2536,
11,
2039,
388,
2599,
198,
220,
220,
220,
37227,
198,
220,
220,
220,
383,
2420,
4571,
286,
262,
7322,
14,
5458,
13,
1002,
55... | 2.762712 | 236 |
import os
os.chdir(r'C:\Temp\Wolfenstein')
print(os.getcwd())
CHAR8 = 1
INT16 = 2
INT32 = 4
f = open('MAPHEAD-1.2.WL1', 'rb')
h = f.read()
f.close()
magic = h[0:0+INT16] # uint16le = 2x8
print(magic == b'\xcd\xab')
ptr = h[2:INT32 * 100 + 2]
levels = []
for i in range(100):
levels.append(int.from_bytes(ptr[i*INT32:(i+1)*INT32],
byteorder='little', signed=True))
print('Level 0 pointer:', levels[0]) # 0x08C3 = 2250 debut header 1er niveau
f = open('GAMEMAPS-1.2.WL1', 'rb')
g = f.read()
f.close()
header_ted5 = g[0:8]
print('Header:', header_ted5)
print(chr(g[2272])) # 2272 : (+22) Debut de W donc du nom du niveau
# Hardcoded 64x64 in Wolfenstein 3D
width = int.from_bytes(g[levels[0] + 18:INT16], byteorder='little',
signed=True)
height = int.from_bytes(g[levels[0] + 20:INT16], byteorder='little',
signed=True)
name = ''
for b in g[levels[0] + 22:levels[0] + 22 + CHAR8 * 16]:
if b != '\0':
name += chr(b)
print(width, height, name)
# '!ID!'
print(g[levels[0] + 22 + CHAR8 * 16:levels[0] + 22 + CHAR8 * 16 + CHAR8 * 4])
start = levels[0] + 22 + CHAR8 * 16 + CHAR8 * 4
lvl = g[start:levels[1]]
print('Taille compressee :', len(lvl), 'vs', 'Taille normale:', 64 * 64)
res = []
length = len(lvl)
while i < length:
near = False
far = False
if lvl[i] != 0x00 and i+1 < length:
near = (lvl[i+1] == 0xA7)
far = (lvl[i+1] == 0xA8)
if near:
nb = lvl[i]
start_ref = lvl[i+2]
for j in range(i - start_ref, i - start_ref + nb):
res.append(lvl[j])
i += 2
if far:
nb = lvl[i]
start_ref = int.from_bytes(lvl[i+2:i+4], byteorder='little', signed=False)
for j in range(i - start_ref, i - start_ref + nb):
res.append(lvl[j])
else:
res.append(lvl[i])
i += 1
print('Taille uncamarck:', len(res))
unrlew = []
print('Level 1 pointer:', levels[1])
| [
11748,
28686,
198,
418,
13,
354,
15908,
7,
81,
6,
34,
7479,
30782,
59,
32069,
37975,
11537,
198,
4798,
7,
418,
13,
1136,
66,
16993,
28955,
198,
198,
38019,
23,
796,
352,
198,
12394,
1433,
796,
362,
198,
12394,
2624,
796,
604,
198,
... | 2.013279 | 979 |
# Run as:
# python3 part1.py [input|sample]
import sys
if __name__ == '__main__':
main()
| [
2,
5660,
355,
25,
198,
2,
21015,
18,
636,
16,
13,
9078,
685,
15414,
91,
39873,
60,
198,
198,
11748,
25064,
628,
198,
198,
361,
11593,
3672,
834,
6624,
705,
834,
12417,
834,
10354,
198,
220,
1388,
3419,
198
] | 2.435897 | 39 |
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Contains base class for computing the log likeihood and gradient for a single
pulsar, including an outlier parameter to detect outlying TOAs. Any coordinate
transformations to be applied build off of this base class.
Class methods include computing white noise vectors (N and J), red noise
vectors (Phi), outlier parameters, updating deterministic signals, and
computing the log likelihood and gradient.
This class is a near copy of the ptaLikelihood class found in piccard
(https://github.com/vhaasteren/piccard), and the methods have only been
updated as necessary to work with enterprise Pulsar objects instead of the
data structures in piccard.
Requirements:
numpy
"""
import numpy as np
from .jitterext import cython_Uj
from .pulsar import OutlierPulsar
import enterprise_outliers.utils as ut
class ptaLikelihood(OutlierPulsar):
"""This class serves as a base class for computing the log likelihood and
gradient for a single pulsar. It contains methods to initialize and compute
any auxilliary quantities needed for likelihood calculation, and includes
an additional hyperparameter, corresponding to an outlier 'signal', to
include with the noise parameters. When sampled using HMC, NUTS, or other
gradient-based Monte Carlo samplers, this additional parameter can be used
to detect outlying TOAs in the dataset.
:param enterprise_pintpulsar: `enterprise.PintPulsar` object (with drop_pintpsr=False)
"""
def __init__(self, enterprise_pintpulsar):
"""Constructor method
"""
super(ptaLikelihood, self).__init__(enterprise_pintpulsar)
self.basepmin = None
self.basepmax = None
self.basepstart = None
self.outlier_prob = None
self.detresiduals = None
self.outlier_sig_dict = dict()
self.d_Pb_ind = None
self.d_L_d_b = None
self.d_Pr_d_b = None
self.initBounds()
def initBounds(self):
"""Set parameter vector minimum, maximum, and start values by building
from the :class: `OutlierPulsar` signal dictionary
"""
pmin = []
pmax = []
pstart = []
for _, sig in self.signals.items():
pmin.extend(sig['pmin'])
pmax.extend(sig['pmax'])
pstart.extend(sig['pstart'])
self.basepmin = np.array(pmin)
self.basepmax = np.array(pmax)
self.basepstart = np.array(pstart)
def updateParams(self, parameters):
"""Update parameter name:value dictionary with new values
:param parameters: Vector of signal parameters
"""
for key, value in self.ptadict.items():
self.ptaparams[key] = parameters[value]
def setWhiteNoise(self, calc_gradient=True):
"""Compute white noise vectors for EFAC, EQUAD, and ECORR signals, and
optionally calculate their derivatives.
:param calc_gradient: Include gradient calculation, default is True
"""
self.Nvec[:] = 0
self.Jvec[:] = 0
ef = self.efac_sig
eq = self.equad_sig
ec = self.ecorr_sig
self.Nvec[:] = ef.get_ndiag(self.ptaparams) + eq.get_ndiag(self.ptaparams)
if ec:
for param in ec.param_names:
pequadsqr = 10**(2*self.ptaparams[param])
self.Jvec += self.signals[param]['Jvec'] * pequadsqr
if calc_gradient:
if ef:
for param in ef.param_names:
self.d_Nvec_d_param[self.ptadict[param]] = 2 * \
self.signals[param]['Nvec'] * \
self.ptaparams[param]
if eq:
for param in eq.param_names:
self.d_Nvec_d_param[self.ptadict[param]] = self.signals[param]['Nvec'] * \
2 * np.log(10) * \
10**(2*self.ptaparams[param])
if ec:
for param in ec.param_names:
self.d_Jvec_d_param[self.ptadict[param]] = self.signals[param]['Jvec'] * \
2 * np.log(10) * \
10**(2*self.ptaparams[param])
def setPhi(self, calc_gradient=True):
"""Compute red noise Phi matrix for log10Amp and spectral index
parameters, and optionally calculate their derivatives.
:param calc_gradient: Include gradient calculation, default is True
"""
self.Phivec[:] = 0
rn = self.rn_sig
log10A = self.ptaparams[self.pname + '_rn_log10_A']
gamma = self.ptaparams[self.pname + '_rn_gamma']
sTmax = self.psr.toas.max() - self.psr.toas.min()
self.Phivec[:] = rn.get_phi(self.ptaparams)
if calc_gradient:
d_mat = ut.d_powerlaw(log10A, gamma, sTmax, self.Ffreqs)
for key, _ in self.ptaparams.items():
if key.endswith('log10_A'):
self.d_Phivec_d_param[self.ptadict[key]] = d_mat[:, 0]
elif key.endswith('gamma'):
self.d_Phivec_d_param[self.ptadict[key]] = d_mat[:, 1]
def setOutliers(self):
"""Set outlier probability parameter and its corresponding index in the
parameter vector.
"""
for key, param in self.ptaparams.items():
if key.endswith('outlierprob'):
self.outlier_prob = param
self.d_Pb_ind = [self.ptadict[key]]
def setDetSources(self, parameters, calc_gradient=True):
"""Update the deterministic signals given a parameter vector, and
optionally calculate their derivatives.
:param parameters: Vector of signal parameters
:param calc_gradient: Include gradient calculation, default is True
"""
d_L_d_b = np.zeros_like(parameters)
d_Pr_d_b = np.zeros_like(parameters)
self.outlier_sig_dict = dict()
self.detresiduals = self.psr.residuals.copy()
for _, sig in self.signals.items():
sparams = parameters[sig['msk']]
if sig['type'] == 'bwm':
pass
elif sig['type'] == 'timingmodel':
self.detresiduals -= np.dot(self.Mmat_g, sparams)
elif sig['type'] == 'fouriermode':
self.detresiduals -= np.dot(self.Fmat, sparams)
elif sig['type'] == 'jittermode':
self.detresiduals -= cython_Uj(sparams, self.Uindslc, len(self.detresiduals))
if calc_gradient:
pulsarind = 0
if pulsarind not in self.outlier_sig_dict:
self.outlier_sig_dict[pulsarind] = []
for _, sig in self.signals.items():
parslice = sig['msk']
sparams = parameters[parslice]
if sig['type'] == 'bwm':
pass
elif sig['type'] == 'timingmodel':
d_L_d_xi = np.zeros(self.Mmat_g.shape[1])
d_L_d_b_o = self.Mmat_g.T * (self.detresiduals / self.Nvec)[None, :]
self.outlier_sig_dict[pulsarind].append((parslice, d_L_d_b_o))
d_L_d_b[parslice] = d_L_d_xi
elif sig['type'] == 'fouriermode':
d_L_d_xi = np.zeros(self.Fmat.shape[1])
phivec = self.Phivec.copy()
d_L_d_b_o = self.Fmat.T * (self.detresiduals / self.Nvec)[None, :]
self.outlier_sig_dict[pulsarind].append((parslice, d_L_d_b_o))
d_Pr_d_xi = -sparams / phivec
d_L_d_b[parslice] = d_L_d_xi
d_Pr_d_b[parslice] = d_Pr_d_xi
elif sig['type'] == 'jittermode':
d_L_d_xi = np.zeros(self.Umat.shape[1])
d_L_d_b_o = self.Umat.T * (self.detresiduals / self.Nvec)[None, :]
self.outlier_sig_dict[pulsarind].append((parslice, d_L_d_b_o))
d_Pr_d_xi = -sparams / self.Jvec
d_L_d_b[parslice] = d_L_d_xi
d_Pr_d_b[parslice] = d_Pr_d_xi
self.d_L_d_b = d_L_d_b
self.d_Pr_d_b = d_Pr_d_b
def set_hyperparameters(self, parameters, calc_gradient=True):
"""Wrapper function to update all hyperparameters (white/red noise,
outliers) and their associated vector and matrix quantities.
:param parameters: Vector of signal parameters
:param calc_gradient: Include gradient calculation, default is True
"""
self.updateParams(parameters)
self.setPhi(calc_gradient=calc_gradient)
self.setWhiteNoise(calc_gradient=calc_gradient)
self.setOutliers()
def base_loglikelihood_grad(self, parameters, set_hyper_params=True, calc_gradient=True):
"""Return the log likelihood and gradient for the original,
non-transformed coordinates.
:param parameters: Vector of signal parameters
:param set_hyper_params: Update hyperparameter values before computing
log likelihood, default is True
:param calc_gradient: Include gradient calculation, default is True
:return: Log likelihood and gradient
"""
if set_hyper_params:
self.set_hyperparameters(parameters, calc_gradient=calc_gradient)
self.setDetSources(parameters, calc_gradient=calc_gradient)
d_L_d_b, d_Pr_d_b = self.d_L_d_b, self.d_Pr_d_b
gradient = np.zeros_like(d_L_d_b)
bBb = np.zeros_like(0, dtype=float)
ldB = np.zeros_like(0, dtype=float)
logl_outlier = np.zeros_like(0, dtype=float)
P0 = self.P0
Pb = self.outlier_prob
lln = self.detresiduals**2 / self.Nvec
lld = np.log(self.Nvec) + np.log(2*np.pi)
logL0 = -0.5*lln -0.5*lld
bigL0 = (1. - Pb) * np.exp(logL0)
bigL = bigL0 + Pb/P0
logl_outlier += np.sum(np.log(bigL))
for pslc, d_L_d_b_o in self.outlier_sig_dict[0]:
gradient[pslc] += np.sum(d_L_d_b_o * bigL0[None, :]/bigL[None, :], axis=1)
for pbind in self.d_Pb_ind:
gradient[pbind] += np.sum((-np.exp(logL0)+1.0/P0)/bigL)
for key, d_Nvec_d_p in self.d_Nvec_d_param.items():
d_L_d_b_o = 0.5*(self.detresiduals**2 * d_Nvec_d_p / \
self.Nvec**2 - d_Nvec_d_p / self.Nvec)
gradient[key] += np.sum(d_L_d_b_o * bigL0/bigL)
if 'fouriermode' in self.ptaparams.keys():
pslc = self.signals['fouriermode']['msk']
bsqr = parameters[pslc]**2
phivec = self.Phivec # + Svec[fslc]
bBb += np.sum(bsqr / phivec)
ldB += np.sum(np.log(phivec))
gradient[pslc] += d_Pr_d_b[pslc]
for key, d_Phivec_d_p in self.d_Phivec_d_param.items():
gradient[key] += 0.5 * np.sum(bsqr * d_Phivec_d_p / phivec**2)
gradient[key] -= 0.5 * np.sum(d_Phivec_d_p / phivec)
if 'dmfouriermode' in self.ptaparams.keys():
pass
if 'jittermode' in self.ptaparams.keys():
pslc = self.signals['jittermode']['msk']
bsqr = parameters[pslc]**2
jvec = self.Jvec
bBb += np.sum(bsqr / jvec)
ldB += np.sum(np.log(jvec))
gradient[pslc] += d_Pr_d_b[pslc]
for key, d_Jvec_d_p in self.d_Jvec_d_param.items():
gradient[key] += 0.5 * np.sum(bsqr * d_Jvec_d_p / jvec**2)
gradient[key] -= 0.5 * np.sum(d_Jvec_d_p / jvec)
ll = np.sum(logl_outlier) - 0.5*np.sum(bBb) - 0.5*np.sum(ldB)
return ll, gradient
| [
2,
48443,
14629,
14,
8800,
14,
24330,
21015,
18,
198,
2,
532,
9,
12,
19617,
25,
3384,
69,
12,
23,
532,
9,
12,
198,
37811,
198,
4264,
1299,
2779,
1398,
329,
14492,
262,
2604,
588,
72,
2894,
290,
31312,
329,
257,
2060,
198,
79,
57... | 1.946688 | 6,115 |
"""A module that checks docstrings in Python files.
"""
import inspect
import os
import sys
from typing import Any, Dict, List, Tuple
from numdoclint import helper
VERBOSE_DISABLED: int = 0
VERBOSE_ENABLED: int = 1
def check_python_module(
py_module_path: str, verbose: int = 1,
ignore_func_name_prefix_list: List[str] = ['test_'],
ignore_info_id_list: List[int] = [],
enable_default_or_optional_doc_check: bool = False,
skip_decorator_name_list: List[str] = ['Appender']) -> List[dict]:
"""
Check docstring of single Python module.
Parameters
----------
py_module_path : str
Path of target module.
verbose : int, default 1
Log settings of stdout. Specify one of the following numbers:
- 0 -> Do not output log.
- 1 -> Output the check result.
ignore_func_name_prefix_list : list of str, default ['test_']
A prefix list of function name conditions to ignore.
ignore_info_id_list : list of int, default []
List of IDs to ignore lint checking. A constant with a
prefix of `INFO_ID_` can be specified.
enable_default_or_optional_doc_check : bool, default False
If True specified, the `default` and `optional` string
in docstring will be checked.
i.e., if there is an argument containing a default value,
docstring's argument needs to describe default or optional.
e.g., `price : int, default is 100`, `price : int, default 100`,
`price : int, optional`.
skip_decorator_name_list : list, default ['Appender']
If a decorator name in this list is set to function, that
function will not be checked. Specify if necessary for
docstring-related decorators (`Appender` is used by Pandas).
Notes
-----
If all checks pass, an empty list will be returned.
Returns
-------
info_list : list of dicts
A list containing information on check results.
The following values are set in the dictionary key:
- module_path : str -> Path of target module.
- func_name : str -> Target function name.
- info_id : int -> Identification number of which information.
- info : str -> Information of check result.
Raises
------
IOError
If the target module can not be found.
Notes
------
- Currently, if there are multiple functions with the same name
in the module, only the first function will be checked.
"""
_check_module_exists(py_module_path=py_module_path)
module_str: str = helper.read_file_str(file_path=py_module_path)
func_name_list: List[str] = helper.get_func_name_list(code_str=module_str)
if not func_name_list:
return []
info_list: List[dict] = []
enable_def_or_opt_check: bool = enable_default_or_optional_doc_check
for func_name in func_name_list:
is_func_name_to_ignore_: bool = is_func_name_to_ignore(
func_name=func_name,
ignore_func_name_prefix_list=ignore_func_name_prefix_list)
if is_func_name_to_ignore_:
continue
single_func_info_list: List[dict] = get_single_func_info_list(
path=py_module_path,
code_str=module_str,
func_name=func_name,
enable_default_or_optional_doc_check=enable_def_or_opt_check,
skip_decorator_name_list=skip_decorator_name_list,
ignore_info_id_list=ignore_info_id_list,
)
info_list.extend(single_func_info_list)
_print_info_list(info_list=info_list, verbose=verbose)
return info_list
def check_python_module_recursively(
dir_path: str, verbose: int = 1,
ignore_func_name_prefix_list: List[str] = ['test_'],
ignore_info_id_list: List[int] = [],
enable_default_or_optional_doc_check: bool = False,
skip_decorator_name_list: List[str] = ['Appender']) -> List[dict]:
"""
Check Python module docstring recursively.
Parameters
----------
dir_path : str
Target directory path.
verbose : int, default 1
Log settings of stdout. Specify one of the following numbers:
- 0 -> Do not output log.
- 1 -> Output the check result.
ignore_func_name_prefix_list : list of str, default ['test_']
A prefix list of function name conditions to ignore.
ignore_info_id_list : list of int, default []
List of IDs to ignore lint checking. A constant with a
prefix of `INFO_ID_` can be specified.
enable_default_or_optional_doc_check : bool, default False
If True specified, the `defalt` and `optional` string
in docstring will be checked.
i.e., if there is an argument containing a default value,
docstring's argument needs to describe default or optional.
e.g., `price : int, default is 100`, `price : int, default 100`,
`price : int, optional`.
skip_decorator_name_list : list, default ['Appender']
If a decorator name in this list is set to function, that
function will not be checked. Specify if necessary for
docstring-related decorators (`Appender` is used by Pandas).
Returns
-------
info_list : list of dicts
A list containing information on check results.
The following values are set in the dictionary key:
- module_path : str -> Path of target module.
- func_name : str -> Target function name.
- info_id : int -> Identification number of which information.
- info : str -> Information of check result.
"""
enable_def_or_opt_check: bool = enable_default_or_optional_doc_check
info_list = _check_python_module_recursively(
dir_path=dir_path, info_list=[], verbose=verbose,
ignore_func_name_prefix_list=ignore_func_name_prefix_list,
ignore_info_id_list=ignore_info_id_list,
enable_default_or_optional_doc_check=enable_def_or_opt_check,
skip_decorator_name_list=skip_decorator_name_list)
return info_list
def is_func_name_to_ignore(
func_name: str,
ignore_func_name_prefix_list: List[str]) -> bool:
"""
Get boolean value of function name which should be ignored.
Parameters
----------
func_name : str
Target function name.
ignore_func_name_prefix_list : list of str
A prefix list of function name conditions to ignore.
Returns
-------
result_bool : bool
The boolean value of function name which should be ignored.
"""
for ignore_func_name_prefix in ignore_func_name_prefix_list:
if func_name.startswith(ignore_func_name_prefix):
return True
return False
def _print_info_list(info_list: List[dict], verbose: int) -> str:
"""
Print check result.
Parameters
----------
info_list : list of dicts
A list containing information on check results.
The following values are necessary in the dictionary key:
- module_path : str -> Path of target module.
- func_name : str -> Target function name.
- info_id : int -> Identification number of which information.
- info : str -> Information of check result.
verbose : int
Log settings of stdout.
Returns
-------
printed_str : str
Printed string.
"""
if not info_list:
return ''
if verbose != VERBOSE_ENABLED:
return ''
printed_str: str = ''
for info_dict in info_list:
if printed_str != '':
printed_str += '\n'
printed_str += '{module_path}::{func_name}\n{info}\n'.format(
module_path=info_dict[INFO_KEY_MODULE_PATH],
func_name=info_dict[INFO_KEY_FUNC_NAME],
info=info_dict[INFO_KEY_INFO])
print(printed_str)
return printed_str
def _check_python_module_recursively(
dir_path: str, info_list: List[dict], verbose: int = 1,
ignore_func_name_prefix_list: List[str] = ['test_'],
ignore_info_id_list: List[int] = [],
enable_default_or_optional_doc_check: bool = False,
skip_decorator_name_list: List[str] = ['Appender']) -> List[dict]:
"""
Check Python module docstring recursively.
Parameters
----------
dir_path : str
Target directory path.
info_list : list of dicts
List to add check results to.
verbose : int, default 1
Log settings of stdout. Specify one of the following numbers:
- 0 -> Do not output log.
- 1 -> Output the check result.
ignore_func_name_prefix_list : list of str, default ['test_']
A prefix list of function name conditions to ignore.
ignore_info_id_list : list of int, default []
List of IDs to ignore lint checking. A constant with a
prefix of `INFO_ID_` can be specified.
enable_default_or_optional_doc_check : bool, default False
If True specified, the `defalt` and `optional` string
in docstring will be checked.
skip_decorator_name_list : list, default ['Appender']
If a decorator name in this list is set to function, that
function will not be checked.
Returns
-------
info_list : list of dicts
A list containing information on check results.
The following values are set in the dictionary key:
- module_path : str -> Path of target module.
- func_name : str -> Target function name.
- info_id : int -> Identification number of which information.
- info : str -> Information of check result.
"""
file_or_folder_name_list: List[str] = os.listdir(dir_path)
if not file_or_folder_name_list:
return info_list
enable_def_or_opt_check: bool = enable_default_or_optional_doc_check
for file_or_folder_name in file_or_folder_name_list:
path: str = os.path.join(dir_path, file_or_folder_name)
path = path.replace('\\', '/')
if os.path.isdir(path):
info_list = _check_python_module_recursively(
dir_path=path, info_list=info_list, verbose=verbose,
ignore_func_name_prefix_list=ignore_func_name_prefix_list,
ignore_info_id_list=ignore_info_id_list,
enable_default_or_optional_doc_check=enable_def_or_opt_check,
skip_decorator_name_list=skip_decorator_name_list)
continue
if not path.endswith('.py'):
continue
unit_info_list: List[dict] = check_python_module(
py_module_path=path, verbose=verbose,
ignore_func_name_prefix_list=ignore_func_name_prefix_list,
ignore_info_id_list=ignore_info_id_list,
enable_default_or_optional_doc_check=enable_def_or_opt_check,
skip_decorator_name_list=skip_decorator_name_list)
info_list.extend(unit_info_list)
return info_list
INFO_ID_LACKED_ARGUMENT: int = 1
INFO_ID_LACKED_DOCSTRING_PARAM: int = 2
INFO_ID_LACKED_DOCSTRING_PARAM_TYPE: int = 3
INFO_ID_LACKED_DOCSTRING_PARAM_DESCRIPTION: int = 4
INFO_ID_DIFFERENT_PARAM_ORDER: int = 5
INFO_ID_LACKED_FUNC_DESCRIPTION: int = 6
INFO_ID_LACKED_ARG_DEFAULT_VALUE: int = 7
INFO_ID_LACKED_DOC_DEFAULT_VALUE: int = 8
INFO_ID_LACKED_DOCSTRING_RETURN: int = 9
INFO_ID_LACKED_DOCSTRING_RETURN_TYPE: int = 10
INFO_ID_LACKED_DOCSTRING_RETURN_DESCRIPTION: int = 11
INFO_ID_LACKED_RETURN_VAL: int = 12
INFO_KEY_MODULE_PATH: str = 'module_path'
INFO_KEY_FUNC_NAME: str = 'func_name'
INFO_KEY_INFO_ID: str = 'info_id'
INFO_KEY_INFO: str = 'info'
def get_info_id_list() -> List[int]:
"""
Get a list of information IDs.
Returns
-------
info_id_list : list of int
A list of information IDs.
"""
this_module = sys.modules[__name__]
members: List[Tuple[str, Any]] = inspect.getmembers(this_module)
info_id_list: List[int] = []
for name, obj in members:
if not name.startswith('INFO_ID_'):
continue
if not isinstance(obj, int):
continue
info_id_list.append(obj)
return info_id_list
def get_single_func_info_list(
path: str, code_str: str, func_name: str,
enable_default_or_optional_doc_check: bool,
skip_decorator_name_list: List[str],
ignore_info_id_list: List[int]) -> List[dict]:
"""
Get a list that stores the check result information for
one function.
Parameters
----------
path : str
Path of target module file.
code_str : str
String of target Python code.
func_name : str
Target function name.
enable_default_or_optional_doc_check : bool
If True specified, the `defalt` and `optional` string
in docstring will be checked.
skip_decorator_name_list : list
If a decorator name in this list is set to function, that
function will not be checked.
ignore_info_id_list : list of int
List of IDs to ignore lint checking. A constant with a
prefix of `INFO_ID_` can be specified.
Returns
-------
info_list : list of dict
A list of check results for one function.
The following keys are set in the dictionary:
- module_path : str
- func_name : str
- info_id : int
- info : str
"""
info_list: List[dict] = []
docstring: str = helper.get_func_overall_docstring(
py_module_str=code_str, func_name=func_name)
arg_name_list: List[str] = helper.get_arg_name_list(
py_module_str=code_str, func_name=func_name)
default_val_info_dict: Dict[str, str] = \
helper.get_arg_default_val_info_dict(
py_module_str=code_str, func_name=func_name)
param_info_list: List[Dict[str, str]] = \
helper.get_docstring_param_info_list(
docstring=docstring)
optional_arg_name_list: List[str] = helper.get_optional_arg_name_list(
docstring=docstring)
return_val_info_list: List[Dict[str, str]] = \
helper.get_docstring_return_val_info_list(
docstring=docstring)
return_val_exists_in_func: bool = helper.return_val_exists_in_func(
module_str=code_str, func_name=func_name)
kwargs_exists: bool = helper.kwargs_exists(
py_module_str=code_str, func_name=func_name)
decorator_names: List[str] = helper.get_decorator_names(
py_module_str=code_str, func_name=func_name)
joined_decorator_names: str = ' '.join(decorator_names)
for skip_decorator_name in skip_decorator_name_list:
is_in: bool = skip_decorator_name in joined_decorator_names
if is_in:
return []
unit_info_list: List[dict] = _check_func_description(
module_path=path, func_name=func_name,
docstring=docstring)
info_list.extend(unit_info_list)
unit_info_list = _check_lacked_param(
module_path=path, func_name=func_name,
arg_name_list=arg_name_list, param_info_list=param_info_list,
kwargs_exists=kwargs_exists)
info_list.extend(unit_info_list)
unit_info_list = _check_lacked_docstring_param_type(
module_path=path, func_name=func_name,
param_info_list=param_info_list)
info_list.extend(unit_info_list)
unit_info_list = _check_lacked_docstring_param_description(
module_path=path, func_name=func_name,
param_info_list=param_info_list)
info_list.extend(unit_info_list)
unit_info_list = _check_docstring_param_order(
module_path=path, func_name=func_name,
arg_name_list=arg_name_list, param_info_list=param_info_list)
info_list.extend(unit_info_list)
if enable_default_or_optional_doc_check:
unit_info_list = _check_lacked_default_value(
module_path=path, func_name=func_name,
param_info_list=param_info_list,
default_val_info_dict=default_val_info_dict,
optional_arg_name_list=optional_arg_name_list)
info_list.extend(unit_info_list)
unit_info_list = _check_lacked_return(
module_path=path, func_name=func_name,
return_val_info_list=return_val_info_list,
return_val_exists_in_func=return_val_exists_in_func)
info_list.extend(unit_info_list)
unit_info_list = _check_lacked_return_docstring_type(
module_path=path, func_name=func_name,
return_val_info_list=return_val_info_list)
info_list.extend(unit_info_list)
unit_info_list = _check_lacked_return_docstring_description(
module_path=path, func_name=func_name,
return_val_info_list=return_val_info_list)
info_list.extend(unit_info_list)
info_list = _remove_info_to_ignore_by_id(
info_list=info_list,
ignore_info_id_list=ignore_info_id_list)
return info_list
def _remove_info_to_ignore_by_id(
info_list: List[dict], ignore_info_id_list: List[int]) -> List[dict]:
"""
Remove information from list if specified to ignore.
Parameters
----------
info_list : list of dicts
A list of check results. The following keys are set
in the dictionary:
- module_path : str
- func_name : str
- info_id : int
- info : str
ignore_info_id_list : list of int
List of IDs to ignore lint checking. A constant with a
prefix of `INFO_ID_` can be specified.
Returns
----------
after_info_list : list of dicts
A list after removed information to ignore.
"""
if not info_list:
return info_list
after_info_list: List[dict] = []
for info_dict in info_list:
is_in: bool = info_dict[INFO_KEY_INFO_ID] in ignore_info_id_list
if is_in:
continue
after_info_list.append(info_dict)
return after_info_list
def _check_lacked_return_docstring_description(
module_path: str, func_name: str,
return_val_info_list: List[dict]) -> List[dict]:
"""
Check if the docstring description for the return value is lacked.
Parameters
----------
module_path : str
Path of target module.
func_name : str
Target function name.
return_val_info_list : list of dicts
List containing return value information.
Values are set in the dictionary with the following keys.
- helper.DOC_RETURN_INFO_KEY_NAME : str -> Return value name.
- helper.DOC_RETURN_INFO_KEY_TYPE_NAME : str -> Type name of
return value.
- helper.DOC_RETURN_INFO_KEY_DESCRIPTION : str ->
Description of the return value.
Returns
----------
info_list : list
A list of check results for one function.
The following keys are set in the dictionary:
- module_path : str
- func_name : str
- info_id : int
- info : str
"""
if not return_val_info_list:
return []
info_list: List[dict] = []
for return_val_info_dict in return_val_info_list:
name: str = return_val_info_dict[helper.DOC_RETURN_INFO_KEY_NAME]
type_name: str = return_val_info_dict[
helper.DOC_RETURN_INFO_KEY_TYPE_NAME]
description: str = return_val_info_dict[
helper.DOC_RETURN_INFO_KEY_DESCRIPTION]
if description != '':
continue
info: str = 'Docstring description of return value is missing.'
info += '\nReturn value name: %s' % name
info += '\nReturn value type: %s' % type_name
info_dict: dict = _make_info_dict(
module_path=module_path,
func_name=func_name,
info_id=INFO_ID_LACKED_DOCSTRING_RETURN_DESCRIPTION,
info=info)
info_list.append(info_dict)
return info_list
def _check_lacked_docstring_param_description(
module_path: str, func_name: str,
param_info_list: List[dict]) -> List[dict]:
"""
Check that the docstring argument description is not lacked.
Parameters
----------
module_path : str
Path of target module.
func_name : str
Target function name.
param_info_list : list of dicts
A list containing argument information of docstring.
The dictionary needs a key with the following constants:
- helper.DOC_PARAM_INFO_KEY_ARG_NAME : str
- helper.DOC_PARAM_INFO_KEY_TYPE_NAME : str
- helper.DOC_PARAM_INFO_KEY_DEFAULT_VAL : str
- helper.DOC_PARAM_INFO_KEY_DESCRIPTION : str
Returns
-------
info_list : list
A list of check results for one function.
The following keys are set in the dictionary:
- module_path : str
- func_name : str
- info_id : int
- info : str
"""
if not param_info_list:
return []
info_list: List[dict] = []
for param_info_dict in param_info_list:
arg_name: str = param_info_dict[helper.DOC_PARAM_INFO_KEY_ARG_NAME]
description: str = param_info_dict[
helper.DOC_PARAM_INFO_KEY_DESCRIPTION]
if description != '':
continue
info: str = 'Missing docstring argument information.'
info += f'\nArgument name: {arg_name}'
info_dict: dict = _make_info_dict(
module_path=module_path,
func_name=func_name,
info_id=INFO_ID_LACKED_DOCSTRING_PARAM_DESCRIPTION,
info=info)
info_list.append(info_dict)
return info_list
def _check_lacked_return_docstring_type(
module_path: str, func_name: str,
return_val_info_list: List[dict]) -> List[dict]:
"""
Check that the type specification is not lacked in the
return value's docstring.
Parameters
----------
module_path : str
Path of target module.
func_name : str
Target function name.
return_val_info_list : list of dicts
List containing return value information.
Values are set in the dictionary with the following keys.
- helper.DOC_RETURN_INFO_KEY_NAME : str -> Return value name.
- helper.DOC_RETURN_INFO_KEY_TYPE_NAME : str -> Type name of
return value.
- helper.DOC_RETURN_INFO_KEY_DESCRIPTION : str ->
Description of the return value.
Returns
-------
info_list : list of dicts
A list of check results for one function.
The following keys are set in the dictionary:
- module_path : str
- func_name : str
- info_id : int
- info : str
"""
if not return_val_info_list:
return []
info_list: List[dict] = []
for return_val_info_dict in return_val_info_list:
return_value_name: str = return_val_info_dict[
helper.DOC_RETURN_INFO_KEY_NAME]
type_name: str = return_val_info_dict[
helper.DOC_RETURN_INFO_KEY_TYPE_NAME]
if type_name != '':
continue
info: str = 'Missing docstring type information, or maybe missing '\
'return value name (colon not exists).'
info += f'\nReturn value name: {return_value_name}'
info_dict: dict = _make_info_dict(
module_path=module_path,
func_name=func_name,
info_id=INFO_ID_LACKED_DOCSTRING_RETURN_TYPE,
info=info)
info_list.append(info_dict)
return info_list
def _check_lacked_return(
module_path: str, func_name: str,
return_val_info_list: List[dict],
return_val_exists_in_func: bool) -> List[dict]:
"""
Check if the return value or docstring is lacked.
Parameters
----------
module_path : str
Path of target module.
func_name : str
Target function name.
return_val_info_list : list of dicts
List containing return value information.
Values are set in the dictionary with the following keys.
- helper.DOC_RETURN_INFO_KEY_NAME : str -> Return value name.
- helper.DOC_RETURN_INFO_KEY_TYPE_NAME : str -> Type name of
return value.
- helper.DOC_RETURN_INFO_KEY_DESCRIPTION : str ->
Description of the return value.
return_val_exists_in_func : bool
Boolean value whether the return value exists in the function.
Returns
-------
info_list : list of dicts
A list of check results for one function.
The following keys are set in the dictionary:
- module_path : str
- func_name : str
- info_id : int
- info : str
"""
if not return_val_exists_in_func and not return_val_info_list:
return []
if return_val_exists_in_func and return_val_info_list:
return []
if return_val_exists_in_func and not return_val_info_list:
info: str = 'While the return value exists in the function, '\
'the return value document does not exist in docstring.'
info_dict: dict = _make_info_dict(
module_path=module_path, func_name=func_name,
info_id=INFO_ID_LACKED_DOCSTRING_RETURN,
info=info)
return [info_dict]
if not return_val_exists_in_func and return_val_info_list:
info = 'While the return value document exists in docstring, '\
'the return value does not exist in the function.'
info_dict = _make_info_dict(
module_path=module_path, func_name=func_name,
info_id=INFO_ID_LACKED_RETURN_VAL,
info=info)
return [info_dict]
return []
def _check_lacked_default_value(
module_path: str, func_name: str, param_info_list: List[dict],
default_val_info_dict: Dict[str, str],
optional_arg_name_list: List[str]) -> List[dict]:
"""
Check that the default value of the argument is not missing.
Parameters
----------
module_path : str
Path of target module.
func_name : str
Target function name.
param_info_list : list of dicts
A list containing argument information of docstring.
The dictionary needs a key with the following constants:
- helper.DOC_PARAM_INFO_KEY_ARG_NAME : str
- helper.DOC_PARAM_INFO_KEY_TYPE_NAME : str
- helper.DOC_PARAM_INFO_KEY_DEFAULT_VAL : str
- helper.DOC_PARAM_INFO_KEY_DESCRIPTION : str
default_val_info_dict : dict
A dctionary that stores argument names in keys and default
values in values.
optional_arg_name_list : list of str
A list of argument names specified as optional in docstring.
Returns
-------
info_list : list of dicts
A list of check results for one function.
The following keys are set in the dictionary:
- module_path : str
- func_name : str
- info_id : int
- info : str
"""
info_list: List[dict] = []
for param_info_dict in param_info_list:
param_info_arg_name: str = param_info_dict[
helper.DOC_PARAM_INFO_KEY_ARG_NAME]
param_info_default_val: str = param_info_dict[
helper.DOC_PARAM_INFO_KEY_DEFAULT_VAL]
has_key: bool = param_info_arg_name in default_val_info_dict
if not has_key:
continue
is_optional_arg: bool = param_info_arg_name in optional_arg_name_list
if is_optional_arg:
continue
if param_info_default_val == '':
if default_val_info_dict[param_info_arg_name] == '':
continue
info: str = 'While there is no description of default value'\
' in docstring, there is a default value on the'\
' argument side.'
info += f'\nArgument name: {param_info_arg_name}'
info += '\nArgument default value: %s' \
% default_val_info_dict[param_info_arg_name]
info_dict: dict = _make_info_dict(
module_path=module_path,
func_name=func_name,
info_id=INFO_ID_LACKED_DOC_DEFAULT_VALUE,
info=info)
info_list.append(info_dict)
continue
if default_val_info_dict[param_info_arg_name] != '':
continue
info = 'The default value described in docstring does not '\
'exist in the actual argument.'
info += f'\nArgment name: {param_info_arg_name}'
info += f'\nDocstring default value: {param_info_default_val}'
info_dict = _make_info_dict(
module_path=module_path,
func_name=func_name,
info_id=INFO_ID_LACKED_ARG_DEFAULT_VALUE,
info=info)
info_list.append(info_dict)
return info_list
def _check_func_description(
module_path: str, func_name: str, docstring: str) -> List[dict]:
"""
Check that the target docstring has a function description.
Parameters
----------
module_path : str
Path of target module.
func_name : str
Target function name.
docstring : str
Docstring to be checked.
Returns
-------
info_list : list of dict
A list of check results for one function.
The following keys are set in the dictionary:
- module_path : str
- func_name : str
- info_id : int
- info : str
Notes
-----
Test function will not be checked.
"""
if func_name.startswith('test_'):
return []
func_description: str = helper.get_func_description_from_docstring(
docstring=docstring)
if func_description != '':
return []
info: str = 'The function description is not set to docstring.'
info_dict: dict = _make_info_dict(
module_path=module_path,
func_name=func_name,
info_id=INFO_ID_LACKED_FUNC_DESCRIPTION,
info=info)
return [info_dict]
def _check_docstring_param_order(
module_path: str, func_name: str, arg_name_list: List[str],
param_info_list: List[dict]) -> List[dict]:
"""
Check that the order of arguments and docstring is the same.
Parameters
----------
module_path : str
Path of target module.
func_name : str
Target function name.
arg_name_list : list of str
List of argument names.
param_info_list : list of dicts
A list containing argument information of docstring.
The dictionary needs a key with the following constants:
- helper.DOC_PARAM_INFO_KEY_ARG_NAME : str
- helper.DOC_PARAM_INFO_KEY_TYPE_NAME : str
- helper.DOC_PARAM_INFO_KEY_DEFAULT_VAL : str
- helper.DOC_PARAM_INFO_KEY_DESCRIPTION : str
Returns
-------
info_list : list of dict
A list of check results for one function.
The following keys are set in the dictionary:
- module_path : str
- func_name : str
- info_id : int
- info : str
"""
if len(arg_name_list) != len(param_info_list):
return []
param_info_arg_name_list: List[str] = [
param_info_dict[helper.DOC_PARAM_INFO_KEY_ARG_NAME]
for param_info_dict in param_info_list]
info_list: List[dict] = []
for i, arg_name in enumerate(arg_name_list):
param_info_arg_name: str = param_info_arg_name_list[i]
if arg_name == param_info_arg_name:
continue
info: str = 'The order of the argument and docstring is different.'
info += f'\nOrder of arguments: {arg_name_list}'
info += f'\nOrder of docstring parameters: {param_info_arg_name_list}'
info_dict: dict = _make_info_dict(
module_path=module_path,
func_name=func_name,
info_id=INFO_ID_DIFFERENT_PARAM_ORDER,
info=info)
info_list.append(info_dict)
break
return info_list
def _check_lacked_docstring_param_type(
module_path: str, func_name: str,
param_info_list: List[dict]) -> List[dict]:
"""
Check that the docstring argument type is not lacked.
Parameters
----------
module_path : str
Path of target module.
func_name : str
Target function name.
param_info_list : list of dicts
A list containing argument information of docstring.
The dictionary needs a key with the following constants:
- helper.DOC_PARAM_INFO_KEY_ARG_NAME : str
- helper.DOC_PARAM_INFO_KEY_TYPE_NAME : str
- helper.DOC_PARAM_INFO_KEY_DEFAULT_VAL : str
- helper.DOC_PARAM_INFO_KEY_DESCRIPTION : str
Returns
-------
info_list : list of dict
A list of check results for one function.
The following keys are set in the dictionary:
- module_path : str
- func_name : str
- info_id : int
- info : str
"""
info_list: List[dict] = []
for param_info_dict in param_info_list:
arg_name: str = param_info_dict[helper.DOC_PARAM_INFO_KEY_ARG_NAME]
type_name: str = param_info_dict[helper.DOC_PARAM_INFO_KEY_TYPE_NAME]
if type_name != '':
continue
is_in: bool = helper.args_or_kwargs_str_in_param_name(
param_arg_name=arg_name)
if is_in:
continue
info: str = 'Missing docstring argument type information.'
info += f'\nTarget argument: {arg_name}'
info_dict: dict = _make_info_dict(
module_path=module_path,
func_name=func_name,
info_id=INFO_ID_LACKED_DOCSTRING_PARAM_TYPE,
info=info)
info_list.append(info_dict)
return info_list
def _check_lacked_param(
module_path: str, func_name: str, arg_name_list: List[str],
param_info_list: List[dict], kwargs_exists: bool) -> List[dict]:
"""
Check for missing arguments between arguments and docstring.
Parameters
----------
module_path : str
Path of target module.
func_name : str
Target function name.
arg_name_list : list of str
List of argument names.
param_info_list : list of dicts
A list containing argument information of docstring.
The dictionary needs a key with the following constants:
- helper.DOC_PARAM_INFO_KEY_ARG_NAME : str
- helper.DOC_PARAM_INFO_KEY_TYPE_NAME : str
- helper.DOC_PARAM_INFO_KEY_DEFAULT_VAL : str
- helper.DOC_PARAM_INFO_KEY_DESCRIPTION : str
kwargs_exists : bool
A boolean value of whether `**kwargs` exists in the arguments.
Returns
-------
info_list : list of dict
A list of check results for one function.
The following keys are set in the dictionary:
- module_path : str
- func_name : str
- info_id : int
- info : str
"""
info_list: List[dict] = []
for param_info_dict in param_info_list:
if kwargs_exists:
continue
param_arg_name: str = param_info_dict[
helper.DOC_PARAM_INFO_KEY_ARG_NAME]
is_in: bool = param_arg_name in arg_name_list
if is_in:
continue
is_in = helper.args_or_kwargs_str_in_param_name(
param_arg_name=param_arg_name)
if is_in:
continue
info: str = 'An argument exists in docstring does not exists in '\
'the actual argument.'
info += f'\nLacked argument name: {param_arg_name}'
info_dict = _make_info_dict(
module_path=module_path,
func_name=func_name,
info_id=INFO_ID_LACKED_ARGUMENT,
info=info,
)
info_list.append(info_dict)
param_info_arg_name_list: List[str] = \
[param_dict[helper.DOC_PARAM_INFO_KEY_ARG_NAME]
for param_dict in param_info_list]
for arg_name in arg_name_list:
is_in = arg_name in param_info_arg_name_list
if is_in:
continue
info = 'There is an argument whose explanation '\
'does not exist in docstring.'
info += '\nTarget argument name: %s' % arg_name
info_dict = _make_info_dict(
module_path=module_path,
func_name=func_name,
info_id=INFO_ID_LACKED_DOCSTRING_PARAM,
info=info)
info_list.append(info_dict)
return info_list
def _make_info_dict(
module_path: str, func_name: str, info_id: int,
info: str) -> dict:
"""
Make a dictionaly of check result information.
Parameters
----------
module_path : str
Path of target module.
func_name : str
Target function name.
info_id : int
The Id of the information defined by the constants in
this module.
info : str
Information of check result.
Returns
-------
info_dict : dict
Dictionary with check results information. The keys with
the following constants will be set.
- INFO_KEY_MODULE_PATH : str
- INFO_KEY_FUNC_NAME : str
- INFO_KEY_INFO_ID : int
- INFO_KEY_INFO : str
"""
info_dict: dict = {
INFO_KEY_MODULE_PATH: module_path,
INFO_KEY_FUNC_NAME: func_name,
INFO_KEY_INFO_ID: info_id,
INFO_KEY_INFO: info,
}
return info_dict
def _check_module_exists(py_module_path: str) -> None:
"""
Check that the target module exists.
Parameters
----------
py_module_path : str
Path of target module.
Raises
------
IOError
If the target module can not be found.
"""
if os.path.exists(py_module_path):
return
err_msg: str = 'The target module could not be found.'
err_msg += f'\npy_module_path: {py_module_path}'
raise IOError(err_msg)
| [
37811,
32,
8265,
326,
8794,
2205,
37336,
287,
11361,
3696,
13,
198,
37811,
198,
198,
11748,
10104,
198,
11748,
28686,
198,
11748,
25064,
198,
6738,
19720,
1330,
4377,
11,
360,
713,
11,
7343,
11,
309,
29291,
198,
198,
6738,
997,
15390,
... | 2.339404 | 16,034 |
# -*- coding: utf-8 -*-
"""
Created on Thu Jan 19 16:29:03 2012
Rewritten on Tue July 28 13:09:00 2015
@author: Fesh0r, LexManos
@version: v7.0
"""
import sys
import os
import fnmatch
import shutil
import re
import zipfile
import time
from contextlib import closing
from optparse import OptionParser
from pprint import pprint
"""
This processes a FernFlower output file and fixes some of the common decompiler mistakes.
Making the output code cleaner and less errornious.
This takes advantage of the reconstituted local variables and inner class attributes that are present
in MC release 1.8.2 and above.
Things that are cleaned:
Consecutive empty lines are consensed:
Line 1
Line 2
------------------------------------
Line 1
Line 2
------------------------------------
Trailing whitespace is removed:
' HELLO '
' HELLO'
#Decompile differences between machines related to double and floats, by removing trailing zeros:
# 0.0010D => 0.001D
Unnessasary calls to super with zero arguments, this is implied by the compiler.
'super();' => ''
Parameter names in abstract methods, seince abstract methods have no LVT attribute, FF does not name them correctly.
' <T extends Object & Comparable<T>, V extends T> IBlockState func_177226_a(IProperty<T> var1, V var2);'
' <T extends Object & Comparable<T>, V extends T> IBlockState func_177226_a(IProperty<T> p_177226_1_, V p_177226_1_);'
Enum Members, Enums are majorly syntax sugar, FernFlower does a good job at decompiling most of it.
However it still leaves the first two paramters in code. So we fix that:
'LOGIN("LOGIN", 0, 1)' => 'LOGIN(1)'
#If a Enum's value is an anonymous inner class, the compiler adds a 'null' parameter to the initalizer. Unsure why but we need to strip this out.
# 'STONEBRICK("STONEBRICK", 2, 2, "stone_brick", "brick", (BlockSilverfish.NamelessClass1508106186)null) {'
# 'STONEBRICK(2, "stone_brick", "brick") {'
It also leaves those two parameters in the constructor arguments:
'EnumSomething(String p_i123_1_, int p_i123_2_, int p_i123_3_)'
'EnumSomething(int p_i123_3_)'
Synthetic methods, To support generics Java creates synthetic methods that bounce to concrete methods.
We scan for these methods that do nothing more then bounce with potential typcasting. And remove them
if the target method has the same name. This heavily relies on the mapping data having the correct mappings
'// \$FF: synthetic method'
'public Object call() {
' return (Object)this.call();'
'}'
Fernflower does not properly add generic parameters to anonymous inner class declarations.
I can't think of a good way to fix this generically, so we fix it for the classes
used in Minecraft, Function, Predicate, and Comparator
'new Predicate() {' => 'new Predicate<String, ItemStack>() {'
"""
_JAVA_IDENTIFIER = r'[a-zA-Z_$][\w_$\.]*'
_MODIFIERS = r'public|protected|private|static|abstract|final|native|synchronized|transient|volatile|strictfp'
_MODIFIERS_INIT = r'public|protected|private'
_PARAMETERS_VAR = r'(?:(?P<type>(?:[^ ,])+(?:<.*>)?(?: \.\.\.)?) var(?P<id>\d+)(?P<end>,? )?)'
_PARAMETERS = r'(?:(?P<type>(?:[^ ,])+(?:<.*>)?(?: \.\.\.)?) (?P<name>' + _JAVA_IDENTIFIER + r')(?P<end>,? )?)'
_REGEXP = {
# Typecast marker
'typecast': re.compile(r'\([\w\.\[\]]+\)'),
# Remove repeated blank lines
'newlines': re.compile(r'^\n{2,}', re.MULTILINE),
# Normalize line ending to unix style
'normlines': re.compile(r'\r?\n', re.MULTILINE),
# Remove trailing whitespace
'trailing': re.compile(r'[ \t]+$'),
# strip trailing 0 from doubles and floats to fix decompile differences on OSX
# 0.0010D => 0.001D
#'trailingzero': re.compile(r'(?P<value>[0-9]+\.[0-9]*[1-9])0+(?P<type>[DdFfEe])'),
# Remove unnessasary calls to super()
#'empty_super': re.compile(r'^ +super\(\);\n'),
# Cleanup the argument names on abstract methods
'abstract': re.compile(r' (?P<method>func_(?P<number>\d+)_[a-zA-Z_]+)\((?P<arguments>' + _PARAMETERS_VAR + r'+)\)(?: throws (?:[\w$.]+,? ?)+)?;$'),
# Single parts of parameter lists
'params_var': re.compile(_PARAMETERS_VAR),
# Empty Enum Switch Helper class detection
#'class_header': re.compile(r'static class ' + _JAVA_IDENTIFIER + ' {'),
#'obfid_field': re.compile(r'private static final String __OBFID = \"CL_\d+\";'),
# Cleanup enum syntax sugar not being removed properly
#'enum_member': re.compile(r'^(?P<indent> +)(?P<name>' + _JAVA_IDENTIFIER + r')\("(?P=name)", \d+(?P<sep>[,\)] *)(?P<end>.+)'),
#
# Enum declarations, used to find constructors
#'enum_class': re.compile(r' enum (?P<name>' + _JAVA_IDENTIFIER + r') '),
#
# Enum constructor with sugar arguments
#'enum_init': re.compile(r'^(?P<indent> +)(?P<modifiers>(?:(?:public|protected|private) )*)(?P<name>' + _JAVA_IDENTIFIER + r')\(String p_(?P<id>i\d+)_1_, int p_i\d+_2_(?:, )*(?P<end>.+)'),
#
# Empty enum ending
#'enum_empty': re.compile(r'\)\s*(?:throws (?:[\w$.]+,? ?)+)?\s*\{\s*\}\s*$'),
#
# Enum anon classes add a random 'null' argument at the end.. No clue where this comes from
#'enum_anon': re.compile(r'(?:, )*(?:\([\w\.]+\))*null\) \{'),
#
# Enum $VALUES field
#'enum_values': re.compile(r'^\s*private static final (?P<name>' + _JAVA_IDENTIFIER + r')\[\] \$VALUES = new (?P=name)\[\]\{.*?\};'),
#
# Fernflower namecless classes scattered all over the place no clue why....
#'nameless': re.compile(r'(?:, )*\([\w\.]+(NamelessClass\d+|SwitchHelper)\)null\)'),
# Synthetic markers
#'syn_marker': re.compile(r'^\s*// \$FF: (synthetic|bridge) method$'),
# Method definition
#'method_def': re.compile(r'^\s*(?P<modifiers>(?:(?:' + _MODIFIERS + r') )*)(?P<return>.+?) (?P<method>.+?)\((?P<arguments>' + _PARAMETERS + r'*)\)\s*(?:throws (?:[\w$.]+,? ?)+)?\s*\{'),
# Method call
#'syn_call': re.compile(r'^\s*(?P<return>return )?(this|super)\.(?P<target>.+)\((?P<arguments>(?:(?:(?:\([\w\.\[\]]+\))?[a-zA-Z_$][\w_$]*)(?:, )*)*)\);'),
# Function generic method
#'apply_def': re.compile(r'^\s*public (?P<return>.+?) apply\((?P<type>[^ ,]+(?:<.*>)?) p_apply_1_\)'),
#
# Predicate generic method`
#'predicate_def': re.compile(r'^\s*public boolean apply\((?P<type>[^ ,]+(?:<.*>)?) p_apply_1_\)'),
#
# Comparator generic method
#'compare_def': re.compile(r'^\s*public int compare\((?P<type>[^ ,]+(?:<.*>)?) p_compare_1_, '),
#
# TypeAdapter generic method
#'write_def': re.compile(r'^\s*public void write\(JsonWriter p_write_1_, (?P<type>[^ ,]+(?:<.*>)?) p_write_2_\)'),
#
# SimpleChannelInboundHandler generic method
#'channelRead0_def': re.compile(r'^\s*(public|protected) void channelRead0\(ChannelHandlerContext p_channelRead0_1_, (?P<type>[^ ,]+(?:<.*>)?) p_channelRead0_2_\)'),
#
# GenericFutureListener generic method
#'operationComplete_def': re.compile(r'^\s*public void operationComplete\((?P<type>[^ ,]+(?:<.*>)?) p_operationComplete_1_\)'),
#
# FutureCallback generic method
#'onSuccess_def': re.compile(r'^\s*public void onSuccess\((?P<type>[^ ,]+(?:<.*>)?) p_onSuccess_1_\)'),
#
# CacheLoader generic method
#'load_def': re.compile(r'^\s*public (?P<return>.+?) load\((?P<type>[^ ,]+(?:<.*>)?) p_load_1_\)'),
}
if __name__ == '__main__':
main()
| [
2,
532,
9,
12,
19617,
25,
3384,
69,
12,
23,
532,
9,
12,
198,
37811,
198,
41972,
319,
26223,
2365,
220,
678,
1467,
25,
1959,
25,
3070,
2321,
198,
30003,
9108,
319,
30030,
2901,
2579,
1511,
25,
2931,
25,
405,
1853,
198,
198,
31,
9... | 2.468812 | 3,014 |
r"""
A module defining some "nicer" fourier transform functions.
We define only two functions -- an arbitrary-dimension forward transform, and its inverse. In each case, the transform
is designed to replicate the continuous transform. That is, the transform is volume-normalised and obeys correct
Fourier conventions.
The actual FFT backend is provided by ``pyFFTW`` if it is installed, which provides a significant speedup, and
multi-threading.
Notes
-----
Conveniently, we allow for arbitrary Fourier convention, according to the scheme in
http://mathworld.wolfram.com/FourierTransform.html. That is, we define the forward and inverse *n*-dimensional
transforms respectively as
.. math:: F(k) = \sqrt{\frac{|b|}{(2\pi)^{1-a}}}^n \int f(r) e^{-i b\mathbf{k}\cdot\mathbf{r}} d^n\mathbf{r}
and
.. math:: f(r) = \sqrt{\frac{|b|}{(2\pi)^{1+a}}}^n \int F(k) e^{+i b\mathbf{k}\cdot\mathbf{r}} d^n \mathbf{k}.
In both transforms, the corresponding co-ordinates are returned so a completely consistent transform is simple to get.
This makes switching from standard frequency to angular frequency very simple.
We note that currently, only positive values for b are implemented (in fact, using negative b is consistent, but
one must be careful that the frequencies returned are descending, rather than ascending).
"""
import warnings
# Try importing the pyFFTW interface
try:
from multiprocessing import cpu_count
THREADS = cpu_count()
from pyfftw.interfaces.numpy_fft import fftn as _fftn, ifftn as _ifftn, ifftshift as _ifftshift, fftshift as _fftshift, fftfreq as _fftfreq
from pyfftw.interfaces.cache import enable, set_keepalive_time
# enable()
# set_keepalive_time(100.)
HAVE_FFTW = True
except ImportError:
warnings.warn("You do not have pyFFTW installed. Installing it should give some speed increase.")
HAVE_FFTW = False
from numpy.fft import fftn, ifftn, ifftshift as _ifftshift, fftshift as _fftshift, fftfreq as _fftfreq
# NOTE: to avoid MKL-related bugs, numpy needs to be imported after pyfftw: see https://github.com/pyFFTW/pyFFTW/issues/40
import numpy as np
def fft(X, L=None, Lk=None, a=0, b=2*np.pi, axes=None, ret_cubegrid=False):
r"""
Arbitrary-dimension nice Fourier Transform.
This function wraps numpy's ``fftn`` and applies some nice properties. Notably, the returned fourier transform
is equivalent to what would be expected from a continuous Fourier Transform (including normalisations etc.). In
addition, arbitrary conventions are supported (see :mod:`powerbox.dft` for details).
Default parameters return exactly what ``numpy.fft.fftn`` would return.
The output object always has the zero in the centre, with monotonically increasing spectral arguments.
Parameters
----------
X : array
An array with arbitrary dimensions defining the field to be transformed. Should correspond exactly
to the continuous function for which it is an analogue. A lower-dimensional transform can be specified by using
the ``axes`` argument.
L : float or array-like, optional
The length of the box which defines ``X``. If a scalar, each transformed dimension in ``X`` is assumed to have
the same length. If array-like, must be of the same length as the number of transformed dimensions. The default
returns the un-normalised DFT (same as numpy).
Lk : float or array-like, optional
The length of the fourier-space box which defines the dual of ``X``. Only one of L/Lk needs to be provided. If
provided, L takes precedence. If a scalar, each transformed dimension in ``X`` is assumed to have
the same length. If array-like, must be of the same length as the number of transformed dimensions.
a,b : float, optional
These define the Fourier convention used. See :mod:`powerbox.dft` for details. The defaults return the standard DFT
as defined in :mod:`numpy.fft`.
axes : sequence of ints, optional
The axes to take the transform over. The default is to use all axes for the transform.
ret_cubegrid : bool, optional
Whether to return the entire grid of frequency magnitudes.
Returns
-------
ft : array
The DFT of X, normalised to be consistent with the continuous transform.
freq : list of arrays
The frequencies in each dimension, consistent with the Fourier conventions specified.
grid : array
Only returned if ``ret_cubegrid`` is ``True``. An array with shape given by ``axes`` specifying the magnitude
of the frequencies at each point of the fourier transform.
"""
if axes is None:
axes = list(range(len(X.shape)))
N = np.array([X.shape[axis] for axis in axes])
# Get the box volume if given the fourier-space box volume
if L is None and Lk is None:
L = N
elif L is not None: # give precedence to L
if np.isscalar(L):
L = L*np.ones(len(axes))
elif Lk is not None:
if np.isscalar(Lk):
Lk = Lk * np.ones(len(axes))
L = N*2*np.pi/(Lk*b) # Take account of the fourier convention.
V = float(np.product(L)) # Volume of box
Vx = V/np.product(N) # Volume of cell
ft = Vx*fftshift(fftn(X, axes=axes),axes=axes)*np.sqrt(np.abs(b)/(2*np.pi) ** (1 - a)) ** len(axes)
dx = np.array([float(l)/float(n) for l, n in zip(L, N)])
freq = np.array([fftfreq(n, d=d,b=b) for n, d in zip(N, dx)])
if not ret_cubegrid:
return ft, freq
else:
grid = freq[0] ** 2
for i in range(len(axes) - 1):
grid = np.add.outer(grid, freq[i+1] ** 2)
return ft, freq, np.sqrt(grid)
def ifft(X, Lk=None,L=None, a=0, b=2*np.pi, axes=None,ret_cubegrid=False):
r"""
Arbitrary-dimension nice inverse Fourier Transform.
This function wraps numpy's ``ifftn`` and applies some nice properties. Notably, the returned fourier transform
is equivalent to what would be expected from a continuous inverse Fourier Transform (including normalisations etc.).
In addition, arbitrary conventions are supported (see :mod:`powerbox.dft` for details).
Default parameters return exactly what ``numpy.fft.ifftn`` would return.
Parameters
----------
X : array
An array with arbitrary dimensions defining the field to be transformed. Should correspond exactly
to the continuous function for which it is an analogue. A lower-dimensional transform can be specified by using
the ``axes`` argument. Note that this should have its zero in the center.
Lk : float or array-like, optional
The length of the box which defines ``X``. If a scalar, each transformed dimension in ``X`` is assumed to have
the same length. If array-like, must be of the same length as the number of transformed dimensions. The default
returns the un-normalised DFT (the same as numpy).
L : float or array-like, optional
The length of the real-space box, defining the dual of ``X``. Only one of Lk/L needs to be passed. If L is
passed, it is used. If a scalar, each transformed dimension in ``X`` is assumed to have
the same length. If array-like, must be of the same length as the number of transformed dimensions. The default
of ``Lk=1`` returns the un-normalised DFT.
a,b : float, optional
These define the Fourier convention used. See :mod:`powerbox.dft` for details. The defaults return the standard DFT
as defined in :mod:`numpy.fft`.
axes : sequence of ints, optional
The axes to take the transform over. The default is to use all axes for the transform.
ret_cubegrid : bool, optional
Whether to return the entire grid of real-space co-ordinate magnitudes.
Returns
-------
ft : array
The IDFT of X, normalised to be consistent with the continuous transform.
freq : list of arrays
The real-space co-ordinate grid in each dimension, consistent with the Fourier conventions specified.
grid : array
Only returned if ``ret_cubegrid`` is ``True``. An array with shape given by ``axes`` specifying the magnitude
of the real-space co-ordinates at each point of the inverse fourier transform.
"""
if axes is None:
axes = list(range(len(X.shape)))
N = np.array([X.shape[axis] for axis in axes])
# Get the box volume if given the real-space box volume
if Lk is None and L is None:
Lk = 1
elif L is not None:
if np.isscalar(L):
L = np.array([L]*len(axes))
dx = L/N
Lk = 2*np.pi/(dx*b)
elif np.isscalar(Lk):
Lk = [Lk]*len(axes)
Lk = np.array(Lk)
V = np.product(Lk)
dk = np.array([float(lk)/float(n) for lk, n in zip(Lk, N)])
ft = V*ifftn(ifftshift(X,axes=axes), axes=axes)*np.sqrt(np.abs(b)/(2*np.pi) ** (1 + a)) ** len(axes)
freq = np.array([fftfreq(n, d=d,b=b) for n, d in zip(N, dk)])
if not ret_cubegrid:
return ft, freq
else:
grid = freq[0] ** 2
for i in range(len(axes) - 1):
grid = np.add.outer(grid, freq[i] ** 2)
return ft, freq, np.sqrt(grid)
def fftfreq(N,d=1.0,b=2*np.pi):
"""
Return the fourier frequencies for a box with N cells, using general Fourier convention.
Parameters
----------
N : int
The number of grid cells
d : float, optional
The interval between cells
b : float, optional
The fourier-convention of the frequency component (see :mod:`powerbox.dft` for details).
Returns
-------
freq : array
The N symmetric frequency components of the Fourier transform. Always centred at 0.
"""
return fftshift(_fftfreq(N, d=d))*(2*np.pi/b)
| [
81,
37811,
198,
32,
8265,
16215,
617,
366,
6988,
263,
1,
46287,
5277,
6121,
5499,
13,
198,
198,
1135,
8160,
691,
734,
5499,
1377,
281,
14977,
12,
46156,
2651,
6121,
11,
290,
663,
34062,
13,
554,
1123,
1339,
11,
262,
6121,
198,
271,
... | 2.798686 | 3,502 |
from rest_framework.viewsets import ViewSet
from rest_framework.response import Response
from rest_framework.mixins import ListModelMixin
from rest_framework import status, permissions
from servers.models import Server
from .models import Hosts, Functions, Triggers
from products.models import Product
class ZabbixHostStatusViewset(ViewSet, ListModelMixin):
"""
zabbix status
"""
permission_classes = (permissions.IsAuthenticated,)
def get_total_host_num(self):
"""
返回CMDB中服务器总数
"""
try:
return Server.objects.all().count()
except Exception as e:
return 0
def get_zabbix_total_host_num(self):
"""
返回zabbix中host总数
"""
try:
return Hosts.objects.filter(status__in=[0,1]).filter(flags=0).using("zabbix").count()
except Exception as e:
return 0
def get_zabbix_monitor_host_num(self):
"""
返回zabbix中正常监控的主机数
"""
try:
return Hosts.objects.filter(status=0).filter(flags=0).using("zabbix").count()
except Exception as e:
return 0
def get_zabbix_not_monitor_host_num(self):
"""
return zabbix中,没有监控的主机数
"""
try:
return Hosts.objects.filter(status=1).filter(flags=0).using("zabbix").count()
except Exception as e:
return 0
def get_zabbix_monitor_exception_host_num(self):
"""
return zabbix中,监控异常的主机数: zabbix agent 异常
"""
try:
return Hosts.objects.filter(status=0).filter(flags=0).filter(available=2).using("zabbix").count()
except Exception as e:
return 0
class ProductHostStatusViewset(ViewSet, ListModelMixin):
"""
以业务线为维度查看主机状态
"""
permission_classes = (permissions.IsAuthenticated,)
| [
198,
6738,
1334,
62,
30604,
13,
1177,
28709,
1330,
3582,
7248,
198,
6738,
1334,
62,
30604,
13,
26209,
1330,
18261,
198,
6738,
1334,
62,
30604,
13,
19816,
1040,
1330,
7343,
17633,
35608,
259,
198,
6738,
1334,
62,
30604,
1330,
3722,
11,
... | 2.029412 | 918 |
# -*- coding: utf-8 -*-
import os.path
import platform
import sys
import shutil
import time
| [
2,
532,
9,
12,
19617,
25,
3384,
69,
12,
23,
532,
9,
12,
198,
198,
11748,
28686,
13,
6978,
198,
11748,
3859,
198,
11748,
25064,
198,
11748,
4423,
346,
198,
11748,
640,
628,
628,
198
] | 2.771429 | 35 |
import pytest # noqa: F401
| [
198,
11748,
12972,
9288,
220,
1303,
645,
20402,
25,
376,
21844,
628
] | 2.5 | 12 |
from selenium import webdriver
import time
from selenium.common.exceptions import NoSuchElementException, StaleElementReferenceException
from selenium.webdriver.support.select import Select
from bs4 import BeautifulSoup
from urllib.request import urlopen
| [
6738,
384,
11925,
1505,
1330,
3992,
26230,
198,
11748,
640,
198,
6738,
384,
11925,
1505,
13,
11321,
13,
1069,
11755,
1330,
1400,
16678,
20180,
16922,
11,
520,
1000,
20180,
26687,
16922,
198,
6738,
384,
11925,
1505,
13,
12384,
26230,
13,
... | 3.923077 | 65 |
from pathlib import Path
from shu.kanunu import make_ebook
urls = """
http://www.kanunu8.com/files/chinese/201102/1777.html
http://www.kanunu8.com/files/chinese/201102/1778.html
http://www.kanunu8.com/files/chinese/201102/1766.html
http://www.kanunu8.com/files/chinese/201102/1779.html
http://www.kanunu8.com/files/chinese/201102/1780.html
http://www.kanunu8.com/files/chinese/201102/1767.html
http://www.kanunu8.com/files/chinese/201102/1781.html
""".strip().splitlines()
if __name__ == '__main__':
for number, url in enumerate(urls, 1):
its = ('table[cellpadding="7"]'
if number in (3, 6)
else 'table[cellpadding="8"]')
make(index_url=url, number=number, index_table_selector=its)
| [
6738,
3108,
8019,
1330,
10644,
198,
6738,
427,
84,
13,
27541,
403,
84,
1330,
787,
62,
16497,
628,
198,
6371,
82,
796,
37227,
198,
4023,
1378,
2503,
13,
27541,
403,
84,
23,
13,
785,
14,
16624,
14,
354,
3762,
14,
1264,
15377,
14,
15... | 2.2875 | 320 |
import json
import tempfile
import uuid
import mock
import pytest
from decimal import Decimal
from tests.acceptance import query_json_file, query_parquet_file, download_and_decrypt
pytestmark = [
pytest.mark.acceptance,
pytest.mark.jobs,
pytest.mark.usefixtures("empty_jobs"),
]
@pytest.mark.auth
@pytest.mark.api
@pytest.mark.api
@pytest.mark.api
@pytest.mark.api
@pytest.mark.api
@pytest.mark.api
@pytest.mark.api
| [
11748,
33918,
198,
11748,
20218,
7753,
198,
11748,
334,
27112,
198,
198,
11748,
15290,
198,
11748,
12972,
9288,
198,
6738,
32465,
1330,
4280,
4402,
198,
198,
6738,
5254,
13,
13635,
590,
1330,
12405,
62,
17752,
62,
7753,
11,
12405,
62,
1... | 2.551913 | 183 |
import requests
from bs4 import BeautifulSoup
NO_INFORMATION = 'Sorry, no info about the next episode of {} is available yet.'
SHOW_FINISHED = 'The show has finished streaming all its episodes. (No further information available)'
WRONG_SHOW_NAME = '**ERROR** The TV Show name you entered is not correct.'
DATES_FOUND = 'Brace yourself, the next epsiode(i.e. {}) will release on {}. CountDown: {}.'
| [
11748,
7007,
198,
198,
6738,
275,
82,
19,
1330,
23762,
50,
10486,
628,
198,
15285,
62,
1268,
35036,
796,
705,
14385,
11,
645,
7508,
546,
262,
1306,
4471,
286,
23884,
318,
1695,
1865,
2637,
198,
9693,
3913,
62,
20032,
18422,
1961,
796,... | 3.435897 | 117 |
pkgname = "perl"
pkgver = "5.34.0"
pkgrel = 0
_perl_cross_ver = "1.3.6"
build_style = "gnu_configure"
make_cmd = "gmake"
make_check_target = "test"
hostmakedepends = ["gmake", "less"]
makedepends = ["zlib-devel", "libbz2-devel"]
checkdepends = ["iana-etc", "perl-AnyEvent", "perl-Test-Pod", "procps-ng"]
depends = ["less"]
pkgdesc = "Practical Extraction and Report Language"
maintainer = "q66 <q66@chimera-linux.org>"
license = "Artistic-1.0-Perl OR GPL-1.0-or-later"
url = "https://www.perl.org"
source = [
f"https://www.cpan.org/src/5.0/perl-{pkgver}.tar.gz",
f"https://github.com/arsv/perl-cross/releases/download/{_perl_cross_ver}/perl-cross-{_perl_cross_ver}.tar.gz"
]
sha256 = [
"551efc818b968b05216024fb0b727ef2ad4c100f8cb6b43fab615fa78ae5be9a",
"4010f41870d64e3957b4b8ce70ebba10a7c4a3e86c5551acb4099c3fcbb37ce5"
]
# prevent a massive log dump
tool_flags = {
"CFLAGS": [
"-Wno-compound-token-split-by-macro",
"-DNO_POSIX_2008_LOCALE",
"-D_GNU_SOURCE",
],
"LDFLAGS": ["-Wl,-z,stack-size=2097152", "-pthread"],
}
# check is cyclic: depends on perl modules
options = ["!check"]
| [
35339,
3672,
796,
366,
525,
75,
1,
198,
35339,
332,
796,
366,
20,
13,
2682,
13,
15,
1,
198,
35339,
2411,
796,
657,
198,
62,
525,
75,
62,
19692,
62,
332,
796,
366,
16,
13,
18,
13,
21,
1,
198,
11249,
62,
7635,
796,
366,
41791,
... | 2.113594 | 537 |
import random
print('''Which type of e-mail you want?
--> G-mail(g)
OR
--> Ramdom email(r)''')
value1="qwertyuiopasdfghjklzxcvbnm"
e1=random.choice(value1)
e2=random.choice(value1)
e3=random.choice(value1)
e4=random.choice(value1)
e5=random.choice(value1)
a=input()
if a=='r':
print(e1+e2+e3+e4+e5+e1+e4+e2+"@"+e3+e4+e1+e5+e2+".com")
if a=='g':
print(e1+e2+e3+e4+e5+e1+e4+e2+'@gmail.com')
else:
print("Wrong input!!!") | [
11748,
4738,
201,
198,
4798,
7,
7061,
6,
13828,
2099,
286,
304,
12,
4529,
345,
765,
30,
201,
198,
220,
220,
220,
220,
220,
220,
220,
14610,
402,
12,
4529,
7,
70,
8,
201,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
2... | 1.711744 | 281 |
from django import forms
from django.contrib.auth.forms import (AuthenticationForm, PasswordChangeForm,
ReadOnlyPasswordHashField)
from django.db.models import Q
from .models import User, UserFaceImage
from .utils import base64_file
from accounts.models import Profile
from PIL import Image
# Admin form
class UserCreationForm(forms.ModelForm):
"""A form for creating new users. Includes all the required
fields, plus a repeated password."""
password1 = forms.CharField(label='Password', widget=forms.PasswordInput)
password2 = forms.CharField(
label='Password confirmation', widget=forms.PasswordInput)
# Admin form
class UserChangeForm(forms.ModelForm):
"""A form for updating users. Includes all the fields on
the user, but replaces the password field with admin's
password hash display field.
"""
password = ReadOnlyPasswordHashField()
# User signup form
# User login form
# Authentication form
# Password change form
# Password reset request form
# class set password form
class SetPasswordForm(forms.Form):
"""
A form that lets a user change set their password without entering the old
password
"""
new_password1 = forms.CharField(widget=forms.PasswordInput)
new_password2 = forms.CharField(widget=forms.PasswordInput)
| [
6738,
42625,
14208,
1330,
5107,
198,
6738,
42625,
14208,
13,
3642,
822,
13,
18439,
13,
23914,
1330,
357,
47649,
3299,
8479,
11,
30275,
19400,
8479,
11,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
... | 3.274272 | 412 |
"""Author: Brandon Trabucco, Copyright 2019"""
import tensorflow as tf
from abc import ABC
from mineral.distributions.gaussians.gaussian import Gaussian
| [
37811,
13838,
25,
14328,
833,
397,
18863,
78,
11,
15069,
13130,
37811,
201,
198,
201,
198,
201,
198,
11748,
11192,
273,
11125,
355,
48700,
201,
198,
6738,
450,
66,
1330,
9738,
201,
198,
6738,
18352,
13,
17080,
2455,
507,
13,
4908,
104... | 3.075472 | 53 |
import pytest
import json
import sys
from subprocess import check_output
from tests import PROJECT_DIR, TESTDATA
MYTH = str(PROJECT_DIR / "myth")
test_data = (
(
"flag_array.sol.o",
{
"TX_COUNT": 1,
"TX_OUTPUT": 1,
"MODULE": "EtherThief",
"ISSUE_COUNT": 1,
"ISSUE_NUMBER": 0,
},
"0xab12585800000000000000000000000000000000000000000000000000000000000004d2",
),
(
"exceptions_0.8.0.sol.o",
{
"TX_COUNT": 1,
"TX_OUTPUT": 1,
"MODULE": "Exceptions",
"ISSUE_COUNT": 2,
"ISSUE_NUMBER": 0,
},
None,
),
)
@pytest.mark.parametrize("file_name, tx_data, calldata", test_data)
| [
11748,
12972,
9288,
198,
11748,
33918,
198,
11748,
25064,
198,
198,
6738,
850,
14681,
1330,
2198,
62,
22915,
198,
6738,
5254,
1330,
21965,
23680,
62,
34720,
11,
43001,
26947,
198,
198,
26708,
4221,
796,
965,
7,
31190,
23680,
62,
34720,
... | 1.828571 | 420 |
from .Module import *
| [
6738,
764,
26796,
1330,
1635,
198
] | 3.666667 | 6 |
# Copyright 2014 Hewlett-Packard
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from oslo_utils import timeutils
from monasca_api.common.rest import utils as rest_utils
| [
2,
15069,
1946,
30446,
15503,
12,
11869,
446,
198,
2,
198,
2,
49962,
739,
262,
24843,
13789,
11,
10628,
362,
13,
15,
357,
1169,
366,
34156,
15341,
345,
743,
198,
2,
407,
779,
428,
2393,
2845,
287,
11846,
351,
262,
13789,
13,
921,
... | 3.733333 | 180 |
'''
module for scraping Livetracklist Page and give an Iterable
'''
from typing import Iterable
from bs4 import BeautifulSoup
import requests
class Livetracklist:
'''
Livetracklist.com Scraper
'''
def tracklist(self) -> Iterable:
'''
livetracklist.com scraper
get tracklist in text format to be processed with song title and artist(s)
Parameters
----------
url : str
url of page from which to scrape tracklist
Returns
-------
Iterable
yeilds tuple of length 2 and has content (song_title, artist(s))
'''
soup = self.soup
titles = soup.findAll('span', class_="title")
titles = [title.text for title in titles]
artists = soup.findAll('span', class_="artist")
artists = [artist.text for artist in artists]
data = zip(titles, artists)
return data
| [
7061,
6,
198,
21412,
329,
46743,
7547,
11659,
4868,
7873,
290,
1577,
281,
40806,
540,
198,
7061,
6,
198,
6738,
19720,
1330,
40806,
540,
198,
6738,
275,
82,
19,
1330,
23762,
50,
10486,
198,
11748,
7007,
198,
198,
4871,
7547,
11659,
486... | 2.467914 | 374 |
# Copyright 2013 Red Hat, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
__all__ = [
'ClientSendError',
'ExpectedException',
'NoSuchMethod',
'RPCClient',
'RPCDispatcher',
'RPCDispatcherError',
'RPCVersionCapError',
'RemoteError',
'UnsupportedVersion',
'expected_exceptions',
'get_rpc_server',
]
from .client import *
from .dispatcher import *
from .server import *
| [
198,
2,
15069,
2211,
2297,
10983,
11,
3457,
13,
198,
2,
198,
2,
220,
220,
220,
49962,
739,
262,
24843,
13789,
11,
10628,
362,
13,
15,
357,
1169,
366,
34156,
15341,
345,
743,
198,
2,
220,
220,
220,
407,
779,
428,
2393,
2845,
287,
... | 2.959248 | 319 |
#!/usr/bin/env python3
import unittest
import os
import glob
from python_utils import import_vars, set_env_var, print_input_args, \
print_info_msg, print_err_msg_exit, create_symlink_to_file, \
define_macos_utilities, check_var_valid_value, \
cd_vrfy, mkdir_vrfy, find_pattern_in_str
def link_fix(verbose, file_group):
""" This file defines a function that ...
Args:
verbose: True or False
file_group: could be on of ["grid", "orog", "sfc_climo"]
Returns:
a string: resolution
"""
print_input_args(locals())
valid_vals_file_group=["grid", "orog", "sfc_climo"]
check_var_valid_value(file_group, valid_vals_file_group)
#import all environement variables
import_vars()
#
#-----------------------------------------------------------------------
#
# Create symlinks in the FIXLAM directory pointing to the grid files.
# These symlinks are needed by the make_orog, make_sfc_climo, make_ic,
# make_lbc, and/or run_fcst tasks.
#
# Note that we check that each target file exists before attempting to
# create symlinks. This is because the "ln" command will create sym-
# links to non-existent targets without returning with a nonzero exit
# code.
#
#-----------------------------------------------------------------------
#
print_info_msg(f'Creating links in the FIXLAM directory to the grid files...',
verbose=verbose)
#
#-----------------------------------------------------------------------
#
# Create globbing patterns for grid, orography, and surface climatology
# files.
#
#
# For grid files (i.e. file_group set to "grid"), symlinks are created
# in the FIXLAM directory to files (of the same names) in the GRID_DIR.
# These symlinks/files and the reason each is needed is listed below:
#
# 1) "C*.mosaic.halo${NHW}.nc"
# This mosaic file for the wide-halo grid (i.e. the grid with a ${NHW}-
# cell-wide halo) is needed as an input to the orography filtering
# executable in the orography generation task. The filtering code
# extracts from this mosaic file the name of the file containing the
# grid on which it will generate filtered topography. Note that the
# orography generation and filtering are both performed on the wide-
# halo grid. The filtered orography file on the wide-halo grid is then
# shaved down to obtain the filtered orography files with ${NH3}- and
# ${NH4}-cell-wide halos.
#
# The raw orography generation step in the make_orog task requires the
# following symlinks/files:
#
# a) C*.mosaic.halo${NHW}.nc
# The script for the make_orog task extracts the name of the grid
# file from this mosaic file; this name should be
# "C*.grid.tile${TILE_RGNL}.halo${NHW}.nc".
#
# b) C*.grid.tile${TILE_RGNL}.halo${NHW}.nc
# This is the
# The script for the make_orog task passes the name of the grid
# file (extracted above from the mosaic file) to the orography
# generation executable. The executable then
# reads in this grid file and generates a raw orography
# file on the grid. The raw orography file is initially renamed "out.oro.nc",
# but for clarity, it is then renamed "C*.raw_orog.tile${TILE_RGNL}.halo${NHW}.nc".
#
# c) The fixed files thirty.second.antarctic.new.bin, landcover30.fixed,
# and gmted2010.30sec.int.
#
# The orography filtering step in the make_orog task requires the
# following symlinks/files:
#
# a) C*.mosaic.halo${NHW}.nc
# This is the mosaic file for the wide-halo grid. The orography
# filtering executable extracts from this file the name of the grid
# file containing the wide-halo grid (which should be
# "${CRES}.grid.tile${TILE_RGNL}.halo${NHW}.nc"). The executable then
# looks for this grid file IN THE DIRECTORY IN WHICH IT IS RUNNING.
# Thus, before running the executable, the script creates a symlink in this run directory that
# points to the location of the actual wide-halo grid file.
#
# b) C*.raw_orog.tile${TILE_RGNL}.halo${NHW}.nc
# This is the raw orography file on the wide-halo grid. The script
# for the make_orog task copies this file to a new file named
# "C*.filtered_orog.tile${TILE_RGNL}.halo${NHW}.nc" that will be
# used as input to the orography filtering executable. The executable
# will then overwrite the contents of this file with the filtered orography.
# Thus, the output of the orography filtering executable will be
# the file C*.filtered_orog.tile${TILE_RGNL}.halo${NHW}.nc.
#
# The shaving step in the make_orog task requires the following:
#
# a) C*.filtered_orog.tile${TILE_RGNL}.halo${NHW}.nc
# This is the filtered orography file on the wide-halo grid.
# This gets shaved down to two different files:
#
# i) ${CRES}.oro_data.tile${TILE_RGNL}.halo${NH0}.nc
# This is the filtered orography file on the halo-0 grid.
#
# ii) ${CRES}.oro_data.tile${TILE_RGNL}.halo${NH4}.nc
# This is the filtered orography file on the halo-4 grid.
#
# Note that the file names of the shaved files differ from that of
# the initial unshaved file on the wide-halo grid in that the field
# after ${CRES} is now "oro_data" (not "filtered_orog") to comply
# with the naming convention used more generally.
#
# 2) "C*.mosaic.halo${NH4}.nc"
# This mosaic file for the grid with a 4-cell-wide halo is needed as
# an input to the surface climatology generation executable. The
# surface climatology generation code reads from this file the number
# of tiles (which should be 1 for a regional grid) and the tile names.
# More importantly, using the ESMF function ESMF_GridCreateMosaic(),
# it creates a data object of type esmf_grid; the grid information
# in this object is obtained from the grid file specified in the mosaic
# file, which should be "C*.grid.tile${TILE_RGNL}.halo${NH4}.nc". The
# dimensions specified in this grid file must match the ones specified
# in the (filtered) orography file "C*.oro_data.tile${TILE_RGNL}.halo${NH4}.nc"
# that is also an input to the surface climatology generation executable.
# If they do not, then the executable will crash with an ESMF library
# error (something like "Arguments are incompatible").
#
# Thus, for the make_sfc_climo task, the following symlinks/files must
# exist:
# a) "C*.mosaic.halo${NH4}.nc"
# b) "C*.grid.tile${TILE_RGNL}.halo${NH4}.nc"
# c) "C*.oro_data.tile${TILE_RGNL}.halo${NH4}.nc"
#
# 3)
#
#
#-----------------------------------------------------------------------
#
#
if file_group == "grid":
fns=[
f"C*{DOT_OR_USCORE}mosaic.halo{NHW}.nc",
f"C*{DOT_OR_USCORE}mosaic.halo{NH4}.nc",
f"C*{DOT_OR_USCORE}mosaic.halo{NH3}.nc",
f"C*{DOT_OR_USCORE}grid.tile{TILE_RGNL}.halo{NHW}.nc",
f"C*{DOT_OR_USCORE}grid.tile{TILE_RGNL}.halo{NH3}.nc",
f"C*{DOT_OR_USCORE}grid.tile{TILE_RGNL}.halo{NH4}.nc"
]
fps=[ os.path.join(GRID_DIR,itm) for itm in fns]
run_task=f"{RUN_TASK_MAKE_GRID}"
#
elif file_group == "orog":
fns=[
f"C*{DOT_OR_USCORE}oro_data.tile{TILE_RGNL}.halo{NH0}.nc",
f"C*{DOT_OR_USCORE}oro_data.tile{TILE_RGNL}.halo{NH4}.nc"
]
if CCPP_PHYS_SUITE == "FV3_HRRR":
fns+=[
f"C*{DOT_OR_USCORE}oro_data_ss.tile{TILE_RGNL}.halo{NH0}.nc",
f"C*{DOT_OR_USCORE}oro_data_ls.tile{TILE_RGNL}.halo{NH0}.nc",
]
fps=[ os.path.join(OROG_DIR,itm) for itm in fns]
run_task=f"{RUN_TASK_MAKE_OROG}"
#
# The following list of symlinks (which have the same names as their
# target files) need to be created made in order for the make_ics and
# make_lbcs tasks (i.e. tasks involving chgres_cube) to work.
#
elif file_group == "sfc_climo":
num_fields=len(SFC_CLIMO_FIELDS)
fns=[None] * (2 * num_fields)
for i in range(num_fields):
ii=2*i
fns[ii]=f"C*.{SFC_CLIMO_FIELDS[i]}.tile{TILE_RGNL}.halo{NH0}.nc"
fns[ii+1]=f"C*.{SFC_CLIMO_FIELDS[i]}.tile{TILE_RGNL}.halo{NH4}.nc"
fps=[ os.path.join(SFC_CLIMO_DIR,itm) for itm in fns]
run_task=f"{RUN_TASK_MAKE_SFC_CLIMO}"
#
#
#-----------------------------------------------------------------------
#
# Find all files matching the globbing patterns and make sure that they
# all have the same resolution (an integer) in their names.
#
#-----------------------------------------------------------------------
#
i=0
res_prev=""
res=""
fp_prev=""
for pattern in fps:
files = glob.glob(pattern)
for fp in files:
fn = os.path.basename(fp)
regex_search = "^C([0-9]*).*"
res = find_pattern_in_str(regex_search, fn)
if res is None:
print_err_msg_exit(f'''
The resolution could not be extracted from the current file's name. The
full path to the file (fp) is:
fp = \"{fp}\"
This may be because fp contains the * globbing character, which would
imply that no files were found that match the globbing pattern specified
in fp.''')
else:
res = res[0]
if ( i > 0 ) and ( res != res_prev ):
print_err_msg_exit(f'''
The resolutions (as obtained from the file names) of the previous and
current file (fp_prev and fp, respectively) are different:
fp_prev = \"{fp_prev}\"
fp = \"{fp}\"
Please ensure that all files have the same resolution.''')
i=i+1
fp_prev=f"{fp}"
res_prev=res
#
#-----------------------------------------------------------------------
#
# Replace the * globbing character in the set of globbing patterns with
# the resolution. This will result in a set of (full paths to) specific
# files.
#
#-----------------------------------------------------------------------
#
fps=[ itm.replace('*',res) for itm in fps]
#
#-----------------------------------------------------------------------
#
# In creating the various symlinks below, it is convenient to work in
# the FIXLAM directory. We will change directory back to the original
# later below.
#
#-----------------------------------------------------------------------
#
SAVE_DIR=os.getcwd()
cd_vrfy(FIXLAM)
#
#-----------------------------------------------------------------------
#
# Use the set of full file paths generated above as the link targets to
# create symlinks to these files in the FIXLAM directory.
#
#-----------------------------------------------------------------------
#
# If the task in consideration (which will be one of the pre-processing
# tasks MAKE_GRID_TN, MAKE_OROG_TN, and MAKE_SFC_CLIMO_TN) was run, then
# the target files will be located under the experiment directory. In
# this case, we use relative symlinks in order the experiment directory
# more portable and the symlinks more readable. However, if the task
# was not run, then pregenerated grid, orography, or surface climatology
# files will be used, and those will be located in an arbitrary directory
# (specified by the user) that is somwehere outside the experiment
# directory. Thus, in this case, there isn't really an advantage to using
# relative symlinks, so we use symlinks with absolute paths.
#
if run_task:
relative_link_flag=True
else:
relative_link_flag=False
for fp in fps:
fn=os.path.basename(fp)
create_symlink_to_file(fp,fn,relative_link_flag)
#
#-----------------------------------------------------------------------
#
# Set the C-resolution based on the resolution appearing in the file
# names.
#
#-----------------------------------------------------------------------
#
cres=f"C{res}"
#
#-----------------------------------------------------------------------
#
# If considering grid files, create a symlink to the halo4 grid file
# that does not contain the halo size in its name. This is needed by
# the tasks that generate the initial and lateral boundary condition
# files.
#
#-----------------------------------------------------------------------
#
if file_group == "grid":
target=f"{cres}{DOT_OR_USCORE}grid.tile{TILE_RGNL}.halo{NH4}.nc"
symlink=f"{cres}{DOT_OR_USCORE}grid.tile{TILE_RGNL}.nc"
create_symlink_to_file(target,symlink,True)
#
# The surface climatology file generation code looks for a grid file
# having a name of the form "C${GFDLgrid_RES}_grid.tile7.halo4.nc" (i.e.
# the C-resolution used in the name of this file is the number of grid
# points per horizontal direction per tile, just like in the global model).
# Thus, if we are running the MAKE_SFC_CLIMO_TN task, if the grid is of
# GFDLgrid type, and if we are not using GFDLgrid_RES in filenames (i.e.
# we are using the equivalent global uniform grid resolution instead),
# then create a link whose name uses the GFDLgrid_RES that points to the
# link whose name uses the equivalent global uniform resolution.
#
if RUN_TASK_MAKE_SFC_CLIMO and \
GRID_GEN_METHOD == "GFDLgrid" and \
not GFDLgrid_USE_GFDLgrid_RES_IN_FILENAMES:
target=f"{cres}{DOT_OR_USCORE}grid.tile{TILE_RGNL}.halo{NH4}.nc"
symlink=f"C{GFDLgrid_RES}{DOT_OR_USCORE}grid.tile{TILE_RGNL}.nc"
create_symlink_to_file(target,symlink,relative)
#
#-----------------------------------------------------------------------
#
# If considering surface climatology files, create symlinks to the surface
# climatology files that do not contain the halo size in their names.
# These are needed by the task that generates the initial condition files.
#
#-----------------------------------------------------------------------
#
if file_group == "sfc_climo":
tmp=[ f"{cres}.{itm}" for itm in SFC_CLIMO_FIELDS]
fns_sfc_climo_with_halo_in_fn=[ f"{itm}.tile{TILE_RGNL}.halo{NH4}.nc" for itm in tmp]
fns_sfc_climo_no_halo_in_fn=[ f"{itm}.tile{TILE_RGNL}.nc" for itm in tmp]
for i in range(num_fields):
target=f"{fns_sfc_climo_with_halo_in_fn[i]}"
symlink=f"{fns_sfc_climo_no_halo_in_fn[i]}"
create_symlink_to_file(target, symlink, True)
#
# In order to be able to specify the surface climatology file names in
# the forecast model's namelist file, in the FIXLAM directory a symlink
# must be created for each surface climatology field that has "tile1" in
# its name (and no "halo") and which points to the corresponding "tile7.halo0"
# file.
#
tmp=[ f"{cres}.{itm}" for itm in SFC_CLIMO_FIELDS ]
fns_sfc_climo_tile7_halo0_in_fn=[ f"{itm}.tile{TILE_RGNL}.halo{NH0}.nc" for itm in tmp ]
fns_sfc_climo_tile1_no_halo_in_fn=[ f"{itm}.tile1.nc" for itm in tmp ]
for i in range(num_fields):
target=f"{fns_sfc_climo_tile7_halo0_in_fn[i]}"
symlink=f"{fns_sfc_climo_tile1_no_halo_in_fn[i]}"
create_symlink_to_file(target,symlink,True)
#
#-----------------------------------------------------------------------
#
# Change directory back to original one.
#
#-----------------------------------------------------------------------
#
cd_vrfy(SAVE_DIR)
return res
| [
2,
48443,
14629,
14,
8800,
14,
24330,
21015,
18,
198,
198,
11748,
555,
715,
395,
198,
11748,
28686,
198,
11748,
15095,
198,
198,
6738,
21015,
62,
26791,
1330,
1330,
62,
85,
945,
11,
900,
62,
24330,
62,
7785,
11,
3601,
62,
15414,
62,... | 2.453686 | 6,661 |
my_first_square = StarSquare(7)
my_first_square.print()
my_first_square.change_n(5)
my_first_square.print()
my_second_square = StarSquare(2)
my_second_square.print() | [
198,
1820,
62,
11085,
62,
23415,
796,
2907,
48011,
7,
22,
8,
198,
1820,
62,
11085,
62,
23415,
13,
4798,
3419,
198,
1820,
62,
11085,
62,
23415,
13,
3803,
62,
77,
7,
20,
8,
198,
1820,
62,
11085,
62,
23415,
13,
4798,
3419,
198,
198... | 2.569231 | 65 |
import numpy as np
import matplotlib.pyplot as plt
import matplotlib.patches as mp
from skimage.filter.rank import entropy,otsu
from skimage.filter import threshold_otsu
from skimage.morphology import square,rectangle,label,closing,disk,binary_erosion,opening
from skimage.color import label2rgb,rgb2gray
from skimage.segmentation import clear_border
from skimage.measure import regionprops
from skimage import io
i=rgb2gray(io.imread('fog.jpg'))
t=threshold_otsu(i)
i=i>t
z=binary_erosion(1-i,square(3))
b=z.copy()
clear_border(b)
l=label(b)
x=np.logical_xor(z,b)
l[x]=-1
iol=label2rgb(l,image=i)
fig,ax=plt.subplots(ncols=1,nrows=1,figsize=(6,6))
ax.imshow(iol)
for region in regionprops(l,['Area','BoundingBox']):
if region['Area']<1500:
continue
minr,minc,maxr,maxc=region['BoundingBox']
rect=mp.Rectangle((minc,minr),maxc-minc,maxr-minr,fill=False,edgecolor='red',linewidth=1)
ax.add_patch(rect)
plt.show()
| [
11748,
299,
32152,
355,
45941,
198,
11748,
2603,
29487,
8019,
13,
9078,
29487,
355,
458,
83,
198,
11748,
2603,
29487,
8019,
13,
8071,
2052,
355,
29034,
198,
6738,
1341,
9060,
13,
24455,
13,
43027,
1330,
40709,
11,
1747,
84,
198,
6738,
... | 2.386768 | 393 |
#
# def arista.avd.add_md_toc
#
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
from jinja2.runtime import Undefined
from io import StringIO
import sys
import re
try:
import md_toc
HAS_MD_TOC = True
except ImportError:
HAS_MD_TOC = False
def add_md_toc(md_input, skip_lines=0, toc_levels=2, toc_marker='<!-- toc -->'):
"""
add_md_toc will parse the input MarkDown and add a TOC between the toc_markers
Example
-------
copy:
content: "{{ lookup('template','eos-device-documentation.j2') | arista.avd.add_md_toc(skip_lines=3) }}"
dest: "{{ devices_dir }}/{{ inventory_hostname }}.md"
mode: 0664
Parameters
----------
md_input: str
MarkDown which will be processed
skip_lines: int, optional
Skip first x lines when parsing MD file
default: 0
toc_levels: int, optional
How many levels of headings will be included in the TOC (Default:2)
default: 2
toc_marker: str, optional
TOC will be inserted or updated between two of these markers in the MD file
default: '<!-- toc -->'
Returns
-------
str
MD with added TOC
"""
if isinstance(md_input, Undefined) or md_input is None or HAS_MD_TOC is False:
# Return None
return
# Generate TOC from variable
with StringIO(md_input) as md:
stdin = sys.stdin
sys.stdin = md
toc = md_toc.build_toc('-', keep_header_levels=toc_levels, skip_lines=skip_lines)
sys.stdin = stdin
# Insert TOC between markers
toc_marker = re.escape(toc_marker)
toc_pattern = re.compile(fr"{toc_marker}[\S\s]*?{toc_marker}")
return toc_pattern.sub(toc, md_input, count=1)
| [
2,
198,
2,
825,
610,
12523,
13,
615,
67,
13,
2860,
62,
9132,
62,
40301,
198,
2,
198,
6738,
11593,
37443,
834,
1330,
357,
48546,
62,
11748,
11,
7297,
11,
3601,
62,
8818,
8,
198,
834,
4164,
330,
31172,
834,
796,
2099,
198,
198,
67... | 2.401628 | 737 |
import shutil
import sys
import os
num_copies = int(sys.argv[1])
original_dir = sys.argv[2]
pack_filename = sys.argv[3]
for i in range(num_copies):
shutil.copy2(original_dir + pack_filename + '.bin', original_dir+pack_filename + str(i) + '.bin' )
| [
11748,
4423,
346,
198,
11748,
25064,
198,
11748,
28686,
198,
198,
22510,
62,
22163,
444,
796,
493,
7,
17597,
13,
853,
85,
58,
16,
12962,
198,
14986,
62,
15908,
796,
25064,
13,
853,
85,
58,
17,
60,
198,
8002,
62,
34345,
796,
25064,
... | 2.480769 | 104 |
import time
from app import app, db, cache
from app.models import Machine
import app.utils as utils
@cache.cached(timeout=3)
# limit_time 为检测多久时间间隔内的机器 , offline_time 表示超过offline_time 时间,判断为失联
| [
11748,
640,
198,
6738,
598,
1330,
598,
11,
20613,
11,
12940,
198,
6738,
598,
13,
27530,
1330,
10850,
198,
11748,
598,
13,
26791,
355,
3384,
4487,
198,
31,
23870,
13,
66,
2317,
7,
48678,
28,
18,
8,
198,
2,
4179,
62,
2435,
220,
1031... | 1.754545 | 110 |
#! /usr/bin/env python3
import numpy as np
from cv2 import aruco
# import matplotlib.pyplot as plt
import csv
from cv2 import cv2
if __name__ == "__main__":
main()
| [
2,
0,
1220,
14629,
14,
8800,
14,
24330,
21015,
18,
198,
11748,
299,
32152,
355,
45941,
198,
6738,
269,
85,
17,
1330,
610,
84,
1073,
198,
2,
1330,
2603,
29487,
8019,
13,
9078,
29487,
355,
458,
83,
198,
11748,
269,
21370,
198,
6738,
... | 2.418919 | 74 |
import threading
from unittest import TestCase
import multiprocessing
import multiprocessing_utils
| [
11748,
4704,
278,
198,
6738,
555,
715,
395,
1330,
6208,
20448,
198,
198,
11748,
18540,
305,
919,
278,
198,
198,
11748,
18540,
305,
919,
278,
62,
26791,
628,
628
] | 3.586207 | 29 |
from social.backends.linkedin import LinkedinOAuth as LinkedinBackend, \
LinkedinOAuth2 as LinkedinOAuth2Backend
| [
6738,
1919,
13,
1891,
2412,
13,
25614,
259,
1330,
7502,
27152,
23621,
1071,
355,
7502,
27152,
7282,
437,
11,
3467,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
... | 2.112676 | 71 |
import paho.mqtt.client as mqtt
mqttc = mqtt.Client("C1")
mqttc.connect("test.mosquitto.org", 1883)
mqttc.publish("topic/yourTopic", "Hello world")
mqttc.loop(2)
| [
11748,
279,
17108,
13,
76,
80,
926,
13,
16366,
355,
285,
80,
926,
198,
198,
76,
80,
926,
66,
796,
285,
80,
926,
13,
11792,
7203,
34,
16,
4943,
198,
76,
80,
926,
66,
13,
8443,
7203,
9288,
13,
16785,
421,
37606,
13,
2398,
1600,
... | 2.116883 | 77 |
#Script_json_maker_vulnerabilities_adder0.4
import json
codecov="-"
Branchcov="-"
Vulnerability_detected="No" #Read from file
Vulnerabilities_detected = []
#Vulnerabilities_detected = ",".join(Vulnerabilities_detected)
File_path= "vuln_json.json" | [
2,
7391,
62,
17752,
62,
10297,
62,
85,
5697,
5738,
62,
26676,
15,
13,
19,
198,
11748,
33918,
198,
198,
19815,
721,
709,
2625,
21215,
198,
33,
25642,
66,
709,
2625,
21215,
198,
53,
40920,
62,
15255,
11197,
2625,
2949,
1,
1303,
5569,
... | 2.775281 | 89 |
import unittest
from scrapers.journalscrapers import WileyScraper
| [
11748,
555,
715,
395,
198,
198,
6738,
15881,
364,
13,
73,
18408,
66,
2416,
364,
1330,
43424,
3351,
38545,
198
] | 3.35 | 20 |
import subprocess
from py_buildsystem.common import logger
| [
11748,
850,
14681,
201,
198,
201,
198,
6738,
12972,
62,
11249,
10057,
13,
11321,
1330,
49706,
201,
198,
201,
198
] | 3.25 | 20 |
import expand_task_dict
import make_tasks
def expand_ros_task(taskdict, nav, name = '', index = None):
"""
Turn a task dict from params (or from tasks_from_wps) into a task object
"""
task = make_tasks.expand_task(taskdict, nav, name, index)
return task
| [
11748,
4292,
62,
35943,
62,
11600,
198,
11748,
787,
62,
83,
6791,
628,
198,
4299,
4292,
62,
4951,
62,
35943,
7,
35943,
11600,
11,
6812,
11,
1438,
796,
705,
3256,
6376,
796,
6045,
2599,
198,
220,
220,
220,
37227,
198,
220,
220,
220,
... | 2.701923 | 104 |
# Copyright 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
from measurements import memory
import page_sets
from telemetry import benchmark
@benchmark.Enabled('android')
@benchmark.Disabled
@benchmark.Disabled
@benchmark.Disabled # crbug.com/371153
@benchmark.Enabled('android', 'has tabs')
class TypicalMobileSites(benchmark.Benchmark):
"""Long memory test."""
test = memory.Memory
page_set = page_sets.TypicalMobileSitesPageSet
options = {'pageset_repeat': 15}
| [
2,
15069,
2211,
383,
18255,
1505,
46665,
13,
1439,
2489,
10395,
13,
198,
2,
5765,
286,
428,
2723,
2438,
318,
21825,
416,
257,
347,
10305,
12,
7635,
5964,
326,
460,
307,
198,
2,
1043,
287,
262,
38559,
24290,
2393,
13,
198,
198,
6738,... | 3.367816 | 174 |
import torch
import torch.nn as nn
from torch.autograd import Function
import cupy
from string import Template
@cupy.util.memoize(for_each_device=True)
CUDA_NUM_THREADS = 1024
Stream = namedtuple('Stream', ['ptr'])
class RoI(Function):
"""
NOTE:only CUDA-compatible
"""
| [
11748,
28034,
198,
11748,
28034,
13,
20471,
355,
299,
77,
198,
6738,
28034,
13,
2306,
519,
6335,
1330,
15553,
198,
11748,
6508,
88,
198,
6738,
4731,
1330,
37350,
628,
198,
31,
25244,
88,
13,
22602,
13,
11883,
78,
1096,
7,
1640,
62,
... | 2.71028 | 107 |
'''
Directly manage the salt fileserver plugins
'''
# Import salt libs
import salt.fileserver
def update():
'''
Execute an update for all of the configured fileserver backends
'''
fileserver = salt.fileserver.Fileserver(__opts__)
fileserver.update()
| [
7061,
6,
198,
13470,
306,
6687,
262,
8268,
3696,
18497,
20652,
198,
7061,
6,
198,
198,
2,
17267,
8268,
9195,
82,
198,
11748,
8268,
13,
16624,
18497,
628,
198,
4299,
4296,
33529,
198,
220,
220,
220,
705,
7061,
198,
220,
220,
220,
839... | 3 | 91 |
import asyncio
from math import ceil
import discord
from discord import Colour
from discord.ext import commands
from classes.classic_solver import ClassicSolver
from classes.repeat_solver import RepeatSolver
from classes.detective_solver import DetectiveSolver
from classes.custom_solver import CustomSolver
from classes.stat_manager import StatManager
| [
11748,
30351,
952,
198,
6738,
10688,
1330,
2906,
346,
198,
198,
11748,
36446,
198,
6738,
36446,
1330,
38773,
198,
6738,
36446,
13,
2302,
1330,
9729,
198,
198,
6738,
6097,
13,
49421,
62,
82,
14375,
1330,
13449,
50,
14375,
198,
6738,
6097... | 4.103448 | 87 |
"maths director class that aggregates the course builder"
from course_factory.course_builder import CourseBuilder
class MathsDirector:
"maths director class"
@staticmethod | [
1,
11018,
82,
3437,
1398,
326,
13262,
689,
262,
1781,
27098,
1,
198,
198,
6738,
1781,
62,
69,
9548,
13,
17319,
62,
38272,
1330,
20537,
32875,
628,
198,
4871,
16320,
82,
28702,
25,
198,
220,
220,
220,
366,
11018,
82,
3437,
1398,
1,
... | 3.4 | 55 |
# -*- coding:utf8 -*-
# ==============================================================================
# Copyright 2017 Baidu.com, Inc. All Rights Reserved
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""
This module implements the core layer of Match-LSTM and BiDAF
"""
import tensorflow as tf
import tensorflow.contrib as tc
class MatchLSTMAttnCell(tc.rnn.LSTMCell):
"""
Implements the Match-LSTM attention cell
"""
class MatchLSTMLayer(object):
"""
Implements the Match-LSTM layer, which attend to the question dynamically in a LSTM fashion.
"""
def match(self, passage_encodes, question_encodes, p_length, q_length):
"""
Match the passage_encodes with question_encodes using Match-LSTM algorithm
"""
with tf.variable_scope('match_lstm'):
cell_fw = MatchLSTMAttnCell(self.hidden_size, question_encodes)
cell_bw = MatchLSTMAttnCell(self.hidden_size, question_encodes)
outputs, state = tf.nn.bidirectional_dynamic_rnn(cell_fw, cell_bw,
inputs=passage_encodes,
sequence_length=p_length,
dtype=tf.float32)
match_outputs = tf.concat(outputs, 2)
state_fw, state_bw = state
c_fw, h_fw = state_fw
c_bw, h_bw = state_bw
match_state = tf.concat([h_fw, h_bw], 1)
return match_outputs, match_state
class AttentionFlowMatchLayer(object):
"""
Implements the Attention Flow layer,
which computes Context-to-question Attention and question-to-context Attention
"""
def match(self, passage_encodes, question_encodes, p_length, q_length):
"""
Match the passage_encodes with question_encodes using Attention Flow Match algorithm
"""
with tf.variable_scope('bidaf'):
sim_matrix = tf.matmul(passage_encodes, question_encodes, transpose_b=True)
context2question_attn = tf.matmul(tf.nn.softmax(sim_matrix, -1), question_encodes)
b = tf.nn.softmax(tf.expand_dims(tf.reduce_max(sim_matrix, 2), 1), -1)
question2context_attn = tf.tile(tf.matmul(b, passage_encodes),
[1, tf.shape(passage_encodes)[1], 1])
concat_outputs = tf.concat([passage_encodes, context2question_attn,
passage_encodes * question2context_attn,
passage_encodes * context2question_attn], -1)
return concat_outputs, None
| [
2,
532,
9,
12,
19617,
25,
40477,
23,
532,
9,
12,
198,
2,
38093,
25609,
28,
198,
2,
15069,
2177,
347,
1698,
84,
13,
785,
11,
3457,
13,
1439,
6923,
33876,
198,
2,
198,
2,
49962,
739,
262,
24843,
13789,
11,
10628,
362,
13,
15,
35... | 2.318571 | 1,400 |
#!/usr/bin/env python
"""
This module implements a version of pymatgen's Transmuter to generate
TransformedStructures from DB data sources. They enable the
high-throughput generation of new structures and input files.
"""
from __future__ import division
__author__ = "Shyue Ping Ong"
__copyright__ = "Copyright 2012, The Materials Project"
__version__ = "0.1"
__maintainer__ = "Shyue Ping Ong"
__email__ = "shyue@mit.edu"
__date__ = "Mar 4, 2012"
import datetime
from pymatgen.alchemy.materials import TransformedStructure
from pymatgen.alchemy.transmuters import StandardTransmuter
class QeTransmuter(StandardTransmuter):
"""
The QeTransmuter uses a QueryEngine to retrieve and generate new structures
from a database.
"""
def __init__(self, queryengine, criteria, transformations,
extend_collection=0, ncores=None):
"""Constructor.
Args:
queryengine:
QueryEngine object for database access
criteria:
A criteria to search on, which is passed to queryengine's
get_entries method.
transformations:
New transformations to be applied to all structures
extend_collection:
Whether to use more than one output structure from one-to-many
transformations. extend_collection can be a number, which
determines the maximum branching for each transformation.
ncores:
Number of cores to use for applying transformations.
Uses multiprocessing.Pool
"""
entries = queryengine.get_entries(criteria, inc_structure=True)
source = "{}:{}/{}/{}".format(queryengine.host, queryengine.port,
queryengine.database_name,
queryengine.collection_name)
transformed_structures = [TransformedStructure(
entry.structure, [], history=get_history(entry))
for entry in entries]
StandardTransmuter.__init__(self, transformed_structures,
transformations=transformations,
extend_collection=extend_collection,
ncores=ncores)
| [
2,
48443,
14629,
14,
8800,
14,
24330,
21015,
198,
198,
37811,
198,
1212,
8265,
23986,
257,
2196,
286,
279,
4948,
265,
5235,
338,
3602,
76,
11894,
284,
7716,
198,
8291,
12214,
44909,
942,
422,
20137,
1366,
4237,
13,
1119,
7139,
262,
19... | 2.34898 | 980 |
from django import template
from blog.models import Post, Category, Comment
register = template.Library()
@register.simple_tag
@register.simple_tag(name='counter')
@register.simple_tag(name='comment_count')
@register.simple_tag
@register.filter
@register.inclusion_tag('blog/blog-latestposts.html')
@register.inclusion_tag('blog/blog-postcategory.html')
@register.inclusion_tag('web/index-latestposts.html')
| [
6738,
42625,
14208,
1330,
11055,
198,
6738,
4130,
13,
27530,
1330,
2947,
11,
21743,
11,
18957,
628,
198,
30238,
796,
11055,
13,
23377,
3419,
198,
198,
31,
30238,
13,
36439,
62,
12985,
628,
198,
31,
30238,
13,
36439,
62,
12985,
7,
3672... | 3.169118 | 136 |
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations, models
| [
2,
532,
9,
12,
19617,
25,
3384,
69,
12,
23,
532,
9,
12,
198,
6738,
11593,
37443,
834,
1330,
28000,
1098,
62,
17201,
874,
198,
198,
6738,
42625,
14208,
13,
9945,
1330,
15720,
602,
11,
4981,
628
] | 2.891892 | 37 |
#
# cogs/navi/task/factory.py
#
# futaba - A Discord Mod bot for the Programming server
# Copyright (c) 2017-2020 Jake Richardson, Ammon Smith, jackylam5
#
# futaba is available free of charge under the terms of the MIT
# License. You are free to redistribute and/or modify it under those
# terms. It is distributed in the hopes that it will be useful, but
# WITHOUT ANY WARRANTY. See the LICENSE file for more details.
#
"""
A factory function to create appropriate Navi Task objects from
the values and attributes retrieved in the database.
"""
import logging
from collections import namedtuple
import discord
from futaba.enums import TaskType
from .change_roles import build_change_role_task
from .punish import build_punish_task
from .send_message import build_send_message_task
logger = logging.getLogger(__name__)
__all__ = ["build_navi_task"]
FakeUser = namedtuple("FakeUser", ("id", "name", "discriminator"))
TASK_BUILDERS = {
TaskType.CHANGE_ROLES: build_change_role_task,
TaskType.SEND_MESSAGE: build_send_message_task,
TaskType.PUNISH: build_punish_task,
}
| [
2,
198,
2,
269,
18463,
14,
77,
15820,
14,
35943,
14,
69,
9548,
13,
9078,
198,
2,
198,
2,
13294,
15498,
532,
317,
39462,
3401,
10214,
329,
262,
30297,
4382,
198,
2,
15069,
357,
66,
8,
2177,
12,
42334,
14757,
21679,
11,
1703,
2144,
... | 3.184211 | 342 |
from django.http import HttpRequest, HttpResponse
from typing import List, Union
from zerver.decorator import has_request_variables, REQ
from zerver.lib.actions import check_send_typing_notification, \
extract_recipients
from zerver.lib.response import json_success
from zerver.models import UserProfile
@has_request_variables
| [
6738,
42625,
14208,
13,
4023,
1330,
367,
29281,
18453,
11,
367,
29281,
31077,
198,
6738,
19720,
1330,
7343,
11,
4479,
198,
198,
6738,
1976,
18497,
13,
12501,
273,
1352,
1330,
468,
62,
25927,
62,
25641,
2977,
11,
4526,
48,
198,
6738,
1... | 3.363636 | 99 |
import itertools
maxCount = 0
per = None
set = set()
for permutation in itertools.combinations([1,2,3,4,5,6,7,8,9], 4):
permutation = list(permutation)
set.clear()
generate([False, False, False, False, True, True, True], 0, "", permutation[:])
generate([False, False, False, True, False, True, True], 0, "", permutation[:])
generate([False, False, False, True, True, False, True], 0, "", permutation[:])
generate([False, False, True, False, False, True, True], 0, "", permutation[:])
generate([False, False, True, False, True, False, True], 0, "", permutation[:])
li = sorted(set)
try:
index = li.index(1)
except:
continue
count = 1
for s in range(index, len(li)):
if (li[s] % 1 != 0):
continue
if (count == li[s]):
count += 1
else:
count -= 1
break
if count > maxCount:
maxCount = count
per = permutation
print count
print 'Answer:', per
| [
11748,
340,
861,
10141,
628,
198,
9806,
12332,
796,
657,
198,
525,
796,
6045,
198,
2617,
796,
900,
3419,
198,
198,
1640,
9943,
7094,
287,
340,
861,
10141,
13,
24011,
7352,
26933,
16,
11,
17,
11,
18,
11,
19,
11,
20,
11,
21,
11,
2... | 2.266816 | 446 |
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
# File : dataloader.py
# Author : Chi Han, Jiayuan Mao
# Email : haanchi@gmail.com, maojiayuan@gmail.com
# Date : 23.07.2019
# Last Modified Date: 19.11.2019
# Last Modified By : Chi Han
#
# This file is part of the VCML codebase
# Distributed under MIT license
from torch.utils.data import DataLoader
| [
2,
48443,
14629,
14,
8800,
14,
24330,
21015,
18,
198,
2,
532,
9,
12,
19617,
25,
3384,
69,
12,
23,
532,
9,
12,
198,
2,
9220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
1058,
4818,
282,
1170,
263,
13,
9078,... | 2.278409 | 176 |
"""Copyright 2020 Google LLC Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License.
You may obtain a copy of the License at
https://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
"""Trainer.
Usage:
Training:
trainer --data_folder ~/workspace/seq2graph/spider \
--epochs 500 --save_path ~/workspace/seq2graph/seq2seq_savedmodel
Predicting:
trainer --save_model_path ~/workspace/seq2graph/seq2seq_savedmodel \
--predict ~/workspace/seq2graph/spider/train.record \
--predict_output ~/tmp/seq2seq_train.txt
"""
from absl import app
from absl import flags
from absl import logging
import os
import json
import sys
import shutil
import time
import tensorflow as tf
import numpy as np
from vocabulary import Vocabulary
from dataset import build_dataset
from graph_transformer import GraphTransformer
from graph_utils import contains_tree
from graph_utils import reconstruct_tree
from graph_utils import precompute_children_combinations
from graph_utils import retrieve_trees
from training_utils import NoamSchedule
from training_utils import EdgeLoss
from training_utils import SequenceLoss
from fscore import FScore
flags.DEFINE_string("data_spec", None, "Path to training data spec.")
flags.DEFINE_integer("batch_size", 32, "Batch size.")
flags.DEFINE_integer("model_dim", 128, "Model dim.")
flags.DEFINE_integer("epochs", 10, "Num of epochs.")
flags.DEFINE_float("dropout", 0.2, "Dropout rate.")
flags.DEFINE_bool("biaffine", True, "Use Biaffine in edge prediction.")
flags.DEFINE_string("save_model_path", None, "Save model path.")
flags.DEFINE_string(
"predict", None,
"Init model from save_model_path and run prediction on the data set,")
flags.DEFINE_string("predict_output", None, "Prediction output.")
flags.DEFINE_bool("eager_run", False, "Run in eager mode for debugging.")
flags.DEFINE_string("ref_derivs", None, "Reference dev derivations.")
FLAGS = flags.FLAGS
@tf.function
if __name__ == "__main__":
app.run(main)
| [
37811,
15269,
12131,
3012,
11419,
49962,
739,
262,
24843,
13789,
11,
10628,
362,
13,
15,
357,
1169,
366,
34156,
15341,
345,
743,
407,
779,
428,
2393,
2845,
287,
11846,
351,
262,
13789,
13,
198,
198,
1639,
743,
7330,
257,
4866,
286,
26... | 3.318634 | 703 |
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
'''
Unit Tests for senlin.rpc.client
'''
import copy
import mock
from oslo_messaging._drivers import common as rpc_common
from senlin.common import exception
from senlin.common import messaging
from senlin.rpc import client as rpc_client
from senlin.tests.common import base
from senlin.tests.common import utils
| [
2,
49962,
739,
262,
24843,
13789,
11,
10628,
362,
13,
15,
357,
1169,
366,
34156,
15341,
345,
743,
198,
2,
407,
779,
428,
2393,
2845,
287,
11846,
351,
262,
13789,
13,
921,
743,
7330,
198,
2,
257,
4866,
286,
262,
13789,
379,
198,
2,... | 3.654008 | 237 |
import pyexasol
import time
from influxdb import InfluxDBClient
# Settings section
###############################################################################
# InfluxDB connection
influx_db = {
'host': '127.0.0.1',
'port': '8086',
'user': 'root',
'pass': 'root',
'db' : 'pyexamon'
}
# Exasol DB(s) connection
exasol_dbs = {
0:{'alias' : 'ExaDb_1', 'dsn' : '127.0.0.1:8563', 'user' : 'SYS', 'pass' : 'exasol'},
# You can add more than one DB
1:{'alias' : 'ExaDb_2', 'dsn' : '192.168.56.2:8563', 'user' : 'SYS', 'pass' : 'exasol'},
# 2:{'host' : '192.168.1.11', 'port' : '8563', 'user' : 'SYS', 'pass' : 'exasol'},
# ...
# xxx:{'host' : '192.168.1.xxx', 'port' : '8563', 'user' : 'SYS', 'pass' : 'exasol'}
}
# Loop after xxx seconds
time_to_sleep = 30
# Subroutines section
###############################################################################
# Main code section
###############################################################################
# In case of loop_mode do this code inifinitelly
while True:
# How many records we should get.
records_to_get = time_to_sleep/60+2
# InfluxDB connection
iconn = influx_connect(influx_db["host"], influx_db["port"], influx_db["user"], influx_db["pass"], influx_db["db"])
# For every Exasol DB
for db_index in exasol_dbs:
# Ecasol connection
econn = exa_connect(exasol_dbs[db_index]["dsn"], exasol_dbs[db_index]["user"], exasol_dbs[db_index]["pass"])
# If connection exists
if (econn != None):
# Loading records to InfluxDb
records = get_exa_monitor_last_day(econn, records_to_get)
load_records_into_influxdb(records, iconn, exasol_dbs[db_index]["alias"])
# Closing Exa connection
econn.close()
# Wait a time then repeat
time.sleep(time_to_sleep)
# Closing InfluxDB connection
iconn.close()
| [
11748,
12972,
1069,
292,
349,
201,
198,
11748,
640,
201,
198,
6738,
25065,
9945,
1330,
4806,
22564,
11012,
11792,
201,
198,
201,
198,
2,
16163,
2665,
201,
198,
29113,
29113,
7804,
4242,
21017,
201,
198,
201,
198,
2,
4806,
22564,
11012,
... | 2.3031 | 871 |
from setuptools import setup, find_packages
with open("README.md", "r") as fh:
long_description = fh.read()
setup(
name="nyumytimecli",
version="0.0.4",
author="Matteo Sandrin",
long_description=long_description,
long_description_content_type="text/markdown",
description="A command-line interface for NYU's employee time-tracking website",
url="https://github.com/matteosandrin/nyu-mytime-cli",
packages=find_packages(),
install_requires=[
'click',
'selenium'
],
entry_points='''
[console_scripts]
nyu-mytime=nyumytimecli.nyumytimecli:cli
''',
setup_requires=["pytest-runner"],
tests_require=["pytest"],
include_package_data=True,
) | [
6738,
900,
37623,
10141,
1330,
9058,
11,
1064,
62,
43789,
628,
198,
4480,
1280,
7203,
15675,
11682,
13,
9132,
1600,
366,
81,
4943,
355,
277,
71,
25,
198,
220,
220,
220,
890,
62,
11213,
796,
277,
71,
13,
961,
3419,
198,
198,
40406,
... | 2.656126 | 253 |
# Standard Imports
# 3rd Party Imports
import discord
from discord.ext import commands
# Custom Imports
from database import Database
from finance import Finances
| [
2,
8997,
1846,
3742,
198,
198,
2,
513,
4372,
3615,
1846,
3742,
198,
11748,
36446,
198,
6738,
36446,
13,
2302,
1330,
9729,
198,
198,
2,
8562,
1846,
3742,
198,
6738,
6831,
1330,
24047,
198,
6738,
9604,
1330,
4463,
1817,
198
] | 4.125 | 40 |